Merge branch 'audio-federation' into 'develop'
Audio federation See merge request funkwhale/funkwhale!411
This commit is contained in:
commit
bc8dd56e25
|
@ -249,6 +249,7 @@ Then, in separate terminals, you can setup as many different instances as you
|
|||
need::
|
||||
|
||||
export COMPOSE_PROJECT_NAME=node2
|
||||
export VUE_PORT=1234 # this has to be unique for each instance
|
||||
docker-compose -f dev.yml run --rm api python manage.py migrate
|
||||
docker-compose -f dev.yml run --rm api python manage.py createsuperuser
|
||||
docker-compose -f dev.yml up nginx api front nginx api celeryworker
|
||||
|
|
|
@ -14,7 +14,7 @@ router.register(r"settings", GlobalPreferencesViewSet, base_name="settings")
|
|||
router.register(r"activity", activity_views.ActivityViewSet, "activity")
|
||||
router.register(r"tags", views.TagViewSet, "tags")
|
||||
router.register(r"tracks", views.TrackViewSet, "tracks")
|
||||
router.register(r"track-files", views.TrackFileViewSet, "trackfiles")
|
||||
router.register(r"uploads", views.UploadViewSet, "uploads")
|
||||
router.register(r"libraries", views.LibraryViewSet, "libraries")
|
||||
router.register(r"listen", views.ListenViewSet, "listen")
|
||||
router.register(r"artists", views.ArtistViewSet, "artists")
|
||||
|
|
|
@ -514,8 +514,14 @@ ACCOUNT_USERNAME_BLACKLIST = [
|
|||
"me",
|
||||
"ghost",
|
||||
"_",
|
||||
"-",
|
||||
"hello",
|
||||
"contact",
|
||||
"inbox",
|
||||
"outbox",
|
||||
"shared-inbox",
|
||||
"shared_inbox",
|
||||
"actor",
|
||||
] + env.list("ACCOUNT_USERNAME_BLACKLIST", default=[])
|
||||
|
||||
EXTERNAL_REQUESTS_VERIFY_SSL = env.bool("EXTERNAL_REQUESTS_VERIFY_SSL", default=True)
|
||||
|
|
|
@ -9,7 +9,9 @@ from funkwhale_api.common import preferences
|
|||
class ConditionalAuthentication(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
if preferences.get("common__api_authentication_required"):
|
||||
return request.user and request.user.is_authenticated
|
||||
return (request.user and request.user.is_authenticated) or (
|
||||
hasattr(request, "actor") and request.actor
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
|
|
|
@ -5,6 +5,12 @@ visibility.
|
|||
|
||||
Files without any import job will be bounded to a "default" library on the first
|
||||
superuser account found. This should now happen though.
|
||||
|
||||
XXX TODO:
|
||||
|
||||
- add followers url on actor
|
||||
- shared inbox url on actor
|
||||
- compute hash from files
|
||||
"""
|
||||
|
||||
from funkwhale_api.music import models
|
||||
|
@ -19,7 +25,7 @@ def main(command, **kwargs):
|
|||
command.stdout.write(
|
||||
"* {} users imported music on this instance".format(len(importers))
|
||||
)
|
||||
files = models.TrackFile.objects.filter(
|
||||
files = models.Upload.objects.filter(
|
||||
library__isnull=True, jobs__isnull=False
|
||||
).distinct()
|
||||
command.stdout.write(
|
||||
|
@ -39,7 +45,7 @@ def main(command, **kwargs):
|
|||
)
|
||||
user_files.update(library=library)
|
||||
|
||||
files = models.TrackFile.objects.filter(
|
||||
files = models.Upload.objects.filter(
|
||||
library__isnull=True, jobs__isnull=True
|
||||
).distinct()
|
||||
command.stdout.write(
|
||||
|
|
|
@ -64,3 +64,46 @@ class ChunkedPath(object):
|
|||
new_filename = "".join(chunks[3:]) + ".{}".format(ext)
|
||||
parts = chunks[:3] + [new_filename]
|
||||
return os.path.join(self.root, *parts)
|
||||
|
||||
|
||||
def chunk_queryset(source_qs, chunk_size):
|
||||
"""
|
||||
From https://github.com/peopledoc/django-chunkator/blob/master/chunkator/__init__.py
|
||||
"""
|
||||
pk = None
|
||||
# In django 1.9, _fields is always present and `None` if 'values()' is used
|
||||
# In Django 1.8 and below, _fields will only be present if using `values()`
|
||||
has_fields = hasattr(source_qs, "_fields") and source_qs._fields
|
||||
if has_fields:
|
||||
if "pk" not in source_qs._fields:
|
||||
raise ValueError("The values() call must include the `pk` field")
|
||||
|
||||
field = source_qs.model._meta.pk
|
||||
# set the correct field name:
|
||||
# for ForeignKeys, we want to use `model_id` field, and not `model`,
|
||||
# to bypass default ordering on related model
|
||||
order_by_field = field.attname
|
||||
|
||||
source_qs = source_qs.order_by(order_by_field)
|
||||
queryset = source_qs
|
||||
while True:
|
||||
if pk:
|
||||
queryset = source_qs.filter(pk__gt=pk)
|
||||
page = queryset[:chunk_size]
|
||||
page = list(page)
|
||||
nb_items = len(page)
|
||||
|
||||
if nb_items == 0:
|
||||
return
|
||||
|
||||
last_item = page[-1]
|
||||
# source_qs._fields exists *and* is not none when using "values()"
|
||||
if has_fields:
|
||||
pk = last_item["pk"]
|
||||
else:
|
||||
pk = last_item.pk
|
||||
|
||||
yield page
|
||||
|
||||
if nb_items < chunk_size:
|
||||
return
|
||||
|
|
|
@ -2,11 +2,12 @@ import uuid
|
|||
import logging
|
||||
|
||||
from django.db import transaction, IntegrityError
|
||||
from django.utils import timezone
|
||||
from django.db.models import Q
|
||||
|
||||
from funkwhale_api.common import channels
|
||||
from funkwhale_api.common import utils as funkwhale_utils
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
PUBLIC_ADDRESS = "https://www.w3.org/ns/activitystreams#Public"
|
||||
|
||||
|
@ -83,18 +84,21 @@ def receive(activity, on_behalf_of):
|
|||
serializer.validated_data.get("id"),
|
||||
)
|
||||
return
|
||||
# we create inbox items for further delivery
|
||||
items = [
|
||||
models.InboxItem(activity=copy, actor=r, type="to")
|
||||
for r in serializer.validated_data["recipients"]["to"]
|
||||
if hasattr(r, "fid")
|
||||
]
|
||||
items += [
|
||||
models.InboxItem(activity=copy, actor=r, type="cc")
|
||||
for r in serializer.validated_data["recipients"]["cc"]
|
||||
if hasattr(r, "fid")
|
||||
]
|
||||
models.InboxItem.objects.bulk_create(items)
|
||||
|
||||
local_to_recipients = get_actors_from_audience(activity.get("to", []))
|
||||
local_to_recipients = local_to_recipients.exclude(user=None)
|
||||
|
||||
local_cc_recipients = get_actors_from_audience(activity.get("cc", []))
|
||||
local_cc_recipients = local_cc_recipients.exclude(user=None)
|
||||
|
||||
inbox_items = []
|
||||
for recipients, type in [(local_to_recipients, "to"), (local_cc_recipients, "cc")]:
|
||||
|
||||
for r in recipients.values_list("pk", flat=True):
|
||||
inbox_items.append(models.InboxItem(actor_id=r, type=type, activity=copy))
|
||||
|
||||
models.InboxItem.objects.bulk_create(inbox_items)
|
||||
|
||||
# at this point, we have the activity in database. Even if we crash, it's
|
||||
# okay, as we can retry later
|
||||
funkwhale_utils.on_commit(tasks.dispatch_inbox.delay, activity_id=copy.pk)
|
||||
|
@ -153,6 +157,16 @@ class InboxRouter(Router):
|
|||
inbox_items = context.get(
|
||||
"inbox_items", models.InboxItem.objects.none()
|
||||
)
|
||||
inbox_items = (
|
||||
inbox_items.select_related()
|
||||
.select_related("actor__user")
|
||||
.prefetch_related(
|
||||
"activity__object",
|
||||
"activity__target",
|
||||
"activity__related_object",
|
||||
)
|
||||
)
|
||||
|
||||
for ii in inbox_items:
|
||||
user = ii.actor.get_user()
|
||||
if not user:
|
||||
|
@ -169,7 +183,6 @@ class InboxRouter(Router):
|
|||
},
|
||||
},
|
||||
)
|
||||
inbox_items.update(is_delivered=True, last_delivery_date=timezone.now())
|
||||
return
|
||||
|
||||
|
||||
|
@ -185,73 +198,203 @@ class OutboxRouter(Router):
|
|||
from . import tasks
|
||||
|
||||
for route, handler in self.routes:
|
||||
if match_route(route, routing):
|
||||
activities_data = []
|
||||
for e in handler(context):
|
||||
# a route can yield zero, one or more activity payloads
|
||||
if e:
|
||||
activities_data.append(e)
|
||||
inbox_items_by_activity_uuid = {}
|
||||
prepared_activities = []
|
||||
for activity_data in activities_data:
|
||||
to = activity_data["payload"].pop("to", [])
|
||||
cc = activity_data["payload"].pop("cc", [])
|
||||
a = models.Activity(**activity_data)
|
||||
a.uuid = uuid.uuid4()
|
||||
to_items, new_to = prepare_inbox_items(to, "to")
|
||||
cc_items, new_cc = prepare_inbox_items(cc, "cc")
|
||||
if not to_items and not cc_items:
|
||||
continue
|
||||
inbox_items_by_activity_uuid[str(a.uuid)] = to_items + cc_items
|
||||
if new_to:
|
||||
a.payload["to"] = new_to
|
||||
if new_cc:
|
||||
a.payload["cc"] = new_cc
|
||||
prepared_activities.append(a)
|
||||
if not match_route(route, routing):
|
||||
continue
|
||||
|
||||
activities = models.Activity.objects.bulk_create(prepared_activities)
|
||||
activities_data = []
|
||||
for e in handler(context):
|
||||
# a route can yield zero, one or more activity payloads
|
||||
if e:
|
||||
activities_data.append(e)
|
||||
inbox_items_by_activity_uuid = {}
|
||||
deliveries_by_activity_uuid = {}
|
||||
prepared_activities = []
|
||||
for activity_data in activities_data:
|
||||
activity_data["payload"]["actor"] = activity_data["actor"].fid
|
||||
to = activity_data["payload"].pop("to", [])
|
||||
cc = activity_data["payload"].pop("cc", [])
|
||||
a = models.Activity(**activity_data)
|
||||
a.uuid = uuid.uuid4()
|
||||
to_inbox_items, to_deliveries, new_to = prepare_deliveries_and_inbox_items(
|
||||
to, "to"
|
||||
)
|
||||
cc_inbox_items, cc_deliveries, new_cc = prepare_deliveries_and_inbox_items(
|
||||
cc, "cc"
|
||||
)
|
||||
if not any(
|
||||
[to_inbox_items, to_deliveries, cc_inbox_items, cc_deliveries]
|
||||
):
|
||||
continue
|
||||
deliveries_by_activity_uuid[str(a.uuid)] = to_deliveries + cc_deliveries
|
||||
inbox_items_by_activity_uuid[str(a.uuid)] = (
|
||||
to_inbox_items + cc_inbox_items
|
||||
)
|
||||
if new_to:
|
||||
a.payload["to"] = new_to
|
||||
if new_cc:
|
||||
a.payload["cc"] = new_cc
|
||||
prepared_activities.append(a)
|
||||
|
||||
final_inbox_items = []
|
||||
for a in activities:
|
||||
try:
|
||||
prepared_inbox_items = inbox_items_by_activity_uuid[str(a.uuid)]
|
||||
except KeyError:
|
||||
continue
|
||||
activities = models.Activity.objects.bulk_create(prepared_activities)
|
||||
|
||||
for ii in prepared_inbox_items:
|
||||
ii.activity = a
|
||||
final_inbox_items.append(ii)
|
||||
for activity in activities:
|
||||
if str(activity.uuid) in deliveries_by_activity_uuid:
|
||||
for obj in deliveries_by_activity_uuid[str(a.uuid)]:
|
||||
obj.activity = activity
|
||||
|
||||
# create all inbox items, in bulk
|
||||
models.InboxItem.objects.bulk_create(final_inbox_items)
|
||||
if str(activity.uuid) in inbox_items_by_activity_uuid:
|
||||
for obj in inbox_items_by_activity_uuid[str(a.uuid)]:
|
||||
obj.activity = activity
|
||||
|
||||
for a in activities:
|
||||
funkwhale_utils.on_commit(
|
||||
tasks.dispatch_outbox.delay, activity_id=a.pk
|
||||
)
|
||||
return activities
|
||||
# create all deliveries and items, in bulk
|
||||
models.Delivery.objects.bulk_create(
|
||||
[
|
||||
obj
|
||||
for collection in deliveries_by_activity_uuid.values()
|
||||
for obj in collection
|
||||
]
|
||||
)
|
||||
models.InboxItem.objects.bulk_create(
|
||||
[
|
||||
obj
|
||||
for collection in inbox_items_by_activity_uuid.values()
|
||||
for obj in collection
|
||||
]
|
||||
)
|
||||
|
||||
for a in activities:
|
||||
funkwhale_utils.on_commit(tasks.dispatch_outbox.delay, activity_id=a.pk)
|
||||
return activities
|
||||
|
||||
|
||||
def recursive_gettattr(obj, key):
|
||||
"""
|
||||
Given a dictionary such as {'user': {'name': 'Bob'}} and
|
||||
a dotted string such as user.name, returns 'Bob'.
|
||||
|
||||
If the value is not present, returns None
|
||||
"""
|
||||
v = obj
|
||||
for k in key.split("."):
|
||||
v = v.get(k)
|
||||
if v is None:
|
||||
return
|
||||
|
||||
return v
|
||||
|
||||
|
||||
def match_route(route, payload):
|
||||
for key, value in route.items():
|
||||
if payload.get(key) != value:
|
||||
payload_value = recursive_gettattr(payload, key)
|
||||
if payload_value != value:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def prepare_inbox_items(recipient_list, type):
|
||||
def prepare_deliveries_and_inbox_items(recipient_list, type):
|
||||
"""
|
||||
Given a list of recipients (
|
||||
either actor instances, public adresses, a dictionnary with a "type" and "target"
|
||||
keys for followers collections)
|
||||
returns a list of deliveries, alist of inbox_items and a list
|
||||
of urls to persist in the activity in place of the initial recipient list.
|
||||
"""
|
||||
from . import models
|
||||
|
||||
items = []
|
||||
new_list = [] # we return a list of actors url instead
|
||||
local_recipients = set()
|
||||
remote_inbox_urls = set()
|
||||
urls = []
|
||||
|
||||
for r in recipient_list:
|
||||
if r != PUBLIC_ADDRESS:
|
||||
item = models.InboxItem(actor=r, type=type)
|
||||
items.append(item)
|
||||
new_list.append(r.fid)
|
||||
else:
|
||||
new_list.append(r)
|
||||
if isinstance(r, models.Actor):
|
||||
if r.is_local:
|
||||
local_recipients.add(r)
|
||||
else:
|
||||
remote_inbox_urls.add(r.shared_inbox_url or r.inbox_url)
|
||||
urls.append(r.fid)
|
||||
elif r == PUBLIC_ADDRESS:
|
||||
urls.append(r)
|
||||
elif isinstance(r, dict) and r["type"] == "followers":
|
||||
received_follows = (
|
||||
r["target"]
|
||||
.received_follows.filter(approved=True)
|
||||
.select_related("actor__user")
|
||||
)
|
||||
for follow in received_follows:
|
||||
actor = follow.actor
|
||||
if actor.is_local:
|
||||
local_recipients.add(actor)
|
||||
else:
|
||||
remote_inbox_urls.add(actor.shared_inbox_url or actor.inbox_url)
|
||||
urls.append(r["target"].followers_url)
|
||||
|
||||
return items, new_list
|
||||
deliveries = [models.Delivery(inbox_url=url) for url in remote_inbox_urls]
|
||||
inbox_items = [
|
||||
models.InboxItem(actor=actor, type=type) for actor in local_recipients
|
||||
]
|
||||
|
||||
return inbox_items, deliveries, urls
|
||||
|
||||
|
||||
def join_queries_or(left, right):
|
||||
if left:
|
||||
return left | right
|
||||
else:
|
||||
return right
|
||||
|
||||
|
||||
def get_actors_from_audience(urls):
|
||||
"""
|
||||
Given a list of urls such as [
|
||||
"https://hello.world/@bob/followers",
|
||||
"https://eldritch.cafe/@alice/followers",
|
||||
"https://funkwhale.demo/libraries/uuid/followers",
|
||||
]
|
||||
Returns a queryset of actors that are member of the collections
|
||||
listed in the given urls. The urls may contain urls referring
|
||||
to an actor, an actor followers collection or an library followers
|
||||
collection.
|
||||
|
||||
Urls that don't match anything are simply discarded
|
||||
"""
|
||||
from . import models
|
||||
|
||||
queries = {"followed": None, "actors": []}
|
||||
for url in urls:
|
||||
if url == PUBLIC_ADDRESS:
|
||||
continue
|
||||
queries["actors"].append(url)
|
||||
queries["followed"] = join_queries_or(
|
||||
queries["followed"], Q(target__followers_url=url)
|
||||
)
|
||||
final_query = None
|
||||
if queries["actors"]:
|
||||
final_query = join_queries_or(final_query, Q(fid__in=queries["actors"]))
|
||||
if queries["followed"]:
|
||||
actor_follows = models.Follow.objects.filter(queries["followed"], approved=True)
|
||||
final_query = join_queries_or(
|
||||
final_query, Q(pk__in=actor_follows.values_list("actor", flat=True))
|
||||
)
|
||||
|
||||
library_follows = models.LibraryFollow.objects.filter(
|
||||
queries["followed"], approved=True
|
||||
)
|
||||
final_query = join_queries_or(
|
||||
final_query, Q(pk__in=library_follows.values_list("actor", flat=True))
|
||||
)
|
||||
if not final_query:
|
||||
return models.Actor.objects.none()
|
||||
return models.Actor.objects.filter(final_query)
|
||||
|
||||
|
||||
def get_inbox_urls(actor_queryset):
|
||||
"""
|
||||
Given an actor queryset, returns a deduplicated set containing
|
||||
all inbox or shared inbox urls where we should deliver our payloads for
|
||||
those actors
|
||||
"""
|
||||
values = actor_queryset.values("inbox_url", "shared_inbox_url")
|
||||
|
||||
urls = set([actor["shared_inbox_url"] or actor["inbox_url"] for actor in values])
|
||||
return sorted(urls)
|
||||
|
|
|
@ -4,23 +4,21 @@ from . import models
|
|||
from . import tasks
|
||||
|
||||
|
||||
def redeliver_inbox_items(modeladmin, request, queryset):
|
||||
for id in set(
|
||||
queryset.filter(activity__actor__user__isnull=False).values_list(
|
||||
"activity", flat=True
|
||||
)
|
||||
):
|
||||
tasks.dispatch_outbox.delay(activity_id=id)
|
||||
def redeliver_deliveries(modeladmin, request, queryset):
|
||||
queryset.update(is_delivered=False)
|
||||
for delivery in queryset:
|
||||
tasks.deliver_to_remote.delay(delivery_id=delivery.pk)
|
||||
|
||||
|
||||
redeliver_inbox_items.short_description = "Redeliver"
|
||||
redeliver_deliveries.short_description = "Redeliver"
|
||||
|
||||
|
||||
def redeliver_activities(modeladmin, request, queryset):
|
||||
for id in set(
|
||||
queryset.filter(actor__user__isnull=False).values_list("id", flat=True)
|
||||
):
|
||||
tasks.dispatch_outbox.delay(activity_id=id)
|
||||
for activity in queryset.select_related("actor__user"):
|
||||
if activity.actor.is_local:
|
||||
tasks.dispatch_outbox.delay(activity_id=activity.pk)
|
||||
else:
|
||||
tasks.dispatch_inbox.delay(activity_id=activity.pk)
|
||||
|
||||
|
||||
redeliver_activities.short_description = "Redeliver"
|
||||
|
@ -67,14 +65,22 @@ class LibraryFollowAdmin(admin.ModelAdmin):
|
|||
|
||||
@admin.register(models.InboxItem)
|
||||
class InboxItemAdmin(admin.ModelAdmin):
|
||||
list_display = [
|
||||
"actor",
|
||||
"activity",
|
||||
"type",
|
||||
"last_delivery_date",
|
||||
"delivery_attempts",
|
||||
]
|
||||
list_filter = ["type"]
|
||||
list_display = ["actor", "activity", "type", "is_read"]
|
||||
list_filter = ["type", "activity__type", "is_read"]
|
||||
search_fields = ["actor__fid", "activity__fid"]
|
||||
list_select_related = True
|
||||
actions = [redeliver_inbox_items]
|
||||
|
||||
|
||||
@admin.register(models.Delivery)
|
||||
class DeliveryAdmin(admin.ModelAdmin):
|
||||
list_display = [
|
||||
"inbox_url",
|
||||
"activity",
|
||||
"last_attempt_date",
|
||||
"attempts",
|
||||
"is_delivered",
|
||||
]
|
||||
list_filter = ["activity__type", "is_delivered"]
|
||||
search_fields = ["inbox_url"]
|
||||
list_select_related = True
|
||||
actions = [redeliver_deliveries]
|
||||
|
|
|
@ -16,7 +16,7 @@ class NestedLibraryFollowSerializer(serializers.ModelSerializer):
|
|||
|
||||
class LibrarySerializer(serializers.ModelSerializer):
|
||||
actor = federation_serializers.APIActorSerializer()
|
||||
files_count = serializers.SerializerMethodField()
|
||||
uploads_count = serializers.SerializerMethodField()
|
||||
follow = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
|
@ -28,13 +28,13 @@ class LibrarySerializer(serializers.ModelSerializer):
|
|||
"name",
|
||||
"description",
|
||||
"creation_date",
|
||||
"files_count",
|
||||
"uploads_count",
|
||||
"privacy_level",
|
||||
"follow",
|
||||
]
|
||||
|
||||
def get_files_count(self, o):
|
||||
return max(getattr(o, "_files_count", 0), o.files_count)
|
||||
def get_uploads_count(self, o):
|
||||
return max(getattr(o, "_uploads_count", 0), o.uploads_count)
|
||||
|
||||
def get_follow(self, o):
|
||||
try:
|
||||
|
|
|
@ -87,7 +87,7 @@ class LibraryViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet):
|
|||
music_models.Library.objects.all()
|
||||
.order_by("-creation_date")
|
||||
.select_related("actor")
|
||||
.annotate(_files_count=Count("files"))
|
||||
.annotate(_uploads_count=Count("uploads"))
|
||||
)
|
||||
serializer_class = api_serializers.LibrarySerializer
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
|
|
|
@ -76,6 +76,9 @@ class ActorFactory(factory.DjangoModelFactory):
|
|||
fid = factory.LazyAttribute(
|
||||
lambda o: "https://{}/users/{}".format(o.domain, o.preferred_username)
|
||||
)
|
||||
followers_url = factory.LazyAttribute(
|
||||
lambda o: "https://{}/users/{}followers".format(o.domain, o.preferred_username)
|
||||
)
|
||||
inbox_url = factory.LazyAttribute(
|
||||
lambda o: "https://{}/users/{}/inbox".format(o.domain, o.preferred_username)
|
||||
)
|
||||
|
@ -134,19 +137,12 @@ class MusicLibraryFactory(factory.django.DjangoModelFactory):
|
|||
privacy_level = "me"
|
||||
name = factory.Faker("sentence")
|
||||
description = factory.Faker("sentence")
|
||||
files_count = 0
|
||||
uploads_count = 0
|
||||
fid = factory.Faker("federation_url")
|
||||
|
||||
class Meta:
|
||||
model = "music.Library"
|
||||
|
||||
@factory.post_generation
|
||||
def fid(self, create, extracted, **kwargs):
|
||||
if not create:
|
||||
# Simple build, do nothing.
|
||||
return
|
||||
|
||||
self.fid = extracted or self.get_federation_id()
|
||||
|
||||
@factory.post_generation
|
||||
def followers_url(self, create, extracted, **kwargs):
|
||||
if not create:
|
||||
|
@ -160,7 +156,7 @@ class MusicLibraryFactory(factory.django.DjangoModelFactory):
|
|||
class LibraryScan(factory.django.DjangoModelFactory):
|
||||
library = factory.SubFactory(MusicLibraryFactory)
|
||||
actor = factory.SubFactory(ActorFactory)
|
||||
total_files = factory.LazyAttribute(lambda o: o.library.files_count)
|
||||
total_files = factory.LazyAttribute(lambda o: o.library.uploads_count)
|
||||
|
||||
class Meta:
|
||||
model = "music.LibraryScan"
|
||||
|
@ -169,7 +165,7 @@ class LibraryScan(factory.django.DjangoModelFactory):
|
|||
@registry.register
|
||||
class ActivityFactory(factory.django.DjangoModelFactory):
|
||||
actor = factory.SubFactory(ActorFactory)
|
||||
url = factory.Faker("url")
|
||||
url = factory.Faker("federation_url")
|
||||
payload = factory.LazyFunction(lambda: {"type": "Create"})
|
||||
|
||||
class Meta:
|
||||
|
@ -178,7 +174,7 @@ class ActivityFactory(factory.django.DjangoModelFactory):
|
|||
|
||||
@registry.register
|
||||
class InboxItemFactory(factory.django.DjangoModelFactory):
|
||||
actor = factory.SubFactory(ActorFactory)
|
||||
actor = factory.SubFactory(ActorFactory, local=True)
|
||||
activity = factory.SubFactory(ActivityFactory)
|
||||
type = "to"
|
||||
|
||||
|
@ -186,6 +182,15 @@ class InboxItemFactory(factory.django.DjangoModelFactory):
|
|||
model = "federation.InboxItem"
|
||||
|
||||
|
||||
@registry.register
|
||||
class DeliveryFactory(factory.django.DjangoModelFactory):
|
||||
activity = factory.SubFactory(ActivityFactory)
|
||||
inbox_url = factory.Faker("url")
|
||||
|
||||
class Meta:
|
||||
model = "federation.Delivery"
|
||||
|
||||
|
||||
@registry.register
|
||||
class LibraryFollowFactory(factory.DjangoModelFactory):
|
||||
target = factory.SubFactory(MusicLibraryFactory)
|
||||
|
@ -269,9 +274,9 @@ class AudioMetadataFactory(factory.Factory):
|
|||
@registry.register(name="federation.Audio")
|
||||
class AudioFactory(factory.Factory):
|
||||
type = "Audio"
|
||||
id = factory.Faker("url")
|
||||
id = factory.Faker("federation_url")
|
||||
published = factory.LazyFunction(lambda: timezone.now().isoformat())
|
||||
actor = factory.Faker("url")
|
||||
actor = factory.Faker("federation_url")
|
||||
url = factory.SubFactory(LinkFactory, audio=True)
|
||||
metadata = factory.SubFactory(LibraryTrackMetadataFactory)
|
||||
|
||||
|
|
|
@ -108,7 +108,7 @@ def get_library_page(library, page_url, actor):
|
|||
)
|
||||
serializer = serializers.CollectionPageSerializer(
|
||||
data=response.json(),
|
||||
context={"library": library, "item_serializer": serializers.AudioSerializer},
|
||||
context={"library": library, "item_serializer": serializers.UploadSerializer},
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
return serializer.validated_data
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
# Generated by Django 2.0.8 on 2018-09-20 18:03
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('federation', '0011_auto_20180910_1902'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Delivery',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('is_delivered', models.BooleanField(default=False)),
|
||||
('last_attempt_date', models.DateTimeField(blank=True, null=True)),
|
||||
('attempts', models.PositiveIntegerField(default=0)),
|
||||
('inbox_url', models.URLField(max_length=500)),
|
||||
('activity', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='deliveries', to='federation.Activity')),
|
||||
],
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='inboxitem',
|
||||
name='delivery_attempts',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='inboxitem',
|
||||
name='is_delivered',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='inboxitem',
|
||||
name='last_delivery_date',
|
||||
),
|
||||
]
|
|
@ -48,8 +48,8 @@ class ActorQuerySet(models.QuerySet):
|
|||
qs = qs.annotate(
|
||||
**{
|
||||
"_usage_{}".format(s): models.Sum(
|
||||
"libraries__files__size",
|
||||
filter=models.Q(libraries__files__import_status=s),
|
||||
"libraries__uploads__size",
|
||||
filter=models.Q(libraries__uploads__import_status=s),
|
||||
)
|
||||
}
|
||||
)
|
||||
|
@ -72,8 +72,8 @@ class Actor(models.Model):
|
|||
domain = models.CharField(max_length=1000)
|
||||
summary = models.CharField(max_length=500, null=True, blank=True)
|
||||
preferred_username = models.CharField(max_length=200, null=True, blank=True)
|
||||
public_key = models.CharField(max_length=5000, null=True, blank=True)
|
||||
private_key = models.CharField(max_length=5000, null=True, blank=True)
|
||||
public_key = models.TextField(max_length=5000, null=True, blank=True)
|
||||
private_key = models.TextField(max_length=5000, null=True, blank=True)
|
||||
creation_date = models.DateTimeField(default=timezone.now)
|
||||
last_fetch_date = models.DateTimeField(default=timezone.now)
|
||||
manually_approves_followers = models.NullBooleanField(default=None)
|
||||
|
@ -159,25 +159,34 @@ class Actor(models.Model):
|
|||
return data
|
||||
|
||||
|
||||
class InboxItemQuerySet(models.QuerySet):
|
||||
def local(self, include=True):
|
||||
return self.exclude(actor__user__isnull=include)
|
||||
|
||||
|
||||
class InboxItem(models.Model):
|
||||
"""
|
||||
Store activities binding to local actors, with read/unread status.
|
||||
"""
|
||||
|
||||
actor = models.ForeignKey(
|
||||
Actor, related_name="inbox_items", on_delete=models.CASCADE
|
||||
)
|
||||
activity = models.ForeignKey(
|
||||
"Activity", related_name="inbox_items", on_delete=models.CASCADE
|
||||
)
|
||||
is_delivered = models.BooleanField(default=False)
|
||||
type = models.CharField(max_length=10, choices=[("to", "to"), ("cc", "cc")])
|
||||
last_delivery_date = models.DateTimeField(null=True, blank=True)
|
||||
delivery_attempts = models.PositiveIntegerField(default=0)
|
||||
is_read = models.BooleanField(default=False)
|
||||
|
||||
objects = InboxItemQuerySet.as_manager()
|
||||
|
||||
class Delivery(models.Model):
|
||||
"""
|
||||
Store deliveries attempt to remote inboxes
|
||||
"""
|
||||
|
||||
is_delivered = models.BooleanField(default=False)
|
||||
last_attempt_date = models.DateTimeField(null=True, blank=True)
|
||||
attempts = models.PositiveIntegerField(default=0)
|
||||
inbox_url = models.URLField(max_length=500)
|
||||
|
||||
activity = models.ForeignKey(
|
||||
"Activity", related_name="deliveries", on_delete=models.CASCADE
|
||||
)
|
||||
|
||||
|
||||
class Activity(models.Model):
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
import logging
|
||||
|
||||
from funkwhale_api.music import models as music_models
|
||||
|
||||
from . import activity
|
||||
from . import serializers
|
||||
|
||||
|
@ -90,3 +92,109 @@ def outbox_follow(context):
|
|||
"object": follow.target,
|
||||
"related_object": follow,
|
||||
}
|
||||
|
||||
|
||||
@outbox.register({"type": "Create", "object.type": "Audio"})
|
||||
def outbox_create_audio(context):
|
||||
upload = context["upload"]
|
||||
serializer = serializers.ActivitySerializer(
|
||||
{
|
||||
"type": "Create",
|
||||
"actor": upload.library.actor.fid,
|
||||
"object": serializers.UploadSerializer(upload).data,
|
||||
}
|
||||
)
|
||||
yield {
|
||||
"type": "Create",
|
||||
"actor": upload.library.actor,
|
||||
"payload": with_recipients(
|
||||
serializer.data, to=[{"type": "followers", "target": upload.library}]
|
||||
),
|
||||
"object": upload,
|
||||
"target": upload.library,
|
||||
}
|
||||
|
||||
|
||||
@inbox.register({"type": "Create", "object.type": "Audio"})
|
||||
def inbox_create_audio(payload, context):
|
||||
serializer = serializers.UploadSerializer(
|
||||
data=payload["object"],
|
||||
context={"activity": context.get("activity"), "actor": context["actor"]},
|
||||
)
|
||||
|
||||
if not serializer.is_valid(raise_exception=context.get("raise_exception", False)):
|
||||
logger.warn("Discarding invalid audio create")
|
||||
return
|
||||
|
||||
upload = serializer.save()
|
||||
|
||||
return {"object": upload, "target": upload.library}
|
||||
|
||||
|
||||
@inbox.register({"type": "Delete", "object.type": "Library"})
|
||||
def inbox_delete_library(payload, context):
|
||||
actor = context["actor"]
|
||||
library_id = payload["object"].get("id")
|
||||
if not library_id:
|
||||
logger.debug("Discarding deletion of empty library")
|
||||
return
|
||||
|
||||
try:
|
||||
library = actor.libraries.get(fid=library_id)
|
||||
except music_models.Library.DoesNotExist:
|
||||
logger.debug("Discarding deletion of unkwnown library %s", library_id)
|
||||
return
|
||||
|
||||
library.delete()
|
||||
|
||||
|
||||
@outbox.register({"type": "Delete", "object.type": "Library"})
|
||||
def outbox_delete_library(context):
|
||||
library = context["library"]
|
||||
serializer = serializers.ActivitySerializer(
|
||||
{"type": "Delete", "object": {"type": "Library", "id": library.fid}}
|
||||
)
|
||||
yield {
|
||||
"type": "Delete",
|
||||
"actor": library.actor,
|
||||
"payload": with_recipients(
|
||||
serializer.data, to=[{"type": "followers", "target": library}]
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@inbox.register({"type": "Delete", "object.type": "Audio"})
|
||||
def inbox_delete_audio(payload, context):
|
||||
actor = context["actor"]
|
||||
try:
|
||||
upload_fids = [i for i in payload["object"]["id"]]
|
||||
except TypeError:
|
||||
# we did not receive a list of Ids, so we can probably use the value directly
|
||||
upload_fids = [payload["object"]["id"]]
|
||||
|
||||
candidates = music_models.Upload.objects.filter(
|
||||
library__actor=actor, fid__in=upload_fids
|
||||
)
|
||||
|
||||
total = candidates.count()
|
||||
logger.info("Deleting %s uploads with ids %s", total, upload_fids)
|
||||
candidates.delete()
|
||||
|
||||
|
||||
@outbox.register({"type": "Delete", "object.type": "Audio"})
|
||||
def outbox_delete_audio(context):
|
||||
uploads = context["uploads"]
|
||||
library = uploads[0].library
|
||||
serializer = serializers.ActivitySerializer(
|
||||
{
|
||||
"type": "Delete",
|
||||
"object": {"type": "Audio", "id": [u.get_federation_id() for u in uploads]},
|
||||
}
|
||||
)
|
||||
yield {
|
||||
"type": "Delete",
|
||||
"actor": library.actor,
|
||||
"payload": with_recipients(
|
||||
serializer.data, to=[{"type": "followers", "target": library}]
|
||||
),
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ import urllib.parse
|
|||
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.core.paginator import Paginator
|
||||
from django.db.models import F, Q
|
||||
from rest_framework import serializers
|
||||
|
||||
from funkwhale_api.common import utils as funkwhale_utils
|
||||
|
@ -29,7 +30,7 @@ class ActorSerializer(serializers.Serializer):
|
|||
manuallyApprovesFollowers = serializers.NullBooleanField(required=False)
|
||||
name = serializers.CharField(required=False, max_length=200)
|
||||
summary = serializers.CharField(max_length=None, required=False)
|
||||
followers = serializers.URLField(max_length=500, required=False, allow_null=True)
|
||||
followers = serializers.URLField(max_length=500)
|
||||
following = serializers.URLField(max_length=500, required=False, allow_null=True)
|
||||
publicKey = serializers.JSONField(required=False)
|
||||
|
||||
|
@ -174,30 +175,6 @@ class BaseActivitySerializer(serializers.Serializer):
|
|||
"We cannot handle an activity with no recipient"
|
||||
)
|
||||
|
||||
matching = models.Actor.objects.filter(fid__in=to + cc)
|
||||
if self.context.get("local_recipients", False):
|
||||
matching = matching.local()
|
||||
|
||||
if not len(matching):
|
||||
raise serializers.ValidationError("No matching recipients found")
|
||||
|
||||
actors_by_fid = {a.fid: a for a in matching}
|
||||
|
||||
def match(recipients, actors):
|
||||
for r in recipients:
|
||||
if r == activity.PUBLIC_ADDRESS:
|
||||
yield r
|
||||
else:
|
||||
try:
|
||||
yield actors[r]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return {
|
||||
"to": list(match(to, actors_by_fid)),
|
||||
"cc": list(match(cc, actors_by_fid)),
|
||||
}
|
||||
|
||||
|
||||
class FollowSerializer(serializers.Serializer):
|
||||
id = serializers.URLField(max_length=500)
|
||||
|
@ -422,7 +399,8 @@ class ActivitySerializer(serializers.Serializer):
|
|||
actor = serializers.URLField(max_length=500)
|
||||
id = serializers.URLField(max_length=500, required=False)
|
||||
type = serializers.ChoiceField(choices=[(c, c) for c in activity.ACTIVITY_TYPES])
|
||||
object = serializers.JSONField()
|
||||
object = serializers.JSONField(required=False)
|
||||
target = serializers.JSONField(required=False)
|
||||
|
||||
def validate_object(self, value):
|
||||
try:
|
||||
|
@ -528,6 +506,7 @@ class LibrarySerializer(PaginatedCollectionSerializer):
|
|||
type = serializers.ChoiceField(choices=["Library"])
|
||||
name = serializers.CharField()
|
||||
summary = serializers.CharField(allow_blank=True, allow_null=True, required=False)
|
||||
followers = serializers.URLField(max_length=500)
|
||||
audience = serializers.ChoiceField(
|
||||
choices=["", None, "https://www.w3.org/ns/activitystreams#Public"],
|
||||
required=False,
|
||||
|
@ -542,7 +521,7 @@ class LibrarySerializer(PaginatedCollectionSerializer):
|
|||
"summary": library.description,
|
||||
"page_size": 100,
|
||||
"actor": library.actor,
|
||||
"items": library.files.filter(import_status="finished"),
|
||||
"items": library.uploads.filter(import_status="finished"),
|
||||
"type": "Library",
|
||||
}
|
||||
r = super().to_representation(conf)
|
||||
|
@ -551,6 +530,7 @@ class LibrarySerializer(PaginatedCollectionSerializer):
|
|||
if library.privacy_level == "public"
|
||||
else ""
|
||||
)
|
||||
r["followers"] = library.followers_url
|
||||
return r
|
||||
|
||||
def create(self, validated_data):
|
||||
|
@ -563,9 +543,10 @@ class LibrarySerializer(PaginatedCollectionSerializer):
|
|||
fid=validated_data["id"],
|
||||
actor=actor,
|
||||
defaults={
|
||||
"files_count": validated_data["totalItems"],
|
||||
"uploads_count": validated_data["totalItems"],
|
||||
"name": validated_data["name"],
|
||||
"description": validated_data["summary"],
|
||||
"followers_url": validated_data["followers"],
|
||||
"privacy_level": "everyone"
|
||||
if validated_data["audience"]
|
||||
== "https://www.w3.org/ns/activitystreams#Public"
|
||||
|
@ -639,43 +620,157 @@ class CollectionPageSerializer(serializers.Serializer):
|
|||
return d
|
||||
|
||||
|
||||
class ArtistMetadataSerializer(serializers.Serializer):
|
||||
musicbrainz_id = serializers.UUIDField(required=False, allow_null=True)
|
||||
name = serializers.CharField()
|
||||
class MusicEntitySerializer(serializers.Serializer):
|
||||
id = serializers.URLField(max_length=500)
|
||||
published = serializers.DateTimeField()
|
||||
musicbrainzId = serializers.UUIDField(allow_null=True, required=False)
|
||||
name = serializers.CharField(max_length=1000)
|
||||
|
||||
def create(self, validated_data):
|
||||
mbid = validated_data.get("musicbrainzId")
|
||||
candidates = self.model.objects.filter(
|
||||
Q(mbid=mbid) | Q(fid=validated_data["id"])
|
||||
).order_by(F("fid").desc(nulls_last=True))
|
||||
|
||||
existing = candidates.first()
|
||||
if existing:
|
||||
return existing
|
||||
|
||||
# nothing matching in our database, let's create a new object
|
||||
return self.model.objects.create(**self.get_create_data(validated_data))
|
||||
|
||||
def get_create_data(self, validated_data):
|
||||
return {
|
||||
"mbid": validated_data.get("musicbrainzId"),
|
||||
"fid": validated_data["id"],
|
||||
"name": validated_data["name"],
|
||||
"creation_date": validated_data["published"],
|
||||
"from_activity": self.context.get("activity"),
|
||||
}
|
||||
|
||||
|
||||
class ReleaseMetadataSerializer(serializers.Serializer):
|
||||
musicbrainz_id = serializers.UUIDField(required=False, allow_null=True)
|
||||
title = serializers.CharField()
|
||||
class ArtistSerializer(MusicEntitySerializer):
|
||||
model = music_models.Artist
|
||||
|
||||
def to_representation(self, instance):
|
||||
d = {
|
||||
"type": "Artist",
|
||||
"id": instance.fid,
|
||||
"name": instance.name,
|
||||
"published": instance.creation_date.isoformat(),
|
||||
"musicbrainzId": str(instance.mbid) if instance.mbid else None,
|
||||
}
|
||||
|
||||
if self.context.get("include_ap_context", self.parent is None):
|
||||
d["@context"] = AP_CONTEXT
|
||||
return d
|
||||
|
||||
|
||||
class RecordingMetadataSerializer(serializers.Serializer):
|
||||
musicbrainz_id = serializers.UUIDField(required=False, allow_null=True)
|
||||
title = serializers.CharField()
|
||||
class AlbumSerializer(MusicEntitySerializer):
|
||||
model = music_models.Album
|
||||
released = serializers.DateField(allow_null=True, required=False)
|
||||
artists = serializers.ListField(child=ArtistSerializer(), min_length=1)
|
||||
|
||||
def to_representation(self, instance):
|
||||
d = {
|
||||
"type": "Album",
|
||||
"id": instance.fid,
|
||||
"name": instance.title,
|
||||
"published": instance.creation_date.isoformat(),
|
||||
"musicbrainzId": str(instance.mbid) if instance.mbid else None,
|
||||
"released": instance.release_date.isoformat()
|
||||
if instance.release_date
|
||||
else None,
|
||||
"artists": [
|
||||
ArtistSerializer(
|
||||
instance.artist, context={"include_ap_context": False}
|
||||
).data
|
||||
],
|
||||
}
|
||||
if instance.cover:
|
||||
d["cover"] = {"type": "Image", "url": utils.full_url(instance.cover.url)}
|
||||
if self.context.get("include_ap_context", self.parent is None):
|
||||
d["@context"] = AP_CONTEXT
|
||||
return d
|
||||
|
||||
def get_create_data(self, validated_data):
|
||||
artist_data = validated_data["artists"][0]
|
||||
artist = ArtistSerializer(
|
||||
context={"activity": self.context.get("activity")}
|
||||
).create(artist_data)
|
||||
|
||||
return {
|
||||
"mbid": validated_data.get("musicbrainzId"),
|
||||
"fid": validated_data["id"],
|
||||
"title": validated_data["name"],
|
||||
"creation_date": validated_data["published"],
|
||||
"artist": artist,
|
||||
"release_date": validated_data.get("released"),
|
||||
"from_activity": self.context.get("activity"),
|
||||
}
|
||||
|
||||
|
||||
class AudioMetadataSerializer(serializers.Serializer):
|
||||
artist = ArtistMetadataSerializer()
|
||||
release = ReleaseMetadataSerializer()
|
||||
recording = RecordingMetadataSerializer()
|
||||
bitrate = serializers.IntegerField(required=False, allow_null=True, min_value=0)
|
||||
size = serializers.IntegerField(required=False, allow_null=True, min_value=0)
|
||||
length = serializers.IntegerField(required=False, allow_null=True, min_value=0)
|
||||
class TrackSerializer(MusicEntitySerializer):
|
||||
model = music_models.Track
|
||||
position = serializers.IntegerField(min_value=0, allow_null=True, required=False)
|
||||
artists = serializers.ListField(child=ArtistSerializer(), min_length=1)
|
||||
album = AlbumSerializer()
|
||||
|
||||
def to_representation(self, instance):
|
||||
d = {
|
||||
"type": "Track",
|
||||
"id": instance.fid,
|
||||
"name": instance.title,
|
||||
"published": instance.creation_date.isoformat(),
|
||||
"musicbrainzId": str(instance.mbid) if instance.mbid else None,
|
||||
"position": instance.position,
|
||||
"artists": [
|
||||
ArtistSerializer(
|
||||
instance.artist, context={"include_ap_context": False}
|
||||
).data
|
||||
],
|
||||
"album": AlbumSerializer(
|
||||
instance.album, context={"include_ap_context": False}
|
||||
).data,
|
||||
}
|
||||
|
||||
if self.context.get("include_ap_context", self.parent is None):
|
||||
d["@context"] = AP_CONTEXT
|
||||
return d
|
||||
|
||||
def get_create_data(self, validated_data):
|
||||
artist_data = validated_data["artists"][0]
|
||||
artist = ArtistSerializer(
|
||||
context={"activity": self.context.get("activity")}
|
||||
).create(artist_data)
|
||||
album = AlbumSerializer(
|
||||
context={"activity": self.context.get("activity")}
|
||||
).create(validated_data["album"])
|
||||
|
||||
return {
|
||||
"mbid": validated_data.get("musicbrainzId"),
|
||||
"fid": validated_data["id"],
|
||||
"title": validated_data["name"],
|
||||
"position": validated_data.get("position"),
|
||||
"creation_date": validated_data["published"],
|
||||
"artist": artist,
|
||||
"album": album,
|
||||
"from_activity": self.context.get("activity"),
|
||||
}
|
||||
|
||||
|
||||
class AudioSerializer(serializers.Serializer):
|
||||
type = serializers.CharField()
|
||||
class UploadSerializer(serializers.Serializer):
|
||||
type = serializers.ChoiceField(choices=["Audio"])
|
||||
id = serializers.URLField(max_length=500)
|
||||
library = serializers.URLField(max_length=500)
|
||||
url = serializers.JSONField()
|
||||
published = serializers.DateTimeField()
|
||||
updated = serializers.DateTimeField(required=False)
|
||||
metadata = AudioMetadataSerializer()
|
||||
updated = serializers.DateTimeField(required=False, allow_null=True)
|
||||
bitrate = serializers.IntegerField(min_value=0)
|
||||
size = serializers.IntegerField(min_value=0)
|
||||
duration = serializers.IntegerField(min_value=0)
|
||||
|
||||
def validate_type(self, v):
|
||||
if v != "Audio":
|
||||
raise serializers.ValidationError("Invalid type for audio")
|
||||
return v
|
||||
track = TrackSerializer(required=True)
|
||||
|
||||
def validate_url(self, v):
|
||||
try:
|
||||
|
@ -699,61 +794,64 @@ class AudioSerializer(serializers.Serializer):
|
|||
if lb.fid != v:
|
||||
raise serializers.ValidationError("Invalid library")
|
||||
return lb
|
||||
|
||||
actor = self.context.get("actor")
|
||||
kwargs = {}
|
||||
if actor:
|
||||
kwargs["actor"] = actor
|
||||
try:
|
||||
return music_models.Library.objects.get(fid=v)
|
||||
return music_models.Library.objects.get(fid=v, **kwargs)
|
||||
except music_models.Library.DoesNotExist:
|
||||
raise serializers.ValidationError("Invalid library")
|
||||
|
||||
def create(self, validated_data):
|
||||
defaults = {
|
||||
try:
|
||||
return music_models.Upload.objects.get(fid=validated_data["id"])
|
||||
except music_models.Upload.DoesNotExist:
|
||||
pass
|
||||
|
||||
track = TrackSerializer(
|
||||
context={"activity": self.context.get("activity")}
|
||||
).create(validated_data["track"])
|
||||
|
||||
data = {
|
||||
"fid": validated_data["id"],
|
||||
"mimetype": validated_data["url"]["mediaType"],
|
||||
"source": validated_data["url"]["href"],
|
||||
"creation_date": validated_data["published"],
|
||||
"modification_date": validated_data.get("updated"),
|
||||
"metadata": self.initial_data,
|
||||
"track": track,
|
||||
"duration": validated_data["duration"],
|
||||
"size": validated_data["size"],
|
||||
"bitrate": validated_data["bitrate"],
|
||||
"library": validated_data["library"],
|
||||
"from_activity": self.context.get("activity"),
|
||||
"import_status": "finished",
|
||||
}
|
||||
tf, created = validated_data["library"].files.update_or_create(
|
||||
fid=validated_data["id"], defaults=defaults
|
||||
)
|
||||
return tf
|
||||
return music_models.Upload.objects.create(**data)
|
||||
|
||||
def to_representation(self, instance):
|
||||
track = instance.track
|
||||
album = instance.track.album
|
||||
artist = instance.track.artist
|
||||
d = {
|
||||
"type": "Audio",
|
||||
"id": instance.get_federation_id(),
|
||||
"library": instance.library.get_federation_id(),
|
||||
"name": instance.track.full_name,
|
||||
"library": instance.library.fid,
|
||||
"name": track.full_name,
|
||||
"published": instance.creation_date.isoformat(),
|
||||
"metadata": {
|
||||
"artist": {
|
||||
"musicbrainz_id": str(artist.mbid) if artist.mbid else None,
|
||||
"name": artist.name,
|
||||
},
|
||||
"release": {
|
||||
"musicbrainz_id": str(album.mbid) if album.mbid else None,
|
||||
"title": album.title,
|
||||
},
|
||||
"recording": {
|
||||
"musicbrainz_id": str(track.mbid) if track.mbid else None,
|
||||
"title": track.title,
|
||||
},
|
||||
"bitrate": instance.bitrate,
|
||||
"size": instance.size,
|
||||
"length": instance.duration,
|
||||
},
|
||||
"bitrate": instance.bitrate,
|
||||
"size": instance.size,
|
||||
"duration": instance.duration,
|
||||
"url": {
|
||||
"href": utils.full_url(instance.listen_url),
|
||||
"type": "Link",
|
||||
"mediaType": instance.mimetype,
|
||||
},
|
||||
"track": TrackSerializer(track, context={"include_ap_context": False}).data,
|
||||
}
|
||||
if instance.modification_date:
|
||||
d["updated"] = instance.modification_date.isoformat()
|
||||
|
||||
if self.context.get("include_ap_context", True):
|
||||
if self.context.get("include_ap_context", self.parent is None):
|
||||
d["@context"] = AP_CONTEXT
|
||||
return d
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ def clean_music_cache():
|
|||
limit = timezone.now() - datetime.timedelta(minutes=delay)
|
||||
|
||||
candidates = (
|
||||
music_models.TrackFile.objects.filter(
|
||||
music_models.Upload.objects.filter(
|
||||
Q(audio_file__isnull=False)
|
||||
& (Q(accessed_date__lt=limit) | Q(accessed_date=None))
|
||||
)
|
||||
|
@ -36,13 +36,13 @@ def clean_music_cache():
|
|||
.only("audio_file", "id")
|
||||
.order_by("id")
|
||||
)
|
||||
for tf in candidates:
|
||||
tf.audio_file.delete()
|
||||
for upload in candidates:
|
||||
upload.audio_file.delete()
|
||||
|
||||
# we also delete orphaned files, if any
|
||||
storage = models.LibraryTrack._meta.get_field("audio_file").storage
|
||||
files = get_files(storage, "federation_cache/tracks")
|
||||
existing = music_models.TrackFile.objects.filter(audio_file__in=files)
|
||||
existing = music_models.Upload.objects.filter(audio_file__in=files)
|
||||
missing = set(files) - set(existing.values_list("audio_file", flat=True))
|
||||
for m in missing:
|
||||
storage.delete(m)
|
||||
|
@ -70,61 +70,30 @@ def dispatch_inbox(activity):
|
|||
creation, etc.)
|
||||
"""
|
||||
|
||||
try:
|
||||
routes.inbox.dispatch(
|
||||
activity.payload,
|
||||
context={
|
||||
"activity": activity,
|
||||
"actor": activity.actor,
|
||||
"inbox_items": (
|
||||
activity.inbox_items.local()
|
||||
.select_related()
|
||||
.select_related("actor__user")
|
||||
.prefetch_related("activity__object", "activity__target")
|
||||
),
|
||||
},
|
||||
)
|
||||
except Exception:
|
||||
activity.inbox_items.local().update(
|
||||
delivery_attempts=F("delivery_attempts") + 1,
|
||||
last_delivery_date=timezone.now(),
|
||||
)
|
||||
raise
|
||||
else:
|
||||
activity.inbox_items.local().update(
|
||||
delivery_attempts=F("delivery_attempts") + 1,
|
||||
last_delivery_date=timezone.now(),
|
||||
is_delivered=True,
|
||||
)
|
||||
routes.inbox.dispatch(
|
||||
activity.payload,
|
||||
context={
|
||||
"activity": activity,
|
||||
"actor": activity.actor,
|
||||
"inbox_items": activity.inbox_items.filter(is_read=False),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@celery.app.task(name="federation.dispatch_outbox")
|
||||
@celery.require_instance(models.Activity.objects.select_related(), "activity")
|
||||
def dispatch_outbox(activity):
|
||||
"""
|
||||
Deliver a local activity to its recipients
|
||||
Deliver a local activity to its recipients, both locally and remotely
|
||||
"""
|
||||
inbox_items = activity.inbox_items.all().select_related("actor")
|
||||
local_recipients_items = [ii for ii in inbox_items if ii.actor.is_local]
|
||||
if local_recipients_items:
|
||||
inbox_items = activity.inbox_items.filter(is_read=False).select_related()
|
||||
deliveries = activity.deliveries.filter(is_delivered=False)
|
||||
|
||||
if inbox_items.exists():
|
||||
dispatch_inbox.delay(activity_id=activity.pk)
|
||||
remote_recipients_items = [ii for ii in inbox_items if not ii.actor.is_local]
|
||||
|
||||
shared_inbox_urls = {
|
||||
ii.actor.shared_inbox_url
|
||||
for ii in remote_recipients_items
|
||||
if ii.actor.shared_inbox_url
|
||||
}
|
||||
inbox_urls = {
|
||||
ii.actor.inbox_url
|
||||
for ii in remote_recipients_items
|
||||
if not ii.actor.shared_inbox_url
|
||||
}
|
||||
for url in shared_inbox_urls:
|
||||
deliver_to_remote_inbox.delay(activity_id=activity.pk, shared_inbox_url=url)
|
||||
|
||||
for url in inbox_urls:
|
||||
deliver_to_remote_inbox.delay(activity_id=activity.pk, inbox_url=url)
|
||||
for id in deliveries.values_list("pk", flat=True):
|
||||
deliver_to_remote.delay(delivery_id=id)
|
||||
|
||||
|
||||
@celery.app.task(
|
||||
|
@ -133,22 +102,21 @@ def dispatch_outbox(activity):
|
|||
retry_backoff=30,
|
||||
max_retries=5,
|
||||
)
|
||||
@celery.require_instance(models.Activity.objects.select_related(), "activity")
|
||||
def deliver_to_remote_inbox(activity, inbox_url=None, shared_inbox_url=None):
|
||||
url = inbox_url or shared_inbox_url
|
||||
actor = activity.actor
|
||||
inbox_items = activity.inbox_items.filter(is_delivered=False)
|
||||
if inbox_url:
|
||||
inbox_items = inbox_items.filter(actor__inbox_url=inbox_url)
|
||||
else:
|
||||
inbox_items = inbox_items.filter(actor__shared_inbox_url=shared_inbox_url)
|
||||
logger.info("Preparing activity delivery to %s", url)
|
||||
@celery.require_instance(
|
||||
models.Delivery.objects.filter(is_delivered=False).select_related(
|
||||
"activity__actor"
|
||||
),
|
||||
"delivery",
|
||||
)
|
||||
def deliver_to_remote(delivery):
|
||||
actor = delivery.activity.actor
|
||||
logger.info("Preparing activity delivery to %s", delivery.inbox_url)
|
||||
auth = signing.get_auth(actor.private_key, actor.private_key_id)
|
||||
try:
|
||||
response = session.get_session().post(
|
||||
auth=auth,
|
||||
json=activity.payload,
|
||||
url=url,
|
||||
json=delivery.activity.payload,
|
||||
url=delivery.inbox_url,
|
||||
timeout=5,
|
||||
verify=settings.EXTERNAL_REQUESTS_VERIFY_SSL,
|
||||
headers={"Content-Type": "application/activity+json"},
|
||||
|
@ -156,10 +124,12 @@ def deliver_to_remote_inbox(activity, inbox_url=None, shared_inbox_url=None):
|
|||
logger.debug("Remote answered with %s", response.status_code)
|
||||
response.raise_for_status()
|
||||
except Exception:
|
||||
inbox_items.update(
|
||||
last_delivery_date=timezone.now(),
|
||||
delivery_attempts=F("delivery_attempts") + 1,
|
||||
)
|
||||
delivery.last_attempt_date = timezone.now()
|
||||
delivery.attempts = F("attempts") + 1
|
||||
delivery.save(update_fields=["last_attempt_date", "attempts"])
|
||||
raise
|
||||
else:
|
||||
inbox_items.update(last_delivery_date=timezone.now(), is_delivered=True)
|
||||
delivery.last_attempt_date = timezone.now()
|
||||
delivery.attempts = F("attempts") + 1
|
||||
delivery.is_delivered = True
|
||||
delivery.save(update_fields=["last_attempt_date", "attempts", "is_delivered"])
|
||||
|
|
|
@ -8,10 +8,15 @@ music_router = routers.SimpleRouter(trailing_slash=False)
|
|||
router.register(
|
||||
r"federation/instance/actors", views.InstanceActorViewSet, "instance-actors"
|
||||
)
|
||||
router.register(r"federation/shared", views.SharedViewSet, "shared")
|
||||
router.register(r"federation/actors", views.ActorViewSet, "actors")
|
||||
router.register(r".well-known", views.WellKnownViewSet, "well-known")
|
||||
|
||||
music_router.register(r"libraries", views.MusicLibraryViewSet, "libraries")
|
||||
music_router.register(r"uploads", views.MusicUploadViewSet, "uploads")
|
||||
music_router.register(r"artists", views.MusicArtistViewSet, "artists")
|
||||
music_router.register(r"albums", views.MusicAlbumViewSet, "albums")
|
||||
music_router.register(r"tracks", views.MusicTrackViewSet, "tracks")
|
||||
urlpatterns = router.urls + [
|
||||
url("federation/music/", include((music_router.urls, "music"), namespace="music"))
|
||||
]
|
||||
|
|
|
@ -27,6 +27,22 @@ class FederationMixin(object):
|
|||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
|
||||
class SharedViewSet(FederationMixin, viewsets.GenericViewSet):
|
||||
permission_classes = []
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
renderer_classes = [renderers.ActivityPubRenderer]
|
||||
|
||||
@list_route(methods=["post"])
|
||||
def inbox(self, request, *args, **kwargs):
|
||||
if request.method.lower() == "post" and request.actor is None:
|
||||
raise exceptions.AuthenticationFailed(
|
||||
"You need a valid signature to send an activity"
|
||||
)
|
||||
if request.method.lower() == "post":
|
||||
activity.receive(activity=request.data, on_behalf_of=request.actor)
|
||||
return response.Response({}, status=200)
|
||||
|
||||
|
||||
class ActorViewSet(FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet):
|
||||
lookup_field = "preferred_username"
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
|
@ -49,6 +65,18 @@ class ActorViewSet(FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericV
|
|||
def outbox(self, request, *args, **kwargs):
|
||||
return response.Response({}, status=200)
|
||||
|
||||
@detail_route(methods=["get"])
|
||||
def followers(self, request, *args, **kwargs):
|
||||
self.get_object()
|
||||
# XXX to implement
|
||||
return response.Response({})
|
||||
|
||||
@detail_route(methods=["get"])
|
||||
def following(self, request, *args, **kwargs):
|
||||
self.get_object()
|
||||
# XXX to implement
|
||||
return response.Response({})
|
||||
|
||||
|
||||
class InstanceActorViewSet(FederationMixin, viewsets.GenericViewSet):
|
||||
lookup_field = "actor"
|
||||
|
@ -175,8 +203,8 @@ class MusicLibraryViewSet(
|
|||
"actor": lb.actor,
|
||||
"name": lb.name,
|
||||
"summary": lb.description,
|
||||
"items": lb.files.order_by("-creation_date"),
|
||||
"item_serializer": serializers.AudioSerializer,
|
||||
"items": lb.uploads.order_by("-creation_date"),
|
||||
"item_serializer": serializers.UploadSerializer,
|
||||
}
|
||||
page = request.GET.get("page")
|
||||
if page is None:
|
||||
|
@ -204,3 +232,49 @@ class MusicLibraryViewSet(
|
|||
return response.Response(status=404)
|
||||
|
||||
return response.Response(data)
|
||||
|
||||
@detail_route(methods=["get"])
|
||||
def followers(self, request, *args, **kwargs):
|
||||
self.get_object()
|
||||
# XXX Implement this
|
||||
return response.Response({})
|
||||
|
||||
|
||||
class MusicUploadViewSet(
|
||||
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
|
||||
):
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
permission_classes = []
|
||||
renderer_classes = [renderers.ActivityPubRenderer]
|
||||
queryset = music_models.Upload.objects.none()
|
||||
lookup_field = "uuid"
|
||||
|
||||
|
||||
class MusicArtistViewSet(
|
||||
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
|
||||
):
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
permission_classes = []
|
||||
renderer_classes = [renderers.ActivityPubRenderer]
|
||||
queryset = music_models.Artist.objects.none()
|
||||
lookup_field = "uuid"
|
||||
|
||||
|
||||
class MusicAlbumViewSet(
|
||||
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
|
||||
):
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
permission_classes = []
|
||||
renderer_classes = [renderers.ActivityPubRenderer]
|
||||
queryset = music_models.Album.objects.none()
|
||||
lookup_field = "uuid"
|
||||
|
||||
|
||||
class MusicTrackViewSet(
|
||||
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
|
||||
):
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
permission_classes = []
|
||||
renderer_classes = [renderers.ActivityPubRenderer]
|
||||
queryset = music_models.Track.objects.none()
|
||||
lookup_field = "uuid"
|
||||
|
|
|
@ -43,7 +43,7 @@ def get_artists():
|
|||
|
||||
|
||||
def get_music_duration():
|
||||
seconds = models.TrackFile.objects.aggregate(d=Sum("duration"))["d"]
|
||||
seconds = models.Upload.objects.aggregate(d=Sum("duration"))["d"]
|
||||
if seconds:
|
||||
return seconds / 3600
|
||||
return 0
|
||||
|
|
|
@ -6,7 +6,7 @@ from funkwhale_api.requests import models as requests_models
|
|||
from funkwhale_api.users import models as users_models
|
||||
|
||||
|
||||
class ManageTrackFileFilterSet(filters.FilterSet):
|
||||
class ManageUploadFilterSet(filters.FilterSet):
|
||||
q = fields.SearchFilter(
|
||||
search_fields=[
|
||||
"track__title",
|
||||
|
@ -17,7 +17,7 @@ class ManageTrackFileFilterSet(filters.FilterSet):
|
|||
)
|
||||
|
||||
class Meta:
|
||||
model = music_models.TrackFile
|
||||
model = music_models.Upload
|
||||
fields = ["q", "track__album", "track__artist", "track"]
|
||||
|
||||
|
||||
|
|
|
@ -10,14 +10,14 @@ from funkwhale_api.users import models as users_models
|
|||
from . import filters
|
||||
|
||||
|
||||
class ManageTrackFileArtistSerializer(serializers.ModelSerializer):
|
||||
class ManageUploadArtistSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = music_models.Artist
|
||||
fields = ["id", "mbid", "creation_date", "name"]
|
||||
|
||||
|
||||
class ManageTrackFileAlbumSerializer(serializers.ModelSerializer):
|
||||
artist = ManageTrackFileArtistSerializer()
|
||||
class ManageUploadAlbumSerializer(serializers.ModelSerializer):
|
||||
artist = ManageUploadArtistSerializer()
|
||||
|
||||
class Meta:
|
||||
model = music_models.Album
|
||||
|
@ -32,20 +32,20 @@ class ManageTrackFileAlbumSerializer(serializers.ModelSerializer):
|
|||
)
|
||||
|
||||
|
||||
class ManageTrackFileTrackSerializer(serializers.ModelSerializer):
|
||||
artist = ManageTrackFileArtistSerializer()
|
||||
album = ManageTrackFileAlbumSerializer()
|
||||
class ManageUploadTrackSerializer(serializers.ModelSerializer):
|
||||
artist = ManageUploadArtistSerializer()
|
||||
album = ManageUploadAlbumSerializer()
|
||||
|
||||
class Meta:
|
||||
model = music_models.Track
|
||||
fields = ("id", "mbid", "title", "album", "artist", "creation_date", "position")
|
||||
|
||||
|
||||
class ManageTrackFileSerializer(serializers.ModelSerializer):
|
||||
track = ManageTrackFileTrackSerializer()
|
||||
class ManageUploadSerializer(serializers.ModelSerializer):
|
||||
track = ManageUploadTrackSerializer()
|
||||
|
||||
class Meta:
|
||||
model = music_models.TrackFile
|
||||
model = music_models.Upload
|
||||
fields = (
|
||||
"id",
|
||||
"path",
|
||||
|
@ -62,9 +62,9 @@ class ManageTrackFileSerializer(serializers.ModelSerializer):
|
|||
)
|
||||
|
||||
|
||||
class ManageTrackFileActionSerializer(common_serializers.ActionSerializer):
|
||||
class ManageUploadActionSerializer(common_serializers.ActionSerializer):
|
||||
actions = [common_serializers.Action("delete", allow_all=False)]
|
||||
filterset_class = filters.ManageTrackFileFilterSet
|
||||
filterset_class = filters.ManageUploadFilterSet
|
||||
|
||||
@transaction.atomic
|
||||
def handle_delete(self, objects):
|
||||
|
|
|
@ -4,7 +4,7 @@ from rest_framework import routers
|
|||
from . import views
|
||||
|
||||
library_router = routers.SimpleRouter()
|
||||
library_router.register(r"track-files", views.ManageTrackFileViewSet, "track-files")
|
||||
library_router.register(r"uploads", views.ManageUploadViewSet, "uploads")
|
||||
requests_router = routers.SimpleRouter()
|
||||
requests_router.register(
|
||||
r"import-requests", views.ManageImportRequestViewSet, "import-requests"
|
||||
|
|
|
@ -10,16 +10,16 @@ from funkwhale_api.users.permissions import HasUserPermission
|
|||
from . import filters, serializers
|
||||
|
||||
|
||||
class ManageTrackFileViewSet(
|
||||
class ManageUploadViewSet(
|
||||
mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
|
||||
):
|
||||
queryset = (
|
||||
music_models.TrackFile.objects.all()
|
||||
music_models.Upload.objects.all()
|
||||
.select_related("track__artist", "track__album__artist")
|
||||
.order_by("-id")
|
||||
)
|
||||
serializer_class = serializers.ManageTrackFileSerializer
|
||||
filter_class = filters.ManageTrackFileFilterSet
|
||||
serializer_class = serializers.ManageUploadSerializer
|
||||
filter_class = filters.ManageUploadFilterSet
|
||||
permission_classes = (HasUserPermission,)
|
||||
required_permissions = ["library"]
|
||||
ordering_fields = [
|
||||
|
@ -35,7 +35,7 @@ class ManageTrackFileViewSet(
|
|||
@list_route(methods=["post"])
|
||||
def action(self, request, *args, **kwargs):
|
||||
queryset = self.get_queryset()
|
||||
serializer = serializers.ManageTrackFileActionSerializer(
|
||||
serializer = serializers.ManageUploadActionSerializer(
|
||||
request.data, queryset=queryset
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
|
|
@ -33,8 +33,8 @@ class ImportBatchAdmin(admin.ModelAdmin):
|
|||
|
||||
@admin.register(models.ImportJob)
|
||||
class ImportJobAdmin(admin.ModelAdmin):
|
||||
list_display = ["source", "batch", "track_file", "status", "mbid"]
|
||||
list_select_related = ["track_file", "batch"]
|
||||
list_display = ["source", "batch", "upload", "status", "mbid"]
|
||||
list_select_related = ["upload", "batch"]
|
||||
search_fields = ["source", "batch__pk", "mbid"]
|
||||
list_filter = ["status"]
|
||||
|
||||
|
@ -55,8 +55,8 @@ class LyricsAdmin(admin.ModelAdmin):
|
|||
list_filter = ["work__language"]
|
||||
|
||||
|
||||
@admin.register(models.TrackFile)
|
||||
class TrackFileAdmin(admin.ModelAdmin):
|
||||
@admin.register(models.Upload)
|
||||
class UploadAdmin(admin.ModelAdmin):
|
||||
list_display = [
|
||||
"track",
|
||||
"audio_file",
|
||||
|
|
|
@ -17,6 +17,7 @@ SAMPLES_PATH = os.path.join(
|
|||
class ArtistFactory(factory.django.DjangoModelFactory):
|
||||
name = factory.Faker("name")
|
||||
mbid = factory.Faker("uuid4")
|
||||
fid = factory.Faker("federation_url")
|
||||
|
||||
class Meta:
|
||||
model = "music.Artist"
|
||||
|
@ -30,6 +31,7 @@ class AlbumFactory(factory.django.DjangoModelFactory):
|
|||
cover = factory.django.ImageField()
|
||||
artist = factory.SubFactory(ArtistFactory)
|
||||
release_group_id = factory.Faker("uuid4")
|
||||
fid = factory.Faker("federation_url")
|
||||
|
||||
class Meta:
|
||||
model = "music.Album"
|
||||
|
@ -37,6 +39,7 @@ class AlbumFactory(factory.django.DjangoModelFactory):
|
|||
|
||||
@registry.register
|
||||
class TrackFactory(factory.django.DjangoModelFactory):
|
||||
fid = factory.Faker("federation_url")
|
||||
title = factory.Faker("sentence", nb_words=3)
|
||||
mbid = factory.Faker("uuid4")
|
||||
album = factory.SubFactory(AlbumFactory)
|
||||
|
@ -49,7 +52,8 @@ class TrackFactory(factory.django.DjangoModelFactory):
|
|||
|
||||
|
||||
@registry.register
|
||||
class TrackFileFactory(factory.django.DjangoModelFactory):
|
||||
class UploadFactory(factory.django.DjangoModelFactory):
|
||||
fid = factory.Faker("federation_url")
|
||||
track = factory.SubFactory(TrackFactory)
|
||||
library = factory.SubFactory(federation_factories.MusicLibraryFactory)
|
||||
audio_file = factory.django.FileField(
|
||||
|
@ -62,7 +66,7 @@ class TrackFileFactory(factory.django.DjangoModelFactory):
|
|||
mimetype = "audio/ogg"
|
||||
|
||||
class Meta:
|
||||
model = "music.TrackFile"
|
||||
model = "music.Upload"
|
||||
|
||||
class Params:
|
||||
in_place = factory.Trait(audio_file=None)
|
||||
|
|
|
@ -14,7 +14,7 @@ def create_data(count=25):
|
|||
artist=artist, size=random.randint(1, 5)
|
||||
)
|
||||
for album in albums:
|
||||
factories.TrackFileFactory.create_batch(
|
||||
factories.UploadFactory.create_batch(
|
||||
track__album=album, size=random.randint(3, 18)
|
||||
)
|
||||
|
||||
|
|
|
@ -41,7 +41,7 @@ class TrackFilter(filters.FilterSet):
|
|||
return queryset.playable_by(actor, value)
|
||||
|
||||
|
||||
class TrackFileFilter(filters.FilterSet):
|
||||
class UploadFilter(filters.FilterSet):
|
||||
library = filters.CharFilter("library__uuid")
|
||||
track = filters.UUIDFilter("track__uuid")
|
||||
track_artist = filters.UUIDFilter("track__artist__uuid")
|
||||
|
@ -67,7 +67,7 @@ class TrackFileFilter(filters.FilterSet):
|
|||
)
|
||||
|
||||
class Meta:
|
||||
model = models.TrackFile
|
||||
model = models.Upload
|
||||
fields = [
|
||||
"playable",
|
||||
"import_status",
|
||||
|
|
|
@ -15,7 +15,7 @@ class Importer(object):
|
|||
# let's validate data, just in case
|
||||
instance = self.model(**cleaned_data)
|
||||
exclude = EXCLUDE_VALIDATION.get(self.model.__name__, [])
|
||||
instance.full_clean(exclude=["mbid", "uuid"] + exclude)
|
||||
instance.full_clean(exclude=["mbid", "uuid", "fid", "from_activity"] + exclude)
|
||||
m = self.model.objects.update_or_create(mbid=mbid, defaults=cleaned_data)[0]
|
||||
for hook in import_hooks:
|
||||
hook(m, cleaned_data, raw_data)
|
||||
|
|
|
@ -27,9 +27,9 @@ class Command(BaseCommand):
|
|||
@transaction.atomic
|
||||
def fix_mimetypes(self, dry_run, **kwargs):
|
||||
self.stdout.write("Fixing missing mimetypes...")
|
||||
matching = models.TrackFile.objects.filter(
|
||||
source__startswith="file://"
|
||||
).exclude(mimetype__startswith="audio/")
|
||||
matching = models.Upload.objects.filter(source__startswith="file://").exclude(
|
||||
mimetype__startswith="audio/"
|
||||
)
|
||||
self.stdout.write(
|
||||
"[mimetypes] {} entries found with bad or no mimetype".format(
|
||||
matching.count()
|
||||
|
@ -48,7 +48,7 @@ class Command(BaseCommand):
|
|||
|
||||
def fix_file_data(self, dry_run, **kwargs):
|
||||
self.stdout.write("Fixing missing bitrate or length...")
|
||||
matching = models.TrackFile.objects.filter(
|
||||
matching = models.Upload.objects.filter(
|
||||
Q(bitrate__isnull=True) | Q(duration__isnull=True)
|
||||
)
|
||||
total = matching.count()
|
||||
|
@ -57,41 +57,41 @@ class Command(BaseCommand):
|
|||
)
|
||||
if dry_run:
|
||||
return
|
||||
for i, tf in enumerate(matching.only("audio_file")):
|
||||
for i, upload in enumerate(matching.only("audio_file")):
|
||||
self.stdout.write(
|
||||
"[bitrate/length] {}/{} fixing file #{}".format(i + 1, total, tf.pk)
|
||||
"[bitrate/length] {}/{} fixing file #{}".format(i + 1, total, upload.pk)
|
||||
)
|
||||
|
||||
try:
|
||||
audio_file = tf.get_audio_file()
|
||||
audio_file = upload.get_audio_file()
|
||||
if audio_file:
|
||||
data = utils.get_audio_file_data(audio_file)
|
||||
tf.bitrate = data["bitrate"]
|
||||
tf.duration = data["length"]
|
||||
tf.save(update_fields=["duration", "bitrate"])
|
||||
upload.bitrate = data["bitrate"]
|
||||
upload.duration = data["length"]
|
||||
upload.save(update_fields=["duration", "bitrate"])
|
||||
else:
|
||||
self.stderr.write("[bitrate/length] no file found")
|
||||
except Exception as e:
|
||||
self.stderr.write(
|
||||
"[bitrate/length] error with file #{}: {}".format(tf.pk, str(e))
|
||||
"[bitrate/length] error with file #{}: {}".format(upload.pk, str(e))
|
||||
)
|
||||
|
||||
def fix_file_size(self, dry_run, **kwargs):
|
||||
self.stdout.write("Fixing missing size...")
|
||||
matching = models.TrackFile.objects.filter(size__isnull=True)
|
||||
matching = models.Upload.objects.filter(size__isnull=True)
|
||||
total = matching.count()
|
||||
self.stdout.write("[size] {} entries found with missing values".format(total))
|
||||
if dry_run:
|
||||
return
|
||||
for i, tf in enumerate(matching.only("size")):
|
||||
for i, upload in enumerate(matching.only("size")):
|
||||
self.stdout.write(
|
||||
"[size] {}/{} fixing file #{}".format(i + 1, total, tf.pk)
|
||||
"[size] {}/{} fixing file #{}".format(i + 1, total, upload.pk)
|
||||
)
|
||||
|
||||
try:
|
||||
tf.size = tf.get_file_size()
|
||||
tf.save(update_fields=["size"])
|
||||
upload.size = upload.get_file_size()
|
||||
upload.save(update_fields=["size"])
|
||||
except Exception as e:
|
||||
self.stderr.write(
|
||||
"[size] error with file #{}: {}".format(tf.pk, str(e))
|
||||
"[size] error with file #{}: {}".format(upload.pk, str(e))
|
||||
)
|
|
@ -0,0 +1,66 @@
|
|||
# Generated by Django 2.0.8 on 2018-09-14 20:07
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('federation', '0011_auto_20180910_1902'),
|
||||
('music', '0030_auto_20180825_1411'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='album',
|
||||
name='fid',
|
||||
field=models.URLField(db_index=True, max_length=500, null=True, unique=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='album',
|
||||
name='from_activity',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='federation.Activity'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='artist',
|
||||
name='fid',
|
||||
field=models.URLField(db_index=True, max_length=500, null=True, unique=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='artist',
|
||||
name='from_activity',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='federation.Activity'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='track',
|
||||
name='fid',
|
||||
field=models.URLField(db_index=True, max_length=500, null=True, unique=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='track',
|
||||
name='from_activity',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='federation.Activity'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='trackfile',
|
||||
name='from_activity',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='federation.Activity'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='work',
|
||||
name='fid',
|
||||
field=models.URLField(db_index=True, max_length=500, null=True, unique=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='work',
|
||||
name='from_activity',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='federation.Activity'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='trackfile',
|
||||
name='modification_date',
|
||||
field=models.DateTimeField(default=django.utils.timezone.now, null=True),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,40 @@
|
|||
# Generated by Django 2.0.8 on 2018-09-21 16:47
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [("music", "0031_auto_20180914_2007")]
|
||||
|
||||
operations = [
|
||||
migrations.RenameModel("TrackFile", "Upload"),
|
||||
migrations.RenameField(
|
||||
model_name="importjob", old_name="track_file", new_name="upload"
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name="library", old_name="files_count", new_name="uploads_count"
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="upload",
|
||||
name="library",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="uploads",
|
||||
to="music.Library",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="upload",
|
||||
name="track",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="uploads",
|
||||
to="music.Track",
|
||||
),
|
||||
),
|
||||
]
|
|
@ -32,8 +32,12 @@ def empty_dict():
|
|||
|
||||
|
||||
class APIModelMixin(models.Model):
|
||||
fid = models.URLField(unique=True, max_length=500, db_index=True, null=True)
|
||||
mbid = models.UUIDField(unique=True, db_index=True, null=True, blank=True)
|
||||
uuid = models.UUIDField(unique=True, db_index=True, default=uuid.uuid4)
|
||||
from_activity = models.ForeignKey(
|
||||
"federation.Activity", null=True, on_delete=models.SET_NULL
|
||||
)
|
||||
api_includes = []
|
||||
creation_date = models.DateTimeField(default=timezone.now)
|
||||
import_hooks = []
|
||||
|
@ -86,6 +90,23 @@ class APIModelMixin(models.Model):
|
|||
self.musicbrainz_model, self.mbid
|
||||
)
|
||||
|
||||
def get_federation_id(self):
|
||||
if self.fid:
|
||||
return self.fid
|
||||
|
||||
return federation_utils.full_url(
|
||||
reverse(
|
||||
"federation:music:{}-detail".format(self.federation_namespace),
|
||||
kwargs={"uuid": self.uuid},
|
||||
)
|
||||
)
|
||||
|
||||
def save(self, **kwargs):
|
||||
if not self.pk and not self.fid:
|
||||
self.fid = self.get_federation_id()
|
||||
|
||||
return super().save(**kwargs)
|
||||
|
||||
|
||||
class ArtistQuerySet(models.QuerySet):
|
||||
def with_albums_count(self):
|
||||
|
@ -116,7 +137,7 @@ class ArtistQuerySet(models.QuerySet):
|
|||
|
||||
class Artist(APIModelMixin):
|
||||
name = models.CharField(max_length=255)
|
||||
|
||||
federation_namespace = "artists"
|
||||
musicbrainz_model = "artist"
|
||||
musicbrainz_mapping = {
|
||||
"mbid": {"musicbrainz_field_name": "id"},
|
||||
|
@ -195,6 +216,7 @@ class Album(APIModelMixin):
|
|||
|
||||
api_includes = ["artist-credits", "recordings", "media", "release-groups"]
|
||||
api = musicbrainz.api.releases
|
||||
federation_namespace = "albums"
|
||||
musicbrainz_model = "release"
|
||||
musicbrainz_mapping = {
|
||||
"mbid": {"musicbrainz_field_name": "id"},
|
||||
|
@ -290,6 +312,8 @@ class Work(APIModelMixin):
|
|||
api = musicbrainz.api.works
|
||||
api_includes = ["url-rels", "recording-rels"]
|
||||
musicbrainz_model = "work"
|
||||
federation_namespace = "works"
|
||||
|
||||
musicbrainz_mapping = {
|
||||
"mbid": {"musicbrainz_field_name": "id"},
|
||||
"title": {"musicbrainz_field_name": "title"},
|
||||
|
@ -307,6 +331,12 @@ class Work(APIModelMixin):
|
|||
|
||||
return lyric
|
||||
|
||||
def get_federation_id(self):
|
||||
if self.fid:
|
||||
return self.fid
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class Lyrics(models.Model):
|
||||
uuid = models.UUIDField(unique=True, db_index=True, default=uuid.uuid4)
|
||||
|
@ -332,7 +362,7 @@ class TrackQuerySet(models.QuerySet):
|
|||
|
||||
def annotate_playable_by_actor(self, actor):
|
||||
files = (
|
||||
TrackFile.objects.playable_by(actor)
|
||||
Upload.objects.playable_by(actor)
|
||||
.filter(track=models.OuterRef("id"))
|
||||
.order_by("id")
|
||||
.values("id")[:1]
|
||||
|
@ -341,11 +371,25 @@ class TrackQuerySet(models.QuerySet):
|
|||
return self.annotate(is_playable_by_actor=subquery)
|
||||
|
||||
def playable_by(self, actor, include=True):
|
||||
files = TrackFile.objects.playable_by(actor, include)
|
||||
files = Upload.objects.playable_by(actor, include)
|
||||
if include:
|
||||
return self.filter(files__in=files)
|
||||
return self.filter(uploads__in=files)
|
||||
else:
|
||||
return self.exclude(files__in=files)
|
||||
return self.exclude(uploads__in=files)
|
||||
|
||||
def annotate_duration(self):
|
||||
first_upload = Upload.objects.filter(track=models.OuterRef("pk")).order_by("pk")
|
||||
return self.annotate(
|
||||
duration=models.Subquery(first_upload.values("duration")[:1])
|
||||
)
|
||||
|
||||
def annotate_file_data(self):
|
||||
first_upload = Upload.objects.filter(track=models.OuterRef("pk")).order_by("pk")
|
||||
return self.annotate(
|
||||
bitrate=models.Subquery(first_upload.values("bitrate")[:1]),
|
||||
size=models.Subquery(first_upload.values("size")[:1]),
|
||||
mimetype=models.Subquery(first_upload.values("mimetype")[:1]),
|
||||
)
|
||||
|
||||
|
||||
def get_artist(release_list):
|
||||
|
@ -364,7 +408,7 @@ class Track(APIModelMixin):
|
|||
work = models.ForeignKey(
|
||||
Work, related_name="tracks", null=True, blank=True, on_delete=models.CASCADE
|
||||
)
|
||||
|
||||
federation_namespace = "tracks"
|
||||
musicbrainz_model = "recording"
|
||||
api = musicbrainz.api.recordings
|
||||
api_includes = ["artist-credits", "releases", "media", "tags", "work-rels"]
|
||||
|
@ -482,8 +526,10 @@ class Track(APIModelMixin):
|
|||
return reverse("api:v1:listen-detail", kwargs={"uuid": self.uuid})
|
||||
|
||||
|
||||
class TrackFileQuerySet(models.QuerySet):
|
||||
class UploadQuerySet(models.QuerySet):
|
||||
def playable_by(self, actor, include=True):
|
||||
from funkwhale_api.federation.models import LibraryFollow
|
||||
|
||||
if actor is None:
|
||||
libraries = Library.objects.filter(privacy_level="everyone")
|
||||
|
||||
|
@ -492,8 +538,14 @@ class TrackFileQuerySet(models.QuerySet):
|
|||
instance_query = models.Q(
|
||||
privacy_level="instance", actor__domain=actor.domain
|
||||
)
|
||||
followed_libraries = LibraryFollow.objects.filter(
|
||||
actor=actor, approved=True
|
||||
).values_list("target", flat=True)
|
||||
libraries = Library.objects.filter(
|
||||
me_query | instance_query | models.Q(privacy_level="everyone")
|
||||
me_query
|
||||
| instance_query
|
||||
| models.Q(privacy_level="everyone")
|
||||
| models.Q(pk__in=followed_libraries)
|
||||
)
|
||||
if include:
|
||||
return self.filter(library__in=libraries)
|
||||
|
@ -523,11 +575,11 @@ def get_import_reference():
|
|||
return str(uuid.uuid4())
|
||||
|
||||
|
||||
class TrackFile(models.Model):
|
||||
class Upload(models.Model):
|
||||
fid = models.URLField(unique=True, max_length=500, null=True, blank=True)
|
||||
uuid = models.UUIDField(unique=True, db_index=True, default=uuid.uuid4)
|
||||
track = models.ForeignKey(
|
||||
Track, related_name="files", on_delete=models.CASCADE, null=True, blank=True
|
||||
Track, related_name="uploads", on_delete=models.CASCADE, null=True, blank=True
|
||||
)
|
||||
audio_file = models.FileField(upload_to=get_file_path, max_length=255)
|
||||
source = models.CharField(
|
||||
|
@ -537,7 +589,7 @@ class TrackFile(models.Model):
|
|||
max_length=500,
|
||||
)
|
||||
creation_date = models.DateTimeField(default=timezone.now)
|
||||
modification_date = models.DateTimeField(auto_now=True)
|
||||
modification_date = models.DateTimeField(default=timezone.now, null=True)
|
||||
accessed_date = models.DateTimeField(null=True, blank=True)
|
||||
duration = models.IntegerField(null=True, blank=True)
|
||||
size = models.IntegerField(null=True, blank=True)
|
||||
|
@ -545,7 +597,11 @@ class TrackFile(models.Model):
|
|||
acoustid_track_id = models.UUIDField(null=True, blank=True)
|
||||
mimetype = models.CharField(null=True, blank=True, max_length=200)
|
||||
library = models.ForeignKey(
|
||||
"library", null=True, blank=True, related_name="files", on_delete=models.CASCADE
|
||||
"library",
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="uploads",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
# metadata from federation
|
||||
|
@ -569,8 +625,11 @@ class TrackFile(models.Model):
|
|||
import_details = JSONField(
|
||||
default=empty_dict, max_length=50000, encoder=DjangoJSONEncoder
|
||||
)
|
||||
from_activity = models.ForeignKey(
|
||||
"federation.Activity", null=True, on_delete=models.SET_NULL
|
||||
)
|
||||
|
||||
objects = TrackFileQuerySet.as_manager()
|
||||
objects = UploadQuerySet.as_manager()
|
||||
|
||||
def download_audio_from_remote(self, user):
|
||||
from funkwhale_api.common import session
|
||||
|
@ -586,6 +645,7 @@ class TrackFile(models.Model):
|
|||
auth=auth,
|
||||
stream=True,
|
||||
timeout=20,
|
||||
headers={"Content-Type": "application/octet-stream"},
|
||||
verify=settings.EXTERNAL_REQUESTS_VERIFY_SSL,
|
||||
)
|
||||
with remote_response as r:
|
||||
|
@ -605,7 +665,9 @@ class TrackFile(models.Model):
|
|||
if self.fid:
|
||||
return self.fid
|
||||
|
||||
return federation_utils.full_url("/federation/music/file/{}".format(self.uuid))
|
||||
return federation_utils.full_url(
|
||||
reverse("federation:music:uploads-detail", kwargs={"uuid": self.uuid})
|
||||
)
|
||||
|
||||
@property
|
||||
def filename(self):
|
||||
|
@ -648,6 +710,8 @@ class TrackFile(models.Model):
|
|||
self.mimetype = utils.guess_mimetype(self.audio_file)
|
||||
if not self.size and self.audio_file:
|
||||
self.size = self.audio_file.size
|
||||
if not self.pk and not self.fid and self.library.actor.is_local:
|
||||
self.fid = self.get_federation_id()
|
||||
return super().save(**kwargs)
|
||||
|
||||
def get_metadata(self):
|
||||
|
@ -658,7 +722,7 @@ class TrackFile(models.Model):
|
|||
|
||||
@property
|
||||
def listen_url(self):
|
||||
return self.track.listen_url + "?file={}".format(self.uuid)
|
||||
return self.track.listen_url + "?upload={}".format(self.uuid)
|
||||
|
||||
|
||||
IMPORT_STATUS_CHOICES = (
|
||||
|
@ -734,8 +798,8 @@ class ImportJob(models.Model):
|
|||
batch = models.ForeignKey(
|
||||
ImportBatch, related_name="jobs", on_delete=models.CASCADE
|
||||
)
|
||||
track_file = models.ForeignKey(
|
||||
TrackFile, related_name="jobs", null=True, blank=True, on_delete=models.CASCADE
|
||||
upload = models.ForeignKey(
|
||||
Upload, related_name="jobs", null=True, blank=True, on_delete=models.CASCADE
|
||||
)
|
||||
source = models.CharField(max_length=500)
|
||||
mbid = models.UUIDField(editable=False, null=True, blank=True)
|
||||
|
@ -793,7 +857,7 @@ class Library(federation_models.FederationMixin):
|
|||
privacy_level = models.CharField(
|
||||
choices=LIBRARY_PRIVACY_LEVEL_CHOICES, default="me", max_length=25
|
||||
)
|
||||
files_count = models.PositiveIntegerField(default=0)
|
||||
uploads_count = models.PositiveIntegerField(default=0)
|
||||
objects = LibraryQuerySet.as_manager()
|
||||
|
||||
def get_federation_id(self):
|
||||
|
@ -822,7 +886,7 @@ class Library(federation_models.FederationMixin):
|
|||
if latest_scan and latest_scan.creation_date + delay_between_scans > now:
|
||||
return
|
||||
|
||||
scan = self.scans.create(total_files=self.files_count)
|
||||
scan = self.scans.create(total_files=self.uploads_count)
|
||||
from . import tasks
|
||||
|
||||
common_utils.on_commit(tasks.start_library_scan.delay, library_scan_id=scan.pk)
|
||||
|
|
|
@ -6,6 +6,7 @@ from versatileimagefield.serializers import VersatileImageFieldSerializer
|
|||
from funkwhale_api.activity import serializers as activity_serializers
|
||||
from funkwhale_api.common import serializers as common_serializers
|
||||
from funkwhale_api.common import utils as common_utils
|
||||
from funkwhale_api.federation import routes
|
||||
|
||||
from . import filters, models, tasks
|
||||
|
||||
|
@ -60,6 +61,7 @@ class AlbumTrackSerializer(serializers.ModelSerializer):
|
|||
artist = ArtistSimpleSerializer(read_only=True)
|
||||
is_playable = serializers.SerializerMethodField()
|
||||
listen_url = serializers.SerializerMethodField()
|
||||
duration = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = models.Track
|
||||
|
@ -73,6 +75,7 @@ class AlbumTrackSerializer(serializers.ModelSerializer):
|
|||
"position",
|
||||
"is_playable",
|
||||
"listen_url",
|
||||
"duration",
|
||||
)
|
||||
|
||||
def get_is_playable(self, obj):
|
||||
|
@ -84,6 +87,12 @@ class AlbumTrackSerializer(serializers.ModelSerializer):
|
|||
def get_listen_url(self, obj):
|
||||
return obj.listen_url
|
||||
|
||||
def get_duration(self, obj):
|
||||
try:
|
||||
return obj.duration
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
|
||||
class AlbumSerializer(serializers.ModelSerializer):
|
||||
tracks = serializers.SerializerMethodField()
|
||||
|
@ -142,6 +151,10 @@ class TrackSerializer(serializers.ModelSerializer):
|
|||
lyrics = serializers.SerializerMethodField()
|
||||
is_playable = serializers.SerializerMethodField()
|
||||
listen_url = serializers.SerializerMethodField()
|
||||
duration = serializers.SerializerMethodField()
|
||||
bitrate = serializers.SerializerMethodField()
|
||||
size = serializers.SerializerMethodField()
|
||||
mimetype = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = models.Track
|
||||
|
@ -156,6 +169,10 @@ class TrackSerializer(serializers.ModelSerializer):
|
|||
"lyrics",
|
||||
"is_playable",
|
||||
"listen_url",
|
||||
"duration",
|
||||
"bitrate",
|
||||
"size",
|
||||
"mimetype",
|
||||
)
|
||||
|
||||
def get_lyrics(self, obj):
|
||||
|
@ -170,9 +187,33 @@ class TrackSerializer(serializers.ModelSerializer):
|
|||
except AttributeError:
|
||||
return None
|
||||
|
||||
def get_duration(self, obj):
|
||||
try:
|
||||
return obj.duration
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
def get_bitrate(self, obj):
|
||||
try:
|
||||
return obj.bitrate
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
def get_size(self, obj):
|
||||
try:
|
||||
return obj.size
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
def get_mimetype(self, obj):
|
||||
try:
|
||||
return obj.mimetype
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
|
||||
class LibraryForOwnerSerializer(serializers.ModelSerializer):
|
||||
files_count = serializers.SerializerMethodField()
|
||||
uploads_count = serializers.SerializerMethodField()
|
||||
size = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
|
@ -183,20 +224,20 @@ class LibraryForOwnerSerializer(serializers.ModelSerializer):
|
|||
"name",
|
||||
"description",
|
||||
"privacy_level",
|
||||
"files_count",
|
||||
"uploads_count",
|
||||
"size",
|
||||
"creation_date",
|
||||
]
|
||||
read_only_fields = ["fid", "uuid", "creation_date", "actor"]
|
||||
|
||||
def get_files_count(self, o):
|
||||
return getattr(o, "_files_count", o.files_count)
|
||||
def get_uploads_count(self, o):
|
||||
return getattr(o, "_uploads_count", o.uploads_count)
|
||||
|
||||
def get_size(self, o):
|
||||
return getattr(o, "_size", 0)
|
||||
|
||||
|
||||
class TrackFileSerializer(serializers.ModelSerializer):
|
||||
class UploadSerializer(serializers.ModelSerializer):
|
||||
track = TrackSerializer(required=False, allow_null=True)
|
||||
library = common_serializers.RelatedField(
|
||||
"uuid",
|
||||
|
@ -206,7 +247,7 @@ class TrackFileSerializer(serializers.ModelSerializer):
|
|||
)
|
||||
|
||||
class Meta:
|
||||
model = models.TrackFile
|
||||
model = models.Upload
|
||||
fields = [
|
||||
"uuid",
|
||||
"filename",
|
||||
|
@ -235,9 +276,9 @@ class TrackFileSerializer(serializers.ModelSerializer):
|
|||
]
|
||||
|
||||
|
||||
class TrackFileForOwnerSerializer(TrackFileSerializer):
|
||||
class Meta(TrackFileSerializer.Meta):
|
||||
fields = TrackFileSerializer.Meta.fields + [
|
||||
class UploadForOwnerSerializer(UploadSerializer):
|
||||
class Meta(UploadSerializer.Meta):
|
||||
fields = UploadSerializer.Meta.fields + [
|
||||
"import_details",
|
||||
"import_metadata",
|
||||
"import_reference",
|
||||
|
@ -246,7 +287,7 @@ class TrackFileForOwnerSerializer(TrackFileSerializer):
|
|||
"audio_file",
|
||||
]
|
||||
write_only_fields = ["audio_file"]
|
||||
read_only_fields = TrackFileSerializer.Meta.read_only_fields + [
|
||||
read_only_fields = UploadSerializer.Meta.read_only_fields + [
|
||||
"import_details",
|
||||
"import_metadata",
|
||||
"metadata",
|
||||
|
@ -272,16 +313,26 @@ class TrackFileForOwnerSerializer(TrackFileSerializer):
|
|||
return f
|
||||
|
||||
|
||||
class TrackFileActionSerializer(common_serializers.ActionSerializer):
|
||||
class UploadActionSerializer(common_serializers.ActionSerializer):
|
||||
actions = [
|
||||
common_serializers.Action("delete", allow_all=True),
|
||||
common_serializers.Action("relaunch_import", allow_all=True),
|
||||
]
|
||||
filterset_class = filters.TrackFileFilter
|
||||
filterset_class = filters.UploadFilter
|
||||
pk_field = "uuid"
|
||||
|
||||
@transaction.atomic
|
||||
def handle_delete(self, objects):
|
||||
libraries = sorted(set(objects.values_list("library", flat=True)))
|
||||
for id in libraries:
|
||||
# we group deletes by library for easier federation
|
||||
uploads = objects.filter(library__pk=id).select_related("library__actor")
|
||||
for chunk in common_utils.chunk_queryset(uploads, 100):
|
||||
routes.outbox.dispatch(
|
||||
{"type": "Delete", "object": {"type": "Audio"}},
|
||||
context={"uploads": chunk},
|
||||
)
|
||||
|
||||
return objects.delete()
|
||||
|
||||
@transaction.atomic
|
||||
|
@ -290,7 +341,7 @@ class TrackFileActionSerializer(common_serializers.ActionSerializer):
|
|||
pks = list(qs.values_list("id", flat=True))
|
||||
qs.update(import_status="pending")
|
||||
for pk in pks:
|
||||
common_utils.on_commit(tasks.import_track_file.delay, track_file_id=pk)
|
||||
common_utils.on_commit(tasks.import_upload.delay, upload_id=pk)
|
||||
|
||||
|
||||
class TagSerializer(serializers.ModelSerializer):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import django.dispatch
|
||||
|
||||
track_file_import_status_updated = django.dispatch.Signal(
|
||||
providing_args=["old_status", "new_status", "track_file"]
|
||||
upload_import_status_updated = django.dispatch.Signal(
|
||||
providing_args=["old_status", "new_status", "upload"]
|
||||
)
|
||||
|
|
|
@ -11,7 +11,7 @@ from requests.exceptions import RequestException
|
|||
|
||||
from funkwhale_api.common import channels
|
||||
from funkwhale_api.common import preferences
|
||||
from funkwhale_api.federation import activity, actors
|
||||
from funkwhale_api.federation import activity, actors, routes
|
||||
from funkwhale_api.federation import library as lb
|
||||
from funkwhale_api.federation import library as federation_serializers
|
||||
from funkwhale_api.providers.acoustid import get_acoustid_client
|
||||
|
@ -26,15 +26,15 @@ from . import serializers
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@celery.app.task(name="acoustid.set_on_track_file")
|
||||
@celery.require_instance(models.TrackFile, "track_file")
|
||||
def set_acoustid_on_track_file(track_file):
|
||||
@celery.app.task(name="acoustid.set_on_upload")
|
||||
@celery.require_instance(models.Upload, "upload")
|
||||
def set_acoustid_on_upload(upload):
|
||||
client = get_acoustid_client()
|
||||
result = client.get_best_match(track_file.audio_file.path)
|
||||
result = client.get_best_match(upload.audio_file.path)
|
||||
|
||||
def update(id):
|
||||
track_file.acoustid_track_id = id
|
||||
track_file.save(update_fields=["acoustid_track_id"])
|
||||
upload.acoustid_track_id = id
|
||||
upload.save(update_fields=["acoustid_track_id"])
|
||||
return id
|
||||
|
||||
if result:
|
||||
|
@ -86,14 +86,14 @@ def import_track_from_remote(metadata):
|
|||
)[0]
|
||||
|
||||
|
||||
def update_album_cover(album, track_file, replace=False):
|
||||
def update_album_cover(album, upload, replace=False):
|
||||
if album.cover and not replace:
|
||||
return
|
||||
|
||||
if track_file:
|
||||
if upload:
|
||||
# maybe the file has a cover embedded?
|
||||
try:
|
||||
metadata = track_file.get_metadata()
|
||||
metadata = upload.get_metadata()
|
||||
except FileNotFoundError:
|
||||
metadata = None
|
||||
if metadata:
|
||||
|
@ -102,9 +102,9 @@ def update_album_cover(album, track_file, replace=False):
|
|||
# best case scenario, cover is embedded in the track
|
||||
logger.info("[Album %s] Using cover embedded in file", album.pk)
|
||||
return album.get_image(data=cover)
|
||||
if track_file.source and track_file.source.startswith("file://"):
|
||||
if upload.source and upload.source.startswith("file://"):
|
||||
# let's look for a cover in the same directory
|
||||
path = os.path.dirname(track_file.source.replace("file://", "", 1))
|
||||
path = os.path.dirname(upload.source.replace("file://", "", 1))
|
||||
logger.info("[Album %s] scanning covers from %s", album.pk, path)
|
||||
cover = get_cover_from_fs(path)
|
||||
if cover:
|
||||
|
@ -163,14 +163,14 @@ def import_batch_notify_followers(import_batch):
|
|||
library_actor = actors.SYSTEM_ACTORS["library"].get_actor_instance()
|
||||
followers = library_actor.get_approved_followers()
|
||||
jobs = import_batch.jobs.filter(
|
||||
status="finished", library_track__isnull=True, track_file__isnull=False
|
||||
).select_related("track_file__track__artist", "track_file__track__album__artist")
|
||||
track_files = [job.track_file for job in jobs]
|
||||
status="finished", library_track__isnull=True, upload__isnull=False
|
||||
).select_related("upload__track__artist", "upload__track__album__artist")
|
||||
uploads = [job.upload for job in jobs]
|
||||
collection = federation_serializers.CollectionSerializer(
|
||||
{
|
||||
"actor": library_actor,
|
||||
"id": import_batch.get_federation_id(),
|
||||
"items": track_files,
|
||||
"items": uploads,
|
||||
"item_serializer": federation_serializers.AudioSerializer,
|
||||
}
|
||||
).data
|
||||
|
@ -218,17 +218,17 @@ def start_library_scan(library_scan):
|
|||
)
|
||||
def scan_library_page(library_scan, page_url):
|
||||
data = lb.get_library_page(library_scan.library, page_url, library_scan.actor)
|
||||
tfs = []
|
||||
uploads = []
|
||||
|
||||
for item_serializer in data["items"]:
|
||||
tf = item_serializer.save(library=library_scan.library)
|
||||
if tf.import_status == "pending" and not tf.track:
|
||||
upload = item_serializer.save(library=library_scan.library)
|
||||
if upload.import_status == "pending" and not upload.track:
|
||||
# this track is not matched to any musicbrainz or other musical
|
||||
# metadata
|
||||
import_track_file.delay(track_file_id=tf.pk)
|
||||
tfs.append(tf)
|
||||
import_upload.delay(upload_id=upload.pk)
|
||||
uploads.append(upload)
|
||||
|
||||
library_scan.processed_files = F("processed_files") + len(tfs)
|
||||
library_scan.processed_files = F("processed_files") + len(uploads)
|
||||
library_scan.modification_date = timezone.now()
|
||||
update_fields = ["modification_date", "processed_files"]
|
||||
|
||||
|
@ -254,82 +254,82 @@ def getter(data, *keys):
|
|||
return v
|
||||
|
||||
|
||||
class TrackFileImportError(ValueError):
|
||||
class UploadImportError(ValueError):
|
||||
def __init__(self, code):
|
||||
self.code = code
|
||||
super().__init__(code)
|
||||
|
||||
|
||||
def fail_import(track_file, error_code):
|
||||
old_status = track_file.import_status
|
||||
track_file.import_status = "errored"
|
||||
track_file.import_details = {"error_code": error_code}
|
||||
track_file.import_date = timezone.now()
|
||||
track_file.save(update_fields=["import_details", "import_status", "import_date"])
|
||||
signals.track_file_import_status_updated.send(
|
||||
def fail_import(upload, error_code):
|
||||
old_status = upload.import_status
|
||||
upload.import_status = "errored"
|
||||
upload.import_details = {"error_code": error_code}
|
||||
upload.import_date = timezone.now()
|
||||
upload.save(update_fields=["import_details", "import_status", "import_date"])
|
||||
signals.upload_import_status_updated.send(
|
||||
old_status=old_status,
|
||||
new_status=track_file.import_status,
|
||||
track_file=track_file,
|
||||
new_status=upload.import_status,
|
||||
upload=upload,
|
||||
sender=None,
|
||||
)
|
||||
|
||||
|
||||
@celery.app.task(name="music.import_track_file")
|
||||
@celery.app.task(name="music.import_upload")
|
||||
@celery.require_instance(
|
||||
models.TrackFile.objects.filter(import_status="pending").select_related(
|
||||
models.Upload.objects.filter(import_status="pending").select_related(
|
||||
"library__actor__user"
|
||||
),
|
||||
"track_file",
|
||||
"upload",
|
||||
)
|
||||
def import_track_file(track_file):
|
||||
data = track_file.import_metadata or {}
|
||||
old_status = track_file.import_status
|
||||
def import_upload(upload):
|
||||
data = upload.import_metadata or {}
|
||||
old_status = upload.import_status
|
||||
try:
|
||||
track = get_track_from_import_metadata(track_file.import_metadata or {})
|
||||
if not track and track_file.audio_file:
|
||||
track = get_track_from_import_metadata(upload.import_metadata or {})
|
||||
if not track and upload.audio_file:
|
||||
# easy ways did not work. Now we have to be smart and use
|
||||
# metadata from the file itself if any
|
||||
track = import_track_data_from_file(track_file.audio_file.file, hints=data)
|
||||
if not track and track_file.metadata:
|
||||
track = import_track_data_from_file(upload.audio_file.file, hints=data)
|
||||
if not track and upload.metadata:
|
||||
# we can try to import using federation metadata
|
||||
track = import_track_from_remote(track_file.metadata)
|
||||
except TrackFileImportError as e:
|
||||
return fail_import(track_file, e.code)
|
||||
track = import_track_from_remote(upload.metadata)
|
||||
except UploadImportError as e:
|
||||
return fail_import(upload, e.code)
|
||||
except Exception:
|
||||
fail_import(track_file, "unknown_error")
|
||||
fail_import(upload, "unknown_error")
|
||||
raise
|
||||
# under some situations, we want to skip the import (
|
||||
# for instance if the user already owns the files)
|
||||
owned_duplicates = get_owned_duplicates(track_file, track)
|
||||
track_file.track = track
|
||||
owned_duplicates = get_owned_duplicates(upload, track)
|
||||
upload.track = track
|
||||
|
||||
if owned_duplicates:
|
||||
track_file.import_status = "skipped"
|
||||
track_file.import_details = {
|
||||
upload.import_status = "skipped"
|
||||
upload.import_details = {
|
||||
"code": "already_imported_in_owned_libraries",
|
||||
"duplicates": list(owned_duplicates),
|
||||
}
|
||||
track_file.import_date = timezone.now()
|
||||
track_file.save(
|
||||
upload.import_date = timezone.now()
|
||||
upload.save(
|
||||
update_fields=["import_details", "import_status", "import_date", "track"]
|
||||
)
|
||||
signals.track_file_import_status_updated.send(
|
||||
signals.upload_import_status_updated.send(
|
||||
old_status=old_status,
|
||||
new_status=track_file.import_status,
|
||||
track_file=track_file,
|
||||
new_status=upload.import_status,
|
||||
upload=upload,
|
||||
sender=None,
|
||||
)
|
||||
return
|
||||
|
||||
# all is good, let's finalize the import
|
||||
audio_data = track_file.get_audio_data()
|
||||
audio_data = upload.get_audio_data()
|
||||
if audio_data:
|
||||
track_file.duration = audio_data["duration"]
|
||||
track_file.size = audio_data["size"]
|
||||
track_file.bitrate = audio_data["bitrate"]
|
||||
track_file.import_status = "finished"
|
||||
track_file.import_date = timezone.now()
|
||||
track_file.save(
|
||||
upload.duration = audio_data["duration"]
|
||||
upload.size = audio_data["size"]
|
||||
upload.bitrate = audio_data["bitrate"]
|
||||
upload.import_status = "finished"
|
||||
upload.import_date = timezone.now()
|
||||
upload.save(
|
||||
update_fields=[
|
||||
"track",
|
||||
"import_status",
|
||||
|
@ -339,15 +339,17 @@ def import_track_file(track_file):
|
|||
"bitrate",
|
||||
]
|
||||
)
|
||||
signals.track_file_import_status_updated.send(
|
||||
signals.upload_import_status_updated.send(
|
||||
old_status=old_status,
|
||||
new_status=track_file.import_status,
|
||||
track_file=track_file,
|
||||
new_status=upload.import_status,
|
||||
upload=upload,
|
||||
sender=None,
|
||||
)
|
||||
|
||||
routes.outbox.dispatch(
|
||||
{"type": "Create", "object": {"type": "Audio"}}, context={"upload": upload}
|
||||
)
|
||||
if not track.album.cover:
|
||||
update_album_cover(track.album, track_file)
|
||||
update_album_cover(track.album, upload)
|
||||
|
||||
|
||||
def get_track_from_import_metadata(data):
|
||||
|
@ -363,19 +365,19 @@ def get_track_from_import_metadata(data):
|
|||
try:
|
||||
return models.Track.objects.get(uuid=track_uuid)
|
||||
except models.Track.DoesNotExist:
|
||||
raise TrackFileImportError(code="track_uuid_not_found")
|
||||
raise UploadImportError(code="track_uuid_not_found")
|
||||
|
||||
|
||||
def get_owned_duplicates(track_file, track):
|
||||
def get_owned_duplicates(upload, track):
|
||||
"""
|
||||
Ensure we skip duplicate tracks to avoid wasting user/instance storage
|
||||
"""
|
||||
owned_libraries = track_file.library.actor.libraries.all()
|
||||
owned_libraries = upload.library.actor.libraries.all()
|
||||
return (
|
||||
models.TrackFile.objects.filter(
|
||||
models.Upload.objects.filter(
|
||||
track__isnull=False, library__in=owned_libraries, track=track
|
||||
)
|
||||
.exclude(pk=track_file.pk)
|
||||
.exclude(pk=upload.pk)
|
||||
.values_list("uuid", flat=True)
|
||||
)
|
||||
|
||||
|
@ -422,11 +424,9 @@ def import_track_data_from_file(file, hints={}):
|
|||
return track
|
||||
|
||||
|
||||
@receiver(signals.track_file_import_status_updated)
|
||||
def broadcast_import_status_update_to_owner(
|
||||
old_status, new_status, track_file, **kwargs
|
||||
):
|
||||
user = track_file.library.actor.get_user()
|
||||
@receiver(signals.upload_import_status_updated)
|
||||
def broadcast_import_status_update_to_owner(old_status, new_status, upload, **kwargs):
|
||||
user = upload.library.actor.get_user()
|
||||
if not user:
|
||||
return
|
||||
group = "user.{}.imports".format(user.pk)
|
||||
|
@ -437,7 +437,7 @@ def broadcast_import_status_update_to_owner(
|
|||
"text": "",
|
||||
"data": {
|
||||
"type": "import.status_updated",
|
||||
"track_file": serializers.TrackFileForOwnerSerializer(track_file).data,
|
||||
"upload": serializers.UploadForOwnerSerializer(upload).data,
|
||||
"old_status": old_status,
|
||||
"new_status": new_status,
|
||||
},
|
||||
|
|
|
@ -3,7 +3,7 @@ import urllib
|
|||
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
from django.db.models import Count, Prefetch, Sum
|
||||
from django.db.models import Count, Prefetch, Sum, F
|
||||
from django.db.models.functions import Length
|
||||
from django.utils import timezone
|
||||
|
||||
|
@ -19,6 +19,7 @@ from funkwhale_api.common import utils as common_utils
|
|||
from funkwhale_api.common import permissions as common_permissions
|
||||
from funkwhale_api.federation.authentication import SignatureAuthentication
|
||||
from funkwhale_api.federation import api_serializers as federation_api_serializers
|
||||
from funkwhale_api.federation import routes
|
||||
|
||||
from . import filters, models, serializers, tasks, utils
|
||||
|
||||
|
@ -44,6 +45,9 @@ class ArtistViewSet(viewsets.ReadOnlyModelViewSet):
|
|||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
albums = models.Album.objects.with_tracks_count()
|
||||
albums = albums.annotate_playable_by_actor(
|
||||
utils.get_actor_from_request(self.request)
|
||||
)
|
||||
return queryset.prefetch_related(Prefetch("albums", queryset=albums)).distinct()
|
||||
|
||||
|
||||
|
@ -61,6 +65,14 @@ class AlbumViewSet(viewsets.ReadOnlyModelViewSet):
|
|||
tracks = models.Track.objects.annotate_playable_by_actor(
|
||||
utils.get_actor_from_request(self.request)
|
||||
).select_related("artist")
|
||||
if (
|
||||
hasattr(self, "kwargs")
|
||||
and self.kwargs
|
||||
and self.request.method.lower() == "get"
|
||||
):
|
||||
# we are detailing a single album, so we can add the overhead
|
||||
# to fetch additional data
|
||||
tracks = tracks.annotate_duration()
|
||||
qs = queryset.prefetch_related(Prefetch("tracks", queryset=tracks))
|
||||
return qs.distinct()
|
||||
|
||||
|
@ -77,8 +89,8 @@ class LibraryViewSet(
|
|||
queryset = (
|
||||
models.Library.objects.all()
|
||||
.order_by("-creation_date")
|
||||
.annotate(_files_count=Count("files"))
|
||||
.annotate(_size=Sum("files__size"))
|
||||
.annotate(_uploads_count=Count("uploads"))
|
||||
.annotate(_size=Sum("uploads__size"))
|
||||
)
|
||||
serializer_class = serializers.LibraryForOwnerSerializer
|
||||
permission_classes = [
|
||||
|
@ -95,6 +107,14 @@ class LibraryViewSet(
|
|||
def perform_create(self, serializer):
|
||||
serializer.save(actor=self.request.user.actor)
|
||||
|
||||
@transaction.atomic
|
||||
def perform_destroy(self, instance):
|
||||
routes.outbox.dispatch(
|
||||
{"type": "Delete", "object": {"type": "Library"}},
|
||||
context={"library": instance},
|
||||
)
|
||||
instance.delete()
|
||||
|
||||
@detail_route(methods=["get"])
|
||||
@transaction.non_atomic_requests
|
||||
def follows(self, request, *args, **kwargs):
|
||||
|
@ -141,7 +161,15 @@ class TrackViewSet(TagViewSetMixin, viewsets.ReadOnlyModelViewSet):
|
|||
|
||||
queryset = queryset.annotate_playable_by_actor(
|
||||
utils.get_actor_from_request(self.request)
|
||||
)
|
||||
).annotate_duration()
|
||||
if (
|
||||
hasattr(self, "kwargs")
|
||||
and self.kwargs
|
||||
and self.request.method.lower() == "get"
|
||||
):
|
||||
# we are detailing a single track, so we can add the overhead
|
||||
# to fetch additional data
|
||||
queryset = queryset.annotate_file_data()
|
||||
return queryset.distinct()
|
||||
|
||||
@detail_route(methods=["get"])
|
||||
|
@ -201,8 +229,8 @@ def get_file_path(audio_file):
|
|||
return path.encode("utf-8")
|
||||
|
||||
|
||||
def handle_serve(track_file, user):
|
||||
f = track_file
|
||||
def handle_serve(upload, user):
|
||||
f = upload
|
||||
# we update the accessed_date
|
||||
f.accessed_date = timezone.now()
|
||||
f.save(update_fields=["accessed_date"])
|
||||
|
@ -261,19 +289,20 @@ class ListenViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet):
|
|||
def retrieve(self, request, *args, **kwargs):
|
||||
track = self.get_object()
|
||||
actor = utils.get_actor_from_request(request)
|
||||
queryset = track.files.select_related("track__album__artist", "track__artist")
|
||||
explicit_file = request.GET.get("file")
|
||||
queryset = track.uploads.select_related("track__album__artist", "track__artist")
|
||||
explicit_file = request.GET.get("upload")
|
||||
if explicit_file:
|
||||
queryset = queryset.filter(uuid=explicit_file)
|
||||
queryset = queryset.playable_by(actor)
|
||||
tf = queryset.first()
|
||||
if not tf:
|
||||
queryset = queryset.order_by(F("audio_file").desc(nulls_last=True))
|
||||
upload = queryset.first()
|
||||
if not upload:
|
||||
return Response(status=404)
|
||||
|
||||
return handle_serve(tf, user=request.user)
|
||||
return handle_serve(upload, user=request.user)
|
||||
|
||||
|
||||
class TrackFileViewSet(
|
||||
class UploadViewSet(
|
||||
mixins.ListModelMixin,
|
||||
mixins.CreateModelMixin,
|
||||
mixins.RetrieveModelMixin,
|
||||
|
@ -282,18 +311,18 @@ class TrackFileViewSet(
|
|||
):
|
||||
lookup_field = "uuid"
|
||||
queryset = (
|
||||
models.TrackFile.objects.all()
|
||||
models.Upload.objects.all()
|
||||
.order_by("-creation_date")
|
||||
.select_related("library", "track__artist", "track__album__artist")
|
||||
)
|
||||
serializer_class = serializers.TrackFileForOwnerSerializer
|
||||
serializer_class = serializers.UploadForOwnerSerializer
|
||||
permission_classes = [
|
||||
permissions.IsAuthenticated,
|
||||
common_permissions.OwnerPermission,
|
||||
]
|
||||
owner_field = "library.actor.user"
|
||||
owner_checks = ["read", "write"]
|
||||
filter_class = filters.TrackFileFilter
|
||||
filter_class = filters.UploadFilter
|
||||
ordering_fields = (
|
||||
"creation_date",
|
||||
"import_date",
|
||||
|
@ -309,9 +338,7 @@ class TrackFileViewSet(
|
|||
@list_route(methods=["post"])
|
||||
def action(self, request, *args, **kwargs):
|
||||
queryset = self.get_queryset()
|
||||
serializer = serializers.TrackFileActionSerializer(
|
||||
request.data, queryset=queryset
|
||||
)
|
||||
serializer = serializers.UploadActionSerializer(request.data, queryset=queryset)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
result = serializer.save()
|
||||
return Response(result, status=200)
|
||||
|
@ -322,8 +349,16 @@ class TrackFileViewSet(
|
|||
return context
|
||||
|
||||
def perform_create(self, serializer):
|
||||
tf = serializer.save()
|
||||
common_utils.on_commit(tasks.import_track_file.delay, track_file_id=tf.pk)
|
||||
upload = serializer.save()
|
||||
common_utils.on_commit(tasks.import_upload.delay, upload_id=upload.pk)
|
||||
|
||||
@transaction.atomic
|
||||
def perform_destroy(self, instance):
|
||||
routes.outbox.dispatch(
|
||||
{"type": "Delete", "object": {"type": "Audio"}},
|
||||
context={"uploads": [instance]},
|
||||
)
|
||||
instance.delete()
|
||||
|
||||
|
||||
class TagViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
|
|
|
@ -12,7 +12,7 @@ class PlaylistQuerySet(models.QuerySet):
|
|||
|
||||
def with_duration(self):
|
||||
return self.annotate(
|
||||
duration=models.Sum("playlist_tracks__track__files__duration")
|
||||
duration=models.Sum("playlist_tracks__track__uploads__duration")
|
||||
)
|
||||
|
||||
def with_covers(self):
|
||||
|
@ -135,7 +135,7 @@ class PlaylistTrackQuerySet(models.QuerySet):
|
|||
self.select_related()
|
||||
.select_related("track__album__artist")
|
||||
.prefetch_related(
|
||||
"track__tags", "track__files", "track__artist__albums__tracks__tags"
|
||||
"track__tags", "track__uploads", "track__artist__albums__tracks__tags"
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -198,8 +198,8 @@ class Command(BaseCommand):
|
|||
def filter_matching(self, matching):
|
||||
sources = ["file://{}".format(p) for p in matching]
|
||||
# we skip reimport for path that are already found
|
||||
# as a TrackFile.source
|
||||
existing = models.TrackFile.objects.filter(source__in=sources)
|
||||
# as a Upload.source
|
||||
existing = models.Upload.objects.filter(source__in=sources)
|
||||
existing = existing.values_list("source", flat=True)
|
||||
existing = set([p.replace("file://", "", 1) for p in existing])
|
||||
skipped = set(matching) & existing
|
||||
|
|
|
@ -43,8 +43,8 @@ class SessionRadio(SimpleRadio):
|
|||
return self.session
|
||||
|
||||
def get_queryset(self, **kwargs):
|
||||
qs = Track.objects.annotate(files_count=Count("files"))
|
||||
return qs.filter(files_count__gt=0)
|
||||
qs = Track.objects.annotate(uploads_count=Count("uploads"))
|
||||
return qs.filter(uploads_count__gt=0)
|
||||
|
||||
def get_queryset_kwargs(self):
|
||||
return {}
|
||||
|
|
|
@ -38,7 +38,7 @@ class GetArtistsSerializer(serializers.Serializer):
|
|||
|
||||
class GetArtistSerializer(serializers.Serializer):
|
||||
def to_representation(self, artist):
|
||||
albums = artist.albums.prefetch_related("tracks__files")
|
||||
albums = artist.albums.prefetch_related("tracks__uploads")
|
||||
payload = {
|
||||
"id": artist.pk,
|
||||
"name": artist.name,
|
||||
|
@ -62,7 +62,7 @@ class GetArtistSerializer(serializers.Serializer):
|
|||
return payload
|
||||
|
||||
|
||||
def get_track_data(album, track, tf):
|
||||
def get_track_data(album, track, upload):
|
||||
data = {
|
||||
"id": track.pk,
|
||||
"isDir": "false",
|
||||
|
@ -70,9 +70,9 @@ def get_track_data(album, track, tf):
|
|||
"album": album.title,
|
||||
"artist": album.artist.name,
|
||||
"track": track.position or 1,
|
||||
"contentType": tf.mimetype,
|
||||
"suffix": tf.extension or "",
|
||||
"duration": tf.duration or 0,
|
||||
"contentType": upload.mimetype,
|
||||
"suffix": upload.extension or "",
|
||||
"duration": upload.duration or 0,
|
||||
"created": track.creation_date,
|
||||
"albumId": album.pk,
|
||||
"artistId": album.artist.pk,
|
||||
|
@ -80,10 +80,10 @@ def get_track_data(album, track, tf):
|
|||
}
|
||||
if track.album.cover:
|
||||
data["coverArt"] = "al-{}".format(track.album.id)
|
||||
if tf.bitrate:
|
||||
data["bitrate"] = int(tf.bitrate / 1000)
|
||||
if tf.size:
|
||||
data["size"] = tf.size
|
||||
if upload.bitrate:
|
||||
data["bitrate"] = int(upload.bitrate / 1000)
|
||||
if upload.size:
|
||||
data["size"] = upload.size
|
||||
if album.release_date:
|
||||
data["year"] = album.release_date.year
|
||||
return data
|
||||
|
@ -103,7 +103,7 @@ def get_album2_data(album):
|
|||
try:
|
||||
payload["songCount"] = album._tracks_count
|
||||
except AttributeError:
|
||||
payload["songCount"] = len(album.tracks.prefetch_related("files"))
|
||||
payload["songCount"] = len(album.tracks.prefetch_related("uploads"))
|
||||
return payload
|
||||
|
||||
|
||||
|
@ -111,17 +111,17 @@ def get_song_list_data(tracks):
|
|||
songs = []
|
||||
for track in tracks:
|
||||
try:
|
||||
tf = [tf for tf in track.files.all()][0]
|
||||
uploads = [upload for upload in track.uploads.all()][0]
|
||||
except IndexError:
|
||||
continue
|
||||
track_data = get_track_data(track.album, track, tf)
|
||||
track_data = get_track_data(track.album, track, uploads)
|
||||
songs.append(track_data)
|
||||
return songs
|
||||
|
||||
|
||||
class GetAlbumSerializer(serializers.Serializer):
|
||||
def to_representation(self, album):
|
||||
tracks = album.tracks.prefetch_related("files").select_related("album")
|
||||
tracks = album.tracks.prefetch_related("uploads").select_related("album")
|
||||
payload = get_album2_data(album)
|
||||
if album.release_date:
|
||||
payload["year"] = album.release_date.year
|
||||
|
@ -132,10 +132,10 @@ class GetAlbumSerializer(serializers.Serializer):
|
|||
|
||||
class GetSongSerializer(serializers.Serializer):
|
||||
def to_representation(self, track):
|
||||
tf = track.files.all()
|
||||
if not len(tf):
|
||||
uploads = track.uploads.all()
|
||||
if not len(uploads):
|
||||
return {}
|
||||
return get_track_data(track.album, track, tf[0])
|
||||
return get_track_data(track.album, track, uploads[0])
|
||||
|
||||
|
||||
def get_starred_tracks_data(favorites):
|
||||
|
@ -143,16 +143,16 @@ def get_starred_tracks_data(favorites):
|
|||
tracks = (
|
||||
music_models.Track.objects.filter(pk__in=by_track_id.keys())
|
||||
.select_related("album__artist")
|
||||
.prefetch_related("files")
|
||||
.prefetch_related("uploads")
|
||||
)
|
||||
tracks = tracks.order_by("-creation_date")
|
||||
data = []
|
||||
for t in tracks:
|
||||
try:
|
||||
tf = [tf for tf in t.files.all()][0]
|
||||
uploads = [upload for upload in t.uploads.all()][0]
|
||||
except IndexError:
|
||||
continue
|
||||
td = get_track_data(t.album, t, tf)
|
||||
td = get_track_data(t.album, t, uploads)
|
||||
td["starred"] = by_track_id[t.pk].creation_date
|
||||
data.append(td)
|
||||
return data
|
||||
|
@ -178,26 +178,26 @@ def get_playlist_detail_data(playlist):
|
|||
data = get_playlist_data(playlist)
|
||||
qs = (
|
||||
playlist.playlist_tracks.select_related("track__album__artist")
|
||||
.prefetch_related("track__files")
|
||||
.prefetch_related("track__uploads")
|
||||
.order_by("index")
|
||||
)
|
||||
data["entry"] = []
|
||||
for plt in qs:
|
||||
try:
|
||||
tf = [tf for tf in plt.track.files.all()][0]
|
||||
uploads = [upload for upload in plt.track.uploads.all()][0]
|
||||
except IndexError:
|
||||
continue
|
||||
td = get_track_data(plt.track.album, plt.track, tf)
|
||||
td = get_track_data(plt.track.album, plt.track, uploads)
|
||||
data["entry"].append(td)
|
||||
return data
|
||||
|
||||
|
||||
def get_music_directory_data(artist):
|
||||
tracks = artist.tracks.select_related("album").prefetch_related("files")
|
||||
tracks = artist.tracks.select_related("album").prefetch_related("uploads")
|
||||
data = {"id": artist.pk, "parent": 1, "name": artist.name, "child": []}
|
||||
for track in tracks:
|
||||
try:
|
||||
tf = [tf for tf in track.files.all()][0]
|
||||
upload = [upload for upload in track.uploads.all()][0]
|
||||
except IndexError:
|
||||
continue
|
||||
album = track.album
|
||||
|
@ -209,19 +209,19 @@ def get_music_directory_data(artist):
|
|||
"artist": artist.name,
|
||||
"track": track.position or 1,
|
||||
"year": track.album.release_date.year if track.album.release_date else 0,
|
||||
"contentType": tf.mimetype,
|
||||
"suffix": tf.extension or "",
|
||||
"duration": tf.duration or 0,
|
||||
"contentType": upload.mimetype,
|
||||
"suffix": upload.extension or "",
|
||||
"duration": upload.duration or 0,
|
||||
"created": track.creation_date,
|
||||
"albumId": album.pk,
|
||||
"artistId": artist.pk,
|
||||
"parent": artist.id,
|
||||
"type": "music",
|
||||
}
|
||||
if tf.bitrate:
|
||||
td["bitrate"] = int(tf.bitrate / 1000)
|
||||
if tf.size:
|
||||
td["size"] = tf.size
|
||||
if upload.bitrate:
|
||||
td["bitrate"] = int(upload.bitrate / 1000)
|
||||
if upload.size:
|
||||
td["size"] = upload.size
|
||||
data["child"].append(td)
|
||||
return data
|
||||
|
||||
|
@ -229,9 +229,9 @@ def get_music_directory_data(artist):
|
|||
class ScrobbleSerializer(serializers.Serializer):
|
||||
submission = serializers.BooleanField(default=True, required=False)
|
||||
id = serializers.PrimaryKeyRelatedField(
|
||||
queryset=music_models.Track.objects.annotate(files_count=Count("files")).filter(
|
||||
files_count__gt=0
|
||||
)
|
||||
queryset=music_models.Track.objects.annotate(
|
||||
uploads_count=Count("uploads")
|
||||
).filter(uploads_count__gt=0)
|
||||
)
|
||||
|
||||
def create(self, data):
|
||||
|
|
|
@ -177,11 +177,11 @@ class SubsonicViewSet(viewsets.GenericViewSet):
|
|||
@find_object(music_models.Track.objects.all())
|
||||
def stream(self, request, *args, **kwargs):
|
||||
track = kwargs.pop("obj")
|
||||
queryset = track.files.select_related("track__album__artist", "track__artist")
|
||||
track_file = queryset.first()
|
||||
if not track_file:
|
||||
queryset = track.uploads.select_related("track__album__artist", "track__artist")
|
||||
upload = queryset.first()
|
||||
if not upload:
|
||||
return response.Response(status=404)
|
||||
return music_views.handle_serve(track_file=track_file, user=request.user)
|
||||
return music_views.handle_serve(upload=upload, user=request.user)
|
||||
|
||||
@list_route(methods=["get", "post"], url_name="star", url_path="star")
|
||||
@find_object(music_models.Track.objects.all())
|
||||
|
@ -265,9 +265,9 @@ class SubsonicViewSet(viewsets.GenericViewSet):
|
|||
"subsonic": "song",
|
||||
"search_fields": ["title"],
|
||||
"queryset": (
|
||||
music_models.Track.objects.prefetch_related("files").select_related(
|
||||
"album__artist"
|
||||
)
|
||||
music_models.Track.objects.prefetch_related(
|
||||
"uploads"
|
||||
).select_related("album__artist")
|
||||
),
|
||||
"serializer": serializers.get_song_list_data,
|
||||
},
|
||||
|
|
|
@ -261,7 +261,7 @@ def create_actor(user):
|
|||
reverse("federation:actors-detail", kwargs={"preferred_username": username})
|
||||
),
|
||||
"shared_inbox_url": federation_utils.full_url(
|
||||
reverse("federation:actors-inbox", kwargs={"preferred_username": username})
|
||||
reverse("federation:shared-inbox")
|
||||
),
|
||||
"inbox_url": federation_utils.full_url(
|
||||
reverse("federation:actors-inbox", kwargs={"preferred_username": username})
|
||||
|
@ -269,6 +269,16 @@ def create_actor(user):
|
|||
"outbox_url": federation_utils.full_url(
|
||||
reverse("federation:actors-outbox", kwargs={"preferred_username": username})
|
||||
),
|
||||
"followers_url": federation_utils.full_url(
|
||||
reverse(
|
||||
"federation:actors-followers", kwargs={"preferred_username": username}
|
||||
)
|
||||
),
|
||||
"following_url": federation_utils.full_url(
|
||||
reverse(
|
||||
"federation:actors-following", kwargs={"preferred_username": username}
|
||||
)
|
||||
),
|
||||
}
|
||||
args["private_key"] = private.decode("utf-8")
|
||||
args["public_key"] = public.decode("utf-8")
|
||||
|
|
|
@ -49,24 +49,24 @@ def test_migrate_to_user_libraries(factories, command):
|
|||
user1 = factories["users.User"](is_superuser=False, with_actor=True)
|
||||
user2 = factories["users.User"](is_superuser=True, with_actor=True)
|
||||
factories["users.User"](is_superuser=True)
|
||||
no_import_files = factories["music.TrackFile"].create_batch(size=5, library=None)
|
||||
no_import_files = factories["music.Upload"].create_batch(size=5, library=None)
|
||||
import_jobs = factories["music.ImportJob"].create_batch(
|
||||
batch__submitted_by=user1, size=5, finished=True
|
||||
)
|
||||
# we delete libraries that are created automatically
|
||||
for j in import_jobs:
|
||||
j.track_file.library = None
|
||||
j.track_file.save()
|
||||
j.upload.library = None
|
||||
j.upload.save()
|
||||
scripts.migrate_to_user_libraries.main(command)
|
||||
|
||||
# tracks with import jobs are bound to the importer's library
|
||||
library = user1.actor.libraries.get(name="default")
|
||||
assert list(library.files.order_by("id").values_list("id", flat=True)) == sorted(
|
||||
[ij.track_file.pk for ij in import_jobs]
|
||||
assert list(library.uploads.order_by("id").values_list("id", flat=True)) == sorted(
|
||||
[ij.upload.pk for ij in import_jobs]
|
||||
)
|
||||
|
||||
# tracks without import jobs are bound to first superuser
|
||||
library = user2.actor.libraries.get(name="default")
|
||||
assert list(library.files.order_by("id").values_list("id", flat=True)) == sorted(
|
||||
[tf.pk for tf in no_import_files]
|
||||
assert list(library.uploads.order_by("id").values_list("id", flat=True)) == sorted(
|
||||
[upload.pk for upload in no_import_files]
|
||||
)
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
from funkwhale_api.common import utils
|
||||
|
||||
|
||||
def test_chunk_queryset(factories):
|
||||
actors = factories["federation.Actor"].create_batch(size=4)
|
||||
queryset = actors[0].__class__.objects.all()
|
||||
chunks = list(utils.chunk_queryset(queryset, 2))
|
||||
|
||||
assert list(chunks[0]) == actors[0:2]
|
||||
assert list(chunks[1]) == actors[2:4]
|
|
@ -6,7 +6,9 @@ import PIL
|
|||
import random
|
||||
import shutil
|
||||
import tempfile
|
||||
import uuid
|
||||
|
||||
from faker.providers import internet as internet_provider
|
||||
import factory
|
||||
import pytest
|
||||
import requests_mock
|
||||
|
@ -24,6 +26,25 @@ from funkwhale_api.activity import record
|
|||
from funkwhale_api.users.permissions import HasUserPermission
|
||||
|
||||
|
||||
class FunkwhaleProvider(internet_provider.Provider):
|
||||
"""
|
||||
Our own faker data generator, since built-in ones are sometimes
|
||||
not random enough
|
||||
"""
|
||||
|
||||
def federation_url(self, prefix=""):
|
||||
def path_generator():
|
||||
return "{}/{}".format(prefix, uuid.uuid4())
|
||||
|
||||
domain = self.domain_name()
|
||||
protocol = "https"
|
||||
path = path_generator()
|
||||
return "{}://{}/{}".format(protocol, domain, path)
|
||||
|
||||
|
||||
factory.Faker.add_provider(FunkwhaleProvider)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def queryset_equal_queries():
|
||||
"""
|
||||
|
|
|
@ -1,21 +1,31 @@
|
|||
|
||||
import pytest
|
||||
import uuid
|
||||
|
||||
from funkwhale_api.federation import activity, api_serializers, serializers, tasks
|
||||
from django.db.models import Q
|
||||
from django.urls import reverse
|
||||
|
||||
from funkwhale_api.federation import (
|
||||
activity,
|
||||
models,
|
||||
api_serializers,
|
||||
serializers,
|
||||
tasks,
|
||||
)
|
||||
|
||||
|
||||
def test_receive_validates_basic_attributes_and_stores_activity(factories, now, mocker):
|
||||
mocked_dispatch = mocker.patch("funkwhale_api.common.utils.on_commit")
|
||||
local_actor = factories["users.User"]().create_actor()
|
||||
local_to_actor = factories["users.User"]().create_actor()
|
||||
local_cc_actor = factories["users.User"]().create_actor()
|
||||
remote_actor = factories["federation.Actor"]()
|
||||
another_actor = factories["federation.Actor"]()
|
||||
a = {
|
||||
"@context": [],
|
||||
"actor": remote_actor.fid,
|
||||
"type": "Noop",
|
||||
"id": "https://test.activity",
|
||||
"to": [local_actor.fid],
|
||||
"cc": [another_actor.fid, activity.PUBLIC_ADDRESS],
|
||||
"to": [local_to_actor.fid, remote_actor.fid],
|
||||
"cc": [local_cc_actor.fid, activity.PUBLIC_ADDRESS],
|
||||
}
|
||||
|
||||
copy = activity.receive(activity=a, on_behalf_of=remote_actor)
|
||||
|
@ -29,8 +39,60 @@ def test_receive_validates_basic_attributes_and_stores_activity(factories, now,
|
|||
tasks.dispatch_inbox.delay, activity_id=copy.pk
|
||||
)
|
||||
|
||||
inbox_item = copy.inbox_items.get(actor__fid=local_actor.fid)
|
||||
assert inbox_item.is_delivered is False
|
||||
assert models.InboxItem.objects.count() == 2
|
||||
for actor, t in [(local_to_actor, "to"), (local_cc_actor, "cc")]:
|
||||
ii = models.InboxItem.objects.get(actor=actor)
|
||||
assert ii.type == t
|
||||
assert ii.activity == copy
|
||||
assert ii.is_read is False
|
||||
|
||||
|
||||
def test_get_actors_from_audience_urls(settings, db):
|
||||
settings.FEDERATION_HOSTNAME = "federation.hostname"
|
||||
library_uuid1 = uuid.uuid4()
|
||||
library_uuid2 = uuid.uuid4()
|
||||
|
||||
urls = [
|
||||
"https://wrong.url",
|
||||
"https://federation.hostname"
|
||||
+ reverse("federation:actors-detail", kwargs={"preferred_username": "kevin"}),
|
||||
"https://federation.hostname"
|
||||
+ reverse("federation:actors-detail", kwargs={"preferred_username": "alice"}),
|
||||
"https://federation.hostname"
|
||||
+ reverse("federation:actors-detail", kwargs={"preferred_username": "bob"}),
|
||||
"https://federation.hostname"
|
||||
+ reverse("federation:music:libraries-detail", kwargs={"uuid": library_uuid1}),
|
||||
"https://federation.hostname"
|
||||
+ reverse("federation:music:libraries-detail", kwargs={"uuid": library_uuid2}),
|
||||
activity.PUBLIC_ADDRESS,
|
||||
]
|
||||
followed_query = Q(target__followers_url=urls[0])
|
||||
for url in urls[1:-1]:
|
||||
followed_query |= Q(target__followers_url=url)
|
||||
actor_follows = models.Follow.objects.filter(followed_query, approved=True)
|
||||
library_follows = models.LibraryFollow.objects.filter(followed_query, approved=True)
|
||||
expected = models.Actor.objects.filter(
|
||||
Q(fid__in=urls[0:-1])
|
||||
| Q(pk__in=actor_follows.values_list("actor", flat=True))
|
||||
| Q(pk__in=library_follows.values_list("actor", flat=True))
|
||||
)
|
||||
assert str(activity.get_actors_from_audience(urls).query) == str(expected.query)
|
||||
|
||||
|
||||
def test_get_inbox_urls(factories):
|
||||
a1 = factories["federation.Actor"](
|
||||
shared_inbox_url=None, inbox_url="https://a1.inbox"
|
||||
)
|
||||
a2 = factories["federation.Actor"](
|
||||
shared_inbox_url="https://shared.inbox", inbox_url="https://a2.inbox"
|
||||
)
|
||||
factories["federation.Actor"](
|
||||
shared_inbox_url="https://shared.inbox", inbox_url="https://a3.inbox"
|
||||
)
|
||||
|
||||
expected = sorted(set([a1.inbox_url, a2.shared_inbox_url]))
|
||||
|
||||
assert activity.get_inbox_urls(a1.__class__.objects.all()) == expected
|
||||
|
||||
|
||||
def test_receive_invalid_data(factories):
|
||||
|
@ -97,8 +159,6 @@ def test_inbox_routing_send_to_channel(factories, mocker):
|
|||
|
||||
ii.refresh_from_db()
|
||||
|
||||
assert ii.is_delivered is True
|
||||
|
||||
group_send.assert_called_once_with(
|
||||
"user.{}.inbox".format(ii.actor.user.pk),
|
||||
{
|
||||
|
@ -118,6 +178,16 @@ def test_inbox_routing_send_to_channel(factories, mocker):
|
|||
({"type": "Follow"}, {"type": "Follow"}, True),
|
||||
({"type": "Follow"}, {"type": "Noop"}, False),
|
||||
({"type": "Follow"}, {"type": "Follow", "id": "https://hello"}, True),
|
||||
(
|
||||
{"type": "Create", "object.type": "Audio"},
|
||||
{"type": "Create", "object": {"type": "Note"}},
|
||||
False,
|
||||
),
|
||||
(
|
||||
{"type": "Create", "object.type": "Audio"},
|
||||
{"type": "Create", "object": {"type": "Audio"}},
|
||||
True,
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_route_matching(route, payload, expected):
|
||||
|
@ -126,7 +196,6 @@ def test_route_matching(route, payload, expected):
|
|||
|
||||
def test_outbox_router_dispatch(mocker, factories, now):
|
||||
router = activity.OutboxRouter()
|
||||
recipient = factories["federation.Actor"]()
|
||||
actor = factories["federation.Actor"]()
|
||||
r1 = factories["federation.Actor"]()
|
||||
r2 = factories["federation.Actor"]()
|
||||
|
@ -144,6 +213,9 @@ def test_outbox_router_dispatch(mocker, factories, now):
|
|||
"actor": actor,
|
||||
}
|
||||
|
||||
expected_deliveries_url = activity.get_inbox_urls(
|
||||
models.Actor.objects.filter(pk__in=[r1.pk, r2.pk])
|
||||
)
|
||||
router.connect({"type": "Noop"}, handler)
|
||||
activities = router.dispatch({"type": "Noop"}, {"summary": "hello"})
|
||||
a = activities[0]
|
||||
|
@ -163,9 +235,112 @@ def test_outbox_router_dispatch(mocker, factories, now):
|
|||
assert a.creation_date >= now
|
||||
assert a.uuid is not None
|
||||
|
||||
for recipient, type in [(r1, "to"), (r2, "cc")]:
|
||||
item = a.inbox_items.get(actor=recipient)
|
||||
assert item.is_delivered is False
|
||||
assert item.last_delivery_date is None
|
||||
assert item.delivery_attempts == 0
|
||||
assert item.type == type
|
||||
assert a.deliveries.count() == 2
|
||||
for url in expected_deliveries_url:
|
||||
delivery = a.deliveries.get(inbox_url=url)
|
||||
assert delivery.is_delivered is False
|
||||
|
||||
|
||||
def test_prepare_deliveries_and_inbox_items(factories):
|
||||
local_actor1 = factories["federation.Actor"](
|
||||
local=True, shared_inbox_url="https://testlocal.inbox"
|
||||
)
|
||||
local_actor2 = factories["federation.Actor"](
|
||||
local=True, shared_inbox_url=local_actor1.shared_inbox_url
|
||||
)
|
||||
local_actor3 = factories["federation.Actor"](local=True, shared_inbox_url=None)
|
||||
|
||||
remote_actor1 = factories["federation.Actor"](
|
||||
shared_inbox_url="https://testremote.inbox"
|
||||
)
|
||||
remote_actor2 = factories["federation.Actor"](
|
||||
shared_inbox_url=remote_actor1.shared_inbox_url
|
||||
)
|
||||
remote_actor3 = factories["federation.Actor"](shared_inbox_url=None)
|
||||
|
||||
library = factories["music.Library"]()
|
||||
library_follower_local = factories["federation.LibraryFollow"](
|
||||
target=library, actor__local=True, approved=True
|
||||
).actor
|
||||
library_follower_remote = factories["federation.LibraryFollow"](
|
||||
target=library, actor__local=False, approved=True
|
||||
).actor
|
||||
# follow not approved
|
||||
factories["federation.LibraryFollow"](
|
||||
target=library, actor__local=False, approved=False
|
||||
)
|
||||
|
||||
followed_actor = factories["federation.Actor"]()
|
||||
actor_follower_local = factories["federation.Follow"](
|
||||
target=followed_actor, actor__local=True, approved=True
|
||||
).actor
|
||||
actor_follower_remote = factories["federation.Follow"](
|
||||
target=followed_actor, actor__local=False, approved=True
|
||||
).actor
|
||||
# follow not approved
|
||||
factories["federation.Follow"](
|
||||
target=followed_actor, actor__local=False, approved=False
|
||||
)
|
||||
|
||||
recipients = [
|
||||
local_actor1,
|
||||
local_actor2,
|
||||
local_actor3,
|
||||
remote_actor1,
|
||||
remote_actor2,
|
||||
remote_actor3,
|
||||
activity.PUBLIC_ADDRESS,
|
||||
{"type": "followers", "target": library},
|
||||
{"type": "followers", "target": followed_actor},
|
||||
]
|
||||
|
||||
inbox_items, deliveries, urls = activity.prepare_deliveries_and_inbox_items(
|
||||
recipients, "to"
|
||||
)
|
||||
expected_inbox_items = sorted(
|
||||
[
|
||||
models.InboxItem(actor=local_actor1, type="to"),
|
||||
models.InboxItem(actor=local_actor2, type="to"),
|
||||
models.InboxItem(actor=local_actor3, type="to"),
|
||||
models.InboxItem(actor=library_follower_local, type="to"),
|
||||
models.InboxItem(actor=actor_follower_local, type="to"),
|
||||
],
|
||||
key=lambda v: v.actor.pk,
|
||||
)
|
||||
|
||||
expected_deliveries = sorted(
|
||||
[
|
||||
models.Delivery(inbox_url=remote_actor1.shared_inbox_url),
|
||||
models.Delivery(inbox_url=remote_actor3.inbox_url),
|
||||
models.Delivery(inbox_url=library_follower_remote.inbox_url),
|
||||
models.Delivery(inbox_url=actor_follower_remote.inbox_url),
|
||||
],
|
||||
key=lambda v: v.inbox_url,
|
||||
)
|
||||
|
||||
expected_urls = [
|
||||
local_actor1.fid,
|
||||
local_actor2.fid,
|
||||
local_actor3.fid,
|
||||
remote_actor1.fid,
|
||||
remote_actor2.fid,
|
||||
remote_actor3.fid,
|
||||
activity.PUBLIC_ADDRESS,
|
||||
library.followers_url,
|
||||
followed_actor.followers_url,
|
||||
]
|
||||
|
||||
assert urls == expected_urls
|
||||
assert len(expected_inbox_items) == len(inbox_items)
|
||||
assert len(expected_deliveries) == len(deliveries)
|
||||
|
||||
for delivery, expected_delivery in zip(
|
||||
sorted(deliveries, key=lambda v: v.inbox_url), expected_deliveries
|
||||
):
|
||||
assert delivery.inbox_url == expected_delivery.inbox_url
|
||||
|
||||
for inbox_item, expected_inbox_item in zip(
|
||||
sorted(inbox_items, key=lambda v: v.actor.pk), expected_inbox_items
|
||||
):
|
||||
assert inbox_item.actor == expected_inbox_item.actor
|
||||
assert inbox_item.type == "to"
|
||||
|
|
|
@ -3,7 +3,7 @@ from funkwhale_api.federation import serializers
|
|||
|
||||
|
||||
def test_library_serializer(factories):
|
||||
library = factories["music.Library"](files_count=5678)
|
||||
library = factories["music.Library"](uploads_count=5678)
|
||||
expected = {
|
||||
"fid": library.fid,
|
||||
"uuid": str(library.uuid),
|
||||
|
@ -11,7 +11,7 @@ def test_library_serializer(factories):
|
|||
"name": library.name,
|
||||
"description": library.description,
|
||||
"creation_date": library.creation_date.isoformat().split("+")[0] + "Z",
|
||||
"files_count": library.files_count,
|
||||
"uploads_count": library.uploads_count,
|
||||
"privacy_level": library.privacy_level,
|
||||
"follow": None,
|
||||
}
|
||||
|
@ -22,7 +22,7 @@ def test_library_serializer(factories):
|
|||
|
||||
|
||||
def test_library_serializer_with_follow(factories):
|
||||
library = factories["music.Library"](files_count=5678)
|
||||
library = factories["music.Library"](uploads_count=5678)
|
||||
follow = factories["federation.LibraryFollow"](target=library)
|
||||
|
||||
setattr(library, "_follows", [follow])
|
||||
|
@ -33,7 +33,7 @@ def test_library_serializer_with_follow(factories):
|
|||
"name": library.name,
|
||||
"description": library.description,
|
||||
"creation_date": library.creation_date.isoformat().split("+")[0] + "Z",
|
||||
"files_count": library.files_count,
|
||||
"uploads_count": library.uploads_count,
|
||||
"privacy_level": library.privacy_level,
|
||||
"follow": api_serializers.NestedLibraryFollowSerializer(follow).data,
|
||||
}
|
||||
|
@ -53,7 +53,7 @@ def test_library_serializer_validates_existing_follow(factories):
|
|||
assert "target" in serializer.errors
|
||||
|
||||
|
||||
def test_manage_track_file_action_read(factories):
|
||||
def test_manage_upload_action_read(factories):
|
||||
ii = factories["federation.InboxItem"]()
|
||||
s = api_serializers.InboxItemActionSerializer(queryset=None)
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ def test_authenticate(factories, mocker, api_request):
|
|||
"type": "Person",
|
||||
"outbox": "https://test.com",
|
||||
"inbox": "https://test.com",
|
||||
"followers": "https://test.com",
|
||||
"preferredUsername": "test",
|
||||
"publicKey": {
|
||||
"publicKeyPem": public.decode("utf-8"),
|
||||
|
|
|
@ -27,25 +27,25 @@ def test_follow_federation_url(factories):
|
|||
|
||||
def test_actor_get_quota(factories):
|
||||
library = factories["music.Library"]()
|
||||
factories["music.TrackFile"](
|
||||
factories["music.Upload"](
|
||||
library=library,
|
||||
import_status="pending",
|
||||
audio_file__from_path=None,
|
||||
audio_file__data=b"a",
|
||||
)
|
||||
factories["music.TrackFile"](
|
||||
factories["music.Upload"](
|
||||
library=library,
|
||||
import_status="skipped",
|
||||
audio_file__from_path=None,
|
||||
audio_file__data=b"aa",
|
||||
)
|
||||
factories["music.TrackFile"](
|
||||
factories["music.Upload"](
|
||||
library=library,
|
||||
import_status="errored",
|
||||
audio_file__from_path=None,
|
||||
audio_file__data=b"aaa",
|
||||
)
|
||||
factories["music.TrackFile"](
|
||||
factories["music.Upload"](
|
||||
library=library,
|
||||
import_status="finished",
|
||||
audio_file__from_path=None,
|
||||
|
|
|
@ -8,6 +8,9 @@ from funkwhale_api.federation import routes, serializers
|
|||
[
|
||||
({"type": "Follow"}, routes.inbox_follow),
|
||||
({"type": "Accept"}, routes.inbox_accept),
|
||||
({"type": "Create", "object.type": "Audio"}, routes.inbox_create_audio),
|
||||
({"type": "Delete", "object.type": "Library"}, routes.inbox_delete_library),
|
||||
({"type": "Delete", "object.type": "Audio"}, routes.inbox_delete_audio),
|
||||
],
|
||||
)
|
||||
def test_inbox_routes(route, handler):
|
||||
|
@ -24,6 +27,9 @@ def test_inbox_routes(route, handler):
|
|||
[
|
||||
({"type": "Accept"}, routes.outbox_accept),
|
||||
({"type": "Follow"}, routes.outbox_follow),
|
||||
({"type": "Create", "object.type": "Audio"}, routes.outbox_create_audio),
|
||||
({"type": "Delete", "object.type": "Library"}, routes.outbox_delete_library),
|
||||
({"type": "Delete", "object.type": "Audio"}, routes.outbox_delete_audio),
|
||||
],
|
||||
)
|
||||
def test_outbox_routes(route, handler):
|
||||
|
@ -155,3 +161,153 @@ def test_outbox_follow_library(factories, mocker):
|
|||
assert activity["payload"] == expected
|
||||
assert activity["actor"] == follow.actor
|
||||
assert activity["object"] == follow.target
|
||||
|
||||
|
||||
def test_outbox_create_audio(factories, mocker):
|
||||
upload = factories["music.Upload"]()
|
||||
activity = list(routes.outbox_create_audio({"upload": upload}))[0]
|
||||
serializer = serializers.ActivitySerializer(
|
||||
{
|
||||
"type": "Create",
|
||||
"object": serializers.UploadSerializer(upload).data,
|
||||
"actor": upload.library.actor.fid,
|
||||
}
|
||||
)
|
||||
expected = serializer.data
|
||||
expected["to"] = [{"type": "followers", "target": upload.library}]
|
||||
|
||||
assert dict(activity["payload"]) == dict(expected)
|
||||
assert activity["actor"] == upload.library.actor
|
||||
assert activity["target"] == upload.library
|
||||
assert activity["object"] == upload
|
||||
|
||||
|
||||
def test_inbox_create_audio(factories, mocker):
|
||||
activity = factories["federation.Activity"]()
|
||||
upload = factories["music.Upload"](bitrate=42, duration=55)
|
||||
payload = {
|
||||
"type": "Create",
|
||||
"actor": upload.library.actor.fid,
|
||||
"object": serializers.UploadSerializer(upload).data,
|
||||
}
|
||||
library = upload.library
|
||||
upload.delete()
|
||||
init = mocker.spy(serializers.UploadSerializer, "__init__")
|
||||
save = mocker.spy(serializers.UploadSerializer, "save")
|
||||
assert library.uploads.count() == 0
|
||||
result = routes.inbox_create_audio(
|
||||
payload,
|
||||
context={"actor": library.actor, "raise_exception": True, "activity": activity},
|
||||
)
|
||||
assert library.uploads.count() == 1
|
||||
assert result == {"object": library.uploads.latest("id"), "target": library}
|
||||
|
||||
assert init.call_count == 1
|
||||
args = init.call_args
|
||||
assert args[1]["data"] == payload["object"]
|
||||
assert args[1]["context"] == {"activity": activity, "actor": library.actor}
|
||||
assert save.call_count == 1
|
||||
|
||||
|
||||
def test_inbox_delete_library(factories):
|
||||
activity = factories["federation.Activity"]()
|
||||
|
||||
library = factories["music.Library"]()
|
||||
payload = {
|
||||
"type": "Delete",
|
||||
"actor": library.actor.fid,
|
||||
"object": {"type": "Library", "id": library.fid},
|
||||
}
|
||||
|
||||
routes.inbox_delete_library(
|
||||
payload,
|
||||
context={"actor": library.actor, "raise_exception": True, "activity": activity},
|
||||
)
|
||||
|
||||
with pytest.raises(library.__class__.DoesNotExist):
|
||||
library.refresh_from_db()
|
||||
|
||||
|
||||
def test_inbox_delete_library_impostor(factories):
|
||||
activity = factories["federation.Activity"]()
|
||||
impostor = factories["federation.Actor"]()
|
||||
library = factories["music.Library"]()
|
||||
payload = {
|
||||
"type": "Delete",
|
||||
"actor": library.actor.fid,
|
||||
"object": {"type": "Library", "id": library.fid},
|
||||
}
|
||||
|
||||
routes.inbox_delete_library(
|
||||
payload,
|
||||
context={"actor": impostor, "raise_exception": True, "activity": activity},
|
||||
)
|
||||
|
||||
# not deleted, should still be here
|
||||
library.refresh_from_db()
|
||||
|
||||
|
||||
def test_outbox_delete_library(factories):
|
||||
library = factories["music.Library"]()
|
||||
activity = list(routes.outbox_delete_library({"library": library}))[0]
|
||||
expected = serializers.ActivitySerializer(
|
||||
{"type": "Delete", "object": {"type": "Library", "id": library.fid}}
|
||||
).data
|
||||
|
||||
expected["to"] = [{"type": "followers", "target": library}]
|
||||
|
||||
assert dict(activity["payload"]) == dict(expected)
|
||||
assert activity["actor"] == library.actor
|
||||
|
||||
|
||||
def test_inbox_delete_audio(factories):
|
||||
activity = factories["federation.Activity"]()
|
||||
|
||||
upload = factories["music.Upload"]()
|
||||
library = upload.library
|
||||
payload = {
|
||||
"type": "Delete",
|
||||
"actor": library.actor.fid,
|
||||
"object": {"type": "Audio", "id": [upload.fid]},
|
||||
}
|
||||
|
||||
routes.inbox_delete_audio(
|
||||
payload,
|
||||
context={"actor": library.actor, "raise_exception": True, "activity": activity},
|
||||
)
|
||||
|
||||
with pytest.raises(upload.__class__.DoesNotExist):
|
||||
upload.refresh_from_db()
|
||||
|
||||
|
||||
def test_inbox_delete_audio_impostor(factories):
|
||||
activity = factories["federation.Activity"]()
|
||||
impostor = factories["federation.Actor"]()
|
||||
upload = factories["music.Upload"]()
|
||||
library = upload.library
|
||||
payload = {
|
||||
"type": "Delete",
|
||||
"actor": library.actor.fid,
|
||||
"object": {"type": "Audio", "id": [upload.fid]},
|
||||
}
|
||||
|
||||
routes.inbox_delete_audio(
|
||||
payload,
|
||||
context={"actor": impostor, "raise_exception": True, "activity": activity},
|
||||
)
|
||||
|
||||
# not deleted, should still be here
|
||||
upload.refresh_from_db()
|
||||
|
||||
|
||||
def test_outbox_delete_audio(factories):
|
||||
upload = factories["music.Upload"]()
|
||||
activity = list(routes.outbox_delete_audio({"uploads": [upload]}))[0]
|
||||
expected = serializers.ActivitySerializer(
|
||||
{"type": "Delete", "object": {"type": "Audio", "id": [upload.fid]}}
|
||||
).data
|
||||
|
||||
expected["to"] = [{"type": "followers", "target": upload.library}]
|
||||
|
||||
assert dict(activity["payload"]) == dict(expected)
|
||||
assert activity["actor"] == upload.library.actor
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
import pytest
|
||||
from django.core.paginator import Paginator
|
||||
import uuid
|
||||
|
||||
from funkwhale_api.federation import activity, models, serializers, utils
|
||||
from django.core.paginator import Paginator
|
||||
from django.utils import timezone
|
||||
|
||||
from funkwhale_api.federation import models, serializers, utils
|
||||
|
||||
|
||||
def test_actor_serializer_from_ap(db):
|
||||
|
@ -336,13 +339,13 @@ def test_undo_follow_serializer_validates_on_context(factories):
|
|||
|
||||
|
||||
def test_paginated_collection_serializer(factories):
|
||||
tfs = factories["music.TrackFile"].create_batch(size=5)
|
||||
uploads = factories["music.Upload"].create_batch(size=5)
|
||||
actor = factories["federation.Actor"](local=True)
|
||||
|
||||
conf = {
|
||||
"id": "https://test.federation/test",
|
||||
"items": tfs,
|
||||
"item_serializer": serializers.AudioSerializer,
|
||||
"items": uploads,
|
||||
"item_serializer": serializers.UploadSerializer,
|
||||
"actor": actor,
|
||||
"page_size": 2,
|
||||
}
|
||||
|
@ -355,7 +358,7 @@ def test_paginated_collection_serializer(factories):
|
|||
"type": "Collection",
|
||||
"id": conf["id"],
|
||||
"actor": actor.fid,
|
||||
"totalItems": len(tfs),
|
||||
"totalItems": len(uploads),
|
||||
"current": conf["id"] + "?page=1",
|
||||
"last": conf["id"] + "?page=3",
|
||||
"first": conf["id"] + "?page=1",
|
||||
|
@ -425,7 +428,7 @@ def test_collection_page_serializer_can_validate_child():
|
|||
}
|
||||
|
||||
serializer = serializers.CollectionPageSerializer(
|
||||
data=data, context={"item_serializer": serializers.AudioSerializer}
|
||||
data=data, context={"item_serializer": serializers.UploadSerializer}
|
||||
)
|
||||
|
||||
# child are validated but not included in data if not valid
|
||||
|
@ -434,14 +437,14 @@ def test_collection_page_serializer_can_validate_child():
|
|||
|
||||
|
||||
def test_collection_page_serializer(factories):
|
||||
tfs = factories["music.TrackFile"].create_batch(size=5)
|
||||
uploads = factories["music.Upload"].create_batch(size=5)
|
||||
actor = factories["federation.Actor"](local=True)
|
||||
|
||||
conf = {
|
||||
"id": "https://test.federation/test",
|
||||
"item_serializer": serializers.AudioSerializer,
|
||||
"item_serializer": serializers.UploadSerializer,
|
||||
"actor": actor,
|
||||
"page": Paginator(tfs, 2).page(2),
|
||||
"page": Paginator(uploads, 2).page(2),
|
||||
}
|
||||
expected = {
|
||||
"@context": [
|
||||
|
@ -452,7 +455,7 @@ def test_collection_page_serializer(factories):
|
|||
"type": "CollectionPage",
|
||||
"id": conf["id"] + "?page=2",
|
||||
"actor": actor.fid,
|
||||
"totalItems": len(tfs),
|
||||
"totalItems": len(uploads),
|
||||
"partOf": conf["id"],
|
||||
"prev": conf["id"] + "?page=1",
|
||||
"next": conf["id"] + "?page=3",
|
||||
|
@ -471,38 +474,12 @@ def test_collection_page_serializer(factories):
|
|||
assert serializer.data == expected
|
||||
|
||||
|
||||
def test_activity_pub_audio_serializer_to_library_track_no_duplicate(factories):
|
||||
remote_library = factories["music.Library"]()
|
||||
tf = factories["music.TrackFile"].build(library=remote_library)
|
||||
data = serializers.AudioSerializer(tf).data
|
||||
serializer1 = serializers.AudioSerializer(data=data)
|
||||
serializer2 = serializers.AudioSerializer(data=data)
|
||||
|
||||
assert serializer1.is_valid(raise_exception=True) is True
|
||||
assert serializer2.is_valid(raise_exception=True) is True
|
||||
|
||||
tf1 = serializer1.save()
|
||||
tf2 = serializer2.save()
|
||||
|
||||
assert tf1 == tf2
|
||||
|
||||
assert tf1.library == remote_library
|
||||
assert tf1.source == utils.full_url(tf.listen_url)
|
||||
assert tf1.mimetype == tf.mimetype
|
||||
assert tf1.bitrate == tf.bitrate
|
||||
assert tf1.duration == tf.duration
|
||||
assert tf1.size == tf.size
|
||||
assert tf1.metadata == data
|
||||
assert tf1.fid == tf.get_federation_id()
|
||||
assert not tf1.audio_file
|
||||
|
||||
|
||||
def test_music_library_serializer_to_ap(factories):
|
||||
library = factories["music.Library"]()
|
||||
# pending, errored and skippednot included
|
||||
factories["music.TrackFile"](import_status="pending")
|
||||
factories["music.TrackFile"](import_status="errored")
|
||||
factories["music.TrackFile"](import_status="finished")
|
||||
factories["music.Upload"](import_status="pending")
|
||||
factories["music.Upload"](import_status="errored")
|
||||
factories["music.Upload"](import_status="finished")
|
||||
serializer = serializers.LibrarySerializer(library)
|
||||
expected = {
|
||||
"@context": [
|
||||
|
@ -520,6 +497,7 @@ def test_music_library_serializer_to_ap(factories):
|
|||
"current": library.fid + "?page=1",
|
||||
"last": library.fid + "?page=1",
|
||||
"first": library.fid + "?page=1",
|
||||
"followers": library.followers_url,
|
||||
}
|
||||
|
||||
assert serializer.data == expected
|
||||
|
@ -541,6 +519,7 @@ def test_music_library_serializer_from_public(factories, mocker):
|
|||
"summary": "World",
|
||||
"type": "Library",
|
||||
"id": "https://library.id",
|
||||
"followers": "https://library.id/followers",
|
||||
"actor": actor.fid,
|
||||
"totalItems": 12,
|
||||
"first": "https://library.id?page=1",
|
||||
|
@ -554,10 +533,12 @@ def test_music_library_serializer_from_public(factories, mocker):
|
|||
|
||||
assert library.actor == actor
|
||||
assert library.fid == data["id"]
|
||||
assert library.files_count == data["totalItems"]
|
||||
assert library.uploads_count == data["totalItems"]
|
||||
assert library.privacy_level == "everyone"
|
||||
assert library.name == "Hello"
|
||||
assert library.description == "World"
|
||||
assert library.followers_url == data["followers"]
|
||||
|
||||
retrieve.assert_called_once_with(
|
||||
actor.fid,
|
||||
queryset=actor.__class__,
|
||||
|
@ -581,6 +562,7 @@ def test_music_library_serializer_from_private(factories, mocker):
|
|||
"summary": "World",
|
||||
"type": "Library",
|
||||
"id": "https://library.id",
|
||||
"followers": "https://library.id/followers",
|
||||
"actor": actor.fid,
|
||||
"totalItems": 12,
|
||||
"first": "https://library.id?page=1",
|
||||
|
@ -594,10 +576,11 @@ def test_music_library_serializer_from_private(factories, mocker):
|
|||
|
||||
assert library.actor == actor
|
||||
assert library.fid == data["id"]
|
||||
assert library.files_count == data["totalItems"]
|
||||
assert library.uploads_count == data["totalItems"]
|
||||
assert library.privacy_level == "me"
|
||||
assert library.name == "Hello"
|
||||
assert library.description == "World"
|
||||
assert library.followers_url == data["followers"]
|
||||
retrieve.assert_called_once_with(
|
||||
actor.fid,
|
||||
queryset=actor.__class__,
|
||||
|
@ -605,75 +588,349 @@ def test_music_library_serializer_from_private(factories, mocker):
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"model,serializer_class",
|
||||
[
|
||||
("music.Artist", serializers.ArtistSerializer),
|
||||
("music.Album", serializers.AlbumSerializer),
|
||||
("music.Track", serializers.TrackSerializer),
|
||||
],
|
||||
)
|
||||
def test_music_entity_serializer_create_existing_mbid(
|
||||
model, serializer_class, factories
|
||||
):
|
||||
entity = factories[model]()
|
||||
data = {"musicbrainzId": str(entity.mbid), "id": "https://noop"}
|
||||
serializer = serializer_class()
|
||||
|
||||
assert serializer.create(data) == entity
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"model,serializer_class",
|
||||
[
|
||||
("music.Artist", serializers.ArtistSerializer),
|
||||
("music.Album", serializers.AlbumSerializer),
|
||||
("music.Track", serializers.TrackSerializer),
|
||||
],
|
||||
)
|
||||
def test_music_entity_serializer_create_existing_fid(
|
||||
model, serializer_class, factories
|
||||
):
|
||||
entity = factories[model](fid="https://entity.url")
|
||||
data = {"musicbrainzId": None, "id": "https://entity.url"}
|
||||
serializer = serializer_class()
|
||||
|
||||
assert serializer.create(data) == entity
|
||||
|
||||
|
||||
def test_activity_pub_artist_serializer_to_ap(factories):
|
||||
artist = factories["music.Artist"]()
|
||||
expected = {
|
||||
"@context": serializers.AP_CONTEXT,
|
||||
"type": "Artist",
|
||||
"id": artist.fid,
|
||||
"name": artist.name,
|
||||
"musicbrainzId": artist.mbid,
|
||||
"published": artist.creation_date.isoformat(),
|
||||
}
|
||||
serializer = serializers.ArtistSerializer(artist)
|
||||
|
||||
assert serializer.data == expected
|
||||
|
||||
|
||||
def test_activity_pub_artist_serializer_from_ap(factories):
|
||||
activity = factories["federation.Activity"]()
|
||||
|
||||
published = timezone.now()
|
||||
data = {
|
||||
"type": "Artist",
|
||||
"id": "http://hello.artist",
|
||||
"name": "John Smith",
|
||||
"musicbrainzId": str(uuid.uuid4()),
|
||||
"published": published.isoformat(),
|
||||
}
|
||||
serializer = serializers.ArtistSerializer(data=data, context={"activity": activity})
|
||||
|
||||
assert serializer.is_valid(raise_exception=True)
|
||||
|
||||
artist = serializer.save()
|
||||
|
||||
assert artist.from_activity == activity
|
||||
assert artist.name == data["name"]
|
||||
assert artist.fid == data["id"]
|
||||
assert str(artist.mbid) == data["musicbrainzId"]
|
||||
assert artist.creation_date == published
|
||||
|
||||
|
||||
def test_activity_pub_album_serializer_to_ap(factories):
|
||||
album = factories["music.Album"]()
|
||||
|
||||
expected = {
|
||||
"@context": serializers.AP_CONTEXT,
|
||||
"type": "Album",
|
||||
"id": album.fid,
|
||||
"name": album.title,
|
||||
"cover": {"type": "Image", "url": utils.full_url(album.cover.url)},
|
||||
"musicbrainzId": album.mbid,
|
||||
"published": album.creation_date.isoformat(),
|
||||
"released": album.release_date.isoformat(),
|
||||
"artists": [
|
||||
serializers.ArtistSerializer(
|
||||
album.artist, context={"include_ap_context": False}
|
||||
).data
|
||||
],
|
||||
}
|
||||
serializer = serializers.AlbumSerializer(album)
|
||||
|
||||
assert serializer.data == expected
|
||||
|
||||
|
||||
def test_activity_pub_album_serializer_from_ap(factories):
|
||||
activity = factories["federation.Activity"]()
|
||||
|
||||
published = timezone.now()
|
||||
released = timezone.now().date()
|
||||
data = {
|
||||
"type": "Album",
|
||||
"id": "http://hello.album",
|
||||
"name": "Purple album",
|
||||
"musicbrainzId": str(uuid.uuid4()),
|
||||
"published": published.isoformat(),
|
||||
"released": released.isoformat(),
|
||||
"artists": [
|
||||
{
|
||||
"type": "Artist",
|
||||
"id": "http://hello.artist",
|
||||
"name": "John Smith",
|
||||
"musicbrainzId": str(uuid.uuid4()),
|
||||
"published": published.isoformat(),
|
||||
}
|
||||
],
|
||||
}
|
||||
serializer = serializers.AlbumSerializer(data=data, context={"activity": activity})
|
||||
|
||||
assert serializer.is_valid(raise_exception=True)
|
||||
|
||||
album = serializer.save()
|
||||
artist = album.artist
|
||||
|
||||
assert album.from_activity == activity
|
||||
assert album.title == data["name"]
|
||||
assert album.fid == data["id"]
|
||||
assert str(album.mbid) == data["musicbrainzId"]
|
||||
assert album.creation_date == published
|
||||
assert album.release_date == released
|
||||
|
||||
assert artist.from_activity == activity
|
||||
assert artist.name == data["artists"][0]["name"]
|
||||
assert artist.fid == data["artists"][0]["id"]
|
||||
assert str(artist.mbid) == data["artists"][0]["musicbrainzId"]
|
||||
assert artist.creation_date == published
|
||||
|
||||
|
||||
def test_activity_pub_track_serializer_to_ap(factories):
|
||||
track = factories["music.Track"]()
|
||||
expected = {
|
||||
"@context": serializers.AP_CONTEXT,
|
||||
"published": track.creation_date.isoformat(),
|
||||
"type": "Track",
|
||||
"musicbrainzId": track.mbid,
|
||||
"id": track.fid,
|
||||
"name": track.title,
|
||||
"position": track.position,
|
||||
"artists": [
|
||||
serializers.ArtistSerializer(
|
||||
track.artist, context={"include_ap_context": False}
|
||||
).data
|
||||
],
|
||||
"album": serializers.AlbumSerializer(
|
||||
track.album, context={"include_ap_context": False}
|
||||
).data,
|
||||
}
|
||||
serializer = serializers.TrackSerializer(track)
|
||||
|
||||
assert serializer.data == expected
|
||||
|
||||
|
||||
def test_activity_pub_track_serializer_from_ap(factories):
|
||||
activity = factories["federation.Activity"]()
|
||||
published = timezone.now()
|
||||
released = timezone.now().date()
|
||||
data = {
|
||||
"type": "Track",
|
||||
"id": "http://hello.track",
|
||||
"published": published.isoformat(),
|
||||
"musicbrainzId": str(uuid.uuid4()),
|
||||
"name": "Black in back",
|
||||
"position": 5,
|
||||
"album": {
|
||||
"type": "Album",
|
||||
"id": "http://hello.album",
|
||||
"name": "Purple album",
|
||||
"musicbrainzId": str(uuid.uuid4()),
|
||||
"published": published.isoformat(),
|
||||
"released": released.isoformat(),
|
||||
"artists": [
|
||||
{
|
||||
"type": "Artist",
|
||||
"id": "http://hello.artist",
|
||||
"name": "John Smith",
|
||||
"musicbrainzId": str(uuid.uuid4()),
|
||||
"published": published.isoformat(),
|
||||
}
|
||||
],
|
||||
},
|
||||
"artists": [
|
||||
{
|
||||
"type": "Artist",
|
||||
"id": "http://hello.trackartist",
|
||||
"name": "Bob Smith",
|
||||
"musicbrainzId": str(uuid.uuid4()),
|
||||
"published": published.isoformat(),
|
||||
}
|
||||
],
|
||||
}
|
||||
serializer = serializers.TrackSerializer(data=data, context={"activity": activity})
|
||||
assert serializer.is_valid(raise_exception=True)
|
||||
|
||||
track = serializer.save()
|
||||
album = track.album
|
||||
artist = track.artist
|
||||
|
||||
assert track.from_activity == activity
|
||||
assert track.fid == data["id"]
|
||||
assert track.title == data["name"]
|
||||
assert track.position == data["position"]
|
||||
assert track.creation_date == published
|
||||
assert str(track.mbid) == data["musicbrainzId"]
|
||||
|
||||
assert album.from_activity == activity
|
||||
|
||||
assert album.title == data["album"]["name"]
|
||||
assert album.fid == data["album"]["id"]
|
||||
assert str(album.mbid) == data["album"]["musicbrainzId"]
|
||||
assert album.creation_date == published
|
||||
assert album.release_date == released
|
||||
|
||||
assert artist.from_activity == activity
|
||||
assert artist.name == data["artists"][0]["name"]
|
||||
assert artist.fid == data["artists"][0]["id"]
|
||||
assert str(artist.mbid) == data["artists"][0]["musicbrainzId"]
|
||||
assert artist.creation_date == published
|
||||
|
||||
|
||||
def test_activity_pub_upload_serializer_from_ap(factories, mocker):
|
||||
activity = factories["federation.Activity"]()
|
||||
library = factories["music.Library"]()
|
||||
|
||||
published = timezone.now()
|
||||
updated = timezone.now()
|
||||
released = timezone.now().date()
|
||||
data = {
|
||||
"@context": serializers.AP_CONTEXT,
|
||||
"type": "Audio",
|
||||
"id": "https://track.file",
|
||||
"name": "Ignored",
|
||||
"published": published.isoformat(),
|
||||
"updated": updated.isoformat(),
|
||||
"duration": 43,
|
||||
"bitrate": 42,
|
||||
"size": 66,
|
||||
"url": {"href": "https://audio.file", "type": "Link", "mediaType": "audio/mp3"},
|
||||
"library": library.fid,
|
||||
"track": {
|
||||
"type": "Track",
|
||||
"id": "http://hello.track",
|
||||
"published": published.isoformat(),
|
||||
"musicbrainzId": str(uuid.uuid4()),
|
||||
"name": "Black in back",
|
||||
"position": 5,
|
||||
"album": {
|
||||
"type": "Album",
|
||||
"id": "http://hello.album",
|
||||
"name": "Purple album",
|
||||
"musicbrainzId": str(uuid.uuid4()),
|
||||
"published": published.isoformat(),
|
||||
"released": released.isoformat(),
|
||||
"artists": [
|
||||
{
|
||||
"type": "Artist",
|
||||
"id": "http://hello.artist",
|
||||
"name": "John Smith",
|
||||
"musicbrainzId": str(uuid.uuid4()),
|
||||
"published": published.isoformat(),
|
||||
}
|
||||
],
|
||||
},
|
||||
"artists": [
|
||||
{
|
||||
"type": "Artist",
|
||||
"id": "http://hello.trackartist",
|
||||
"name": "Bob Smith",
|
||||
"musicbrainzId": str(uuid.uuid4()),
|
||||
"published": published.isoformat(),
|
||||
}
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
serializer = serializers.UploadSerializer(data=data, context={"activity": activity})
|
||||
assert serializer.is_valid(raise_exception=True)
|
||||
track_create = mocker.spy(serializers.TrackSerializer, "create")
|
||||
upload = serializer.save()
|
||||
|
||||
assert upload.track.from_activity == activity
|
||||
assert upload.from_activity == activity
|
||||
assert track_create.call_count == 1
|
||||
assert upload.fid == data["id"]
|
||||
assert upload.track.fid == data["track"]["id"]
|
||||
assert upload.duration == data["duration"]
|
||||
assert upload.size == data["size"]
|
||||
assert upload.bitrate == data["bitrate"]
|
||||
assert upload.source == data["url"]["href"]
|
||||
assert upload.mimetype == data["url"]["mediaType"]
|
||||
assert upload.creation_date == published
|
||||
assert upload.import_status == "finished"
|
||||
assert upload.modification_date == updated
|
||||
|
||||
|
||||
def test_activity_pub_upload_serializer_validtes_library_actor(factories, mocker):
|
||||
library = factories["music.Library"]()
|
||||
usurpator = factories["federation.Actor"]()
|
||||
|
||||
serializer = serializers.UploadSerializer(data={}, context={"actor": usurpator})
|
||||
|
||||
with pytest.raises(serializers.serializers.ValidationError):
|
||||
serializer.validate_library(library.fid)
|
||||
|
||||
|
||||
def test_activity_pub_audio_serializer_to_ap(factories):
|
||||
tf = factories["music.TrackFile"](
|
||||
upload = factories["music.Upload"](
|
||||
mimetype="audio/mp3", bitrate=42, duration=43, size=44
|
||||
)
|
||||
expected = {
|
||||
"@context": serializers.AP_CONTEXT,
|
||||
"type": "Audio",
|
||||
"id": tf.get_federation_id(),
|
||||
"name": tf.track.full_name,
|
||||
"published": tf.creation_date.isoformat(),
|
||||
"updated": tf.modification_date.isoformat(),
|
||||
"metadata": {
|
||||
"artist": {
|
||||
"musicbrainz_id": tf.track.artist.mbid,
|
||||
"name": tf.track.artist.name,
|
||||
},
|
||||
"release": {
|
||||
"musicbrainz_id": tf.track.album.mbid,
|
||||
"title": tf.track.album.title,
|
||||
},
|
||||
"recording": {"musicbrainz_id": tf.track.mbid, "title": tf.track.title},
|
||||
"size": tf.size,
|
||||
"length": tf.duration,
|
||||
"bitrate": tf.bitrate,
|
||||
},
|
||||
"id": upload.fid,
|
||||
"name": upload.track.full_name,
|
||||
"published": upload.creation_date.isoformat(),
|
||||
"updated": upload.modification_date.isoformat(),
|
||||
"duration": upload.duration,
|
||||
"bitrate": upload.bitrate,
|
||||
"size": upload.size,
|
||||
"url": {
|
||||
"href": utils.full_url(tf.listen_url),
|
||||
"href": utils.full_url(upload.listen_url),
|
||||
"type": "Link",
|
||||
"mediaType": "audio/mp3",
|
||||
},
|
||||
"library": tf.library.get_federation_id(),
|
||||
"library": upload.library.fid,
|
||||
"track": serializers.TrackSerializer(
|
||||
upload.track, context={"include_ap_context": False}
|
||||
).data,
|
||||
}
|
||||
|
||||
serializer = serializers.AudioSerializer(tf)
|
||||
|
||||
assert serializer.data == expected
|
||||
|
||||
|
||||
def test_activity_pub_audio_serializer_to_ap_no_mbid(factories):
|
||||
tf = factories["music.TrackFile"](
|
||||
mimetype="audio/mp3",
|
||||
track__mbid=None,
|
||||
track__album__mbid=None,
|
||||
track__album__artist__mbid=None,
|
||||
)
|
||||
expected = {
|
||||
"@context": serializers.AP_CONTEXT,
|
||||
"type": "Audio",
|
||||
"id": tf.get_federation_id(),
|
||||
"name": tf.track.full_name,
|
||||
"published": tf.creation_date.isoformat(),
|
||||
"updated": tf.modification_date.isoformat(),
|
||||
"metadata": {
|
||||
"artist": {"name": tf.track.artist.name, "musicbrainz_id": None},
|
||||
"release": {"title": tf.track.album.title, "musicbrainz_id": None},
|
||||
"recording": {"title": tf.track.title, "musicbrainz_id": None},
|
||||
"size": tf.size,
|
||||
"length": None,
|
||||
"bitrate": None,
|
||||
},
|
||||
"url": {
|
||||
"href": utils.full_url(tf.listen_url),
|
||||
"type": "Link",
|
||||
"mediaType": "audio/mp3",
|
||||
},
|
||||
"library": tf.library.fid,
|
||||
}
|
||||
|
||||
serializer = serializers.AudioSerializer(tf)
|
||||
serializer = serializers.UploadSerializer(upload)
|
||||
|
||||
assert serializer.data == expected
|
||||
|
||||
|
@ -731,7 +988,7 @@ def test_local_actor_serializer_to_ap(factories):
|
|||
assert serializer.data == expected
|
||||
|
||||
|
||||
def test_activity_serializer_clean_recipients_empty(db):
|
||||
def test_activity_serializer_validate_recipients_empty(db):
|
||||
s = serializers.BaseActivitySerializer()
|
||||
|
||||
with pytest.raises(serializers.serializers.ValidationError):
|
||||
|
@ -742,32 +999,3 @@ def test_activity_serializer_clean_recipients_empty(db):
|
|||
|
||||
with pytest.raises(serializers.serializers.ValidationError):
|
||||
s.validate_recipients({"cc": []})
|
||||
|
||||
with pytest.raises(serializers.serializers.ValidationError):
|
||||
s.validate_recipients({"to": ["nope"]})
|
||||
|
||||
with pytest.raises(serializers.serializers.ValidationError):
|
||||
s.validate_recipients({"cc": ["nope"]})
|
||||
|
||||
|
||||
def test_activity_serializer_clean_recipients(factories):
|
||||
r1, r2, r3 = factories["federation.Actor"].create_batch(size=3)
|
||||
|
||||
s = serializers.BaseActivitySerializer()
|
||||
|
||||
expected = {"to": [r1, r2], "cc": [r3, activity.PUBLIC_ADDRESS]}
|
||||
|
||||
assert (
|
||||
s.validate_recipients(
|
||||
{"to": [r1.fid, r2.fid], "cc": [r3.fid, activity.PUBLIC_ADDRESS]}
|
||||
)
|
||||
== expected
|
||||
)
|
||||
|
||||
|
||||
def test_activity_serializer_clean_recipients_local(factories):
|
||||
r = factories["federation.Actor"]()
|
||||
|
||||
s = serializers.BaseActivitySerializer(context={"local_recipients": True})
|
||||
with pytest.raises(serializers.serializers.ValidationError):
|
||||
s.validate_recipients({"to": [r]})
|
||||
|
|
|
@ -11,27 +11,27 @@ from funkwhale_api.federation import tasks
|
|||
def test_clean_federation_music_cache_if_no_listen(preferences, factories):
|
||||
preferences["federation__music_cache_duration"] = 60
|
||||
remote_library = factories["music.Library"]()
|
||||
tf1 = factories["music.TrackFile"](
|
||||
upload1 = factories["music.Upload"](
|
||||
library=remote_library, accessed_date=timezone.now()
|
||||
)
|
||||
tf2 = factories["music.TrackFile"](
|
||||
upload2 = factories["music.Upload"](
|
||||
library=remote_library,
|
||||
accessed_date=timezone.now() - datetime.timedelta(minutes=61),
|
||||
)
|
||||
tf3 = factories["music.TrackFile"](library=remote_library, accessed_date=None)
|
||||
path1 = tf1.audio_file.path
|
||||
path2 = tf2.audio_file.path
|
||||
path3 = tf3.audio_file.path
|
||||
upload3 = factories["music.Upload"](library=remote_library, accessed_date=None)
|
||||
path1 = upload1.audio_file.path
|
||||
path2 = upload2.audio_file.path
|
||||
path3 = upload3.audio_file.path
|
||||
|
||||
tasks.clean_music_cache()
|
||||
|
||||
tf1.refresh_from_db()
|
||||
tf2.refresh_from_db()
|
||||
tf3.refresh_from_db()
|
||||
upload1.refresh_from_db()
|
||||
upload2.refresh_from_db()
|
||||
upload3.refresh_from_db()
|
||||
|
||||
assert bool(tf1.audio_file) is True
|
||||
assert bool(tf2.audio_file) is False
|
||||
assert bool(tf3.audio_file) is False
|
||||
assert bool(upload1.audio_file) is True
|
||||
assert bool(upload2.audio_file) is False
|
||||
assert bool(upload3.audio_file) is False
|
||||
assert os.path.exists(path1) is True
|
||||
assert os.path.exists(path2) is False
|
||||
assert os.path.exists(path3) is False
|
||||
|
@ -46,16 +46,16 @@ def test_clean_federation_music_cache_orphaned(settings, preferences, factories)
|
|||
os.makedirs(os.path.dirname(remove_path), exist_ok=True)
|
||||
pathlib.Path(keep_path).touch()
|
||||
pathlib.Path(remove_path).touch()
|
||||
tf = factories["music.TrackFile"](
|
||||
upload = factories["music.Upload"](
|
||||
accessed_date=timezone.now(), audio_file__path=keep_path
|
||||
)
|
||||
|
||||
tasks.clean_music_cache()
|
||||
|
||||
tf.refresh_from_db()
|
||||
upload.refresh_from_db()
|
||||
|
||||
assert bool(tf.audio_file) is True
|
||||
assert os.path.exists(tf.audio_file.path) is True
|
||||
assert bool(upload.audio_file) is True
|
||||
assert os.path.exists(upload.audio_file.path) is True
|
||||
assert os.path.exists(remove_path) is False
|
||||
|
||||
|
||||
|
@ -73,168 +73,47 @@ def test_handle_in(factories, mocker, now, queryset_equal_list):
|
|||
a.payload, context={"actor": a.actor, "activity": a, "inbox_items": [ii1, ii2]}
|
||||
)
|
||||
|
||||
ii1.refresh_from_db()
|
||||
ii2.refresh_from_db()
|
||||
|
||||
assert ii1.is_delivered is True
|
||||
assert ii2.is_delivered is True
|
||||
assert ii1.last_delivery_date == now
|
||||
assert ii2.last_delivery_date == now
|
||||
|
||||
|
||||
def test_handle_in_error(factories, mocker, now):
|
||||
mocker.patch(
|
||||
"funkwhale_api.federation.routes.inbox.dispatch", side_effect=Exception()
|
||||
)
|
||||
r1 = factories["users.User"](with_actor=True).actor
|
||||
r2 = factories["users.User"](with_actor=True).actor
|
||||
|
||||
a = factories["federation.Activity"](payload={"hello": "world"})
|
||||
factories["federation.InboxItem"](activity=a, actor=r1)
|
||||
factories["federation.InboxItem"](activity=a, actor=r2)
|
||||
|
||||
with pytest.raises(Exception):
|
||||
tasks.dispatch_inbox(activity_id=a.pk)
|
||||
|
||||
assert a.inbox_items.filter(is_delivered=False).count() == 2
|
||||
|
||||
|
||||
def test_dispatch_outbox_to_inbox(factories, mocker):
|
||||
def test_dispatch_outbox(factories, mocker):
|
||||
mocked_inbox = mocker.patch("funkwhale_api.federation.tasks.dispatch_inbox.delay")
|
||||
mocked_deliver_to_remote_inbox = mocker.patch(
|
||||
"funkwhale_api.federation.tasks.deliver_to_remote_inbox.delay"
|
||||
mocked_deliver_to_remote = mocker.patch(
|
||||
"funkwhale_api.federation.tasks.deliver_to_remote.delay"
|
||||
)
|
||||
activity = factories["federation.Activity"](actor__local=True)
|
||||
factories["federation.InboxItem"](activity=activity, actor__local=True)
|
||||
remote_ii = factories["federation.InboxItem"](
|
||||
activity=activity,
|
||||
actor__shared_inbox_url=None,
|
||||
actor__inbox_url="https://test.inbox",
|
||||
)
|
||||
factories["federation.InboxItem"](activity=activity)
|
||||
delivery = factories["federation.Delivery"](activity=activity)
|
||||
tasks.dispatch_outbox(activity_id=activity.pk)
|
||||
mocked_inbox.assert_called_once_with(activity_id=activity.pk)
|
||||
mocked_deliver_to_remote_inbox.assert_called_once_with(
|
||||
activity_id=activity.pk, inbox_url=remote_ii.actor.inbox_url
|
||||
)
|
||||
mocked_deliver_to_remote.assert_called_once_with(delivery_id=delivery.pk)
|
||||
|
||||
|
||||
def test_dispatch_outbox_to_shared_inbox_url(factories, mocker):
|
||||
mocked_deliver_to_remote_inbox = mocker.patch(
|
||||
"funkwhale_api.federation.tasks.deliver_to_remote_inbox.delay"
|
||||
)
|
||||
activity = factories["federation.Activity"](actor__local=True)
|
||||
# shared inbox
|
||||
remote_ii_shared1 = factories["federation.InboxItem"](
|
||||
activity=activity, actor__shared_inbox_url="https://shared.inbox"
|
||||
)
|
||||
# another on the same shared inbox
|
||||
factories["federation.InboxItem"](
|
||||
activity=activity, actor__shared_inbox_url="https://shared.inbox"
|
||||
)
|
||||
# one on a dedicated inbox
|
||||
remote_ii_single = factories["federation.InboxItem"](
|
||||
activity=activity,
|
||||
actor__shared_inbox_url=None,
|
||||
actor__inbox_url="https://single.inbox",
|
||||
)
|
||||
tasks.dispatch_outbox(activity_id=activity.pk)
|
||||
def test_deliver_to_remote_success_mark_as_delivered(factories, r_mock, now):
|
||||
delivery = factories["federation.Delivery"]()
|
||||
r_mock.post(delivery.inbox_url)
|
||||
tasks.deliver_to_remote(delivery_id=delivery.pk)
|
||||
|
||||
assert mocked_deliver_to_remote_inbox.call_count == 2
|
||||
mocked_deliver_to_remote_inbox.assert_any_call(
|
||||
activity_id=activity.pk,
|
||||
shared_inbox_url=remote_ii_shared1.actor.shared_inbox_url,
|
||||
)
|
||||
mocked_deliver_to_remote_inbox.assert_any_call(
|
||||
activity_id=activity.pk, inbox_url=remote_ii_single.actor.inbox_url
|
||||
)
|
||||
|
||||
|
||||
def test_deliver_to_remote_inbox_inbox_url(factories, r_mock):
|
||||
activity = factories["federation.Activity"]()
|
||||
url = "https://test.shared/"
|
||||
r_mock.post(url)
|
||||
|
||||
tasks.deliver_to_remote_inbox(activity_id=activity.pk, inbox_url=url)
|
||||
delivery.refresh_from_db()
|
||||
|
||||
request = r_mock.request_history[0]
|
||||
|
||||
assert delivery.is_delivered is True
|
||||
assert delivery.attempts == 1
|
||||
assert delivery.last_attempt_date == now
|
||||
assert r_mock.called is True
|
||||
assert r_mock.call_count == 1
|
||||
assert request.url == url
|
||||
assert request.url == delivery.inbox_url
|
||||
assert request.headers["content-type"] == "application/activity+json"
|
||||
assert request.json() == activity.payload
|
||||
assert request.json() == delivery.activity.payload
|
||||
|
||||
|
||||
def test_deliver_to_remote_inbox_shared_inbox_url(factories, r_mock):
|
||||
activity = factories["federation.Activity"]()
|
||||
url = "https://test.shared/"
|
||||
r_mock.post(url)
|
||||
def test_deliver_to_remote_error(factories, r_mock, now):
|
||||
delivery = factories["federation.Delivery"]()
|
||||
r_mock.post(delivery.inbox_url, status_code=404)
|
||||
|
||||
tasks.deliver_to_remote_inbox(activity_id=activity.pk, shared_inbox_url=url)
|
||||
|
||||
request = r_mock.request_history[0]
|
||||
|
||||
assert r_mock.called is True
|
||||
assert r_mock.call_count == 1
|
||||
assert request.url == url
|
||||
assert request.headers["content-type"] == "application/activity+json"
|
||||
assert request.json() == activity.payload
|
||||
|
||||
|
||||
def test_deliver_to_remote_inbox_success_shared_inbox_marks_inbox_items_as_delivered(
|
||||
factories, r_mock, now
|
||||
):
|
||||
activity = factories["federation.Activity"]()
|
||||
url = "https://test.shared/"
|
||||
r_mock.post(url)
|
||||
ii = factories["federation.InboxItem"](
|
||||
activity=activity, actor__shared_inbox_url=url
|
||||
)
|
||||
other_ii = factories["federation.InboxItem"](
|
||||
activity=activity, actor__shared_inbox_url="https://other.url"
|
||||
)
|
||||
tasks.deliver_to_remote_inbox(activity_id=activity.pk, shared_inbox_url=url)
|
||||
|
||||
ii.refresh_from_db()
|
||||
other_ii.refresh_from_db()
|
||||
|
||||
assert ii.is_delivered is True
|
||||
assert ii.last_delivery_date == now
|
||||
assert other_ii.is_delivered is False
|
||||
assert other_ii.last_delivery_date is None
|
||||
|
||||
|
||||
def test_deliver_to_remote_inbox_success_single_inbox_marks_inbox_items_as_delivered(
|
||||
factories, r_mock, now
|
||||
):
|
||||
activity = factories["federation.Activity"]()
|
||||
url = "https://test.single/"
|
||||
r_mock.post(url)
|
||||
ii = factories["federation.InboxItem"](activity=activity, actor__inbox_url=url)
|
||||
other_ii = factories["federation.InboxItem"](
|
||||
activity=activity, actor__inbox_url="https://other.url"
|
||||
)
|
||||
tasks.deliver_to_remote_inbox(activity_id=activity.pk, inbox_url=url)
|
||||
|
||||
ii.refresh_from_db()
|
||||
other_ii.refresh_from_db()
|
||||
|
||||
assert ii.is_delivered is True
|
||||
assert ii.last_delivery_date == now
|
||||
assert other_ii.is_delivered is False
|
||||
assert other_ii.last_delivery_date is None
|
||||
|
||||
|
||||
def test_deliver_to_remote_inbox_error(factories, r_mock, now):
|
||||
activity = factories["federation.Activity"]()
|
||||
url = "https://test.single/"
|
||||
r_mock.post(url, status_code=404)
|
||||
ii = factories["federation.InboxItem"](activity=activity, actor__inbox_url=url)
|
||||
with pytest.raises(tasks.RequestException):
|
||||
tasks.deliver_to_remote_inbox(activity_id=activity.pk, inbox_url=url)
|
||||
tasks.deliver_to_remote(delivery_id=delivery.pk)
|
||||
|
||||
ii.refresh_from_db()
|
||||
delivery.refresh_from_db()
|
||||
|
||||
assert ii.is_delivered is False
|
||||
assert ii.last_delivery_date == now
|
||||
assert ii.delivery_attempts == 1
|
||||
assert delivery.is_delivered is False
|
||||
assert delivery.attempts == 1
|
||||
assert delivery.last_attempt_date == now
|
||||
|
|
|
@ -109,6 +109,17 @@ def test_local_actor_inbox_post(factories, api_client, mocker, authenticated_act
|
|||
)
|
||||
|
||||
|
||||
def test_shared_inbox_post(factories, api_client, mocker, authenticated_actor):
|
||||
patched_receive = mocker.patch("funkwhale_api.federation.activity.receive")
|
||||
url = reverse("federation:shared-inbox")
|
||||
response = api_client.post(url, {"hello": "world"}, format="json")
|
||||
|
||||
assert response.status_code == 200
|
||||
patched_receive.assert_called_once_with(
|
||||
activity={"hello": "world"}, on_behalf_of=authenticated_actor
|
||||
)
|
||||
|
||||
|
||||
def test_wellknown_webfinger_local(factories, api_client, settings, mocker):
|
||||
user = factories["users.User"](with_actor=True)
|
||||
url = reverse("federation:well-known-webfinger")
|
||||
|
@ -138,14 +149,14 @@ def test_music_library_retrieve(factories, api_client, privacy_level):
|
|||
|
||||
def test_music_library_retrieve_page_public(factories, api_client):
|
||||
library = factories["music.Library"](privacy_level="everyone")
|
||||
tf = factories["music.TrackFile"](library=library)
|
||||
upload = factories["music.Upload"](library=library)
|
||||
id = library.get_federation_id()
|
||||
expected = serializers.CollectionPageSerializer(
|
||||
{
|
||||
"id": id,
|
||||
"item_serializer": serializers.AudioSerializer,
|
||||
"item_serializer": serializers.UploadSerializer,
|
||||
"actor": library.actor,
|
||||
"page": Paginator([tf], 1).page(1),
|
||||
"page": Paginator([upload], 1).page(1),
|
||||
"name": library.name,
|
||||
"summary": library.description,
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ def test_get_users(mocker):
|
|||
|
||||
|
||||
def test_get_music_duration(factories):
|
||||
factories["music.TrackFile"].create_batch(size=5, duration=360)
|
||||
factories["music.Upload"].create_batch(size=5, duration=360)
|
||||
|
||||
# duration is in hours
|
||||
assert stats.get_music_duration() == 0.5
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
from funkwhale_api.manage import serializers
|
||||
|
||||
|
||||
def test_manage_track_file_action_delete(factories):
|
||||
tfs = factories["music.TrackFile"](size=5)
|
||||
s = serializers.ManageTrackFileActionSerializer(queryset=None)
|
||||
def test_manage_upload_action_delete(factories):
|
||||
uploads = factories["music.Upload"](size=5)
|
||||
s = serializers.ManageUploadActionSerializer(queryset=None)
|
||||
|
||||
s.handle_delete(tfs.__class__.objects.all())
|
||||
s.handle_delete(uploads.__class__.objects.all())
|
||||
|
||||
assert tfs.__class__.objects.count() == 0
|
||||
assert uploads.__class__.objects.count() == 0
|
||||
|
||||
|
||||
def test_user_update_permission(factories):
|
||||
|
|
|
@ -7,7 +7,7 @@ from funkwhale_api.manage import serializers, views
|
|||
@pytest.mark.parametrize(
|
||||
"view,permissions,operator",
|
||||
[
|
||||
(views.ManageTrackFileViewSet, ["library"], "and"),
|
||||
(views.ManageUploadViewSet, ["library"], "and"),
|
||||
(views.ManageUserViewSet, ["settings"], "and"),
|
||||
(views.ManageInvitationViewSet, ["settings"], "and"),
|
||||
(views.ManageImportRequestViewSet, ["library"], "and"),
|
||||
|
@ -18,17 +18,17 @@ def test_permissions(assert_user_permission, view, permissions, operator):
|
|||
|
||||
|
||||
@pytest.mark.skip(reason="Refactoring in progress")
|
||||
def test_track_file_view(factories, superuser_api_client):
|
||||
tfs = factories["music.TrackFile"].create_batch(size=5)
|
||||
qs = tfs[0].__class__.objects.order_by("-creation_date")
|
||||
url = reverse("api:v1:manage:library:track-files-list")
|
||||
def test_upload_view(factories, superuser_api_client):
|
||||
uploads = factories["music.Upload"].create_batch(size=5)
|
||||
qs = uploads[0].__class__.objects.order_by("-creation_date")
|
||||
url = reverse("api:v1:manage:library:uploads-list")
|
||||
|
||||
response = superuser_api_client.get(url, {"sort": "-creation_date"})
|
||||
expected = serializers.ManageTrackFileSerializer(
|
||||
expected = serializers.ManageUploadSerializer(
|
||||
qs, many=True, context={"request": response.wsgi_request}
|
||||
).data
|
||||
|
||||
assert response.data["count"] == len(tfs)
|
||||
assert response.data["count"] == len(uploads)
|
||||
assert response.data["results"] == expected
|
||||
|
||||
|
||||
|
|
|
@ -14,14 +14,14 @@ def test_get_track_activity_url_no_mbid(settings, factories):
|
|||
assert track.get_activity_url() == expected
|
||||
|
||||
|
||||
def test_track_file_import_status_updated_broadcast(factories, mocker):
|
||||
def test_upload_import_status_updated_broadcast(factories, mocker):
|
||||
group_send = mocker.patch("funkwhale_api.common.channels.group_send")
|
||||
user = factories["users.User"]()
|
||||
tf = factories["music.TrackFile"](
|
||||
upload = factories["music.Upload"](
|
||||
import_status="finished", library__actor__user=user
|
||||
)
|
||||
signals.track_file_import_status_updated.send(
|
||||
sender=None, track_file=tf, old_status="pending", new_status="finished"
|
||||
signals.upload_import_status_updated.send(
|
||||
sender=None, upload=upload, old_status="pending", new_status="finished"
|
||||
)
|
||||
group_send.assert_called_once_with(
|
||||
"user.{}.imports".format(user.pk),
|
||||
|
@ -32,7 +32,7 @@ def test_track_file_import_status_updated_broadcast(factories, mocker):
|
|||
"type": "import.status_updated",
|
||||
"old_status": "pending",
|
||||
"new_status": "finished",
|
||||
"track_file": serializers.TrackFileForOwnerSerializer(tf).data,
|
||||
"upload": serializers.UploadForOwnerSerializer(upload).data,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
|
|
@ -25,26 +25,26 @@ def test_can_restrict_api_views_to_authenticated_users(
|
|||
assert response.status_code == 401
|
||||
|
||||
|
||||
def test_track_file_url_is_restricted_to_authenticated_users(
|
||||
def test_upload_url_is_restricted_to_authenticated_users(
|
||||
api_client, factories, preferences
|
||||
):
|
||||
preferences["common__api_authentication_required"] = True
|
||||
tf = factories["music.TrackFile"](library__privacy_level="instance")
|
||||
assert tf.audio_file is not None
|
||||
url = tf.track.listen_url
|
||||
upload = factories["music.Upload"](library__privacy_level="instance")
|
||||
assert upload.audio_file is not None
|
||||
url = upload.track.listen_url
|
||||
response = api_client.get(url)
|
||||
assert response.status_code == 401
|
||||
|
||||
|
||||
def test_track_file_url_is_accessible_to_authenticated_users(
|
||||
def test_upload_url_is_accessible_to_authenticated_users(
|
||||
logged_in_api_client, factories, preferences
|
||||
):
|
||||
actor = logged_in_api_client.user.create_actor()
|
||||
preferences["common__api_authentication_required"] = True
|
||||
tf = factories["music.TrackFile"](library__actor=actor)
|
||||
assert tf.audio_file is not None
|
||||
url = tf.track.listen_url
|
||||
upload = factories["music.Upload"](library__actor=actor)
|
||||
assert upload.audio_file is not None
|
||||
url = upload.track.listen_url
|
||||
response = logged_in_api_client.get(url)
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response["X-Accel-Redirect"] == "/_protected{}".format(tf.audio_file.url)
|
||||
assert response["X-Accel-Redirect"] == "/_protected{}".format(upload.audio_file.url)
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
import os
|
||||
|
||||
from funkwhale_api.music.management.commands import fix_track_files
|
||||
from funkwhale_api.music.management.commands import fix_uploads
|
||||
|
||||
DATA_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
def test_fix_track_files_bitrate_length(factories, mocker):
|
||||
tf1 = factories["music.TrackFile"](bitrate=1, duration=2)
|
||||
tf2 = factories["music.TrackFile"](bitrate=None, duration=None)
|
||||
c = fix_track_files.Command()
|
||||
def test_fix_uploads_bitrate_length(factories, mocker):
|
||||
upload1 = factories["music.Upload"](bitrate=1, duration=2)
|
||||
upload2 = factories["music.Upload"](bitrate=None, duration=None)
|
||||
c = fix_uploads.Command()
|
||||
|
||||
mocker.patch(
|
||||
"funkwhale_api.music.utils.get_audio_file_data",
|
||||
|
@ -17,59 +17,59 @@ def test_fix_track_files_bitrate_length(factories, mocker):
|
|||
|
||||
c.fix_file_data(dry_run=False)
|
||||
|
||||
tf1.refresh_from_db()
|
||||
tf2.refresh_from_db()
|
||||
upload1.refresh_from_db()
|
||||
upload2.refresh_from_db()
|
||||
|
||||
# not updated
|
||||
assert tf1.bitrate == 1
|
||||
assert tf1.duration == 2
|
||||
assert upload1.bitrate == 1
|
||||
assert upload1.duration == 2
|
||||
|
||||
# updated
|
||||
assert tf2.bitrate == 42
|
||||
assert tf2.duration == 43
|
||||
assert upload2.bitrate == 42
|
||||
assert upload2.duration == 43
|
||||
|
||||
|
||||
def test_fix_track_files_size(factories, mocker):
|
||||
tf1 = factories["music.TrackFile"]()
|
||||
tf2 = factories["music.TrackFile"]()
|
||||
tf1.__class__.objects.filter(pk=tf1.pk).update(size=1)
|
||||
tf2.__class__.objects.filter(pk=tf2.pk).update(size=None)
|
||||
c = fix_track_files.Command()
|
||||
def test_fix_uploads_size(factories, mocker):
|
||||
upload1 = factories["music.Upload"]()
|
||||
upload2 = factories["music.Upload"]()
|
||||
upload1.__class__.objects.filter(pk=upload1.pk).update(size=1)
|
||||
upload2.__class__.objects.filter(pk=upload2.pk).update(size=None)
|
||||
c = fix_uploads.Command()
|
||||
|
||||
mocker.patch("funkwhale_api.music.models.TrackFile.get_file_size", return_value=2)
|
||||
mocker.patch("funkwhale_api.music.models.Upload.get_file_size", return_value=2)
|
||||
|
||||
c.fix_file_size(dry_run=False)
|
||||
|
||||
tf1.refresh_from_db()
|
||||
tf2.refresh_from_db()
|
||||
upload1.refresh_from_db()
|
||||
upload2.refresh_from_db()
|
||||
|
||||
# not updated
|
||||
assert tf1.size == 1
|
||||
assert upload1.size == 1
|
||||
|
||||
# updated
|
||||
assert tf2.size == 2
|
||||
assert upload2.size == 2
|
||||
|
||||
|
||||
def test_fix_track_files_mimetype(factories, mocker):
|
||||
def test_fix_uploads_mimetype(factories, mocker):
|
||||
mp3_path = os.path.join(DATA_DIR, "test.mp3")
|
||||
ogg_path = os.path.join(DATA_DIR, "test.ogg")
|
||||
tf1 = factories["music.TrackFile"](
|
||||
upload1 = factories["music.Upload"](
|
||||
audio_file__from_path=mp3_path,
|
||||
source="file://{}".format(mp3_path),
|
||||
mimetype="application/x-empty",
|
||||
)
|
||||
|
||||
# this one already has a mimetype set, to it should not be updated
|
||||
tf2 = factories["music.TrackFile"](
|
||||
upload2 = factories["music.Upload"](
|
||||
audio_file__from_path=ogg_path,
|
||||
source="file://{}".format(ogg_path),
|
||||
mimetype="audio/something",
|
||||
)
|
||||
c = fix_track_files.Command()
|
||||
c = fix_uploads.Command()
|
||||
c.fix_mimetypes(dry_run=False)
|
||||
|
||||
tf1.refresh_from_db()
|
||||
tf2.refresh_from_db()
|
||||
upload1.refresh_from_db()
|
||||
upload2.refresh_from_db()
|
||||
|
||||
assert tf1.mimetype == "audio/mpeg"
|
||||
assert tf2.mimetype == "audio/something"
|
||||
assert upload1.mimetype == "audio/mpeg"
|
||||
assert upload2.mimetype == "audio/something"
|
||||
|
|
|
@ -3,8 +3,10 @@ import os
|
|||
import pytest
|
||||
|
||||
from django.utils import timezone
|
||||
from django.urls import reverse
|
||||
|
||||
from funkwhale_api.music import importers, models, tasks
|
||||
from funkwhale_api.federation import utils as federation_utils
|
||||
|
||||
DATA_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
@ -157,33 +159,33 @@ def test_audio_track_mime_type(extention, mimetype, factories):
|
|||
|
||||
name = ".".join(["test", extention])
|
||||
path = os.path.join(DATA_DIR, name)
|
||||
tf = factories["music.TrackFile"](audio_file__from_path=path, mimetype=None)
|
||||
upload = factories["music.Upload"](audio_file__from_path=path, mimetype=None)
|
||||
|
||||
assert tf.mimetype == mimetype
|
||||
assert upload.mimetype == mimetype
|
||||
|
||||
|
||||
def test_track_file_file_name(factories):
|
||||
def test_upload_file_name(factories):
|
||||
name = "test.mp3"
|
||||
path = os.path.join(DATA_DIR, name)
|
||||
tf = factories["music.TrackFile"](audio_file__from_path=path)
|
||||
upload = factories["music.Upload"](audio_file__from_path=path)
|
||||
|
||||
assert tf.filename == tf.track.full_name + ".mp3"
|
||||
assert upload.filename == upload.track.full_name + ".mp3"
|
||||
|
||||
|
||||
def test_track_get_file_size(factories):
|
||||
name = "test.mp3"
|
||||
path = os.path.join(DATA_DIR, name)
|
||||
tf = factories["music.TrackFile"](audio_file__from_path=path)
|
||||
upload = factories["music.Upload"](audio_file__from_path=path)
|
||||
|
||||
assert tf.get_file_size() == 297745
|
||||
assert upload.get_file_size() == 297745
|
||||
|
||||
|
||||
def test_track_get_file_size_in_place(factories):
|
||||
name = "test.mp3"
|
||||
path = os.path.join(DATA_DIR, name)
|
||||
tf = factories["music.TrackFile"](in_place=True, source="file://{}".format(path))
|
||||
upload = factories["music.Upload"](in_place=True, source="file://{}".format(path))
|
||||
|
||||
assert tf.get_file_size() == 297745
|
||||
assert upload.get_file_size() == 297745
|
||||
|
||||
|
||||
def test_album_get_image_content(factories):
|
||||
|
@ -202,7 +204,7 @@ def test_library(factories):
|
|||
)
|
||||
|
||||
assert library.creation_date >= now
|
||||
assert library.files.count() == 0
|
||||
assert library.uploads.count() == 0
|
||||
assert library.uuid is not None
|
||||
|
||||
|
||||
|
@ -210,9 +212,9 @@ def test_library(factories):
|
|||
"privacy_level,expected", [("me", True), ("instance", True), ("everyone", True)]
|
||||
)
|
||||
def test_playable_by_correct_actor(privacy_level, expected, factories):
|
||||
tf = factories["music.TrackFile"](library__privacy_level=privacy_level)
|
||||
queryset = tf.library.files.playable_by(tf.library.actor)
|
||||
match = tf in list(queryset)
|
||||
upload = factories["music.Upload"](library__privacy_level=privacy_level)
|
||||
queryset = upload.library.uploads.playable_by(upload.library.actor)
|
||||
match = upload in list(queryset)
|
||||
assert match is expected
|
||||
|
||||
|
||||
|
@ -220,10 +222,10 @@ def test_playable_by_correct_actor(privacy_level, expected, factories):
|
|||
"privacy_level,expected", [("me", False), ("instance", True), ("everyone", True)]
|
||||
)
|
||||
def test_playable_by_instance_actor(privacy_level, expected, factories):
|
||||
tf = factories["music.TrackFile"](library__privacy_level=privacy_level)
|
||||
instance_actor = factories["federation.Actor"](domain=tf.library.actor.domain)
|
||||
queryset = tf.library.files.playable_by(instance_actor)
|
||||
match = tf in list(queryset)
|
||||
upload = factories["music.Upload"](library__privacy_level=privacy_level)
|
||||
instance_actor = factories["federation.Actor"](domain=upload.library.actor.domain)
|
||||
queryset = upload.library.uploads.playable_by(instance_actor)
|
||||
match = upload in list(queryset)
|
||||
assert match is expected
|
||||
|
||||
|
||||
|
@ -231,9 +233,22 @@ def test_playable_by_instance_actor(privacy_level, expected, factories):
|
|||
"privacy_level,expected", [("me", False), ("instance", False), ("everyone", True)]
|
||||
)
|
||||
def test_playable_by_anonymous(privacy_level, expected, factories):
|
||||
tf = factories["music.TrackFile"](library__privacy_level=privacy_level)
|
||||
queryset = tf.library.files.playable_by(None)
|
||||
match = tf in list(queryset)
|
||||
upload = factories["music.Upload"](library__privacy_level=privacy_level)
|
||||
queryset = upload.library.uploads.playable_by(None)
|
||||
match = upload in list(queryset)
|
||||
assert match is expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("approved", [True, False])
|
||||
def test_playable_by_follower(approved, factories):
|
||||
upload = factories["music.Upload"](library__privacy_level="me")
|
||||
actor = factories["federation.Actor"](local=True)
|
||||
factories["federation.LibraryFollow"](
|
||||
target=upload.library, actor=actor, approved=approved
|
||||
)
|
||||
queryset = upload.library.uploads.playable_by(actor)
|
||||
match = upload in list(queryset)
|
||||
expected = approved
|
||||
assert match is expected
|
||||
|
||||
|
||||
|
@ -241,11 +256,11 @@ def test_playable_by_anonymous(privacy_level, expected, factories):
|
|||
"privacy_level,expected", [("me", True), ("instance", True), ("everyone", True)]
|
||||
)
|
||||
def test_track_playable_by_correct_actor(privacy_level, expected, factories):
|
||||
tf = factories["music.TrackFile"]()
|
||||
upload = factories["music.Upload"]()
|
||||
queryset = models.Track.objects.playable_by(
|
||||
tf.library.actor
|
||||
).annotate_playable_by_actor(tf.library.actor)
|
||||
match = tf.track in list(queryset)
|
||||
upload.library.actor
|
||||
).annotate_playable_by_actor(upload.library.actor)
|
||||
match = upload.track in list(queryset)
|
||||
assert match is expected
|
||||
if expected:
|
||||
assert bool(queryset.first().is_playable_by_actor) is expected
|
||||
|
@ -255,12 +270,12 @@ def test_track_playable_by_correct_actor(privacy_level, expected, factories):
|
|||
"privacy_level,expected", [("me", False), ("instance", True), ("everyone", True)]
|
||||
)
|
||||
def test_track_playable_by_instance_actor(privacy_level, expected, factories):
|
||||
tf = factories["music.TrackFile"](library__privacy_level=privacy_level)
|
||||
instance_actor = factories["federation.Actor"](domain=tf.library.actor.domain)
|
||||
upload = factories["music.Upload"](library__privacy_level=privacy_level)
|
||||
instance_actor = factories["federation.Actor"](domain=upload.library.actor.domain)
|
||||
queryset = models.Track.objects.playable_by(
|
||||
instance_actor
|
||||
).annotate_playable_by_actor(instance_actor)
|
||||
match = tf.track in list(queryset)
|
||||
match = upload.track in list(queryset)
|
||||
assert match is expected
|
||||
if expected:
|
||||
assert bool(queryset.first().is_playable_by_actor) is expected
|
||||
|
@ -270,9 +285,9 @@ def test_track_playable_by_instance_actor(privacy_level, expected, factories):
|
|||
"privacy_level,expected", [("me", False), ("instance", False), ("everyone", True)]
|
||||
)
|
||||
def test_track_playable_by_anonymous(privacy_level, expected, factories):
|
||||
tf = factories["music.TrackFile"](library__privacy_level=privacy_level)
|
||||
upload = factories["music.Upload"](library__privacy_level=privacy_level)
|
||||
queryset = models.Track.objects.playable_by(None).annotate_playable_by_actor(None)
|
||||
match = tf.track in list(queryset)
|
||||
match = upload.track in list(queryset)
|
||||
assert match is expected
|
||||
if expected:
|
||||
assert bool(queryset.first().is_playable_by_actor) is expected
|
||||
|
@ -282,12 +297,12 @@ def test_track_playable_by_anonymous(privacy_level, expected, factories):
|
|||
"privacy_level,expected", [("me", True), ("instance", True), ("everyone", True)]
|
||||
)
|
||||
def test_album_playable_by_correct_actor(privacy_level, expected, factories):
|
||||
tf = factories["music.TrackFile"]()
|
||||
upload = factories["music.Upload"]()
|
||||
|
||||
queryset = models.Album.objects.playable_by(
|
||||
tf.library.actor
|
||||
).annotate_playable_by_actor(tf.library.actor)
|
||||
match = tf.track.album in list(queryset)
|
||||
upload.library.actor
|
||||
).annotate_playable_by_actor(upload.library.actor)
|
||||
match = upload.track.album in list(queryset)
|
||||
assert match is expected
|
||||
if expected:
|
||||
assert bool(queryset.first().is_playable_by_actor) is expected
|
||||
|
@ -297,12 +312,12 @@ def test_album_playable_by_correct_actor(privacy_level, expected, factories):
|
|||
"privacy_level,expected", [("me", False), ("instance", True), ("everyone", True)]
|
||||
)
|
||||
def test_album_playable_by_instance_actor(privacy_level, expected, factories):
|
||||
tf = factories["music.TrackFile"](library__privacy_level=privacy_level)
|
||||
instance_actor = factories["federation.Actor"](domain=tf.library.actor.domain)
|
||||
upload = factories["music.Upload"](library__privacy_level=privacy_level)
|
||||
instance_actor = factories["federation.Actor"](domain=upload.library.actor.domain)
|
||||
queryset = models.Album.objects.playable_by(
|
||||
instance_actor
|
||||
).annotate_playable_by_actor(instance_actor)
|
||||
match = tf.track.album in list(queryset)
|
||||
match = upload.track.album in list(queryset)
|
||||
assert match is expected
|
||||
if expected:
|
||||
assert bool(queryset.first().is_playable_by_actor) is expected
|
||||
|
@ -312,9 +327,9 @@ def test_album_playable_by_instance_actor(privacy_level, expected, factories):
|
|||
"privacy_level,expected", [("me", False), ("instance", False), ("everyone", True)]
|
||||
)
|
||||
def test_album_playable_by_anonymous(privacy_level, expected, factories):
|
||||
tf = factories["music.TrackFile"](library__privacy_level=privacy_level)
|
||||
upload = factories["music.Upload"](library__privacy_level=privacy_level)
|
||||
queryset = models.Album.objects.playable_by(None).annotate_playable_by_actor(None)
|
||||
match = tf.track.album in list(queryset)
|
||||
match = upload.track.album in list(queryset)
|
||||
assert match is expected
|
||||
if expected:
|
||||
assert bool(queryset.first().is_playable_by_actor) is expected
|
||||
|
@ -324,12 +339,12 @@ def test_album_playable_by_anonymous(privacy_level, expected, factories):
|
|||
"privacy_level,expected", [("me", True), ("instance", True), ("everyone", True)]
|
||||
)
|
||||
def test_artist_playable_by_correct_actor(privacy_level, expected, factories):
|
||||
tf = factories["music.TrackFile"]()
|
||||
upload = factories["music.Upload"]()
|
||||
|
||||
queryset = models.Artist.objects.playable_by(
|
||||
tf.library.actor
|
||||
).annotate_playable_by_actor(tf.library.actor)
|
||||
match = tf.track.artist in list(queryset)
|
||||
upload.library.actor
|
||||
).annotate_playable_by_actor(upload.library.actor)
|
||||
match = upload.track.artist in list(queryset)
|
||||
assert match is expected
|
||||
if expected:
|
||||
assert bool(queryset.first().is_playable_by_actor) is expected
|
||||
|
@ -339,12 +354,12 @@ def test_artist_playable_by_correct_actor(privacy_level, expected, factories):
|
|||
"privacy_level,expected", [("me", False), ("instance", True), ("everyone", True)]
|
||||
)
|
||||
def test_artist_playable_by_instance_actor(privacy_level, expected, factories):
|
||||
tf = factories["music.TrackFile"](library__privacy_level=privacy_level)
|
||||
instance_actor = factories["federation.Actor"](domain=tf.library.actor.domain)
|
||||
upload = factories["music.Upload"](library__privacy_level=privacy_level)
|
||||
instance_actor = factories["federation.Actor"](domain=upload.library.actor.domain)
|
||||
queryset = models.Artist.objects.playable_by(
|
||||
instance_actor
|
||||
).annotate_playable_by_actor(instance_actor)
|
||||
match = tf.track.artist in list(queryset)
|
||||
match = upload.track.artist in list(queryset)
|
||||
assert match is expected
|
||||
if expected:
|
||||
assert bool(queryset.first().is_playable_by_actor) is expected
|
||||
|
@ -354,24 +369,24 @@ def test_artist_playable_by_instance_actor(privacy_level, expected, factories):
|
|||
"privacy_level,expected", [("me", False), ("instance", False), ("everyone", True)]
|
||||
)
|
||||
def test_artist_playable_by_anonymous(privacy_level, expected, factories):
|
||||
tf = factories["music.TrackFile"](library__privacy_level=privacy_level)
|
||||
upload = factories["music.Upload"](library__privacy_level=privacy_level)
|
||||
queryset = models.Artist.objects.playable_by(None).annotate_playable_by_actor(None)
|
||||
match = tf.track.artist in list(queryset)
|
||||
match = upload.track.artist in list(queryset)
|
||||
assert match is expected
|
||||
if expected:
|
||||
assert bool(queryset.first().is_playable_by_actor) is expected
|
||||
|
||||
|
||||
def test_track_file_listen_url(factories):
|
||||
tf = factories["music.TrackFile"]()
|
||||
expected = tf.track.listen_url + "?file={}".format(tf.uuid)
|
||||
def test_upload_listen_url(factories):
|
||||
upload = factories["music.Upload"]()
|
||||
expected = upload.track.listen_url + "?upload={}".format(upload.uuid)
|
||||
|
||||
assert tf.listen_url == expected
|
||||
assert upload.listen_url == expected
|
||||
|
||||
|
||||
def test_library_schedule_scan(factories, now, mocker):
|
||||
on_commit = mocker.patch("funkwhale_api.common.utils.on_commit")
|
||||
library = factories["music.Library"](files_count=5)
|
||||
library = factories["music.Library"](uploads_count=5)
|
||||
|
||||
scan = library.schedule_scan()
|
||||
|
||||
|
@ -397,9 +412,9 @@ def test_library_schedule_scan_too_recent(factories, now):
|
|||
|
||||
|
||||
def test_get_audio_data(factories):
|
||||
tf = factories["music.TrackFile"]()
|
||||
upload = factories["music.Upload"]()
|
||||
|
||||
result = tf.get_audio_data()
|
||||
result = upload.get_audio_data()
|
||||
|
||||
assert result == {"duration": 229, "bitrate": 128000, "size": 3459481}
|
||||
|
||||
|
@ -419,3 +434,43 @@ def test_library_queryset_with_follows(factories):
|
|||
l2 = list(qs)[1]
|
||||
assert l1._follows == []
|
||||
assert l2._follows == [follow]
|
||||
|
||||
|
||||
def test_annotate_duration(factories):
|
||||
tf = factories["music.Upload"](duration=32)
|
||||
|
||||
track = models.Track.objects.annotate_duration().get(pk=tf.track.pk)
|
||||
|
||||
assert track.duration == 32
|
||||
|
||||
|
||||
def test_annotate_file_data(factories):
|
||||
tf = factories["music.Upload"](size=42, bitrate=55, mimetype="audio/ogg")
|
||||
|
||||
track = models.Track.objects.annotate_file_data().get(pk=tf.track.pk)
|
||||
|
||||
assert track.size == 42
|
||||
assert track.bitrate == 55
|
||||
assert track.mimetype == "audio/ogg"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"model,factory_args,namespace",
|
||||
[
|
||||
(
|
||||
"music.Upload",
|
||||
{"library__actor__local": True},
|
||||
"federation:music:uploads-detail",
|
||||
),
|
||||
("music.Library", {"actor__local": True}, "federation:music:libraries-detail"),
|
||||
("music.Artist", {}, "federation:music:artists-detail"),
|
||||
("music.Album", {}, "federation:music:albums-detail"),
|
||||
("music.Track", {}, "federation:music:tracks-detail"),
|
||||
],
|
||||
)
|
||||
def test_fid_is_populated(factories, model, factory_args, namespace):
|
||||
instance = factories[model](**factory_args, fid=None)
|
||||
|
||||
assert instance.fid == federation_utils.full_url(
|
||||
reverse(namespace, kwargs={"uuid": instance.uuid})
|
||||
)
|
||||
|
|
|
@ -2,6 +2,7 @@ import datetime
|
|||
|
||||
import pytest
|
||||
|
||||
from funkwhale_api.federation import utils as federation_utils
|
||||
from funkwhale_api.music import models
|
||||
|
||||
|
||||
|
@ -17,6 +18,9 @@ def test_can_create_artist_from_api(artists, mocker, db):
|
|||
assert data["id"], "62c3befb-6366-4585-b256-809472333801"
|
||||
assert artist.mbid, data["id"]
|
||||
assert artist.name, "Adhesive Wombat"
|
||||
assert artist.fid == federation_utils.full_url(
|
||||
"/federation/music/artists/{}".format(artist.uuid)
|
||||
)
|
||||
|
||||
|
||||
def test_can_create_album_from_api(artists, albums, mocker, db):
|
||||
|
@ -41,6 +45,9 @@ def test_can_create_album_from_api(artists, albums, mocker, db):
|
|||
assert album.release_date, datetime.date(2005, 1, 1)
|
||||
assert album.artist.name, "System of a Down"
|
||||
assert album.artist.mbid, data["artist-credit"][0]["artist"]["id"]
|
||||
assert album.fid == federation_utils.full_url(
|
||||
"/federation/music/albums/{}".format(album.uuid)
|
||||
)
|
||||
|
||||
|
||||
def test_can_create_track_from_api(artists, albums, tracks, mocker, db):
|
||||
|
@ -66,6 +73,9 @@ def test_can_create_track_from_api(artists, albums, tracks, mocker, db):
|
|||
assert track.artist.name == "Adhesive Wombat"
|
||||
assert str(track.album.mbid) == "a50d2a81-2a50-484d-9cb4-b9f6833f583e"
|
||||
assert track.album.title == "Marsupial Madness"
|
||||
assert track.fid == federation_utils.full_url(
|
||||
"/federation/music/tracks/{}".format(track.uuid)
|
||||
)
|
||||
|
||||
|
||||
def test_can_create_track_from_api_with_corresponding_tags(
|
||||
|
|
|
@ -46,8 +46,8 @@ def test_artist_with_albums_serializer(factories, to_api_date):
|
|||
|
||||
|
||||
def test_album_track_serializer(factories, to_api_date):
|
||||
tf = factories["music.TrackFile"]()
|
||||
track = tf.track
|
||||
upload = factories["music.Upload"]()
|
||||
track = upload.track
|
||||
|
||||
expected = {
|
||||
"id": track.id,
|
||||
|
@ -59,33 +59,34 @@ def test_album_track_serializer(factories, to_api_date):
|
|||
"is_playable": None,
|
||||
"creation_date": to_api_date(track.creation_date),
|
||||
"listen_url": track.listen_url,
|
||||
"duration": None,
|
||||
}
|
||||
serializer = serializers.AlbumTrackSerializer(track)
|
||||
assert serializer.data == expected
|
||||
|
||||
|
||||
def test_track_file_serializer(factories, to_api_date):
|
||||
tf = factories["music.TrackFile"]()
|
||||
def test_upload_serializer(factories, to_api_date):
|
||||
upload = factories["music.Upload"]()
|
||||
|
||||
expected = {
|
||||
"uuid": str(tf.uuid),
|
||||
"filename": tf.filename,
|
||||
"track": serializers.TrackSerializer(tf.track).data,
|
||||
"duration": tf.duration,
|
||||
"mimetype": tf.mimetype,
|
||||
"bitrate": tf.bitrate,
|
||||
"size": tf.size,
|
||||
"library": serializers.LibraryForOwnerSerializer(tf.library).data,
|
||||
"creation_date": tf.creation_date.isoformat().split("+")[0] + "Z",
|
||||
"uuid": str(upload.uuid),
|
||||
"filename": upload.filename,
|
||||
"track": serializers.TrackSerializer(upload.track).data,
|
||||
"duration": upload.duration,
|
||||
"mimetype": upload.mimetype,
|
||||
"bitrate": upload.bitrate,
|
||||
"size": upload.size,
|
||||
"library": serializers.LibraryForOwnerSerializer(upload.library).data,
|
||||
"creation_date": upload.creation_date.isoformat().split("+")[0] + "Z",
|
||||
"import_date": None,
|
||||
"import_status": "pending",
|
||||
}
|
||||
serializer = serializers.TrackFileSerializer(tf)
|
||||
serializer = serializers.UploadSerializer(upload)
|
||||
assert serializer.data == expected
|
||||
|
||||
|
||||
def test_track_file_owner_serializer(factories, to_api_date):
|
||||
tf = factories["music.TrackFile"](
|
||||
def test_upload_owner_serializer(factories, to_api_date):
|
||||
upload = factories["music.Upload"](
|
||||
import_status="success",
|
||||
import_details={"hello": "world"},
|
||||
import_metadata={"import": "metadata"},
|
||||
|
@ -95,15 +96,15 @@ def test_track_file_owner_serializer(factories, to_api_date):
|
|||
)
|
||||
|
||||
expected = {
|
||||
"uuid": str(tf.uuid),
|
||||
"filename": tf.filename,
|
||||
"track": serializers.TrackSerializer(tf.track).data,
|
||||
"duration": tf.duration,
|
||||
"mimetype": tf.mimetype,
|
||||
"bitrate": tf.bitrate,
|
||||
"size": tf.size,
|
||||
"library": serializers.LibraryForOwnerSerializer(tf.library).data,
|
||||
"creation_date": tf.creation_date.isoformat().split("+")[0] + "Z",
|
||||
"uuid": str(upload.uuid),
|
||||
"filename": upload.filename,
|
||||
"track": serializers.TrackSerializer(upload.track).data,
|
||||
"duration": upload.duration,
|
||||
"mimetype": upload.mimetype,
|
||||
"bitrate": upload.bitrate,
|
||||
"size": upload.size,
|
||||
"library": serializers.LibraryForOwnerSerializer(upload.library).data,
|
||||
"creation_date": upload.creation_date.isoformat().split("+")[0] + "Z",
|
||||
"metadata": {"test": "metadata"},
|
||||
"import_metadata": {"import": "metadata"},
|
||||
"import_date": None,
|
||||
|
@ -112,7 +113,7 @@ def test_track_file_owner_serializer(factories, to_api_date):
|
|||
"source": "upload://test",
|
||||
"import_reference": "ref",
|
||||
}
|
||||
serializer = serializers.TrackFileForOwnerSerializer(tf)
|
||||
serializer = serializers.UploadForOwnerSerializer(upload)
|
||||
assert serializer.data == expected
|
||||
|
||||
|
||||
|
@ -142,8 +143,8 @@ def test_album_serializer(factories, to_api_date):
|
|||
|
||||
|
||||
def test_track_serializer(factories, to_api_date):
|
||||
tf = factories["music.TrackFile"]()
|
||||
track = tf.track
|
||||
upload = factories["music.Upload"]()
|
||||
track = upload.track
|
||||
|
||||
expected = {
|
||||
"id": track.id,
|
||||
|
@ -156,6 +157,10 @@ def test_track_serializer(factories, to_api_date):
|
|||
"creation_date": to_api_date(track.creation_date),
|
||||
"lyrics": track.get_lyrics_url(),
|
||||
"listen_url": track.listen_url,
|
||||
"duration": None,
|
||||
"size": None,
|
||||
"bitrate": None,
|
||||
"mimetype": None,
|
||||
}
|
||||
serializer = serializers.TrackSerializer(track)
|
||||
assert serializer.data == expected
|
||||
|
@ -165,7 +170,7 @@ def test_user_cannot_bind_file_to_a_not_owned_library(factories):
|
|||
user = factories["users.User"]()
|
||||
library = factories["music.Library"]()
|
||||
|
||||
s = serializers.TrackFileForOwnerSerializer(
|
||||
s = serializers.UploadForOwnerSerializer(
|
||||
data={"library": library.uuid, "source": "upload://test"},
|
||||
context={"user": user},
|
||||
)
|
||||
|
@ -176,7 +181,7 @@ def test_user_cannot_bind_file_to_a_not_owned_library(factories):
|
|||
def test_user_can_create_file_in_own_library(factories, uploaded_audio_file):
|
||||
user = factories["users.User"]()
|
||||
library = factories["music.Library"](actor__user=user)
|
||||
s = serializers.TrackFileForOwnerSerializer(
|
||||
s = serializers.UploadForOwnerSerializer(
|
||||
data={
|
||||
"library": library.uuid,
|
||||
"source": "upload://test",
|
||||
|
@ -185,9 +190,9 @@ def test_user_can_create_file_in_own_library(factories, uploaded_audio_file):
|
|||
context={"user": user},
|
||||
)
|
||||
assert s.is_valid(raise_exception=True) is True
|
||||
tf = s.save()
|
||||
upload = s.save()
|
||||
|
||||
assert tf.library == library
|
||||
assert upload.library == library
|
||||
|
||||
|
||||
def test_create_file_checks_for_user_quota(
|
||||
|
@ -199,7 +204,7 @@ def test_create_file_checks_for_user_quota(
|
|||
)
|
||||
user = factories["users.User"]()
|
||||
library = factories["music.Library"](actor__user=user)
|
||||
s = serializers.TrackFileForOwnerSerializer(
|
||||
s = serializers.UploadForOwnerSerializer(
|
||||
data={
|
||||
"library": library.uuid,
|
||||
"source": "upload://test",
|
||||
|
@ -211,34 +216,46 @@ def test_create_file_checks_for_user_quota(
|
|||
assert s.errors["non_field_errors"] == ["upload_quota_reached"]
|
||||
|
||||
|
||||
def test_manage_track_file_action_delete(factories):
|
||||
tfs = factories["music.TrackFile"](size=5)
|
||||
s = serializers.TrackFileActionSerializer(queryset=None)
|
||||
def test_manage_upload_action_delete(factories, queryset_equal_list, mocker):
|
||||
dispatch = mocker.patch("funkwhale_api.federation.routes.outbox.dispatch")
|
||||
library1 = factories["music.Library"]()
|
||||
library2 = factories["music.Library"]()
|
||||
library1_uploads = factories["music.Upload"].create_batch(size=3, library=library1)
|
||||
library2_uploads = factories["music.Upload"].create_batch(size=3, library=library2)
|
||||
s = serializers.UploadActionSerializer(queryset=None)
|
||||
|
||||
s.handle_delete(tfs.__class__.objects.all())
|
||||
s.handle_delete(library1_uploads[0].__class__.objects.all())
|
||||
|
||||
assert tfs.__class__.objects.count() == 0
|
||||
assert library1_uploads[0].__class__.objects.count() == 0
|
||||
dispatch.assert_any_call(
|
||||
{"type": "Delete", "object": {"type": "Audio"}},
|
||||
context={"uploads": library1_uploads},
|
||||
)
|
||||
dispatch.assert_any_call(
|
||||
{"type": "Delete", "object": {"type": "Audio"}},
|
||||
context={"uploads": library2_uploads},
|
||||
)
|
||||
|
||||
|
||||
def test_manage_track_file_action_relaunch_import(factories, mocker):
|
||||
def test_manage_upload_action_relaunch_import(factories, mocker):
|
||||
m = mocker.patch("funkwhale_api.common.utils.on_commit")
|
||||
|
||||
# this one is finished and should stay as is
|
||||
finished = factories["music.TrackFile"](import_status="finished")
|
||||
finished = factories["music.Upload"](import_status="finished")
|
||||
|
||||
to_relaunch = [
|
||||
factories["music.TrackFile"](import_status="pending"),
|
||||
factories["music.TrackFile"](import_status="skipped"),
|
||||
factories["music.TrackFile"](import_status="errored"),
|
||||
factories["music.Upload"](import_status="pending"),
|
||||
factories["music.Upload"](import_status="skipped"),
|
||||
factories["music.Upload"](import_status="errored"),
|
||||
]
|
||||
s = serializers.TrackFileActionSerializer(queryset=None)
|
||||
s = serializers.UploadActionSerializer(queryset=None)
|
||||
|
||||
s.handle_relaunch_import(models.TrackFile.objects.all())
|
||||
s.handle_relaunch_import(models.Upload.objects.all())
|
||||
|
||||
for obj in to_relaunch:
|
||||
obj.refresh_from_db()
|
||||
assert obj.import_status == "pending"
|
||||
m.assert_any_call(tasks.import_track_file.delay, track_file_id=obj.pk)
|
||||
m.assert_any_call(tasks.import_upload.delay, upload_id=obj.pk)
|
||||
|
||||
finished.refresh_from_db()
|
||||
assert finished.import_status == "finished"
|
||||
|
|
|
@ -11,7 +11,7 @@ from funkwhale_api.music import signals, tasks
|
|||
DATA_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
# DATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "files")
|
||||
# DATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "uploads")
|
||||
|
||||
|
||||
def test_can_create_track_from_file_metadata_no_mbid(db, mocker):
|
||||
|
@ -89,64 +89,68 @@ def test_can_create_track_from_file_metadata_mbid(factories, mocker):
|
|||
assert track.artist == artist
|
||||
|
||||
|
||||
def test_track_file_import_mbid(now, factories, temp_signal):
|
||||
def test_upload_import_mbid(now, factories, temp_signal, mocker):
|
||||
outbox = mocker.patch("funkwhale_api.federation.routes.outbox.dispatch")
|
||||
track = factories["music.Track"]()
|
||||
tf = factories["music.TrackFile"](
|
||||
upload = factories["music.Upload"](
|
||||
track=None, import_metadata={"track": {"mbid": track.mbid}}
|
||||
)
|
||||
|
||||
with temp_signal(signals.track_file_import_status_updated) as handler:
|
||||
tasks.import_track_file(track_file_id=tf.pk)
|
||||
with temp_signal(signals.upload_import_status_updated) as handler:
|
||||
tasks.import_upload(upload_id=upload.pk)
|
||||
|
||||
tf.refresh_from_db()
|
||||
upload.refresh_from_db()
|
||||
|
||||
assert tf.track == track
|
||||
assert tf.import_status == "finished"
|
||||
assert tf.import_date == now
|
||||
assert upload.track == track
|
||||
assert upload.import_status == "finished"
|
||||
assert upload.import_date == now
|
||||
handler.assert_called_once_with(
|
||||
track_file=tf,
|
||||
upload=upload,
|
||||
old_status="pending",
|
||||
new_status="finished",
|
||||
sender=None,
|
||||
signal=signals.track_file_import_status_updated,
|
||||
signal=signals.upload_import_status_updated,
|
||||
)
|
||||
outbox.assert_called_once_with(
|
||||
{"type": "Create", "object": {"type": "Audio"}}, context={"upload": upload}
|
||||
)
|
||||
|
||||
|
||||
def test_track_file_import_get_audio_data(factories, mocker):
|
||||
def test_upload_import_get_audio_data(factories, mocker):
|
||||
mocker.patch(
|
||||
"funkwhale_api.music.models.TrackFile.get_audio_data",
|
||||
"funkwhale_api.music.models.Upload.get_audio_data",
|
||||
return_value={"size": 23, "duration": 42, "bitrate": 66},
|
||||
)
|
||||
track = factories["music.Track"]()
|
||||
tf = factories["music.TrackFile"](
|
||||
upload = factories["music.Upload"](
|
||||
track=None, import_metadata={"track": {"mbid": track.mbid}}
|
||||
)
|
||||
|
||||
tasks.import_track_file(track_file_id=tf.pk)
|
||||
tasks.import_upload(upload_id=upload.pk)
|
||||
|
||||
tf.refresh_from_db()
|
||||
assert tf.size == 23
|
||||
assert tf.duration == 42
|
||||
assert tf.bitrate == 66
|
||||
upload.refresh_from_db()
|
||||
assert upload.size == 23
|
||||
assert upload.duration == 42
|
||||
assert upload.bitrate == 66
|
||||
|
||||
|
||||
def test_track_file_import_skip_existing_track_in_own_library(factories, temp_signal):
|
||||
def test_upload_import_skip_existing_track_in_own_library(factories, temp_signal):
|
||||
track = factories["music.Track"]()
|
||||
library = factories["music.Library"]()
|
||||
existing = factories["music.TrackFile"](
|
||||
existing = factories["music.Upload"](
|
||||
track=track,
|
||||
import_status="finished",
|
||||
library=library,
|
||||
import_metadata={"track": {"mbid": track.mbid}},
|
||||
)
|
||||
duplicate = factories["music.TrackFile"](
|
||||
duplicate = factories["music.Upload"](
|
||||
track=track,
|
||||
import_status="pending",
|
||||
library=library,
|
||||
import_metadata={"track": {"mbid": track.mbid}},
|
||||
)
|
||||
with temp_signal(signals.track_file_import_status_updated) as handler:
|
||||
tasks.import_track_file(track_file_id=duplicate.pk)
|
||||
with temp_signal(signals.upload_import_status_updated) as handler:
|
||||
tasks.import_upload(upload_id=duplicate.pk)
|
||||
|
||||
duplicate.refresh_from_db()
|
||||
|
||||
|
@ -157,78 +161,80 @@ def test_track_file_import_skip_existing_track_in_own_library(factories, temp_si
|
|||
}
|
||||
|
||||
handler.assert_called_once_with(
|
||||
track_file=duplicate,
|
||||
upload=duplicate,
|
||||
old_status="pending",
|
||||
new_status="skipped",
|
||||
sender=None,
|
||||
signal=signals.track_file_import_status_updated,
|
||||
signal=signals.upload_import_status_updated,
|
||||
)
|
||||
|
||||
|
||||
def test_track_file_import_track_uuid(now, factories):
|
||||
def test_upload_import_track_uuid(now, factories):
|
||||
track = factories["music.Track"]()
|
||||
tf = factories["music.TrackFile"](
|
||||
upload = factories["music.Upload"](
|
||||
track=None, import_metadata={"track": {"uuid": track.uuid}}
|
||||
)
|
||||
|
||||
tasks.import_track_file(track_file_id=tf.pk)
|
||||
tasks.import_upload(upload_id=upload.pk)
|
||||
|
||||
tf.refresh_from_db()
|
||||
upload.refresh_from_db()
|
||||
|
||||
assert tf.track == track
|
||||
assert tf.import_status == "finished"
|
||||
assert tf.import_date == now
|
||||
assert upload.track == track
|
||||
assert upload.import_status == "finished"
|
||||
assert upload.import_date == now
|
||||
|
||||
|
||||
def test_track_file_import_error(factories, now, temp_signal):
|
||||
tf = factories["music.TrackFile"](import_metadata={"track": {"uuid": uuid.uuid4()}})
|
||||
with temp_signal(signals.track_file_import_status_updated) as handler:
|
||||
tasks.import_track_file(track_file_id=tf.pk)
|
||||
tf.refresh_from_db()
|
||||
def test_upload_import_error(factories, now, temp_signal):
|
||||
upload = factories["music.Upload"](
|
||||
import_metadata={"track": {"uuid": uuid.uuid4()}}
|
||||
)
|
||||
with temp_signal(signals.upload_import_status_updated) as handler:
|
||||
tasks.import_upload(upload_id=upload.pk)
|
||||
upload.refresh_from_db()
|
||||
|
||||
assert tf.import_status == "errored"
|
||||
assert tf.import_date == now
|
||||
assert tf.import_details == {"error_code": "track_uuid_not_found"}
|
||||
assert upload.import_status == "errored"
|
||||
assert upload.import_date == now
|
||||
assert upload.import_details == {"error_code": "track_uuid_not_found"}
|
||||
handler.assert_called_once_with(
|
||||
track_file=tf,
|
||||
upload=upload,
|
||||
old_status="pending",
|
||||
new_status="errored",
|
||||
sender=None,
|
||||
signal=signals.track_file_import_status_updated,
|
||||
signal=signals.upload_import_status_updated,
|
||||
)
|
||||
|
||||
|
||||
def test_track_file_import_updates_cover_if_no_cover(factories, mocker, now):
|
||||
def test_upload_import_updates_cover_if_no_cover(factories, mocker, now):
|
||||
mocked_update = mocker.patch("funkwhale_api.music.tasks.update_album_cover")
|
||||
album = factories["music.Album"](cover="")
|
||||
track = factories["music.Track"](album=album)
|
||||
tf = factories["music.TrackFile"](
|
||||
upload = factories["music.Upload"](
|
||||
track=None, import_metadata={"track": {"uuid": track.uuid}}
|
||||
)
|
||||
tasks.import_track_file(track_file_id=tf.pk)
|
||||
mocked_update.assert_called_once_with(album, tf)
|
||||
tasks.import_upload(upload_id=upload.pk)
|
||||
mocked_update.assert_called_once_with(album, upload)
|
||||
|
||||
|
||||
def test_update_album_cover_mbid(factories, mocker):
|
||||
album = factories["music.Album"](cover="")
|
||||
|
||||
mocked_get = mocker.patch("funkwhale_api.music.models.Album.get_image")
|
||||
tasks.update_album_cover(album=album, track_file=None)
|
||||
tasks.update_album_cover(album=album, upload=None)
|
||||
|
||||
mocked_get.assert_called_once_with()
|
||||
|
||||
|
||||
def test_update_album_cover_file_data(factories, mocker):
|
||||
album = factories["music.Album"](cover="", mbid=None)
|
||||
tf = factories["music.TrackFile"](track__album=album)
|
||||
upload = factories["music.Upload"](track__album=album)
|
||||
|
||||
mocked_get = mocker.patch("funkwhale_api.music.models.Album.get_image")
|
||||
mocker.patch(
|
||||
"funkwhale_api.music.metadata.Metadata.get_picture",
|
||||
return_value={"hello": "world"},
|
||||
)
|
||||
tasks.update_album_cover(album=album, track_file=tf)
|
||||
tf.get_metadata()
|
||||
tasks.update_album_cover(album=album, upload=upload)
|
||||
upload.get_metadata()
|
||||
mocked_get.assert_called_once_with(data={"hello": "world"})
|
||||
|
||||
|
||||
|
@ -239,12 +245,14 @@ def test_update_album_cover_file_cover_separate_file(ext, mimetype, factories, m
|
|||
with open(image_path, "rb") as f:
|
||||
image_content = f.read()
|
||||
album = factories["music.Album"](cover="", mbid=None)
|
||||
tf = factories["music.TrackFile"](track__album=album, source="file://" + image_path)
|
||||
upload = factories["music.Upload"](
|
||||
track__album=album, source="file://" + image_path
|
||||
)
|
||||
|
||||
mocked_get = mocker.patch("funkwhale_api.music.models.Album.get_image")
|
||||
mocker.patch("funkwhale_api.music.metadata.Metadata.get_picture", return_value=None)
|
||||
tasks.update_album_cover(album=album, track_file=tf)
|
||||
tf.get_metadata()
|
||||
tasks.update_album_cover(album=album, upload=upload)
|
||||
upload.get_metadata()
|
||||
mocked_get.assert_called_once_with(
|
||||
data={"mimetype": mimetype, "content": image_content}
|
||||
)
|
||||
|
@ -275,17 +283,23 @@ def test_scan_library_fetches_page_and_calls_scan_page(now, mocker, factories, r
|
|||
|
||||
def test_scan_page_fetches_page_and_creates_tracks(now, mocker, factories, r_mock):
|
||||
scan_page = mocker.patch("funkwhale_api.music.tasks.scan_library_page.delay")
|
||||
import_tf = mocker.patch("funkwhale_api.music.tasks.import_track_file.delay")
|
||||
scan = factories["music.LibraryScan"](status="scanning", total_files=5)
|
||||
tfs = factories["music.TrackFile"].build_batch(size=5, library=scan.library)
|
||||
for i, tf in enumerate(tfs):
|
||||
tf.fid = "https://track.test/{}".format(i)
|
||||
uploads = [
|
||||
factories["music.Upload"].build(
|
||||
fid="https://track.test/{}".format(i),
|
||||
size=42,
|
||||
bitrate=66,
|
||||
duration=99,
|
||||
library=scan.library,
|
||||
)
|
||||
for i in range(5)
|
||||
]
|
||||
|
||||
page_conf = {
|
||||
"actor": scan.library.actor,
|
||||
"id": scan.library.fid,
|
||||
"page": Paginator(tfs, 3).page(1),
|
||||
"item_serializer": federation_serializers.AudioSerializer,
|
||||
"page": Paginator(uploads, 3).page(1),
|
||||
"item_serializer": federation_serializers.UploadSerializer,
|
||||
}
|
||||
page = federation_serializers.CollectionPageSerializer(page_conf)
|
||||
r_mock.get(page.data["id"], json=page.data)
|
||||
|
@ -293,12 +307,11 @@ def test_scan_page_fetches_page_and_creates_tracks(now, mocker, factories, r_moc
|
|||
tasks.scan_library_page(library_scan_id=scan.pk, page_url=page.data["id"])
|
||||
|
||||
scan.refresh_from_db()
|
||||
lts = list(scan.library.files.all().order_by("-creation_date"))
|
||||
lts = list(scan.library.uploads.all().order_by("-creation_date"))
|
||||
|
||||
assert len(lts) == 3
|
||||
for tf in tfs[:3]:
|
||||
new_tf = scan.library.files.get(fid=tf.get_federation_id())
|
||||
import_tf.assert_any_call(track_file_id=new_tf.pk)
|
||||
for upload in uploads[:3]:
|
||||
scan.library.uploads.get(fid=upload.fid)
|
||||
|
||||
assert scan.status == "scanning"
|
||||
assert scan.processed_files == 3
|
||||
|
@ -312,12 +325,12 @@ def test_scan_page_fetches_page_and_creates_tracks(now, mocker, factories, r_moc
|
|||
def test_scan_page_trigger_next_page_scan_skip_if_same(mocker, factories, r_mock):
|
||||
patched_scan = mocker.patch("funkwhale_api.music.tasks.scan_library_page.delay")
|
||||
scan = factories["music.LibraryScan"](status="scanning", total_files=5)
|
||||
tfs = factories["music.TrackFile"].build_batch(size=5, library=scan.library)
|
||||
uploads = factories["music.Upload"].build_batch(size=5, library=scan.library)
|
||||
page_conf = {
|
||||
"actor": scan.library.actor,
|
||||
"id": scan.library.fid,
|
||||
"page": Paginator(tfs, 3).page(1),
|
||||
"item_serializer": federation_serializers.AudioSerializer,
|
||||
"page": Paginator(uploads, 3).page(1),
|
||||
"item_serializer": federation_serializers.UploadSerializer,
|
||||
}
|
||||
page = federation_serializers.CollectionPageSerializer(page_conf)
|
||||
data = page.data
|
||||
|
|
|
@ -9,7 +9,7 @@ DATA_DIR = os.path.dirname(os.path.abspath(__file__))
|
|||
|
||||
def test_guess_mimetype_try_using_extension(factories, mocker):
|
||||
mocker.patch("magic.from_buffer", return_value="audio/mpeg")
|
||||
f = factories["music.TrackFile"].build(audio_file__filename="test.ogg")
|
||||
f = factories["music.Upload"].build(audio_file__filename="test.ogg")
|
||||
|
||||
assert utils.guess_mimetype(f.audio_file) == "audio/mpeg"
|
||||
|
||||
|
@ -17,7 +17,7 @@ def test_guess_mimetype_try_using_extension(factories, mocker):
|
|||
@pytest.mark.parametrize("wrong", ["application/octet-stream", "application/x-empty"])
|
||||
def test_guess_mimetype_try_using_extension_if_fail(wrong, factories, mocker):
|
||||
mocker.patch("magic.from_buffer", return_value=wrong)
|
||||
f = factories["music.TrackFile"].build(audio_file__filename="test.mp3")
|
||||
f = factories["music.Upload"].build(audio_file__filename="test.mp3")
|
||||
|
||||
assert utils.guess_mimetype(f.audio_file) == "audio/mpeg"
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ DATA_DIR = os.path.dirname(os.path.abspath(__file__))
|
|||
|
||||
|
||||
def test_artist_list_serializer(api_request, factories, logged_in_api_client):
|
||||
track = factories["music.TrackFile"](library__privacy_level="everyone").track
|
||||
track = factories["music.Upload"](library__privacy_level="everyone").track
|
||||
artist = track.artist
|
||||
request = api_request.get("/")
|
||||
qs = artist.__class__.objects.with_albums()
|
||||
|
@ -20,6 +20,9 @@ def test_artist_list_serializer(api_request, factories, logged_in_api_client):
|
|||
qs, many=True, context={"request": request}
|
||||
)
|
||||
expected = {"count": 1, "next": None, "previous": None, "results": serializer.data}
|
||||
for artist in serializer.data:
|
||||
for album in artist["albums"]:
|
||||
album["is_playable"] = True
|
||||
url = reverse("api:v1:artists-list")
|
||||
response = logged_in_api_client.get(url)
|
||||
|
||||
|
@ -28,7 +31,7 @@ def test_artist_list_serializer(api_request, factories, logged_in_api_client):
|
|||
|
||||
|
||||
def test_album_list_serializer(api_request, factories, logged_in_api_client):
|
||||
track = factories["music.TrackFile"](library__privacy_level="everyone").track
|
||||
track = factories["music.Upload"](library__privacy_level="everyone").track
|
||||
album = track.album
|
||||
request = api_request.get("/")
|
||||
qs = album.__class__.objects.all()
|
||||
|
@ -46,7 +49,7 @@ def test_album_list_serializer(api_request, factories, logged_in_api_client):
|
|||
|
||||
|
||||
def test_track_list_serializer(api_request, factories, logged_in_api_client):
|
||||
track = factories["music.TrackFile"](library__privacy_level="everyone").track
|
||||
track = factories["music.Upload"](library__privacy_level="everyone").track
|
||||
request = api_request.get("/")
|
||||
qs = track.__class__.objects.all()
|
||||
serializer = serializers.TrackSerializer(
|
||||
|
@ -65,7 +68,7 @@ def test_track_list_serializer(api_request, factories, logged_in_api_client):
|
|||
def test_artist_view_filter_playable(param, expected, factories, api_request):
|
||||
artists = {
|
||||
"empty": factories["music.Artist"](),
|
||||
"full": factories["music.TrackFile"](
|
||||
"full": factories["music.Upload"](
|
||||
library__privacy_level="everyone"
|
||||
).track.artist,
|
||||
}
|
||||
|
@ -84,7 +87,7 @@ def test_artist_view_filter_playable(param, expected, factories, api_request):
|
|||
def test_album_view_filter_playable(param, expected, factories, api_request):
|
||||
artists = {
|
||||
"empty": factories["music.Album"](),
|
||||
"full": factories["music.TrackFile"](
|
||||
"full": factories["music.Upload"](
|
||||
library__privacy_level="everyone"
|
||||
).track.album,
|
||||
}
|
||||
|
@ -99,32 +102,32 @@ def test_album_view_filter_playable(param, expected, factories, api_request):
|
|||
assert list(queryset) == expected
|
||||
|
||||
|
||||
def test_can_serve_track_file_as_remote_library(
|
||||
def test_can_serve_upload_as_remote_library(
|
||||
factories, authenticated_actor, logged_in_api_client, settings, preferences
|
||||
):
|
||||
preferences["common__api_authentication_required"] = True
|
||||
track_file = factories["music.TrackFile"](library__privacy_level="everyone")
|
||||
library_actor = track_file.library.actor
|
||||
upload = factories["music.Upload"](library__privacy_level="everyone")
|
||||
library_actor = upload.library.actor
|
||||
factories["federation.Follow"](
|
||||
approved=True, actor=authenticated_actor, target=library_actor
|
||||
)
|
||||
|
||||
response = logged_in_api_client.get(track_file.track.listen_url)
|
||||
response = logged_in_api_client.get(upload.track.listen_url)
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response["X-Accel-Redirect"] == "{}{}".format(
|
||||
settings.PROTECT_FILES_PATH, track_file.audio_file.url
|
||||
settings.PROTECT_FILES_PATH, upload.audio_file.url
|
||||
)
|
||||
|
||||
|
||||
def test_can_serve_track_file_as_remote_library_deny_not_following(
|
||||
def test_can_serve_upload_as_remote_library_deny_not_following(
|
||||
factories, authenticated_actor, settings, api_client, preferences
|
||||
):
|
||||
preferences["common__api_authentication_required"] = True
|
||||
track_file = factories["music.TrackFile"](library__privacy_level="everyone")
|
||||
response = api_client.get(track_file.track.listen_url)
|
||||
upload = factories["music.Upload"](library__privacy_level="instance")
|
||||
response = api_client.get(upload.track.listen_url)
|
||||
|
||||
assert response.status_code == 403
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -145,12 +148,12 @@ def test_serve_file_in_place(
|
|||
settings.REVERSE_PROXY_TYPE = proxy
|
||||
settings.MUSIC_DIRECTORY_PATH = "/app/music"
|
||||
settings.MUSIC_DIRECTORY_SERVE_PATH = serve_path
|
||||
tf = factories["music.TrackFile"](
|
||||
upload = factories["music.Upload"](
|
||||
in_place=True,
|
||||
source="file:///app/music/hello/world.mp3",
|
||||
library__privacy_level="everyone",
|
||||
)
|
||||
response = api_client.get(tf.track.listen_url)
|
||||
response = api_client.get(upload.track.listen_url)
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response[headers[proxy]] == expected
|
||||
|
@ -199,9 +202,11 @@ def test_serve_file_media(
|
|||
settings.MUSIC_DIRECTORY_PATH = "/app/music"
|
||||
settings.MUSIC_DIRECTORY_SERVE_PATH = serve_path
|
||||
|
||||
tf = factories["music.TrackFile"](library__privacy_level="everyone")
|
||||
tf.__class__.objects.filter(pk=tf.pk).update(audio_file="tracks/hello/world.mp3")
|
||||
response = api_client.get(tf.track.listen_url)
|
||||
upload = factories["music.Upload"](library__privacy_level="everyone")
|
||||
upload.__class__.objects.filter(pk=upload.pk).update(
|
||||
audio_file="tracks/hello/world.mp3"
|
||||
)
|
||||
response = api_client.get(upload.track.listen_url)
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response[headers[proxy]] == expected
|
||||
|
@ -210,32 +215,32 @@ def test_serve_file_media(
|
|||
def test_can_proxy_remote_track(factories, settings, api_client, r_mock, preferences):
|
||||
preferences["common__api_authentication_required"] = False
|
||||
url = "https://file.test"
|
||||
track_file = factories["music.TrackFile"](
|
||||
upload = factories["music.Upload"](
|
||||
library__privacy_level="everyone", audio_file="", source=url
|
||||
)
|
||||
|
||||
r_mock.get(url, body=io.BytesIO(b"test"))
|
||||
response = api_client.get(track_file.track.listen_url)
|
||||
track_file.refresh_from_db()
|
||||
response = api_client.get(upload.track.listen_url)
|
||||
upload.refresh_from_db()
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response["X-Accel-Redirect"] == "{}{}".format(
|
||||
settings.PROTECT_FILES_PATH, track_file.audio_file.url
|
||||
settings.PROTECT_FILES_PATH, upload.audio_file.url
|
||||
)
|
||||
assert track_file.audio_file.read() == b"test"
|
||||
assert upload.audio_file.read() == b"test"
|
||||
|
||||
|
||||
def test_serve_updates_access_date(factories, settings, api_client, preferences):
|
||||
preferences["common__api_authentication_required"] = False
|
||||
track_file = factories["music.TrackFile"](library__privacy_level="everyone")
|
||||
upload = factories["music.Upload"](library__privacy_level="everyone")
|
||||
now = timezone.now()
|
||||
assert track_file.accessed_date is None
|
||||
assert upload.accessed_date is None
|
||||
|
||||
response = api_client.get(track_file.track.listen_url)
|
||||
track_file.refresh_from_db()
|
||||
response = api_client.get(upload.track.listen_url)
|
||||
upload.refresh_from_db()
|
||||
|
||||
assert response.status_code == 200
|
||||
assert track_file.accessed_date > now
|
||||
assert upload.accessed_date > now
|
||||
|
||||
|
||||
def test_listen_no_track(factories, logged_in_api_client):
|
||||
|
@ -254,8 +259,8 @@ def test_listen_no_file(factories, logged_in_api_client):
|
|||
|
||||
|
||||
def test_listen_no_available_file(factories, logged_in_api_client):
|
||||
tf = factories["music.TrackFile"]()
|
||||
url = reverse("api:v1:listen-detail", kwargs={"uuid": tf.track.uuid})
|
||||
upload = factories["music.Upload"]()
|
||||
url = reverse("api:v1:listen-detail", kwargs={"uuid": upload.track.uuid})
|
||||
response = logged_in_api_client.get(url)
|
||||
|
||||
assert response.status_code == 404
|
||||
|
@ -263,10 +268,10 @@ def test_listen_no_available_file(factories, logged_in_api_client):
|
|||
|
||||
def test_listen_correct_access(factories, logged_in_api_client):
|
||||
logged_in_api_client.user.create_actor()
|
||||
tf = factories["music.TrackFile"](
|
||||
upload = factories["music.Upload"](
|
||||
library__actor=logged_in_api_client.user.actor, library__privacy_level="me"
|
||||
)
|
||||
url = reverse("api:v1:listen-detail", kwargs={"uuid": tf.track.uuid})
|
||||
url = reverse("api:v1:listen-detail", kwargs={"uuid": upload.track.uuid})
|
||||
response = logged_in_api_client.get(url)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
@ -274,15 +279,15 @@ def test_listen_correct_access(factories, logged_in_api_client):
|
|||
|
||||
def test_listen_explicit_file(factories, logged_in_api_client, mocker):
|
||||
mocked_serve = mocker.spy(views, "handle_serve")
|
||||
tf1 = factories["music.TrackFile"](library__privacy_level="everyone")
|
||||
tf2 = factories["music.TrackFile"](
|
||||
library__privacy_level="everyone", track=tf1.track
|
||||
upload1 = factories["music.Upload"](library__privacy_level="everyone")
|
||||
upload2 = factories["music.Upload"](
|
||||
library__privacy_level="everyone", track=upload1.track
|
||||
)
|
||||
url = reverse("api:v1:listen-detail", kwargs={"uuid": tf2.track.uuid})
|
||||
response = logged_in_api_client.get(url, {"file": tf2.uuid})
|
||||
url = reverse("api:v1:listen-detail", kwargs={"uuid": upload2.track.uuid})
|
||||
response = logged_in_api_client.get(url, {"upload": upload2.uuid})
|
||||
|
||||
assert response.status_code == 200
|
||||
mocked_serve.assert_called_once_with(tf2, user=logged_in_api_client.user)
|
||||
mocked_serve.assert_called_once_with(upload2, user=logged_in_api_client.user)
|
||||
|
||||
|
||||
def test_user_can_create_library(factories, logged_in_api_client):
|
||||
|
@ -327,42 +332,60 @@ def test_user_cannot_delete_other_actors_library(factories, logged_in_api_client
|
|||
assert response.status_code == 404
|
||||
|
||||
|
||||
def test_user_cannot_get_other_actors_files(factories, logged_in_api_client):
|
||||
logged_in_api_client.user.create_actor()
|
||||
track_file = factories["music.TrackFile"]()
|
||||
def test_library_delete_via_api_triggers_outbox(factories, mocker):
|
||||
dispatch = mocker.patch("funkwhale_api.federation.routes.outbox.dispatch")
|
||||
library = factories["music.Library"]()
|
||||
view = views.LibraryViewSet()
|
||||
view.perform_destroy(library)
|
||||
dispatch.assert_called_once_with(
|
||||
{"type": "Delete", "object": {"type": "Library"}}, context={"library": library}
|
||||
)
|
||||
|
||||
url = reverse("api:v1:trackfiles-detail", kwargs={"uuid": track_file.uuid})
|
||||
|
||||
def test_user_cannot_get_other_actors_uploads(factories, logged_in_api_client):
|
||||
logged_in_api_client.user.create_actor()
|
||||
upload = factories["music.Upload"]()
|
||||
|
||||
url = reverse("api:v1:uploads-detail", kwargs={"uuid": upload.uuid})
|
||||
response = logged_in_api_client.get(url)
|
||||
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
def test_user_cannot_delete_other_actors_files(factories, logged_in_api_client):
|
||||
def test_user_cannot_delete_other_actors_uploads(factories, logged_in_api_client):
|
||||
logged_in_api_client.user.create_actor()
|
||||
track_file = factories["music.TrackFile"]()
|
||||
upload = factories["music.Upload"]()
|
||||
|
||||
url = reverse("api:v1:trackfiles-detail", kwargs={"uuid": track_file.uuid})
|
||||
url = reverse("api:v1:uploads-detail", kwargs={"uuid": upload.uuid})
|
||||
response = logged_in_api_client.delete(url)
|
||||
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
def test_user_cannot_list_other_actors_files(factories, logged_in_api_client):
|
||||
logged_in_api_client.user.create_actor()
|
||||
factories["music.TrackFile"]()
|
||||
def test_upload_delete_via_api_triggers_outbox(factories, mocker):
|
||||
dispatch = mocker.patch("funkwhale_api.federation.routes.outbox.dispatch")
|
||||
upload = factories["music.Upload"]()
|
||||
view = views.UploadViewSet()
|
||||
view.perform_destroy(upload)
|
||||
dispatch.assert_called_once_with(
|
||||
{"type": "Delete", "object": {"type": "Audio"}}, context={"uploads": [upload]}
|
||||
)
|
||||
|
||||
url = reverse("api:v1:trackfiles-list")
|
||||
|
||||
def test_user_cannot_list_other_actors_uploads(factories, logged_in_api_client):
|
||||
logged_in_api_client.user.create_actor()
|
||||
factories["music.Upload"]()
|
||||
|
||||
url = reverse("api:v1:uploads-list")
|
||||
response = logged_in_api_client.get(url)
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.data["count"] == 0
|
||||
|
||||
|
||||
def test_user_can_create_track_file(
|
||||
logged_in_api_client, factories, mocker, audio_file
|
||||
):
|
||||
def test_user_can_create_upload(logged_in_api_client, factories, mocker, audio_file):
|
||||
library = factories["music.Library"](actor__user=logged_in_api_client.user)
|
||||
url = reverse("api:v1:trackfiles-list")
|
||||
url = reverse("api:v1:uploads-list")
|
||||
m = mocker.patch("funkwhale_api.common.utils.on_commit")
|
||||
|
||||
response = logged_in_api_client.post(
|
||||
|
@ -377,14 +400,14 @@ def test_user_can_create_track_file(
|
|||
|
||||
assert response.status_code == 201
|
||||
|
||||
tf = library.files.latest("id")
|
||||
upload = library.uploads.latest("id")
|
||||
|
||||
audio_file.seek(0)
|
||||
assert tf.audio_file.read() == audio_file.read()
|
||||
assert tf.source == "upload://test"
|
||||
assert tf.import_reference == "test"
|
||||
assert tf.track is None
|
||||
m.assert_called_once_with(tasks.import_track_file.delay, track_file_id=tf.pk)
|
||||
assert upload.audio_file.read() == audio_file.read()
|
||||
assert upload.source == "upload://test"
|
||||
assert upload.import_reference == "test"
|
||||
assert upload.track is None
|
||||
m.assert_called_once_with(tasks.import_upload.delay, upload_id=upload.pk)
|
||||
|
||||
|
||||
def test_user_can_list_own_library_follows(factories, logged_in_api_client):
|
||||
|
|
|
@ -93,9 +93,9 @@ def test_playlist_serializer_include_covers(factories, api_request):
|
|||
|
||||
def test_playlist_serializer_include_duration(factories, api_request):
|
||||
playlist = factories["playlists.Playlist"]()
|
||||
tf1 = factories["music.TrackFile"](duration=15)
|
||||
tf2 = factories["music.TrackFile"](duration=30)
|
||||
playlist.insert_many([tf1.track, tf2.track])
|
||||
upload1 = factories["music.Upload"](duration=15)
|
||||
upload2 = factories["music.Upload"](duration=30)
|
||||
playlist.insert_many([upload1.track, upload2.track])
|
||||
qs = playlist.__class__.objects.with_duration().with_tracks_count()
|
||||
|
||||
serializer = serializers.PlaylistSerializer(qs.get())
|
||||
|
|
|
@ -48,7 +48,7 @@ def test_can_pick_by_weight():
|
|||
|
||||
|
||||
def test_can_get_choices_for_favorites_radio(factories):
|
||||
files = factories["music.TrackFile"].create_batch(10)
|
||||
files = factories["music.Upload"].create_batch(10)
|
||||
tracks = [f.track for f in files]
|
||||
user = factories["users.User"]()
|
||||
for i in range(5):
|
||||
|
@ -69,9 +69,9 @@ def test_can_get_choices_for_favorites_radio(factories):
|
|||
|
||||
def test_can_get_choices_for_custom_radio(factories):
|
||||
artist = factories["music.Artist"]()
|
||||
files = factories["music.TrackFile"].create_batch(5, track__artist=artist)
|
||||
files = factories["music.Upload"].create_batch(5, track__artist=artist)
|
||||
tracks = [f.track for f in files]
|
||||
factories["music.TrackFile"].create_batch(5)
|
||||
factories["music.Upload"].create_batch(5)
|
||||
|
||||
session = factories["radios.CustomRadioSession"](
|
||||
custom_radio__config=[{"type": "artist", "ids": [artist.pk]}]
|
||||
|
@ -110,19 +110,19 @@ def test_can_start_custom_radio_from_api(logged_in_client, factories):
|
|||
|
||||
|
||||
def test_can_use_radio_session_to_filter_choices(factories):
|
||||
factories["music.TrackFile"].create_batch(30)
|
||||
factories["music.Upload"].create_batch(10)
|
||||
user = factories["users.User"]()
|
||||
radio = radios.RandomRadio()
|
||||
session = radio.start_session(user)
|
||||
|
||||
for i in range(30):
|
||||
for i in range(10):
|
||||
radio.pick()
|
||||
|
||||
# ensure 30 differents tracks have been suggested
|
||||
# ensure 10 differents tracks have been suggested
|
||||
tracks_id = [
|
||||
session_track.track.pk for session_track in session.session_tracks.all()
|
||||
]
|
||||
assert len(set(tracks_id)) == 30
|
||||
assert len(set(tracks_id)) == 10
|
||||
|
||||
|
||||
def test_can_restore_radio_from_previous_session(factories):
|
||||
|
@ -143,7 +143,7 @@ def test_can_start_radio_for_logged_in_user(logged_in_client):
|
|||
|
||||
|
||||
def test_can_get_track_for_session_from_api(factories, logged_in_client):
|
||||
files = factories["music.TrackFile"].create_batch(1)
|
||||
files = factories["music.Upload"].create_batch(1)
|
||||
tracks = [f.track for f in files]
|
||||
url = reverse("api:v1:radios:sessions-list")
|
||||
response = logged_in_client.post(url, {"radio_type": "random"})
|
||||
|
@ -156,7 +156,7 @@ def test_can_get_track_for_session_from_api(factories, logged_in_client):
|
|||
assert data["track"]["id"] == tracks[0].id
|
||||
assert data["position"] == 1
|
||||
|
||||
next_track = factories["music.TrackFile"]().track
|
||||
next_track = factories["music.Upload"]().track
|
||||
response = logged_in_client.post(url, {"session": session.pk})
|
||||
data = json.loads(response.content.decode("utf-8"))
|
||||
|
||||
|
@ -180,8 +180,8 @@ def test_related_object_radio_validate_related_object(factories):
|
|||
def test_can_start_artist_radio(factories):
|
||||
user = factories["users.User"]()
|
||||
artist = factories["music.Artist"]()
|
||||
factories["music.TrackFile"].create_batch(5)
|
||||
good_files = factories["music.TrackFile"].create_batch(5, track__artist=artist)
|
||||
factories["music.Upload"].create_batch(5)
|
||||
good_files = factories["music.Upload"].create_batch(5, track__artist=artist)
|
||||
good_tracks = [f.track for f in good_files]
|
||||
|
||||
radio = radios.ArtistRadio()
|
||||
|
@ -194,8 +194,8 @@ def test_can_start_artist_radio(factories):
|
|||
def test_can_start_tag_radio(factories):
|
||||
user = factories["users.User"]()
|
||||
tag = factories["taggit.Tag"]()
|
||||
factories["music.TrackFile"].create_batch(5)
|
||||
good_files = factories["music.TrackFile"].create_batch(5, track__tags=[tag])
|
||||
factories["music.Upload"].create_batch(5)
|
||||
good_files = factories["music.Upload"].create_batch(5, track__tags=[tag])
|
||||
good_tracks = [f.track for f in good_files]
|
||||
|
||||
radio = radios.TagRadio()
|
||||
|
@ -223,10 +223,10 @@ def test_can_start_artist_radio_from_api(logged_in_api_client, preferences, fact
|
|||
|
||||
def test_can_start_less_listened_radio(factories):
|
||||
user = factories["users.User"]()
|
||||
wrong_files = factories["music.TrackFile"].create_batch(5)
|
||||
wrong_files = factories["music.Upload"].create_batch(5)
|
||||
for f in wrong_files:
|
||||
factories["history.Listening"](track=f.track, user=user)
|
||||
good_files = factories["music.TrackFile"].create_batch(5)
|
||||
good_files = factories["music.Upload"].create_batch(5)
|
||||
good_tracks = [f.track for f in good_files]
|
||||
radio = radios.LessListenedRadio()
|
||||
radio.start_session(user)
|
||||
|
|
|
@ -65,7 +65,7 @@ def test_get_album_serializer(factories):
|
|||
artist = factories["music.Artist"]()
|
||||
album = factories["music.Album"](artist=artist)
|
||||
track = factories["music.Track"](album=album)
|
||||
tf = factories["music.TrackFile"](track=track, bitrate=42000, duration=43, size=44)
|
||||
upload = factories["music.Upload"](track=track, bitrate=42000, duration=43, size=44)
|
||||
|
||||
expected = {
|
||||
"id": album.pk,
|
||||
|
@ -86,8 +86,8 @@ def test_get_album_serializer(factories):
|
|||
"artist": artist.name,
|
||||
"track": track.position,
|
||||
"year": track.album.release_date.year,
|
||||
"contentType": tf.mimetype,
|
||||
"suffix": tf.extension or "",
|
||||
"contentType": upload.mimetype,
|
||||
"suffix": upload.extension or "",
|
||||
"bitrate": 42,
|
||||
"duration": 43,
|
||||
"size": 44,
|
||||
|
@ -106,9 +106,9 @@ def test_starred_tracks2_serializer(factories):
|
|||
artist = factories["music.Artist"]()
|
||||
album = factories["music.Album"](artist=artist)
|
||||
track = factories["music.Track"](album=album)
|
||||
tf = factories["music.TrackFile"](track=track)
|
||||
upload = factories["music.Upload"](track=track)
|
||||
favorite = factories["favorites.TrackFavorite"](track=track)
|
||||
expected = [serializers.get_track_data(album, track, tf)]
|
||||
expected = [serializers.get_track_data(album, track, upload)]
|
||||
expected[0]["starred"] = favorite.creation_date
|
||||
data = serializers.get_starred_tracks_data([favorite])
|
||||
assert data == expected
|
||||
|
@ -147,7 +147,7 @@ def test_playlist_serializer(factories):
|
|||
|
||||
def test_playlist_detail_serializer(factories):
|
||||
plt = factories["playlists.PlaylistTrack"]()
|
||||
tf = factories["music.TrackFile"](track=plt.track)
|
||||
upload = factories["music.Upload"](track=plt.track)
|
||||
playlist = plt.playlist
|
||||
qs = music_models.Album.objects.with_tracks_count().order_by("pk")
|
||||
expected = {
|
||||
|
@ -158,7 +158,7 @@ def test_playlist_detail_serializer(factories):
|
|||
"songCount": 1,
|
||||
"duration": 0,
|
||||
"created": playlist.creation_date,
|
||||
"entry": [serializers.get_track_data(plt.track.album, plt.track, tf)],
|
||||
"entry": [serializers.get_track_data(plt.track.album, plt.track, upload)],
|
||||
}
|
||||
qs = playlist.__class__.objects.with_tracks_count()
|
||||
data = serializers.get_playlist_detail_data(qs.first())
|
||||
|
@ -167,7 +167,7 @@ def test_playlist_detail_serializer(factories):
|
|||
|
||||
def test_directory_serializer_artist(factories):
|
||||
track = factories["music.Track"]()
|
||||
tf = factories["music.TrackFile"](track=track, bitrate=42000, duration=43, size=44)
|
||||
upload = factories["music.Upload"](track=track, bitrate=42000, duration=43, size=44)
|
||||
album = track.album
|
||||
artist = track.artist
|
||||
|
||||
|
@ -184,8 +184,8 @@ def test_directory_serializer_artist(factories):
|
|||
"artist": artist.name,
|
||||
"track": track.position,
|
||||
"year": track.album.release_date.year,
|
||||
"contentType": tf.mimetype,
|
||||
"suffix": tf.extension or "",
|
||||
"contentType": upload.mimetype,
|
||||
"suffix": upload.extension or "",
|
||||
"bitrate": 42,
|
||||
"duration": 43,
|
||||
"size": 44,
|
||||
|
@ -202,8 +202,8 @@ def test_directory_serializer_artist(factories):
|
|||
|
||||
|
||||
def test_scrobble_serializer(factories):
|
||||
tf = factories["music.TrackFile"]()
|
||||
track = tf.track
|
||||
upload = factories["music.Upload"]()
|
||||
track = upload.track
|
||||
user = factories["users.User"]()
|
||||
payload = {"id": track.pk, "submission": True}
|
||||
serializer = serializers.ScrobbleSerializer(data=payload, context={"user": user})
|
||||
|
|
|
@ -147,11 +147,13 @@ def test_get_song(f, db, logged_in_api_client, factories):
|
|||
artist = factories["music.Artist"]()
|
||||
album = factories["music.Album"](artist=artist)
|
||||
track = factories["music.Track"](album=album)
|
||||
tf = factories["music.TrackFile"](track=track)
|
||||
upload = factories["music.Upload"](track=track)
|
||||
response = logged_in_api_client.get(url, {"f": f, "id": track.pk})
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.data == {"song": serializers.get_track_data(track.album, track, tf)}
|
||||
assert response.data == {
|
||||
"song": serializers.get_track_data(track.album, track, upload)
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("f", ["xml", "json"])
|
||||
|
@ -162,10 +164,10 @@ def test_stream(f, db, logged_in_api_client, factories, mocker):
|
|||
artist = factories["music.Artist"]()
|
||||
album = factories["music.Album"](artist=artist)
|
||||
track = factories["music.Track"](album=album)
|
||||
tf = factories["music.TrackFile"](track=track)
|
||||
upload = factories["music.Upload"](track=track)
|
||||
response = logged_in_api_client.get(url, {"f": f, "id": track.pk})
|
||||
|
||||
mocked_serve.assert_called_once_with(track_file=tf, user=logged_in_api_client.user)
|
||||
mocked_serve.assert_called_once_with(upload=upload, user=logged_in_api_client.user)
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
|
@ -412,8 +414,8 @@ def test_get_cover_art_album(factories, logged_in_api_client):
|
|||
|
||||
|
||||
def test_scrobble(factories, logged_in_api_client):
|
||||
tf = factories["music.TrackFile"]()
|
||||
track = tf.track
|
||||
upload = factories["music.Upload"]()
|
||||
track = upload.track
|
||||
url = reverse("api:subsonic-scrobble")
|
||||
assert url.endswith("scrobble") is True
|
||||
response = logged_in_api_client.get(url, {"id": track.pk, "submission": True})
|
||||
|
|
|
@ -6,9 +6,10 @@ from django.core.management.base import CommandError
|
|||
|
||||
from funkwhale_api.music.models import ImportJob
|
||||
|
||||
DATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "files")
|
||||
DATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "uploads")
|
||||
|
||||
|
||||
@pytest.mark.skip("XXX : wip")
|
||||
def test_management_command_requires_a_valid_username(factories, mocker):
|
||||
path = os.path.join(DATA_DIR, "dummy_file.ogg")
|
||||
factories["users.User"](username="me")
|
||||
|
@ -31,6 +32,7 @@ def test_in_place_import_only_from_music_dir(factories, settings):
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.skip("XXX : wip")
|
||||
def test_import_with_multiple_argument(factories, mocker):
|
||||
factories["users.User"](username="me")
|
||||
path1 = os.path.join(DATA_DIR, "dummy_file.ogg")
|
||||
|
@ -78,10 +80,11 @@ def test_import_files_creates_a_batch_and_job(factories, mocker):
|
|||
m.assert_called_once_with(import_job_id=job.pk, use_acoustid=False)
|
||||
|
||||
|
||||
@pytest.mark.skip("XXX : wip")
|
||||
def test_import_files_skip_if_path_already_imported(factories, mocker):
|
||||
user = factories["users.User"](username="me")
|
||||
path = os.path.join(DATA_DIR, "dummy_file.ogg")
|
||||
factories["music.TrackFile"](source="file://{}".format(path))
|
||||
factories["music.Upload"](source="file://{}".format(path))
|
||||
|
||||
call_command("import_files", path, username="me", async=False, interactive=False)
|
||||
assert user.imports.count() == 0
|
||||
|
@ -119,5 +122,5 @@ def test_import_files_in_place(factories, mocker, settings):
|
|||
|
||||
|
||||
def test_storage_rename_utf_8_files(factories):
|
||||
tf = factories["music.TrackFile"](audio_file__filename="été.ogg")
|
||||
assert tf.audio_file.name.endswith("ete.ogg")
|
||||
upload = factories["music.Upload"](audio_file__filename="été.ogg")
|
||||
assert upload.audio_file.name.endswith("ete.ogg")
|
||||
|
|
|
@ -148,10 +148,7 @@ def test_creating_actor_from_user(factories, settings):
|
|||
)
|
||||
)
|
||||
assert actor.shared_inbox_url == federation_utils.full_url(
|
||||
reverse(
|
||||
"federation:actors-inbox",
|
||||
kwargs={"preferred_username": actor.preferred_username},
|
||||
)
|
||||
reverse("federation:shared-inbox")
|
||||
)
|
||||
assert actor.inbox_url == federation_utils.full_url(
|
||||
reverse(
|
||||
|
@ -165,6 +162,18 @@ def test_creating_actor_from_user(factories, settings):
|
|||
kwargs={"preferred_username": actor.preferred_username},
|
||||
)
|
||||
)
|
||||
assert actor.followers_url == federation_utils.full_url(
|
||||
reverse(
|
||||
"federation:actors-followers",
|
||||
kwargs={"preferred_username": actor.preferred_username},
|
||||
)
|
||||
)
|
||||
assert actor.following_url == federation_utils.full_url(
|
||||
reverse(
|
||||
"federation:actors-following",
|
||||
kwargs={"preferred_username": actor.preferred_username},
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def test_get_channels_groups(factories):
|
||||
|
|
1
dev.yml
1
dev.yml
|
@ -8,6 +8,7 @@ services:
|
|||
- .env
|
||||
environment:
|
||||
- "HOST=0.0.0.0"
|
||||
- "VUE_PORT=${VUE_PORT-8080}"
|
||||
ports:
|
||||
- "${VUE_PORT-8080}:${VUE_PORT-8080}"
|
||||
volumes:
|
||||
|
|
|
@ -202,7 +202,7 @@ similar issues before doing that, and use the issue tracker only to report bugs,
|
|||
If you ever need to share screenshots or urls with someone else, ensure those do not include your personnal token.
|
||||
This token is binded to your account and can be used to connect and use your account.
|
||||
|
||||
Urls that includes your token looks like: ``https://your.instance/api/v1/trackfiles/42/serve/?jwt=yoursecrettoken``
|
||||
Urls that includes your token looks like: ``https://your.instance/api/v1/uploads/42/serve/?jwt=yoursecrettoken``
|
||||
|
||||
Improving this documentation
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
|
@ -34,8 +34,8 @@
|
|||
{{ track.album.title }}
|
||||
</router-link>
|
||||
</td>
|
||||
<td colspan="4" v-if="file && file.duration">
|
||||
{{ time.parse(file.duration) }}
|
||||
<td colspan="4" v-if="track.duration">
|
||||
{{ time.parse(track.duration) }}
|
||||
</td>
|
||||
<td colspan="4" v-else>
|
||||
<translate>N/A</translate>
|
||||
|
@ -79,9 +79,6 @@ export default {
|
|||
} else {
|
||||
return this.track.album.artist
|
||||
}
|
||||
},
|
||||
file () {
|
||||
return this.track.files[0]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
</h2>
|
||||
<div class="ui hidden divider"></div>
|
||||
<radio-button type="artist" :object-id="artist.id"></radio-button>
|
||||
<play-button class="orange" :artist="artist.id">
|
||||
<play-button :is-playable="isPlayable" class="orange" :artist="artist.id">
|
||||
<translate>Play all albums</translate>
|
||||
</play-button>
|
||||
|
||||
|
@ -135,6 +135,11 @@ export default {
|
|||
return a + b
|
||||
}) + this.tracks.length
|
||||
},
|
||||
isPlayable () {
|
||||
return this.artist.albums.filter((a) => {
|
||||
return a.is_playable
|
||||
}).length > 0
|
||||
},
|
||||
wikipediaUrl () {
|
||||
return 'https://en.wikipedia.org/w/index.php?search=' + this.artist.name
|
||||
},
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
<div v-else-if="processableFiles > processedFilesCount" class="ui yellow label">
|
||||
{{ processedFilesCount }}/{{ processableFiles }}
|
||||
</div>
|
||||
<div v-else :class="['ui', {'green': trackFiles.errored === 0}, {'red': trackFiles.errored > 0}, 'label']">
|
||||
<div v-else :class="['ui', {'green': uploads.errored === 0}, {'red': uploads.errored > 0}, 'label']">
|
||||
{{ processedFilesCount }}/{{ processableFiles }}
|
||||
</div>
|
||||
</a>
|
||||
|
@ -116,7 +116,7 @@
|
|||
<library-files-table
|
||||
:key="String(processTimestamp)"
|
||||
:filters="{import_reference: importReference}"
|
||||
:custom-objects="Object.values(trackFiles.objects)"></library-files-table>
|
||||
:custom-objects="Object.values(uploads.objects)"></library-files-table>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
@ -141,9 +141,9 @@ export default {
|
|||
return {
|
||||
files: [],
|
||||
currentTab: 'summary',
|
||||
uploadUrl: '/api/v1/track-files/',
|
||||
uploadUrl: '/api/v1/uploads/',
|
||||
importReference,
|
||||
trackFiles: {
|
||||
uploads: {
|
||||
pending: 0,
|
||||
finished: 0,
|
||||
skipped: 0,
|
||||
|
@ -183,14 +183,14 @@ export default {
|
|||
let self = this
|
||||
let statuses = ['pending', 'errored', 'skipped', 'finished']
|
||||
statuses.forEach((status) => {
|
||||
axios.get('track-files/', {params: {import_reference: self.importReference, import_status: status, page_size: 1}}).then((response) => {
|
||||
self.trackFiles[status] = response.data.count
|
||||
axios.get('uploads/', {params: {import_reference: self.importReference, import_status: status, page_size: 1}}).then((response) => {
|
||||
self.uploads[status] = response.data.count
|
||||
})
|
||||
})
|
||||
},
|
||||
updateProgressBar () {
|
||||
$(this.$el).find('.progress').progress({
|
||||
total: this.files.length * 2,
|
||||
total: this.uploads.length * 2,
|
||||
value: this.uploadedFilesCount + this.finishedJobs
|
||||
})
|
||||
},
|
||||
|
@ -219,13 +219,13 @@ export default {
|
|||
},
|
||||
handleImportEvent (event) {
|
||||
let self = this
|
||||
if (event.track_file.import_reference != self.importReference) {
|
||||
if (event.upload.import_reference != self.importReference) {
|
||||
return
|
||||
}
|
||||
this.$nextTick(() => {
|
||||
self.trackFiles[event.old_status] -= 1
|
||||
self.trackFiles[event.new_status] += 1
|
||||
self.trackFiles.objects[event.track_file.uuid] = event.track_file
|
||||
self.uploads[event.old_status] -= 1
|
||||
self.uploads[event.new_status] += 1
|
||||
self.uploads.objects[event.track_file.uuid] = event.track_file
|
||||
self.triggerReload()
|
||||
})
|
||||
},
|
||||
|
@ -264,10 +264,10 @@ export default {
|
|||
}).length
|
||||
},
|
||||
processableFiles () {
|
||||
return this.trackFiles.pending + this.trackFiles.skipped + this.trackFiles.errored + this.trackFiles.finished + this.uploadedFilesCount
|
||||
return this.uploads.pending + this.uploads.skipped + this.uploads.errored + this.uploads.finished + this.uploadedFilesCount
|
||||
},
|
||||
processedFilesCount () {
|
||||
return this.trackFiles.skipped + this.trackFiles.errored + this.trackFiles.finished
|
||||
return this.uploads.skipped + this.uploads.errored + this.uploads.finished
|
||||
},
|
||||
uploadData: function () {
|
||||
return {
|
||||
|
|
|
@ -44,13 +44,13 @@
|
|||
<i class="external icon"></i>
|
||||
<translate>View on MusicBrainz</translate>
|
||||
</a>
|
||||
<a v-if="downloadUrl" :href="downloadUrl" target="_blank" class="ui button">
|
||||
<a v-if="track.is_playable" :href="downloadUrl" target="_blank" class="ui button">
|
||||
<i class="download icon"></i>
|
||||
<translate>Download</translate>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
<div v-if="file" class="ui vertical stripe center aligned segment">
|
||||
<div class="ui vertical stripe center aligned segment">
|
||||
<h2 class="ui header"><translate>Track information</translate></h2>
|
||||
<table class="ui very basic collapsing celled center aligned table">
|
||||
<tbody>
|
||||
|
@ -58,8 +58,8 @@
|
|||
<td>
|
||||
<translate>Duration</translate>
|
||||
</td>
|
||||
<td v-if="file.duration">
|
||||
{{ time.parse(file.duration) }}
|
||||
<td v-if="track.duration">
|
||||
{{ time.parse(track.duration) }}
|
||||
</td>
|
||||
<td v-else>
|
||||
<translate>N/A</translate>
|
||||
|
@ -69,8 +69,8 @@
|
|||
<td>
|
||||
<translate>Size</translate>
|
||||
</td>
|
||||
<td v-if="file.size">
|
||||
{{ file.size | humanSize }}
|
||||
<td v-if="track.size">
|
||||
{{ track.size | humanSize }}
|
||||
</td>
|
||||
<td v-else>
|
||||
<translate>N/A</translate>
|
||||
|
@ -80,8 +80,8 @@
|
|||
<td>
|
||||
<translate>Bitrate</translate>
|
||||
</td>
|
||||
<td v-if="file.bitrate">
|
||||
{{ file.bitrate | humanSize }}/s
|
||||
<td v-if="track.bitrate">
|
||||
{{ track.bitrate | humanSize }}/s
|
||||
</td>
|
||||
<td v-else>
|
||||
<translate>N/A</translate>
|
||||
|
@ -91,8 +91,8 @@
|
|||
<td>
|
||||
<translate>Type</translate>
|
||||
</td>
|
||||
<td v-if="file.mimetype">
|
||||
{{ file.mimetype }}
|
||||
<td v-if="track.mimetype">
|
||||
{{ track.mimetype }}
|
||||
</td>
|
||||
<td v-else>
|
||||
<translate>N/A</translate>
|
||||
|
@ -192,16 +192,11 @@ export default {
|
|||
return 'https://musicbrainz.org/recording/' + this.track.mbid
|
||||
},
|
||||
downloadUrl () {
|
||||
if (this.track.files.length > 0) {
|
||||
let u = this.$store.getters['instance/absoluteUrl'](this.track.files[0].path)
|
||||
if (this.$store.state.auth.authenticated) {
|
||||
u = url.updateQueryString(u, 'jwt', this.$store.state.auth.token)
|
||||
}
|
||||
return u
|
||||
let u = this.$store.getters['instance/absoluteUrl'](this.track.listen_url)
|
||||
if (this.$store.state.auth.authenticated) {
|
||||
u = url.updateQueryString(u, 'jwt', encodeURI(this.$store.state.auth.token))
|
||||
}
|
||||
},
|
||||
file () {
|
||||
return this.track.files[0]
|
||||
return u
|
||||
},
|
||||
lyricsSearchUrl () {
|
||||
let base = 'http://lyrics.wikia.com/wiki/Special:Search?query='
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
@action-launched="fetchData"
|
||||
:objects-data="result"
|
||||
:actions="actions"
|
||||
:action-url="'manage/library/track-files/action/'"
|
||||
:action-url="'manage/library/uploads/action/'"
|
||||
:filters="actionFilters">
|
||||
<template slot="header-cells">
|
||||
<th><translate>Title</translate></th>
|
||||
|
@ -157,7 +157,7 @@ export default {
|
|||
let self = this
|
||||
self.isLoading = true
|
||||
self.checked = []
|
||||
axios.get('/manage/library/track-files/', {params: params}).then((response) => {
|
||||
axios.get('/manage/library/uploads/', {params: params}).then((response) => {
|
||||
self.result = response.data
|
||||
self.isLoading = false
|
||||
}, error => {
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
@action-launched="fetchData"
|
||||
:objects-data="result"
|
||||
:actions="actions"
|
||||
:action-url="'manage/library/track-files/action/'"
|
||||
:action-url="'manage/library/uploads/action/'"
|
||||
:filters="actionFilters">
|
||||
<template slot="header-cells">
|
||||
<th><translate>Username</translate></th>
|
||||
|
|
|
@ -79,6 +79,7 @@ export default new Vuex.Store({
|
|||
id: track.id,
|
||||
title: track.title,
|
||||
mbid: track.mbid,
|
||||
listen_url: track.listen_url,
|
||||
album: {
|
||||
id: track.album.id,
|
||||
title: track.album.title,
|
||||
|
@ -86,8 +87,7 @@ export default new Vuex.Store({
|
|||
cover: track.album.cover,
|
||||
artist: artist
|
||||
},
|
||||
artist: artist,
|
||||
files: track.files
|
||||
artist: artist
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@
|
|||
{{ library.size | humanSize }}
|
||||
</span>
|
||||
<i class="music icon"></i>
|
||||
<translate :translate-params="{count: library.files_count}" :translate-n="library.files_count" translate-plural="%{ count } tracks">1 tracks</translate>
|
||||
<translate :translate-params="{count: library.uploads_count}" :translate-n="library.uploads_count" translate-plural="%{ count } tracks">1 tracks</translate>
|
||||
</div>
|
||||
</div>
|
||||
<div class="ui bottom basic attached buttons">
|
||||
|
|
|
@ -46,7 +46,7 @@
|
|||
:objects-data="result"
|
||||
:custom-objects="customObjects"
|
||||
:actions="actions"
|
||||
:action-url="'track-files/action/'"
|
||||
:action-url="'uploads/action/'"
|
||||
:filters="actionFilters">
|
||||
<template slot="header-cells">
|
||||
<th><translate>Title</translate></th>
|
||||
|
@ -207,7 +207,7 @@ export default {
|
|||
let self = this
|
||||
self.isLoading = true
|
||||
self.checked = []
|
||||
axios.get('/track-files/', {params: params}).then((response) => {
|
||||
axios.get('/uploads/', {params: params}).then((response) => {
|
||||
self.result = response.data
|
||||
self.isLoading = false
|
||||
}, error => {
|
||||
|
|
|
@ -132,7 +132,7 @@ export default {
|
|||
import_status: status
|
||||
}
|
||||
}
|
||||
axios.post('track-files/action/', payload).then((response) => {
|
||||
axios.post('uploads/action/', payload).then((response) => {
|
||||
self.fetch()
|
||||
})
|
||||
},
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
</div>
|
||||
<div class="content">
|
||||
<i class="music icon"></i>
|
||||
<translate :translate-params="{count: library.files_count}" :translate-n="library.files_count" translate-plural="%{ count } tracks">1 tracks</translate>
|
||||
<translate :translate-params="{count: library.uploads_count}" :translate-n="library.uploads_count" translate-plural="%{ count } tracks">1 tracks</translate>
|
||||
</div>
|
||||
</div>
|
||||
<div class="extra content">
|
||||
|
|
Loading…
Reference in New Issue