See #170: updates to upload API to support channels publishing
This commit is contained in:
parent
e80eb3eb3e
commit
c94d9214ec
|
@ -68,7 +68,7 @@ class ActorQuerySet(models.QuerySet):
|
||||||
|
|
||||||
def with_current_usage(self):
|
def with_current_usage(self):
|
||||||
qs = self
|
qs = self
|
||||||
for s in ["pending", "skipped", "errored", "finished"]:
|
for s in ["draft", "pending", "skipped", "errored", "finished"]:
|
||||||
uploads_query = models.Q(
|
uploads_query = models.Q(
|
||||||
libraries__uploads__import_status=s,
|
libraries__uploads__import_status=s,
|
||||||
libraries__uploads__audio_file__isnull=False,
|
libraries__uploads__audio_file__isnull=False,
|
||||||
|
@ -247,7 +247,7 @@ class Actor(models.Model):
|
||||||
def get_current_usage(self):
|
def get_current_usage(self):
|
||||||
actor = self.__class__.objects.filter(pk=self.pk).with_current_usage().get()
|
actor = self.__class__.objects.filter(pk=self.pk).with_current_usage().get()
|
||||||
data = {}
|
data = {}
|
||||||
for s in ["pending", "skipped", "errored", "finished"]:
|
for s in ["draft", "pending", "skipped", "errored", "finished"]:
|
||||||
data[s] = getattr(actor, "_usage_{}".format(s)) or 0
|
data[s] = getattr(actor, "_usage_{}".format(s)) or 0
|
||||||
|
|
||||||
data["total"] = sum(data.values())
|
data["total"] = sum(data.values())
|
||||||
|
|
|
@ -173,7 +173,7 @@ class ManageDomainActionSerializer(common_serializers.ActionSerializer):
|
||||||
|
|
||||||
@transaction.atomic
|
@transaction.atomic
|
||||||
def handle_purge(self, objects):
|
def handle_purge(self, objects):
|
||||||
ids = objects.values_list("pk", flat=True)
|
ids = objects.values_list("pk", flat=True).order_by("pk")
|
||||||
common_utils.on_commit(federation_tasks.purge_actors.delay, domains=list(ids))
|
common_utils.on_commit(federation_tasks.purge_actors.delay, domains=list(ids))
|
||||||
|
|
||||||
@transaction.atomic
|
@transaction.atomic
|
||||||
|
|
|
@ -11,6 +11,7 @@ import pydub
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.contenttypes.fields import GenericRelation
|
from django.contrib.contenttypes.fields import GenericRelation
|
||||||
from django.contrib.postgres.fields import JSONField
|
from django.contrib.postgres.fields import JSONField
|
||||||
|
from django.core.exceptions import ObjectDoesNotExist
|
||||||
from django.core.files.base import ContentFile
|
from django.core.files.base import ContentFile
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
from django.db import models, transaction
|
from django.db import models, transaction
|
||||||
|
@ -643,6 +644,7 @@ class UploadQuerySet(common_models.NullsLastQuerySet):
|
||||||
|
|
||||||
|
|
||||||
TRACK_FILE_IMPORT_STATUS_CHOICES = (
|
TRACK_FILE_IMPORT_STATUS_CHOICES = (
|
||||||
|
("draft", "Draft"),
|
||||||
("pending", "Pending"),
|
("pending", "Pending"),
|
||||||
("finished", "Finished"),
|
("finished", "Finished"),
|
||||||
("errored", "Errored"),
|
("errored", "Errored"),
|
||||||
|
@ -1139,6 +1141,12 @@ class Library(federation_models.FederationMixin):
|
||||||
common_utils.on_commit(tasks.start_library_scan.delay, library_scan_id=scan.pk)
|
common_utils.on_commit(tasks.start_library_scan.delay, library_scan_id=scan.pk)
|
||||||
return scan
|
return scan
|
||||||
|
|
||||||
|
def get_channel(self):
|
||||||
|
try:
|
||||||
|
return self.channel
|
||||||
|
except ObjectDoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
SCAN_STATUS = [
|
SCAN_STATUS = [
|
||||||
("pending", "pending"),
|
("pending", "pending"),
|
||||||
|
|
|
@ -6,12 +6,14 @@ from django.conf import settings
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from funkwhale_api.activity import serializers as activity_serializers
|
from funkwhale_api.activity import serializers as activity_serializers
|
||||||
|
from funkwhale_api.audio import serializers as audio_serializers
|
||||||
from funkwhale_api.common import serializers as common_serializers
|
from funkwhale_api.common import serializers as common_serializers
|
||||||
from funkwhale_api.common import utils as common_utils
|
from funkwhale_api.common import utils as common_utils
|
||||||
from funkwhale_api.federation import routes
|
from funkwhale_api.federation import routes
|
||||||
from funkwhale_api.federation import utils as federation_utils
|
from funkwhale_api.federation import utils as federation_utils
|
||||||
from funkwhale_api.playlists import models as playlists_models
|
from funkwhale_api.playlists import models as playlists_models
|
||||||
from funkwhale_api.tags.models import Tag
|
from funkwhale_api.tags.models import Tag
|
||||||
|
from funkwhale_api.tags import serializers as tags_serializers
|
||||||
|
|
||||||
from . import filters, models, tasks
|
from . import filters, models, tasks
|
||||||
|
|
||||||
|
@ -61,6 +63,9 @@ class LicenseSerializer(serializers.Serializer):
|
||||||
def get_id(self, obj):
|
def get_id(self, obj):
|
||||||
return obj["identifiers"][0]
|
return obj["identifiers"][0]
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = models.License
|
||||||
|
|
||||||
|
|
||||||
class ArtistAlbumSerializer(serializers.Serializer):
|
class ArtistAlbumSerializer(serializers.Serializer):
|
||||||
tracks_count = serializers.SerializerMethodField()
|
tracks_count = serializers.SerializerMethodField()
|
||||||
|
@ -295,9 +300,15 @@ class UploadSerializer(serializers.ModelSerializer):
|
||||||
library = common_serializers.RelatedField(
|
library = common_serializers.RelatedField(
|
||||||
"uuid",
|
"uuid",
|
||||||
LibraryForOwnerSerializer(),
|
LibraryForOwnerSerializer(),
|
||||||
required=True,
|
required=False,
|
||||||
filters=lambda context: {"actor": context["user"].actor},
|
filters=lambda context: {"actor": context["user"].actor},
|
||||||
)
|
)
|
||||||
|
channel = common_serializers.RelatedField(
|
||||||
|
"uuid",
|
||||||
|
audio_serializers.ChannelSerializer(),
|
||||||
|
required=False,
|
||||||
|
filters=lambda context: {"attributed_to": context["user"].actor},
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = models.Upload
|
model = models.Upload
|
||||||
|
@ -308,6 +319,7 @@ class UploadSerializer(serializers.ModelSerializer):
|
||||||
"mimetype",
|
"mimetype",
|
||||||
"track",
|
"track",
|
||||||
"library",
|
"library",
|
||||||
|
"channel",
|
||||||
"duration",
|
"duration",
|
||||||
"mimetype",
|
"mimetype",
|
||||||
"bitrate",
|
"bitrate",
|
||||||
|
@ -325,11 +337,34 @@ class UploadSerializer(serializers.ModelSerializer):
|
||||||
"size",
|
"size",
|
||||||
"track",
|
"track",
|
||||||
"import_date",
|
"import_date",
|
||||||
"import_status",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ImportMetadataSerializer(serializers.Serializer):
|
||||||
|
title = serializers.CharField(max_length=500, required=True)
|
||||||
|
mbid = serializers.UUIDField(required=False, allow_null=True)
|
||||||
|
copyright = serializers.CharField(max_length=500, required=False, allow_null=True)
|
||||||
|
position = serializers.IntegerField(min_value=1, required=False, allow_null=True)
|
||||||
|
tags = tags_serializers.TagsListField(required=False)
|
||||||
|
license = common_serializers.RelatedField(
|
||||||
|
"code", LicenseSerializer(), required=False, allow_null=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ImportMetadataField(serializers.JSONField):
|
||||||
|
def to_internal_value(self, v):
|
||||||
|
v = super().to_internal_value(v)
|
||||||
|
s = ImportMetadataSerializer(data=v)
|
||||||
|
s.is_valid(raise_exception=True)
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
class UploadForOwnerSerializer(UploadSerializer):
|
class UploadForOwnerSerializer(UploadSerializer):
|
||||||
|
import_status = serializers.ChoiceField(
|
||||||
|
choices=["draft", "pending"], default="pending"
|
||||||
|
)
|
||||||
|
import_metadata = ImportMetadataField(required=False)
|
||||||
|
|
||||||
class Meta(UploadSerializer.Meta):
|
class Meta(UploadSerializer.Meta):
|
||||||
fields = UploadSerializer.Meta.fields + [
|
fields = UploadSerializer.Meta.fields + [
|
||||||
"import_details",
|
"import_details",
|
||||||
|
@ -342,7 +377,6 @@ class UploadForOwnerSerializer(UploadSerializer):
|
||||||
write_only_fields = ["audio_file"]
|
write_only_fields = ["audio_file"]
|
||||||
read_only_fields = UploadSerializer.Meta.read_only_fields + [
|
read_only_fields = UploadSerializer.Meta.read_only_fields + [
|
||||||
"import_details",
|
"import_details",
|
||||||
"import_metadata",
|
|
||||||
"metadata",
|
"metadata",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -353,9 +387,27 @@ class UploadForOwnerSerializer(UploadSerializer):
|
||||||
return r
|
return r
|
||||||
|
|
||||||
def validate(self, validated_data):
|
def validate(self, validated_data):
|
||||||
|
if (
|
||||||
|
not self.instance
|
||||||
|
and "library" not in validated_data
|
||||||
|
and "channel" not in validated_data
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"You need to specify a channel or a library"
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
not self.instance
|
||||||
|
and "library" in validated_data
|
||||||
|
and "channel" in validated_data
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"You may specify a channel or a library, not both"
|
||||||
|
)
|
||||||
if "audio_file" in validated_data:
|
if "audio_file" in validated_data:
|
||||||
self.validate_upload_quota(validated_data["audio_file"])
|
self.validate_upload_quota(validated_data["audio_file"])
|
||||||
|
|
||||||
|
if "channel" in validated_data:
|
||||||
|
validated_data["library"] = validated_data.pop("channel").library
|
||||||
return super().validate(validated_data)
|
return super().validate(validated_data)
|
||||||
|
|
||||||
def validate_upload_quota(self, f):
|
def validate_upload_quota(self, f):
|
||||||
|
@ -390,7 +442,7 @@ class UploadActionSerializer(common_serializers.ActionSerializer):
|
||||||
|
|
||||||
@transaction.atomic
|
@transaction.atomic
|
||||||
def handle_relaunch_import(self, objects):
|
def handle_relaunch_import(self, objects):
|
||||||
qs = objects.exclude(import_status="finished")
|
qs = objects.filter(import_status__in=["pending", "skipped", "errored"])
|
||||||
pks = list(qs.values_list("id", flat=True))
|
pks = list(qs.values_list("id", flat=True))
|
||||||
qs.update(import_status="pending")
|
qs.update(import_status="pending")
|
||||||
for pk in pks:
|
for pk in pks:
|
||||||
|
|
|
@ -165,12 +165,26 @@ def fail_import(upload, error_code, detail=None, **fields):
|
||||||
@celery.app.task(name="music.process_upload")
|
@celery.app.task(name="music.process_upload")
|
||||||
@celery.require_instance(
|
@celery.require_instance(
|
||||||
models.Upload.objects.filter(import_status="pending").select_related(
|
models.Upload.objects.filter(import_status="pending").select_related(
|
||||||
"library__actor__user"
|
"library__actor__user", "library__channel__artist",
|
||||||
),
|
),
|
||||||
"upload",
|
"upload",
|
||||||
)
|
)
|
||||||
def process_upload(upload, update_denormalization=True):
|
def process_upload(upload, update_denormalization=True):
|
||||||
|
from . import serializers
|
||||||
|
|
||||||
import_metadata = upload.import_metadata or {}
|
import_metadata = upload.import_metadata or {}
|
||||||
|
internal_config = {"funkwhale": import_metadata.get("funkwhale", {})}
|
||||||
|
forced_values_serializer = serializers.ImportMetadataSerializer(
|
||||||
|
data=import_metadata
|
||||||
|
)
|
||||||
|
if forced_values_serializer.is_valid():
|
||||||
|
forced_values = forced_values_serializer.validated_data
|
||||||
|
else:
|
||||||
|
forced_values = {}
|
||||||
|
|
||||||
|
if upload.library.get_channel():
|
||||||
|
# ensure the upload is associated with the channel artist
|
||||||
|
forced_values["artist"] = upload.library.channel.artist
|
||||||
old_status = upload.import_status
|
old_status = upload.import_status
|
||||||
audio_file = upload.get_audio_file()
|
audio_file = upload.get_audio_file()
|
||||||
additional_data = {}
|
additional_data = {}
|
||||||
|
@ -193,12 +207,12 @@ def process_upload(upload, update_denormalization=True):
|
||||||
)
|
)
|
||||||
|
|
||||||
final_metadata = collections.ChainMap(
|
final_metadata = collections.ChainMap(
|
||||||
additional_data, serializer.validated_data, import_metadata
|
additional_data, serializer.validated_data, internal_config
|
||||||
)
|
)
|
||||||
additional_data["upload_source"] = upload.source
|
additional_data["upload_source"] = upload.source
|
||||||
try:
|
try:
|
||||||
track = get_track_from_import_metadata(
|
track = get_track_from_import_metadata(
|
||||||
final_metadata, attributed_to=upload.library.actor
|
final_metadata, attributed_to=upload.library.actor, **forced_values
|
||||||
)
|
)
|
||||||
except UploadImportError as e:
|
except UploadImportError as e:
|
||||||
return fail_import(upload, e.code)
|
return fail_import(upload, e.code)
|
||||||
|
@ -264,7 +278,7 @@ def process_upload(upload, update_denormalization=True):
|
||||||
)
|
)
|
||||||
|
|
||||||
broadcast = getter(
|
broadcast = getter(
|
||||||
import_metadata, "funkwhale", "config", "broadcast", default=True
|
internal_config, "funkwhale", "config", "broadcast", default=True
|
||||||
)
|
)
|
||||||
if broadcast:
|
if broadcast:
|
||||||
signals.upload_import_status_updated.send(
|
signals.upload_import_status_updated.send(
|
||||||
|
@ -274,7 +288,7 @@ def process_upload(upload, update_denormalization=True):
|
||||||
sender=None,
|
sender=None,
|
||||||
)
|
)
|
||||||
dispatch_outbox = getter(
|
dispatch_outbox = getter(
|
||||||
import_metadata, "funkwhale", "config", "dispatch_outbox", default=True
|
internal_config, "funkwhale", "config", "dispatch_outbox", default=True
|
||||||
)
|
)
|
||||||
if dispatch_outbox:
|
if dispatch_outbox:
|
||||||
routes.outbox.dispatch(
|
routes.outbox.dispatch(
|
||||||
|
@ -401,8 +415,10 @@ def sort_candidates(candidates, important_fields):
|
||||||
|
|
||||||
|
|
||||||
@transaction.atomic
|
@transaction.atomic
|
||||||
def get_track_from_import_metadata(data, update_cover=False, attributed_to=None):
|
def get_track_from_import_metadata(
|
||||||
track = _get_track(data, attributed_to=attributed_to)
|
data, update_cover=False, attributed_to=None, **forced_values
|
||||||
|
):
|
||||||
|
track = _get_track(data, attributed_to=attributed_to, **forced_values)
|
||||||
if update_cover and track and not track.album.attachment_cover:
|
if update_cover and track and not track.album.attachment_cover:
|
||||||
update_album_cover(
|
update_album_cover(
|
||||||
track.album,
|
track.album,
|
||||||
|
@ -418,7 +434,7 @@ def truncate(v, length):
|
||||||
return v[:length]
|
return v[:length]
|
||||||
|
|
||||||
|
|
||||||
def _get_track(data, attributed_to=None):
|
def _get_track(data, attributed_to=None, **forced_values):
|
||||||
track_uuid = getter(data, "funkwhale", "track", "uuid")
|
track_uuid = getter(data, "funkwhale", "track", "uuid")
|
||||||
|
|
||||||
if track_uuid:
|
if track_uuid:
|
||||||
|
@ -432,7 +448,9 @@ def _get_track(data, attributed_to=None):
|
||||||
return track
|
return track
|
||||||
|
|
||||||
from_activity_id = data.get("from_activity_id", None)
|
from_activity_id = data.get("from_activity_id", None)
|
||||||
track_mbid = data.get("mbid", None)
|
track_mbid = (
|
||||||
|
forced_values["mbid"] if "mbid" in forced_values else data.get("mbid", None)
|
||||||
|
)
|
||||||
album_mbid = getter(data, "album", "mbid")
|
album_mbid = getter(data, "album", "mbid")
|
||||||
track_fid = getter(data, "fid")
|
track_fid = getter(data, "fid")
|
||||||
|
|
||||||
|
@ -455,98 +473,128 @@ def _get_track(data, attributed_to=None):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# get / create artist and album artist
|
# get / create artist and album artist
|
||||||
artists = getter(data, "artists", default=[])
|
if "artist" in forced_values:
|
||||||
artist_data = artists[0]
|
artist = forced_values["artist"]
|
||||||
artist_mbid = artist_data.get("mbid", None)
|
|
||||||
artist_fid = artist_data.get("fid", None)
|
|
||||||
artist_name = truncate(artist_data["name"], models.MAX_LENGTHS["ARTIST_NAME"])
|
|
||||||
|
|
||||||
if artist_mbid:
|
|
||||||
query = Q(mbid=artist_mbid)
|
|
||||||
else:
|
else:
|
||||||
query = Q(name__iexact=artist_name)
|
artists = getter(data, "artists", default=[])
|
||||||
if artist_fid:
|
artist_data = artists[0]
|
||||||
query |= Q(fid=artist_fid)
|
artist_mbid = artist_data.get("mbid", None)
|
||||||
defaults = {
|
artist_fid = artist_data.get("fid", None)
|
||||||
"name": artist_name,
|
artist_name = truncate(artist_data["name"], models.MAX_LENGTHS["ARTIST_NAME"])
|
||||||
"mbid": artist_mbid,
|
|
||||||
"fid": artist_fid,
|
|
||||||
"from_activity_id": from_activity_id,
|
|
||||||
"attributed_to": artist_data.get("attributed_to", attributed_to),
|
|
||||||
}
|
|
||||||
if artist_data.get("fdate"):
|
|
||||||
defaults["creation_date"] = artist_data.get("fdate")
|
|
||||||
|
|
||||||
artist, created = get_best_candidate_or_create(
|
if artist_mbid:
|
||||||
models.Artist, query, defaults=defaults, sort_fields=["mbid", "fid"]
|
query = Q(mbid=artist_mbid)
|
||||||
)
|
else:
|
||||||
if created:
|
query = Q(name__iexact=artist_name)
|
||||||
tags_models.add_tags(artist, *artist_data.get("tags", []))
|
if artist_fid:
|
||||||
|
query |= Q(fid=artist_fid)
|
||||||
album_artists = getter(data, "album", "artists", default=artists) or artists
|
|
||||||
album_artist_data = album_artists[0]
|
|
||||||
album_artist_name = truncate(
|
|
||||||
album_artist_data.get("name"), models.MAX_LENGTHS["ARTIST_NAME"]
|
|
||||||
)
|
|
||||||
if album_artist_name == artist_name:
|
|
||||||
album_artist = artist
|
|
||||||
else:
|
|
||||||
query = Q(name__iexact=album_artist_name)
|
|
||||||
album_artist_mbid = album_artist_data.get("mbid", None)
|
|
||||||
album_artist_fid = album_artist_data.get("fid", None)
|
|
||||||
if album_artist_mbid:
|
|
||||||
query |= Q(mbid=album_artist_mbid)
|
|
||||||
if album_artist_fid:
|
|
||||||
query |= Q(fid=album_artist_fid)
|
|
||||||
defaults = {
|
defaults = {
|
||||||
"name": album_artist_name,
|
"name": artist_name,
|
||||||
"mbid": album_artist_mbid,
|
"mbid": artist_mbid,
|
||||||
"fid": album_artist_fid,
|
"fid": artist_fid,
|
||||||
"from_activity_id": from_activity_id,
|
"from_activity_id": from_activity_id,
|
||||||
"attributed_to": album_artist_data.get("attributed_to", attributed_to),
|
"attributed_to": artist_data.get("attributed_to", attributed_to),
|
||||||
}
|
}
|
||||||
if album_artist_data.get("fdate"):
|
if artist_data.get("fdate"):
|
||||||
defaults["creation_date"] = album_artist_data.get("fdate")
|
defaults["creation_date"] = artist_data.get("fdate")
|
||||||
|
|
||||||
album_artist, created = get_best_candidate_or_create(
|
artist, created = get_best_candidate_or_create(
|
||||||
models.Artist, query, defaults=defaults, sort_fields=["mbid", "fid"]
|
models.Artist, query, defaults=defaults, sort_fields=["mbid", "fid"]
|
||||||
)
|
)
|
||||||
if created:
|
if created:
|
||||||
tags_models.add_tags(album_artist, *album_artist_data.get("tags", []))
|
tags_models.add_tags(artist, *artist_data.get("tags", []))
|
||||||
|
|
||||||
# get / create album
|
if "album" in forced_values:
|
||||||
album_data = data["album"]
|
album = forced_values["album"]
|
||||||
album_title = truncate(album_data["title"], models.MAX_LENGTHS["ALBUM_TITLE"])
|
|
||||||
album_fid = album_data.get("fid", None)
|
|
||||||
|
|
||||||
if album_mbid:
|
|
||||||
query = Q(mbid=album_mbid)
|
|
||||||
else:
|
else:
|
||||||
query = Q(title__iexact=album_title, artist=album_artist)
|
album_artists = getter(data, "album", "artists", default=artists) or artists
|
||||||
|
album_artist_data = album_artists[0]
|
||||||
|
album_artist_name = truncate(
|
||||||
|
album_artist_data.get("name"), models.MAX_LENGTHS["ARTIST_NAME"]
|
||||||
|
)
|
||||||
|
if album_artist_name == artist_name:
|
||||||
|
album_artist = artist
|
||||||
|
else:
|
||||||
|
query = Q(name__iexact=album_artist_name)
|
||||||
|
album_artist_mbid = album_artist_data.get("mbid", None)
|
||||||
|
album_artist_fid = album_artist_data.get("fid", None)
|
||||||
|
if album_artist_mbid:
|
||||||
|
query |= Q(mbid=album_artist_mbid)
|
||||||
|
if album_artist_fid:
|
||||||
|
query |= Q(fid=album_artist_fid)
|
||||||
|
defaults = {
|
||||||
|
"name": album_artist_name,
|
||||||
|
"mbid": album_artist_mbid,
|
||||||
|
"fid": album_artist_fid,
|
||||||
|
"from_activity_id": from_activity_id,
|
||||||
|
"attributed_to": album_artist_data.get("attributed_to", attributed_to),
|
||||||
|
}
|
||||||
|
if album_artist_data.get("fdate"):
|
||||||
|
defaults["creation_date"] = album_artist_data.get("fdate")
|
||||||
|
|
||||||
if album_fid:
|
album_artist, created = get_best_candidate_or_create(
|
||||||
query |= Q(fid=album_fid)
|
models.Artist, query, defaults=defaults, sort_fields=["mbid", "fid"]
|
||||||
defaults = {
|
)
|
||||||
"title": album_title,
|
if created:
|
||||||
"artist": album_artist,
|
tags_models.add_tags(album_artist, *album_artist_data.get("tags", []))
|
||||||
"mbid": album_mbid,
|
|
||||||
"release_date": album_data.get("release_date"),
|
|
||||||
"fid": album_fid,
|
|
||||||
"from_activity_id": from_activity_id,
|
|
||||||
"attributed_to": album_data.get("attributed_to", attributed_to),
|
|
||||||
}
|
|
||||||
if album_data.get("fdate"):
|
|
||||||
defaults["creation_date"] = album_data.get("fdate")
|
|
||||||
|
|
||||||
album, created = get_best_candidate_or_create(
|
# get / create album
|
||||||
models.Album, query, defaults=defaults, sort_fields=["mbid", "fid"]
|
album_data = data["album"]
|
||||||
)
|
album_title = truncate(album_data["title"], models.MAX_LENGTHS["ALBUM_TITLE"])
|
||||||
if created:
|
album_fid = album_data.get("fid", None)
|
||||||
tags_models.add_tags(album, *album_data.get("tags", []))
|
|
||||||
|
if album_mbid:
|
||||||
|
query = Q(mbid=album_mbid)
|
||||||
|
else:
|
||||||
|
query = Q(title__iexact=album_title, artist=album_artist)
|
||||||
|
|
||||||
|
if album_fid:
|
||||||
|
query |= Q(fid=album_fid)
|
||||||
|
defaults = {
|
||||||
|
"title": album_title,
|
||||||
|
"artist": album_artist,
|
||||||
|
"mbid": album_mbid,
|
||||||
|
"release_date": album_data.get("release_date"),
|
||||||
|
"fid": album_fid,
|
||||||
|
"from_activity_id": from_activity_id,
|
||||||
|
"attributed_to": album_data.get("attributed_to", attributed_to),
|
||||||
|
}
|
||||||
|
if album_data.get("fdate"):
|
||||||
|
defaults["creation_date"] = album_data.get("fdate")
|
||||||
|
|
||||||
|
album, created = get_best_candidate_or_create(
|
||||||
|
models.Album, query, defaults=defaults, sort_fields=["mbid", "fid"]
|
||||||
|
)
|
||||||
|
if created:
|
||||||
|
tags_models.add_tags(album, *album_data.get("tags", []))
|
||||||
|
|
||||||
# get / create track
|
# get / create track
|
||||||
track_title = truncate(data["title"], models.MAX_LENGTHS["TRACK_TITLE"])
|
track_title = (
|
||||||
position = data.get("position", 1)
|
forced_values["title"]
|
||||||
|
if "title" in forced_values
|
||||||
|
else truncate(data["title"], models.MAX_LENGTHS["TRACK_TITLE"])
|
||||||
|
)
|
||||||
|
position = (
|
||||||
|
forced_values["position"]
|
||||||
|
if "position" in forced_values
|
||||||
|
else data.get("position", 1)
|
||||||
|
)
|
||||||
|
disc_number = (
|
||||||
|
forced_values["disc_number"]
|
||||||
|
if "disc_number" in forced_values
|
||||||
|
else data.get("disc_number")
|
||||||
|
)
|
||||||
|
license = (
|
||||||
|
forced_values["license"]
|
||||||
|
if "license" in forced_values
|
||||||
|
else licenses.match(data.get("license"), data.get("copyright"))
|
||||||
|
)
|
||||||
|
copyright = (
|
||||||
|
forced_values["copyright"]
|
||||||
|
if "copyright" in forced_values
|
||||||
|
else truncate(data.get("copyright"), models.MAX_LENGTHS["COPYRIGHT"])
|
||||||
|
)
|
||||||
|
|
||||||
query = Q(title__iexact=track_title, artist=artist, album=album, position=position)
|
query = Q(title__iexact=track_title, artist=artist, album=album, position=position)
|
||||||
if track_mbid:
|
if track_mbid:
|
||||||
query |= Q(mbid=track_mbid)
|
query |= Q(mbid=track_mbid)
|
||||||
|
@ -558,12 +606,12 @@ def _get_track(data, attributed_to=None):
|
||||||
"mbid": track_mbid,
|
"mbid": track_mbid,
|
||||||
"artist": artist,
|
"artist": artist,
|
||||||
"position": position,
|
"position": position,
|
||||||
"disc_number": data.get("disc_number"),
|
"disc_number": disc_number,
|
||||||
"fid": track_fid,
|
"fid": track_fid,
|
||||||
"from_activity_id": from_activity_id,
|
"from_activity_id": from_activity_id,
|
||||||
"attributed_to": data.get("attributed_to", attributed_to),
|
"attributed_to": data.get("attributed_to", attributed_to),
|
||||||
"license": licenses.match(data.get("license"), data.get("copyright")),
|
"license": license,
|
||||||
"copyright": truncate(data.get("copyright"), models.MAX_LENGTHS["COPYRIGHT"]),
|
"copyright": copyright,
|
||||||
}
|
}
|
||||||
if data.get("fdate"):
|
if data.get("fdate"):
|
||||||
defaults["creation_date"] = data.get("fdate")
|
defaults["creation_date"] = data.get("fdate")
|
||||||
|
@ -573,7 +621,10 @@ def _get_track(data, attributed_to=None):
|
||||||
)
|
)
|
||||||
|
|
||||||
if created:
|
if created:
|
||||||
tags_models.add_tags(track, *data.get("tags", []))
|
tags = (
|
||||||
|
forced_values["tags"] if "tags" in forced_values else data.get("tags", [])
|
||||||
|
)
|
||||||
|
tags_models.add_tags(track, *tags)
|
||||||
return track
|
return track
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import base64
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
@ -505,6 +506,7 @@ class UploadViewSet(
|
||||||
mixins.ListModelMixin,
|
mixins.ListModelMixin,
|
||||||
mixins.CreateModelMixin,
|
mixins.CreateModelMixin,
|
||||||
mixins.RetrieveModelMixin,
|
mixins.RetrieveModelMixin,
|
||||||
|
mixins.UpdateModelMixin,
|
||||||
mixins.DestroyModelMixin,
|
mixins.DestroyModelMixin,
|
||||||
viewsets.GenericViewSet,
|
viewsets.GenericViewSet,
|
||||||
):
|
):
|
||||||
|
@ -534,8 +536,32 @@ class UploadViewSet(
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
qs = super().get_queryset()
|
qs = super().get_queryset()
|
||||||
|
if self.action in ["update", "partial_update"]:
|
||||||
|
# prevent updating an upload that is already processed
|
||||||
|
qs = qs.filter(import_status="draft")
|
||||||
return qs.filter(library__actor=self.request.user.actor)
|
return qs.filter(library__actor=self.request.user.actor)
|
||||||
|
|
||||||
|
@action(methods=["get"], detail=True, url_path="audio-file-metadata")
|
||||||
|
def audio_file_metadata(self, request, *args, **kwargs):
|
||||||
|
upload = self.get_object()
|
||||||
|
try:
|
||||||
|
m = tasks.metadata.Metadata(upload.get_audio_file())
|
||||||
|
except FileNotFoundError:
|
||||||
|
return Response({"detail": "File not found"}, status=500)
|
||||||
|
serializer = tasks.metadata.TrackMetadataSerializer(data=m)
|
||||||
|
if not serializer.is_valid():
|
||||||
|
return Response(serializer.errors, status=500)
|
||||||
|
payload = serializer.validated_data
|
||||||
|
if (
|
||||||
|
"cover_data" in payload
|
||||||
|
and payload["cover_data"]
|
||||||
|
and "content" in payload["cover_data"]
|
||||||
|
):
|
||||||
|
payload["cover_data"]["content"] = base64.b64encode(
|
||||||
|
payload["cover_data"]["content"]
|
||||||
|
)
|
||||||
|
return Response(payload, status=200)
|
||||||
|
|
||||||
@action(methods=["post"], detail=False)
|
@action(methods=["post"], detail=False)
|
||||||
def action(self, request, *args, **kwargs):
|
def action(self, request, *args, **kwargs):
|
||||||
queryset = self.get_queryset()
|
queryset = self.get_queryset()
|
||||||
|
@ -551,7 +577,13 @@ class UploadViewSet(
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
def perform_create(self, serializer):
|
||||||
upload = serializer.save()
|
upload = serializer.save()
|
||||||
common_utils.on_commit(tasks.process_upload.delay, upload_id=upload.pk)
|
if upload.import_status == "pending":
|
||||||
|
common_utils.on_commit(tasks.process_upload.delay, upload_id=upload.pk)
|
||||||
|
|
||||||
|
def perform_update(self, serializer):
|
||||||
|
upload = serializer.save()
|
||||||
|
if upload.import_status == "pending":
|
||||||
|
common_utils.on_commit(tasks.process_upload.delay, upload_id=upload.pk)
|
||||||
|
|
||||||
@transaction.atomic
|
@transaction.atomic
|
||||||
def perform_destroy(self, instance):
|
def perform_destroy(self, instance):
|
||||||
|
|
|
@ -245,6 +245,7 @@ class User(AbstractUser):
|
||||||
"max": max_,
|
"max": max_,
|
||||||
"remaining": max(max_ - (data["total"] / 1000 / 1000), 0),
|
"remaining": max(max_ - (data["total"] / 1000 / 1000), 0),
|
||||||
"current": data["total"] / 1000 / 1000,
|
"current": data["total"] / 1000 / 1000,
|
||||||
|
"draft": data["draft"] / 1000 / 1000,
|
||||||
"skipped": data["skipped"] / 1000 / 1000,
|
"skipped": data["skipped"] / 1000 / 1000,
|
||||||
"pending": data["pending"] / 1000 / 1000,
|
"pending": data["pending"] / 1000 / 1000,
|
||||||
"finished": data["finished"] / 1000 / 1000,
|
"finished": data["finished"] / 1000 / 1000,
|
||||||
|
|
|
@ -54,6 +54,12 @@ def test_actor_get_quota(factories):
|
||||||
audio_file__from_path=None,
|
audio_file__from_path=None,
|
||||||
audio_file__data=b"aaaa",
|
audio_file__data=b"aaaa",
|
||||||
)
|
)
|
||||||
|
factories["music.Upload"](
|
||||||
|
library=library,
|
||||||
|
import_status="draft",
|
||||||
|
audio_file__from_path=None,
|
||||||
|
audio_file__data=b"aaaaa",
|
||||||
|
)
|
||||||
|
|
||||||
# this one is imported in place and don't count
|
# this one is imported in place and don't count
|
||||||
factories["music.Upload"](
|
factories["music.Upload"](
|
||||||
|
@ -72,7 +78,14 @@ def test_actor_get_quota(factories):
|
||||||
audio_file__data=b"aaaa",
|
audio_file__data=b"aaaa",
|
||||||
)
|
)
|
||||||
|
|
||||||
expected = {"total": 14, "pending": 1, "skipped": 2, "errored": 3, "finished": 8}
|
expected = {
|
||||||
|
"total": 19,
|
||||||
|
"pending": 1,
|
||||||
|
"skipped": 2,
|
||||||
|
"errored": 3,
|
||||||
|
"finished": 8,
|
||||||
|
"draft": 5,
|
||||||
|
}
|
||||||
|
|
||||||
assert library.actor.get_current_usage() == expected
|
assert library.actor.get_current_usage() == expected
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import pytest
|
import pytest
|
||||||
|
import uuid
|
||||||
|
|
||||||
from funkwhale_api.common import serializers as common_serializers
|
from funkwhale_api.common import serializers as common_serializers
|
||||||
from funkwhale_api.federation import serializers as federation_serializers
|
from funkwhale_api.federation import serializers as federation_serializers
|
||||||
|
@ -297,6 +298,7 @@ def test_manage_upload_action_relaunch_import(factories, mocker):
|
||||||
|
|
||||||
# this one is finished and should stay as is
|
# this one is finished and should stay as is
|
||||||
finished = factories["music.Upload"](import_status="finished")
|
finished = factories["music.Upload"](import_status="finished")
|
||||||
|
draft = factories["music.Upload"](import_status="draft")
|
||||||
|
|
||||||
to_relaunch = [
|
to_relaunch = [
|
||||||
factories["music.Upload"](import_status="pending"),
|
factories["music.Upload"](import_status="pending"),
|
||||||
|
@ -314,6 +316,8 @@ def test_manage_upload_action_relaunch_import(factories, mocker):
|
||||||
|
|
||||||
finished.refresh_from_db()
|
finished.refresh_from_db()
|
||||||
assert finished.import_status == "finished"
|
assert finished.import_status == "finished"
|
||||||
|
draft.refresh_from_db()
|
||||||
|
assert draft.import_status == "draft"
|
||||||
assert m.call_count == 3
|
assert m.call_count == 3
|
||||||
|
|
||||||
|
|
||||||
|
@ -357,3 +361,143 @@ def test_update_library_privacy_level_broadcasts_to_followers(
|
||||||
dispatch.assert_called_once_with(
|
dispatch.assert_called_once_with(
|
||||||
{"type": "Update", "object": {"type": "Library"}}, context={"library": library}
|
{"type": "Update", "object": {"type": "Library"}}, context={"library": library}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_upload_with_channel(factories, uploaded_audio_file):
|
||||||
|
channel = factories["audio.Channel"](attributed_to__local=True)
|
||||||
|
user = channel.attributed_to.user
|
||||||
|
data = {
|
||||||
|
"channel": channel.uuid,
|
||||||
|
"audio_file": uploaded_audio_file,
|
||||||
|
"import_status": "draft",
|
||||||
|
}
|
||||||
|
serializer = serializers.UploadForOwnerSerializer(
|
||||||
|
data=data, context={"user": user},
|
||||||
|
)
|
||||||
|
assert serializer.is_valid(raise_exception=True) is True
|
||||||
|
upload = serializer.save()
|
||||||
|
|
||||||
|
assert upload.library == channel.library
|
||||||
|
|
||||||
|
|
||||||
|
def test_upload_with_not_owned_channel_fails(factories, uploaded_audio_file):
|
||||||
|
channel = factories["audio.Channel"]()
|
||||||
|
user = factories["users.User"]()
|
||||||
|
data = {
|
||||||
|
"channel": channel.uuid,
|
||||||
|
"audio_file": uploaded_audio_file,
|
||||||
|
}
|
||||||
|
serializer = serializers.UploadForOwnerSerializer(
|
||||||
|
data=data, context={"user": user},
|
||||||
|
)
|
||||||
|
assert serializer.is_valid() is False
|
||||||
|
assert "channel" in serializer.errors
|
||||||
|
|
||||||
|
|
||||||
|
def test_upload_with_not_owned_library_fails(factories, uploaded_audio_file):
|
||||||
|
library = factories["music.Library"]()
|
||||||
|
user = factories["users.User"]()
|
||||||
|
data = {
|
||||||
|
"library": library.uuid,
|
||||||
|
"audio_file": uploaded_audio_file,
|
||||||
|
}
|
||||||
|
serializer = serializers.UploadForOwnerSerializer(
|
||||||
|
data=data, context={"user": user},
|
||||||
|
)
|
||||||
|
assert serializer.is_valid() is False
|
||||||
|
assert "library" in serializer.errors
|
||||||
|
|
||||||
|
|
||||||
|
def test_upload_requires_library_or_channel(factories, uploaded_audio_file):
|
||||||
|
user = factories["users.User"]()
|
||||||
|
data = {
|
||||||
|
"audio_file": uploaded_audio_file,
|
||||||
|
}
|
||||||
|
serializer = serializers.UploadForOwnerSerializer(
|
||||||
|
data=data, context={"user": user},
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(
|
||||||
|
serializers.serializers.ValidationError,
|
||||||
|
match=r"You need to specify a channel or a library",
|
||||||
|
):
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_upload_requires_library_or_channel_but_not_both(
|
||||||
|
factories, uploaded_audio_file
|
||||||
|
):
|
||||||
|
channel = factories["audio.Channel"](attributed_to__local=True)
|
||||||
|
library = channel.library
|
||||||
|
user = channel.attributed_to.user
|
||||||
|
data = {
|
||||||
|
"audio_file": uploaded_audio_file,
|
||||||
|
"library": library.uuid,
|
||||||
|
"channel": channel.uuid,
|
||||||
|
}
|
||||||
|
serializer = serializers.UploadForOwnerSerializer(
|
||||||
|
data=data, context={"user": user},
|
||||||
|
)
|
||||||
|
with pytest.raises(
|
||||||
|
serializers.serializers.ValidationError,
|
||||||
|
match=r"You may specify a channel or a library, not both",
|
||||||
|
):
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_upload_import_metadata_serializer_simple():
|
||||||
|
serializer = serializers.ImportMetadataSerializer(data={"title": "hello"})
|
||||||
|
|
||||||
|
assert serializer.is_valid(raise_exception=True) is True
|
||||||
|
assert serializer.validated_data == {"title": "hello"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_upload_import_metadata_serializer_full():
|
||||||
|
licenses.load(licenses.LICENSES)
|
||||||
|
data = {
|
||||||
|
"title": "hello",
|
||||||
|
"mbid": "3220fd02-5237-4952-8394-b7e64b0204a6",
|
||||||
|
"tags": ["politics", "gender"],
|
||||||
|
"license": "cc-by-sa-4.0",
|
||||||
|
"copyright": "My work",
|
||||||
|
"position": 42,
|
||||||
|
}
|
||||||
|
expected = data.copy()
|
||||||
|
expected["license"] = models.License.objects.get(code=data["license"])
|
||||||
|
expected["mbid"] = uuid.UUID(data["mbid"])
|
||||||
|
serializer = serializers.ImportMetadataSerializer(data=data)
|
||||||
|
|
||||||
|
assert serializer.is_valid(raise_exception=True) is True
|
||||||
|
assert serializer.validated_data == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_upload_with_channel_keeps_import_metadata(factories, uploaded_audio_file):
|
||||||
|
channel = factories["audio.Channel"](attributed_to__local=True)
|
||||||
|
user = channel.attributed_to.user
|
||||||
|
data = {
|
||||||
|
"channel": channel.uuid,
|
||||||
|
"audio_file": uploaded_audio_file,
|
||||||
|
"import_metadata": {"title": "hello"},
|
||||||
|
}
|
||||||
|
serializer = serializers.UploadForOwnerSerializer(
|
||||||
|
data=data, context={"user": user},
|
||||||
|
)
|
||||||
|
assert serializer.is_valid(raise_exception=True) is True
|
||||||
|
upload = serializer.save()
|
||||||
|
|
||||||
|
assert upload.import_metadata == data["import_metadata"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_upload_with_channel_validates_import_metadata(factories, uploaded_audio_file):
|
||||||
|
channel = factories["audio.Channel"](attributed_to__local=True)
|
||||||
|
user = channel.attributed_to.user
|
||||||
|
data = {
|
||||||
|
"channel": channel.uuid,
|
||||||
|
"audio_file": uploaded_audio_file,
|
||||||
|
"import_metadata": {"title": None},
|
||||||
|
}
|
||||||
|
serializer = serializers.UploadForOwnerSerializer(
|
||||||
|
data=data, context={"user": user},
|
||||||
|
)
|
||||||
|
with pytest.raises(serializers.serializers.ValidationError):
|
||||||
|
assert serializer.is_valid(raise_exception=True)
|
||||||
|
|
|
@ -432,6 +432,14 @@ def test_upload_import_skip_existing_track_in_own_library(factories, temp_signal
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("import_status", ["draft", "errored", "finished"])
|
||||||
|
def test_process_upload_picks_ignore_non_pending_uploads(import_status, factories):
|
||||||
|
upload = factories["music.Upload"](import_status=import_status)
|
||||||
|
|
||||||
|
with pytest.raises(upload.DoesNotExist):
|
||||||
|
tasks.process_upload(upload_id=upload.pk)
|
||||||
|
|
||||||
|
|
||||||
def test_upload_import_track_uuid(now, factories):
|
def test_upload_import_track_uuid(now, factories):
|
||||||
track = factories["music.Track"]()
|
track = factories["music.Track"]()
|
||||||
upload = factories["music.Upload"](
|
upload = factories["music.Upload"](
|
||||||
|
@ -911,3 +919,133 @@ def test_get_cover_from_fs_ignored(name, tmpdir):
|
||||||
f.write(content)
|
f.write(content)
|
||||||
|
|
||||||
assert tasks.get_cover_from_fs(tmpdir) is None
|
assert tasks.get_cover_from_fs(tmpdir) is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_track_from_import_metadata_with_forced_values(factories, mocker, faker):
|
||||||
|
actor = factories["federation.Actor"]()
|
||||||
|
forced_values = {
|
||||||
|
"title": "Real title",
|
||||||
|
"artist": factories["music.Artist"](),
|
||||||
|
"album": None,
|
||||||
|
"license": factories["music.License"](),
|
||||||
|
"position": 3,
|
||||||
|
"copyright": "Real copyright",
|
||||||
|
"mbid": faker.uuid4(),
|
||||||
|
"attributed_to": actor,
|
||||||
|
"tags": ["hello", "world"],
|
||||||
|
}
|
||||||
|
metadata = {
|
||||||
|
"title": "Test track",
|
||||||
|
"artists": [{"name": "Test artist"}],
|
||||||
|
"album": {"title": "Test album", "release_date": datetime.date(2012, 8, 15)},
|
||||||
|
"position": 4,
|
||||||
|
"disc_number": 2,
|
||||||
|
"copyright": "2018 Someone",
|
||||||
|
"tags": ["foo", "bar"],
|
||||||
|
}
|
||||||
|
|
||||||
|
track = tasks.get_track_from_import_metadata(metadata, **forced_values)
|
||||||
|
|
||||||
|
assert track.title == forced_values["title"]
|
||||||
|
assert track.mbid == forced_values["mbid"]
|
||||||
|
assert track.position == forced_values["position"]
|
||||||
|
assert track.disc_number == metadata["disc_number"]
|
||||||
|
assert track.copyright == forced_values["copyright"]
|
||||||
|
assert track.album == forced_values["album"]
|
||||||
|
assert track.artist == forced_values["artist"]
|
||||||
|
assert track.attributed_to == forced_values["attributed_to"]
|
||||||
|
assert track.license == forced_values["license"]
|
||||||
|
assert (
|
||||||
|
sorted(track.tagged_items.values_list("tag__name", flat=True))
|
||||||
|
== forced_values["tags"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_channel_upload_forces_artist_and_attributed_to(
|
||||||
|
factories, mocker, faker
|
||||||
|
):
|
||||||
|
track = factories["music.Track"]()
|
||||||
|
channel = factories["audio.Channel"]()
|
||||||
|
import_metadata = {
|
||||||
|
"title": "Real title",
|
||||||
|
"position": 3,
|
||||||
|
"copyright": "Real copyright",
|
||||||
|
"tags": ["hello", "world"],
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_forced_values = import_metadata.copy()
|
||||||
|
expected_forced_values["artist"] = channel.artist
|
||||||
|
expected_forced_values["attributed_to"] = channel.attributed_to
|
||||||
|
upload = factories["music.Upload"](
|
||||||
|
track=None, import_metadata=import_metadata, library=channel.library
|
||||||
|
)
|
||||||
|
get_track_from_import_metadata = mocker.patch.object(
|
||||||
|
tasks, "get_track_from_import_metadata", return_value=track
|
||||||
|
)
|
||||||
|
|
||||||
|
tasks.process_upload(upload_id=upload.pk)
|
||||||
|
|
||||||
|
upload.refresh_from_db()
|
||||||
|
serializer = tasks.metadata.TrackMetadataSerializer(
|
||||||
|
data=tasks.metadata.Metadata(upload.get_audio_file())
|
||||||
|
)
|
||||||
|
assert serializer.is_valid() is True
|
||||||
|
audio_metadata = serializer.validated_data
|
||||||
|
|
||||||
|
expected_final_metadata = tasks.collections.ChainMap(
|
||||||
|
{"upload_source": None}, audio_metadata, {"funkwhale": {}},
|
||||||
|
)
|
||||||
|
assert upload.import_status == "finished"
|
||||||
|
get_track_from_import_metadata.assert_called_once_with(
|
||||||
|
expected_final_metadata, **expected_forced_values
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_upload_uses_import_metadata_if_valid(factories, mocker):
|
||||||
|
track = factories["music.Track"]()
|
||||||
|
import_metadata = {"title": "hello", "funkwhale": {"foo": "bar"}}
|
||||||
|
upload = factories["music.Upload"](track=None, import_metadata=import_metadata)
|
||||||
|
get_track_from_import_metadata = mocker.patch.object(
|
||||||
|
tasks, "get_track_from_import_metadata", return_value=track
|
||||||
|
)
|
||||||
|
tasks.process_upload(upload_id=upload.pk)
|
||||||
|
|
||||||
|
serializer = tasks.metadata.TrackMetadataSerializer(
|
||||||
|
data=tasks.metadata.Metadata(upload.get_audio_file())
|
||||||
|
)
|
||||||
|
assert serializer.is_valid() is True
|
||||||
|
audio_metadata = serializer.validated_data
|
||||||
|
|
||||||
|
expected_final_metadata = tasks.collections.ChainMap(
|
||||||
|
{"upload_source": None},
|
||||||
|
audio_metadata,
|
||||||
|
{"funkwhale": import_metadata["funkwhale"]},
|
||||||
|
)
|
||||||
|
get_track_from_import_metadata.assert_called_once_with(
|
||||||
|
expected_final_metadata, attributed_to=upload.library.actor, title="hello"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_upload_skips_import_metadata_if_invalid(factories, mocker):
|
||||||
|
track = factories["music.Track"]()
|
||||||
|
import_metadata = {"title": None, "funkwhale": {"foo": "bar"}}
|
||||||
|
upload = factories["music.Upload"](track=None, import_metadata=import_metadata)
|
||||||
|
get_track_from_import_metadata = mocker.patch.object(
|
||||||
|
tasks, "get_track_from_import_metadata", return_value=track
|
||||||
|
)
|
||||||
|
tasks.process_upload(upload_id=upload.pk)
|
||||||
|
|
||||||
|
serializer = tasks.metadata.TrackMetadataSerializer(
|
||||||
|
data=tasks.metadata.Metadata(upload.get_audio_file())
|
||||||
|
)
|
||||||
|
assert serializer.is_valid() is True
|
||||||
|
audio_metadata = serializer.validated_data
|
||||||
|
|
||||||
|
expected_final_metadata = tasks.collections.ChainMap(
|
||||||
|
{"upload_source": None},
|
||||||
|
audio_metadata,
|
||||||
|
{"funkwhale": import_metadata["funkwhale"]},
|
||||||
|
)
|
||||||
|
get_track_from_import_metadata.assert_called_once_with(
|
||||||
|
expected_final_metadata, attributed_to=upload.library.actor
|
||||||
|
)
|
||||||
|
|
|
@ -681,10 +681,105 @@ def test_user_can_create_upload(logged_in_api_client, factories, mocker, audio_f
|
||||||
assert upload.audio_file.read() == audio_file.read()
|
assert upload.audio_file.read() == audio_file.read()
|
||||||
assert upload.source == "upload://test"
|
assert upload.source == "upload://test"
|
||||||
assert upload.import_reference == "test"
|
assert upload.import_reference == "test"
|
||||||
|
assert upload.import_status == "pending"
|
||||||
assert upload.track is None
|
assert upload.track is None
|
||||||
m.assert_called_once_with(tasks.process_upload.delay, upload_id=upload.pk)
|
m.assert_called_once_with(tasks.process_upload.delay, upload_id=upload.pk)
|
||||||
|
|
||||||
|
|
||||||
|
def test_user_can_create_draft_upload(
|
||||||
|
logged_in_api_client, factories, mocker, audio_file
|
||||||
|
):
|
||||||
|
library = factories["music.Library"](actor__user=logged_in_api_client.user)
|
||||||
|
url = reverse("api:v1:uploads-list")
|
||||||
|
m = mocker.patch("funkwhale_api.common.utils.on_commit")
|
||||||
|
|
||||||
|
response = logged_in_api_client.post(
|
||||||
|
url,
|
||||||
|
{
|
||||||
|
"audio_file": audio_file,
|
||||||
|
"source": "upload://test",
|
||||||
|
"import_reference": "test",
|
||||||
|
"import_status": "draft",
|
||||||
|
"library": library.uuid,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
|
||||||
|
upload = library.uploads.latest("id")
|
||||||
|
|
||||||
|
audio_file.seek(0)
|
||||||
|
assert upload.audio_file.read() == audio_file.read()
|
||||||
|
assert upload.source == "upload://test"
|
||||||
|
assert upload.import_reference == "test"
|
||||||
|
assert upload.import_status == "draft"
|
||||||
|
assert upload.track is None
|
||||||
|
m.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
def test_user_can_patch_draft_upload(
|
||||||
|
logged_in_api_client, factories, mocker, audio_file
|
||||||
|
):
|
||||||
|
actor = logged_in_api_client.user.create_actor()
|
||||||
|
library = factories["music.Library"](actor=actor)
|
||||||
|
upload = factories["music.Upload"](library__actor=actor, import_status="draft")
|
||||||
|
url = reverse("api:v1:uploads-detail", kwargs={"uuid": upload.uuid})
|
||||||
|
m = mocker.patch("funkwhale_api.common.utils.on_commit")
|
||||||
|
|
||||||
|
response = logged_in_api_client.patch(
|
||||||
|
url,
|
||||||
|
{
|
||||||
|
"audio_file": audio_file,
|
||||||
|
"source": "upload://test",
|
||||||
|
"import_reference": "test",
|
||||||
|
"library": library.uuid,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
upload.refresh_from_db()
|
||||||
|
|
||||||
|
audio_file.seek(0)
|
||||||
|
assert upload.audio_file.read() == audio_file.read()
|
||||||
|
assert upload.source == "upload://test"
|
||||||
|
assert upload.import_reference == "test"
|
||||||
|
assert upload.import_status == "draft"
|
||||||
|
assert upload.library == library
|
||||||
|
m.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("import_status", ["pending", "errored", "skipped", "finished"])
|
||||||
|
def test_user_cannot_patch_non_draft_upload(
|
||||||
|
import_status, logged_in_api_client, factories
|
||||||
|
):
|
||||||
|
actor = logged_in_api_client.user.create_actor()
|
||||||
|
upload = factories["music.Upload"](
|
||||||
|
library__actor=actor, import_status=import_status
|
||||||
|
)
|
||||||
|
url = reverse("api:v1:uploads-detail", kwargs={"uuid": upload.uuid})
|
||||||
|
response = logged_in_api_client.patch(url, {"import_reference": "test"})
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
def test_user_can_patch_draft_upload_status_triggers_processing(
|
||||||
|
logged_in_api_client, factories, mocker
|
||||||
|
):
|
||||||
|
actor = logged_in_api_client.user.create_actor()
|
||||||
|
upload = factories["music.Upload"](library__actor=actor, import_status="draft")
|
||||||
|
url = reverse("api:v1:uploads-detail", kwargs={"uuid": upload.uuid})
|
||||||
|
m = mocker.patch("funkwhale_api.common.utils.on_commit")
|
||||||
|
|
||||||
|
response = logged_in_api_client.patch(url, {"import_status": "pending"})
|
||||||
|
|
||||||
|
upload.refresh_from_db()
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert upload.import_status == "pending"
|
||||||
|
m.assert_called_once_with(tasks.process_upload.delay, upload_id=upload.pk)
|
||||||
|
|
||||||
|
|
||||||
def test_user_can_list_own_library_follows(factories, logged_in_api_client):
|
def test_user_can_list_own_library_follows(factories, logged_in_api_client):
|
||||||
actor = logged_in_api_client.user.create_actor()
|
actor = logged_in_api_client.user.create_actor()
|
||||||
library = factories["music.Library"](actor=actor)
|
library = factories["music.Library"](actor=actor)
|
||||||
|
@ -1062,3 +1157,17 @@ def test_track_list_exclude_channels(params, expected, factories, logged_in_api_
|
||||||
def test_strip_absolute_media_url(media_url, input, expected, settings):
|
def test_strip_absolute_media_url(media_url, input, expected, settings):
|
||||||
settings.MEDIA_URL = media_url
|
settings.MEDIA_URL = media_url
|
||||||
assert views.strip_absolute_media_url(input) == expected
|
assert views.strip_absolute_media_url(input) == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_upload_audio_metadata(logged_in_api_client, factories):
|
||||||
|
actor = logged_in_api_client.user.create_actor()
|
||||||
|
upload = factories["music.Upload"](library__actor=actor)
|
||||||
|
metadata = tasks.metadata.Metadata(upload.get_audio_file())
|
||||||
|
serializer = tasks.metadata.TrackMetadataSerializer(data=metadata)
|
||||||
|
url = reverse("api:v1:uploads-audio-file-metadata", kwargs={"uuid": upload.uuid})
|
||||||
|
|
||||||
|
response = logged_in_api_client.get(url)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert serializer.is_valid(raise_exception=True) is True
|
||||||
|
assert response.data == serializer.validated_data
|
||||||
|
|
|
@ -204,21 +204,23 @@ def test_user_get_quota_status(factories, preferences, mocker):
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
"funkwhale_api.federation.models.Actor.get_current_usage",
|
"funkwhale_api.federation.models.Actor.get_current_usage",
|
||||||
return_value={
|
return_value={
|
||||||
"total": 10 * 1000 * 1000,
|
"total": 15 * 1000 * 1000,
|
||||||
"pending": 1 * 1000 * 1000,
|
"pending": 1 * 1000 * 1000,
|
||||||
"skipped": 2 * 1000 * 1000,
|
"skipped": 2 * 1000 * 1000,
|
||||||
"errored": 3 * 1000 * 1000,
|
"errored": 3 * 1000 * 1000,
|
||||||
"finished": 4 * 1000 * 1000,
|
"finished": 4 * 1000 * 1000,
|
||||||
|
"draft": 5 * 1000 * 1000,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
assert user.get_quota_status() == {
|
assert user.get_quota_status() == {
|
||||||
"max": 66,
|
"max": 66,
|
||||||
"remaining": 56,
|
"remaining": 51,
|
||||||
"current": 10,
|
"current": 15,
|
||||||
"pending": 1,
|
"pending": 1,
|
||||||
"skipped": 2,
|
"skipped": 2,
|
||||||
"errored": 3,
|
"errored": 3,
|
||||||
"finished": 4,
|
"finished": 4,
|
||||||
|
"draft": 5,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
245
docs/swagger.yml
245
docs/swagger.yml
|
@ -849,6 +849,89 @@ paths:
|
||||||
204:
|
204:
|
||||||
$ref: "#/responses/204"
|
$ref: "#/responses/204"
|
||||||
|
|
||||||
|
/api/v1/channels/:
|
||||||
|
get:
|
||||||
|
summary: List channels
|
||||||
|
tags:
|
||||||
|
- "Uploads and audio content"
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/parameters/PageNumber"
|
||||||
|
- $ref: "#/parameters/PageSize"
|
||||||
|
- $ref: "#/parameters/Scope"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
allOf:
|
||||||
|
- $ref: "#/definitions/ResultPage"
|
||||||
|
- type: "object"
|
||||||
|
properties:
|
||||||
|
results:
|
||||||
|
type: "array"
|
||||||
|
items:
|
||||||
|
$ref: "#/definitions/Channel"
|
||||||
|
post:
|
||||||
|
summary: Create a new channel
|
||||||
|
tags:
|
||||||
|
- "Uploads and audio content"
|
||||||
|
responses:
|
||||||
|
201:
|
||||||
|
$ref: "#/responses/201"
|
||||||
|
400:
|
||||||
|
$ref: "#/responses/400"
|
||||||
|
requestBody:
|
||||||
|
required: true
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/definitions/ChannelCreate"
|
||||||
|
|
||||||
|
/api/v1/channels/{uuid}/:
|
||||||
|
parameters:
|
||||||
|
- name: uuid
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: "string"
|
||||||
|
format: "uuid"
|
||||||
|
get:
|
||||||
|
summary: Retrieve a channel
|
||||||
|
tags:
|
||||||
|
- "Uploads and audio content"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/definitions/Channel"
|
||||||
|
post:
|
||||||
|
summary: Update a channel
|
||||||
|
tags:
|
||||||
|
- "Uploads and audio content"
|
||||||
|
requestBody:
|
||||||
|
required: true
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/definitions/ChannelUpdate"
|
||||||
|
responses:
|
||||||
|
201:
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/definitions/Channel"
|
||||||
|
delete:
|
||||||
|
summary: Delete a channel and all associated uploads
|
||||||
|
description: |
|
||||||
|
This will delete the channel, all associated uploads, follows, and broadcast
|
||||||
|
the event on the federation.
|
||||||
|
tags:
|
||||||
|
- "Uploads and audio content"
|
||||||
|
responses:
|
||||||
|
204:
|
||||||
|
$ref: "#/responses/204"
|
||||||
|
|
||||||
/api/v1/uploads/:
|
/api/v1/uploads/:
|
||||||
get:
|
get:
|
||||||
summary: List owned uploads
|
summary: List owned uploads
|
||||||
|
@ -909,6 +992,17 @@ paths:
|
||||||
audio_file:
|
audio_file:
|
||||||
type: string
|
type: string
|
||||||
format: binary
|
format: binary
|
||||||
|
import_status:
|
||||||
|
type: string
|
||||||
|
description: "Setting import_status to draft will prevent processing, but allow further modifications to audio and metadata. Once ready, use the PATCH method to set import_status to pending. Default to `pending` if unspecified."
|
||||||
|
default: "pending"
|
||||||
|
enum:
|
||||||
|
- "draft"
|
||||||
|
- "pending"
|
||||||
|
import_metadata:
|
||||||
|
required: false
|
||||||
|
$ref: "#/definitions/ImportMetadata"
|
||||||
|
|
||||||
|
|
||||||
/api/v1/uploads/{uuid}/:
|
/api/v1/uploads/{uuid}/:
|
||||||
parameters:
|
parameters:
|
||||||
|
@ -920,6 +1014,24 @@ paths:
|
||||||
format: "uuid"
|
format: "uuid"
|
||||||
get:
|
get:
|
||||||
summary: Retrieve an upload
|
summary: Retrieve an upload
|
||||||
|
tags:
|
||||||
|
- "Uploads and audio content"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/definitions/OwnedUpload"
|
||||||
|
patch:
|
||||||
|
summary: Update a draft upload
|
||||||
|
description: |
|
||||||
|
This will update a draft upload, before it is processed.
|
||||||
|
|
||||||
|
All fields supported for `POST /api/v1/uploads` can be updated here.
|
||||||
|
|
||||||
|
Setting `import_status` to `pending` will trigger processing, and make future
|
||||||
|
modifications impossible.
|
||||||
|
|
||||||
tags:
|
tags:
|
||||||
- "Uploads and audio content"
|
- "Uploads and audio content"
|
||||||
responses:
|
responses:
|
||||||
|
@ -939,6 +1051,26 @@ paths:
|
||||||
204:
|
204:
|
||||||
$ref: "#/responses/204"
|
$ref: "#/responses/204"
|
||||||
|
|
||||||
|
/api/v1/uploads/{uuid}/audio-file-metadata:
|
||||||
|
parameters:
|
||||||
|
- name: uuid
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: "string"
|
||||||
|
format: "uuid"
|
||||||
|
get:
|
||||||
|
summary: Retrieve the tags embedded in the audio file
|
||||||
|
tags:
|
||||||
|
- "Uploads and audio content"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: "object"
|
||||||
|
properties: []
|
||||||
|
|
||||||
/api/v1/favorites/tracks/:
|
/api/v1/favorites/tracks/:
|
||||||
get:
|
get:
|
||||||
tags:
|
tags:
|
||||||
|
@ -1122,6 +1254,17 @@ parameters:
|
||||||
required: false
|
required: false
|
||||||
type: "boolean"
|
type: "boolean"
|
||||||
|
|
||||||
|
Scope:
|
||||||
|
name: "scope"
|
||||||
|
in: "query"
|
||||||
|
default: "all"
|
||||||
|
description: "Limit the results relative to the user making the request. `me` restrict to owned objects, `all` applies no restriction."
|
||||||
|
schema:
|
||||||
|
required: false
|
||||||
|
type: "string"
|
||||||
|
enum:
|
||||||
|
- "me"
|
||||||
|
- "all"
|
||||||
responses:
|
responses:
|
||||||
200:
|
200:
|
||||||
description: Success
|
description: Success
|
||||||
|
@ -1174,11 +1317,13 @@ properties:
|
||||||
type: string
|
type: string
|
||||||
example: "finished"
|
example: "finished"
|
||||||
enum:
|
enum:
|
||||||
|
- "draft"
|
||||||
- "pending"
|
- "pending"
|
||||||
- "finished"
|
- "finished"
|
||||||
- "errored"
|
- "errored"
|
||||||
- "skipped"
|
- "skipped"
|
||||||
description: |
|
description: |
|
||||||
|
* `draft`: waiting for further modifications from the owner
|
||||||
* `pending`: waiting to be processed by the server
|
* `pending`: waiting to be processed by the server
|
||||||
* `finished`: successfully processed by the server
|
* `finished`: successfully processed by the server
|
||||||
* `errored`: couldn't be processed by the server (e.g because of a tagging issue)
|
* `errored`: couldn't be processed by the server (e.g because of a tagging issue)
|
||||||
|
@ -1190,6 +1335,13 @@ properties:
|
||||||
- "ogg"
|
- "ogg"
|
||||||
- "mp3"
|
- "mp3"
|
||||||
|
|
||||||
|
tags:
|
||||||
|
type: array
|
||||||
|
description: A list of hashtags associated with a resource
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
example: "Rock"
|
||||||
|
|
||||||
definitions:
|
definitions:
|
||||||
OAuthApplication:
|
OAuthApplication:
|
||||||
type: "object"
|
type: "object"
|
||||||
|
@ -1404,6 +1556,57 @@ definitions:
|
||||||
type: "integer"
|
type: "integer"
|
||||||
format: "int64"
|
format: "int64"
|
||||||
example: 16
|
example: 16
|
||||||
|
|
||||||
|
ChannelCreate:
|
||||||
|
type: "object"
|
||||||
|
properties:
|
||||||
|
name:
|
||||||
|
type: "string"
|
||||||
|
example: "A short, public name for the channel"
|
||||||
|
maxLength: 255
|
||||||
|
username:
|
||||||
|
type: "string"
|
||||||
|
example: "aliceandbob"
|
||||||
|
description: "The username to associate with the channel, for use over federation. This cannot be changed afterwards."
|
||||||
|
summary:
|
||||||
|
required: false
|
||||||
|
type: "string"
|
||||||
|
example: "A short, public description for the channel"
|
||||||
|
maxLength: 500
|
||||||
|
tags:
|
||||||
|
$ref: "#/properties/tags"
|
||||||
|
|
||||||
|
ChannelUpdate:
|
||||||
|
type: "object"
|
||||||
|
properties:
|
||||||
|
name:
|
||||||
|
type: "string"
|
||||||
|
example: "A short, public name for the channel"
|
||||||
|
maxLength: 255
|
||||||
|
summary:
|
||||||
|
required: false
|
||||||
|
type: "string"
|
||||||
|
example: "A short, public description for the channel"
|
||||||
|
maxLength: 500
|
||||||
|
tags:
|
||||||
|
$ref: "#/properties/tags"
|
||||||
|
|
||||||
|
Channel:
|
||||||
|
type: "object"
|
||||||
|
properties:
|
||||||
|
uuid:
|
||||||
|
type: "string"
|
||||||
|
format: "uuid"
|
||||||
|
creation_date:
|
||||||
|
$ref: "#/properties/creation_date"
|
||||||
|
artist:
|
||||||
|
$ref: "#/definitions/BaseArtist"
|
||||||
|
attributed_to:
|
||||||
|
$ref: "#/definitions/Actor"
|
||||||
|
description: User account owning the channel
|
||||||
|
actor:
|
||||||
|
$ref: "#/definitions/Actor"
|
||||||
|
description: Actor representing the channel over federation
|
||||||
Library:
|
Library:
|
||||||
type: "object"
|
type: "object"
|
||||||
properties:
|
properties:
|
||||||
|
@ -1601,8 +1804,8 @@ definitions:
|
||||||
OwnedLibraryCreate:
|
OwnedLibraryCreate:
|
||||||
type: "object"
|
type: "object"
|
||||||
properties:
|
properties:
|
||||||
password:
|
name:
|
||||||
type: "name"
|
type: "string"
|
||||||
example: "My new library"
|
example: "My new library"
|
||||||
description:
|
description:
|
||||||
required: false
|
required: false
|
||||||
|
@ -1657,6 +1860,39 @@ definitions:
|
||||||
import_reference:
|
import_reference:
|
||||||
type: "string"
|
type: "string"
|
||||||
example: "Import launched via web UI on 03/18"
|
example: "Import launched via web UI on 03/18"
|
||||||
|
import_metadata:
|
||||||
|
$ref: "#/definitions/ImportMetadata"
|
||||||
|
|
||||||
|
ImportMetadata:
|
||||||
|
type: "object"
|
||||||
|
description: "Import metadata to override values from ID3/embedded audio tags"
|
||||||
|
properties:
|
||||||
|
title:
|
||||||
|
type: "string"
|
||||||
|
example: "My Track"
|
||||||
|
required: true
|
||||||
|
mbid:
|
||||||
|
$ref: "#/properties/mbid"
|
||||||
|
required: false
|
||||||
|
copyright:
|
||||||
|
type: "string"
|
||||||
|
example: "Alice, 2018"
|
||||||
|
description: "Copyright information"
|
||||||
|
required: false
|
||||||
|
license:
|
||||||
|
type: "string"
|
||||||
|
example: "cc-by-sa-4.0"
|
||||||
|
required: false
|
||||||
|
description: A license code, as returned by /api/v1/licenses
|
||||||
|
tags:
|
||||||
|
$ref: "#/properties/tags"
|
||||||
|
required: false
|
||||||
|
position:
|
||||||
|
description: "Position of the track in the album or channel"
|
||||||
|
type: "number"
|
||||||
|
minimum: 1
|
||||||
|
example: 1
|
||||||
|
|
||||||
TrackFavorite:
|
TrackFavorite:
|
||||||
type: "object"
|
type: "object"
|
||||||
properties:
|
properties:
|
||||||
|
@ -1773,6 +2009,11 @@ definitions:
|
||||||
format: "int64"
|
format: "int64"
|
||||||
description: Storage space occupied by uploads with "pending" import status, in MB
|
description: Storage space occupied by uploads with "pending" import status, in MB
|
||||||
example: 15
|
example: 15
|
||||||
|
draft:
|
||||||
|
type: "integer"
|
||||||
|
format: "int64"
|
||||||
|
description: Storage space occupied by uploads with "draft" import status, in MB
|
||||||
|
example: 8
|
||||||
errored:
|
errored:
|
||||||
type: "integer"
|
type: "integer"
|
||||||
format: "int64"
|
format: "int64"
|
||||||
|
|
Loading…
Reference in New Issue