refactor: upgrade code to >=python3.7 (pre-commit)
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2189>
This commit is contained in:
parent
7768ea77a4
commit
8d9946d35a
|
@ -28,7 +28,7 @@ _filters = {}
|
|||
_hooks = {}
|
||||
|
||||
|
||||
class PluginCache(object):
|
||||
class PluginCache:
|
||||
def __init__(self, prefix):
|
||||
self.prefix = prefix
|
||||
|
||||
|
@ -81,7 +81,7 @@ def load_settings(name, settings):
|
|||
"text": django_settings.ENV,
|
||||
}
|
||||
values = {}
|
||||
prefix = "FUNKWHALE_PLUGIN_{}".format(name.upper())
|
||||
prefix = f"FUNKWHALE_PLUGIN_{name.upper()}"
|
||||
for s in settings:
|
||||
key = "_".join([prefix, s["name"].upper()])
|
||||
value = mapping[s["type"]](key, default=s.get("default", None))
|
||||
|
@ -262,7 +262,7 @@ def get_serializer_from_conf_template(conf, source=False, user=None):
|
|||
self.fields["library"] = LibraryField(actor=user.actor)
|
||||
|
||||
for vname, v in validators.items():
|
||||
setattr(Serializer, "validate_{}".format(vname), v)
|
||||
setattr(Serializer, f"validate_{vname}", v)
|
||||
return Serializer
|
||||
|
||||
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
|
@ -1,6 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging.config
|
||||
import sys
|
||||
from collections import OrderedDict
|
||||
|
@ -170,7 +167,7 @@ else:
|
|||
|
||||
FUNKWHALE_PROTOCOL = FUNKWHALE_PROTOCOL.lower()
|
||||
FUNKWHALE_HOSTNAME = FUNKWHALE_HOSTNAME.lower()
|
||||
FUNKWHALE_URL = "{}://{}".format(FUNKWHALE_PROTOCOL, FUNKWHALE_HOSTNAME)
|
||||
FUNKWHALE_URL = f"{FUNKWHALE_PROTOCOL}://{FUNKWHALE_HOSTNAME}"
|
||||
FUNKWHALE_SPA_HTML_ROOT = env(
|
||||
"FUNKWHALE_SPA_HTML_ROOT", default=FUNKWHALE_URL + "/front/"
|
||||
)
|
||||
|
@ -336,7 +333,7 @@ FIXTURE_DIRS = (str(APPS_DIR.path("fixtures")),)
|
|||
# EMAIL
|
||||
# ------------------------------------------------------------------------------
|
||||
DEFAULT_FROM_EMAIL = env(
|
||||
"DEFAULT_FROM_EMAIL", default="Funkwhale <noreply@{}>".format(FUNKWHALE_HOSTNAME)
|
||||
"DEFAULT_FROM_EMAIL", default=f"Funkwhale <noreply@{FUNKWHALE_HOSTNAME}>"
|
||||
)
|
||||
"""
|
||||
The name and email address used to send system emails.
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Local settings
|
||||
|
||||
|
@ -102,7 +101,7 @@ CSRF_TRUSTED_ORIGINS = [o for o in ALLOWED_HOSTS]
|
|||
REST_FRAMEWORK["DEFAULT_SCHEMA_CLASS"] = "funkwhale_api.schema.CustomAutoSchema"
|
||||
SPECTACULAR_SETTINGS = {
|
||||
"TITLE": "Funkwhale API",
|
||||
"DESCRIPTION": open("Readme.md", "r").read(),
|
||||
"DESCRIPTION": open("Readme.md").read(),
|
||||
"VERSION": funkwhale_version,
|
||||
"SCHEMA_PATH_PREFIX": "/api/(v[0-9])?",
|
||||
"OAUTH_FLOWS": ["authorizationCode"],
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Production Configurations
|
||||
|
||||
|
@ -9,7 +8,6 @@ Production Configurations
|
|||
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from .common import * # noqa
|
||||
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.conf import settings
|
||||
from django.conf.urls import url
|
||||
from django.conf.urls.static import static
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
__version__ = "1.2.8"
|
||||
__version_info__ = tuple(
|
||||
[
|
||||
|
|
|
@ -7,7 +7,7 @@ class ActivityConfig(AppConfig):
|
|||
name = "funkwhale_api.activity"
|
||||
|
||||
def ready(self):
|
||||
super(ActivityConfig, self).ready()
|
||||
super().ready()
|
||||
|
||||
app_names = [app.name for app in apps.app_configs.values()]
|
||||
record.registry.autodiscover(app_names)
|
||||
|
|
|
@ -17,7 +17,7 @@ def combined_recent(limit, **kwargs):
|
|||
_qs_list = list(querysets.values())
|
||||
union_qs = _qs_list[0].union(*_qs_list[1:])
|
||||
records = []
|
||||
for row in union_qs.order_by("-{}".format(datetime_field))[:limit]:
|
||||
for row in union_qs.order_by(f"-{datetime_field}")[:limit]:
|
||||
records.append(
|
||||
{"type": row["__type"], "when": row[datetime_field], "pk": row["pk"]}
|
||||
)
|
||||
|
|
|
@ -15,7 +15,7 @@ def set_actor(o):
|
|||
|
||||
|
||||
def get_rss_channel_name():
|
||||
return "rssfeed-{}".format(uuid.uuid4())
|
||||
return f"rssfeed-{uuid.uuid4()}"
|
||||
|
||||
|
||||
@registry.register
|
||||
|
|
|
@ -93,7 +93,7 @@ class Channel(models.Model):
|
|||
suffix = self.actor.preferred_username
|
||||
else:
|
||||
suffix = self.actor.full_username
|
||||
return federation_utils.full_url("/channels/{}".format(suffix))
|
||||
return federation_utils.full_url(f"/channels/{suffix}")
|
||||
|
||||
def get_rss_url(self):
|
||||
if not self.artist.is_local or self.is_external_rss:
|
||||
|
|
|
@ -62,7 +62,7 @@ class ChannelMetadataSerializer(serializers.Serializer):
|
|||
|
||||
if child not in categories.ITUNES_CATEGORIES[parent]:
|
||||
raise serializers.ValidationError(
|
||||
'"{}" is not a valid subcategory for "{}"'.format(child, parent)
|
||||
f'"{child}" is not a valid subcategory for "{parent}"'
|
||||
)
|
||||
|
||||
return child
|
||||
|
@ -319,7 +319,7 @@ def retrieve_feed(url):
|
|||
except requests.exceptions.HTTPError as e:
|
||||
if e.response:
|
||||
raise FeedFetchException(
|
||||
"Error while fetching feed: HTTP {}".format(e.response.status_code)
|
||||
f"Error while fetching feed: HTTP {e.response.status_code}"
|
||||
)
|
||||
raise FeedFetchException("Error while fetching feed: unknown error")
|
||||
except requests.exceptions.Timeout:
|
||||
|
@ -327,9 +327,9 @@ def retrieve_feed(url):
|
|||
except requests.exceptions.ConnectionError:
|
||||
raise FeedFetchException("Error while fetching feed: connection error")
|
||||
except requests.RequestException as e:
|
||||
raise FeedFetchException("Error while fetching feed: {}".format(e))
|
||||
raise FeedFetchException(f"Error while fetching feed: {e}")
|
||||
except Exception as e:
|
||||
raise FeedFetchException("Error while fetching feed: {}".format(e))
|
||||
raise FeedFetchException(f"Error while fetching feed: {e}")
|
||||
|
||||
return response
|
||||
|
||||
|
@ -348,7 +348,7 @@ def get_channel_from_rss_url(url, raise_exception=False):
|
|||
parsed_feed = feedparser.parse(response.text)
|
||||
serializer = RssFeedSerializer(data=parsed_feed["feed"])
|
||||
if not serializer.is_valid(raise_exception=raise_exception):
|
||||
raise FeedFetchException("Invalid xml content: {}".format(serializer.errors))
|
||||
raise FeedFetchException(f"Invalid xml content: {serializer.errors}")
|
||||
|
||||
# second mrf check with validated data
|
||||
urls_to_check = set()
|
||||
|
@ -516,7 +516,7 @@ class RssFeedSerializer(serializers.Serializer):
|
|||
else:
|
||||
artist_kwargs = {"pk": None}
|
||||
actor_kwargs = {"pk": None}
|
||||
preferred_username = "rssfeed-{}".format(uuid.uuid4())
|
||||
preferred_username = f"rssfeed-{uuid.uuid4()}"
|
||||
actor_defaults = {
|
||||
"preferred_username": preferred_username,
|
||||
"type": "Application",
|
||||
|
@ -594,7 +594,7 @@ class ItunesDurationField(serializers.CharField):
|
|||
try:
|
||||
int_parts.append(int(part))
|
||||
except (ValueError, TypeError):
|
||||
raise serializers.ValidationError("Invalid duration {}".format(v))
|
||||
raise serializers.ValidationError(f"Invalid duration {v}")
|
||||
|
||||
if len(int_parts) == 2:
|
||||
hours = 0
|
||||
|
@ -602,7 +602,7 @@ class ItunesDurationField(serializers.CharField):
|
|||
elif len(int_parts) == 3:
|
||||
hours, minutes, seconds = int_parts
|
||||
else:
|
||||
raise serializers.ValidationError("Invalid duration {}".format(v))
|
||||
raise serializers.ValidationError(f"Invalid duration {v}")
|
||||
|
||||
return (hours * 3600) + (minutes * 60) + seconds
|
||||
|
||||
|
@ -782,8 +782,8 @@ class RssFeedItemSerializer(serializers.Serializer):
|
|||
# update or create, so we restore the cache by hand
|
||||
if existing_track:
|
||||
for field in ["attachment_cover", "description"]:
|
||||
cached_id_value = getattr(existing_track, "{}_id".format(field))
|
||||
new_id_value = getattr(track, "{}_id".format(field))
|
||||
cached_id_value = getattr(existing_track, f"{field}_id")
|
||||
new_id_value = getattr(track, f"{field}_id")
|
||||
if new_id_value and cached_id_value == new_id_value:
|
||||
setattr(track, field, getattr(existing_track, field))
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ def channel_detail(query, redirect_to_ap):
|
|||
"rel": "alternate",
|
||||
"type": "application/rss+xml",
|
||||
"href": obj.get_rss_url(),
|
||||
"title": "{} - RSS Podcast Feed".format(obj.artist.name),
|
||||
"title": f"{obj.artist.name} - RSS Podcast Feed",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -73,7 +73,7 @@ def channel_detail(query, redirect_to_ap):
|
|||
"type": "application/json+oembed",
|
||||
"href": (
|
||||
utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed"))
|
||||
+ "?format=json&url={}".format(urllib.parse.quote_plus(obj_url))
|
||||
+ f"?format=json&url={urllib.parse.quote_plus(obj_url)}"
|
||||
),
|
||||
}
|
||||
)
|
||||
|
|
|
@ -31,7 +31,7 @@ ARTIST_PREFETCH_QS = (
|
|||
)
|
||||
|
||||
|
||||
class ChannelsMixin(object):
|
||||
class ChannelsMixin:
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
if not preferences.get("audio__channels_enabled"):
|
||||
return http.HttpResponse(status=405)
|
||||
|
|
|
@ -20,7 +20,7 @@ def handler_add_tags_from_tracks(
|
|||
if result is None:
|
||||
click.echo(" No relevant tags found")
|
||||
else:
|
||||
click.echo(" Relevant tags added to {} objects".format(len(result)))
|
||||
click.echo(f" Relevant tags added to {len(result)} objects")
|
||||
|
||||
|
||||
@base.cli.group()
|
||||
|
|
|
@ -16,7 +16,7 @@ def invoke():
|
|||
except ValidationError as e:
|
||||
click.secho("Invalid data:", fg="red")
|
||||
for field, errors in e.detail.items():
|
||||
click.secho(" {}:".format(field), fg="red")
|
||||
click.secho(f" {field}:", fg="red")
|
||||
for error in errors:
|
||||
click.secho(" - {}".format(error), fg="red")
|
||||
click.secho(f" - {error}", fg="red")
|
||||
sys.exit(1)
|
||||
|
|
|
@ -39,19 +39,15 @@ def generate_thumbnails(delete):
|
|||
(Attachment, "file", "attachment_square"),
|
||||
]
|
||||
for model, attribute, key_set in MODELS:
|
||||
click.echo(
|
||||
"Generating thumbnails for {}.{}…".format(model._meta.label, attribute)
|
||||
)
|
||||
qs = model.objects.exclude(**{"{}__isnull".format(attribute): True})
|
||||
click.echo(f"Generating thumbnails for {model._meta.label}.{attribute}…")
|
||||
qs = model.objects.exclude(**{f"{attribute}__isnull": True})
|
||||
qs = qs.exclude(**{attribute: ""})
|
||||
cache_key = "*{}{}*".format(
|
||||
settings.MEDIA_URL, vif_settings.VERSATILEIMAGEFIELD_SIZED_DIRNAME
|
||||
)
|
||||
entries = cache.keys(cache_key)
|
||||
if entries:
|
||||
click.echo(
|
||||
" Clearing {} cache entries: {}…".format(len(entries), cache_key)
|
||||
)
|
||||
click.echo(f" Clearing {len(entries)} cache entries: {cache_key}…")
|
||||
for keys in common_utils.batch(iter(entries)):
|
||||
cache.delete_many(keys)
|
||||
warmer = VersatileImageFieldWarmer(
|
||||
|
@ -62,6 +58,4 @@ def generate_thumbnails(delete):
|
|||
)
|
||||
click.echo(" Creating images")
|
||||
num_created, failed_to_create = warmer.warm()
|
||||
click.echo(
|
||||
" {} created, {} in error".format(num_created, len(failed_to_create))
|
||||
)
|
||||
click.echo(f" {num_created} created, {len(failed_to_create)} in error")
|
||||
|
|
|
@ -7,7 +7,7 @@ from funkwhale_api.users import models, serializers, tasks
|
|||
from . import base, utils
|
||||
|
||||
|
||||
class FakeRequest(object):
|
||||
class FakeRequest:
|
||||
def __init__(self, session={}):
|
||||
self.session = session
|
||||
|
||||
|
@ -44,7 +44,7 @@ def handler_create_user(
|
|||
for permission in permissions:
|
||||
if permission in models.PERMISSIONS:
|
||||
utils.logger.debug("Setting %s permission to True", permission)
|
||||
setattr(user, "permission_{}".format(permission), True)
|
||||
setattr(user, f"permission_{permission}", True)
|
||||
else:
|
||||
utils.logger.warn("Unknown permission %s", permission)
|
||||
utils.logger.debug("Creating actor…")
|
||||
|
@ -56,7 +56,7 @@ def handler_create_user(
|
|||
@transaction.atomic
|
||||
def handler_delete_user(usernames, soft=True):
|
||||
for username in usernames:
|
||||
click.echo("Deleting {}…".format(username))
|
||||
click.echo(f"Deleting {username}…")
|
||||
actor = None
|
||||
user = None
|
||||
try:
|
||||
|
@ -178,9 +178,9 @@ def create(username, password, email, superuser, staff, permission, upload_quota
|
|||
permissions=permission,
|
||||
upload_quota=upload_quota,
|
||||
)
|
||||
click.echo("User {} created!".format(user.username))
|
||||
click.echo(f"User {user.username} created!")
|
||||
if generated_password:
|
||||
click.echo(" Generated password: {}".format(generated_password))
|
||||
click.echo(f" Generated password: {generated_password}")
|
||||
|
||||
|
||||
@base.delete_command(group=users, id_var="username")
|
||||
|
|
|
@ -16,7 +16,7 @@ class UnverifiedEmail(Exception):
|
|||
|
||||
def resend_confirmation_email(request, user):
|
||||
THROTTLE_DELAY = 500
|
||||
cache_key = "auth:resent-email-confirmation:{}".format(user.pk)
|
||||
cache_key = f"auth:resent-email-confirmation:{user.pk}"
|
||||
if cache.get(cache_key):
|
||||
return False
|
||||
|
||||
|
@ -34,7 +34,7 @@ class OAuth2Authentication(BaseOAuth2Authentication):
|
|||
resend_confirmation_email(request, e.user)
|
||||
|
||||
|
||||
class ApplicationTokenAuthentication(object):
|
||||
class ApplicationTokenAuthentication:
|
||||
def authenticate(self, request):
|
||||
try:
|
||||
header = request.headers["Authorization"]
|
||||
|
|
|
@ -24,9 +24,9 @@ def privacy_level_query(user, lookup_field="privacy_level", user_field="user"):
|
|||
if user.is_anonymous:
|
||||
return models.Q(**{lookup_field: "everyone"})
|
||||
|
||||
return models.Q(
|
||||
**{"{}__in".format(lookup_field): ["instance", "everyone"]}
|
||||
) | models.Q(**{lookup_field: "me", user_field: user})
|
||||
return models.Q(**{f"{lookup_field}__in": ["instance", "everyone"]}) | models.Q(
|
||||
**{lookup_field: "me", user_field: user}
|
||||
)
|
||||
|
||||
|
||||
class SearchFilter(django_filters.CharFilter):
|
||||
|
@ -97,7 +97,7 @@ def get_generic_filter_query(value, relation_name, choices):
|
|||
obj = related_queryset.get(obj_query)
|
||||
except related_queryset.model.DoesNotExist:
|
||||
raise forms.ValidationError("Invalid object")
|
||||
filter_query &= models.Q(**{"{}_id".format(relation_name): obj.id})
|
||||
filter_query &= models.Q(**{f"{relation_name}_id": obj.id})
|
||||
|
||||
return filter_query
|
||||
|
||||
|
@ -163,7 +163,7 @@ class GenericRelation(serializers.JSONField):
|
|||
id_value = v[id_attr]
|
||||
id_value = id_field.to_internal_value(id_value)
|
||||
except (TypeError, KeyError, serializers.ValidationError):
|
||||
raise serializers.ValidationError("Invalid {}".format(id_attr))
|
||||
raise serializers.ValidationError(f"Invalid {id_attr}")
|
||||
|
||||
query_getter = conf.get(
|
||||
"get_query", lambda attr, value: models.Q(**{attr: value})
|
||||
|
|
|
@ -7,7 +7,7 @@ from drf_spectacular.utils import extend_schema_field
|
|||
from . import fields, models, search, utils
|
||||
|
||||
|
||||
class NoneObject(object):
|
||||
class NoneObject:
|
||||
def __eq__(self, other):
|
||||
return other.__class__ == NoneObject
|
||||
|
||||
|
@ -46,7 +46,7 @@ class CoerceChoiceField(forms.ChoiceField):
|
|||
try:
|
||||
return [b for a, b in self.choices if v == a][0]
|
||||
except IndexError:
|
||||
raise forms.ValidationError("Invalid value {}".format(value))
|
||||
raise forms.ValidationError(f"Invalid value {value}")
|
||||
|
||||
|
||||
@extend_schema_field(bool)
|
||||
|
@ -63,9 +63,7 @@ class NullBooleanFilter(filters.ChoiceFilter):
|
|||
return qs
|
||||
if value == NONE:
|
||||
value = None
|
||||
qs = self.get_method(qs)(
|
||||
**{"%s__%s" % (self.field_name, self.lookup_expr): value}
|
||||
)
|
||||
qs = self.get_method(qs)(**{f"{self.field_name}__{self.lookup_expr}": value})
|
||||
return qs.distinct() if self.distinct else qs
|
||||
|
||||
|
||||
|
@ -217,7 +215,7 @@ class ActorScopeFilter(filters.CharFilter):
|
|||
if not self.library_field:
|
||||
predicate = "pk__in"
|
||||
else:
|
||||
predicate = "{}__in".format(self.library_field)
|
||||
predicate = f"{self.library_field}__in"
|
||||
return Q(**{predicate: followed_libraries})
|
||||
|
||||
elif scope.startswith("actor:"):
|
||||
|
@ -234,7 +232,7 @@ class ActorScopeFilter(filters.CharFilter):
|
|||
return Q(**{self.actor_field: actor})
|
||||
elif scope.startswith("domain:"):
|
||||
domain = scope.split("domain:", 1)[1]
|
||||
return Q(**{"{}__domain_id".format(self.actor_field): domain})
|
||||
return Q(**{f"{self.actor_field}__domain_id": domain})
|
||||
else:
|
||||
raise EmptyQuerySet()
|
||||
|
||||
|
|
|
@ -50,7 +50,7 @@ class Command(BaseCommand):
|
|||
self.stdout.write(self.style.SUCCESS(name))
|
||||
self.stdout.write("")
|
||||
for line in script["help"].splitlines():
|
||||
self.stdout.write(" {}".format(line))
|
||||
self.stdout.write(f" {line}")
|
||||
self.stdout.write("")
|
||||
|
||||
def get_scripts(self):
|
||||
|
|
|
@ -78,7 +78,7 @@ def serve_spa(request):
|
|||
# We add the style add the end of the body to ensure it has the highest
|
||||
# priority (since it will come after other stylesheets)
|
||||
body, tail = tail.split("</body>", 1)
|
||||
css = "<style>{}</style>".format(css)
|
||||
css = f"<style>{css}</style>"
|
||||
tail = body + "\n" + css + "\n</body>" + tail
|
||||
|
||||
# set a csrf token so that visitor can login / query API if needed
|
||||
|
@ -93,13 +93,13 @@ TITLE_REGEX = re.compile(r"<title>.*</title>")
|
|||
|
||||
|
||||
def replace_manifest_url(head, new_url):
|
||||
replacement = '<link rel=manifest href="{}">'.format(new_url)
|
||||
replacement = f'<link rel=manifest href="{new_url}">'
|
||||
head = MANIFEST_LINK_REGEX.sub(replacement, head)
|
||||
return head
|
||||
|
||||
|
||||
def replace_title(head, new_title):
|
||||
replacement = "<title>{}</title>".format(html.escape(new_title))
|
||||
replacement = f"<title>{html.escape(new_title)}</title>"
|
||||
head = TITLE_REGEX.sub(replacement, head)
|
||||
return head
|
||||
|
||||
|
@ -117,7 +117,7 @@ def get_spa_file(spa_url, name):
|
|||
# we try to open a local file
|
||||
with open(path, "rb") as f:
|
||||
return f.read().decode("utf-8")
|
||||
cache_key = "spa-file:{}:{}".format(spa_url, name)
|
||||
cache_key = f"spa-file:{spa_url}:{name}"
|
||||
cached = caches["local"].get(cache_key)
|
||||
if cached:
|
||||
return cached
|
||||
|
@ -170,11 +170,7 @@ def render_tags(tags):
|
|||
yield "<{tag} {attrs} />".format(
|
||||
tag=tag.pop("tag"),
|
||||
attrs=" ".join(
|
||||
[
|
||||
'{}="{}"'.format(a, html.escape(str(v)))
|
||||
for a, v in sorted(tag.items())
|
||||
if v
|
||||
]
|
||||
[f'{a}="{html.escape(str(v))}"' for a, v in sorted(tag.items()) if v]
|
||||
),
|
||||
)
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ from django.shortcuts import get_object_or_404
|
|||
from rest_framework import serializers
|
||||
|
||||
|
||||
class MultipleLookupDetailMixin(object):
|
||||
class MultipleLookupDetailMixin:
|
||||
lookup_value_regex = "[^/]+"
|
||||
lookup_field = "composite"
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ class NotEqual(Lookup):
|
|||
lhs, lhs_params = self.process_lhs(compiler, connection)
|
||||
rhs, rhs_params = self.process_rhs(compiler, connection)
|
||||
params = lhs_params + rhs_params
|
||||
return "%s <> %s" % (lhs, rhs), params
|
||||
return f"{lhs} <> {rhs}", params
|
||||
|
||||
|
||||
class NullsLastSQLCompiler(SQLCompiler):
|
||||
|
@ -77,8 +77,8 @@ class NullsLastQuerySet(models.QuerySet):
|
|||
class LocalFromFidQuerySet:
|
||||
def local(self, include=True):
|
||||
host = settings.FEDERATION_HOSTNAME
|
||||
query = models.Q(fid__startswith="http://{}/".format(host)) | models.Q(
|
||||
fid__startswith="https://{}/".format(host)
|
||||
query = models.Q(fid__startswith=f"http://{host}/") | models.Q(
|
||||
fid__startswith=f"https://{host}/"
|
||||
)
|
||||
if include:
|
||||
return self.filter(query)
|
||||
|
@ -362,7 +362,7 @@ CONTENT_FKS = {
|
|||
def remove_attached_content(sender, instance, **kwargs):
|
||||
fk_fields = CONTENT_FKS.get(instance._meta.label, [])
|
||||
for field in fk_fields:
|
||||
if getattr(instance, "{}_id".format(field)):
|
||||
if getattr(instance, f"{field}_id"):
|
||||
try:
|
||||
getattr(instance, field).delete()
|
||||
except Content.DoesNotExist:
|
||||
|
|
|
@ -43,7 +43,7 @@ class Registry(persisting_theory.Registry):
|
|||
|
||||
def has_perm(self, perm, type, obj, actor):
|
||||
if perm not in ["approve", "suggest"]:
|
||||
raise ValueError("Invalid permission {}".format(perm))
|
||||
raise ValueError(f"Invalid permission {perm}")
|
||||
conf = self.get_conf(type, obj)
|
||||
checker = conf["perm_checkers"].get(perm)
|
||||
if not checker:
|
||||
|
@ -54,7 +54,7 @@ class Registry(persisting_theory.Registry):
|
|||
try:
|
||||
type_conf = self[type]
|
||||
except KeyError:
|
||||
raise ConfNotFound("{} is not a registered mutation".format(type))
|
||||
raise ConfNotFound(f"{type} is not a registered mutation")
|
||||
|
||||
try:
|
||||
conf = type_conf[obj.__class__]
|
||||
|
@ -63,7 +63,7 @@ class Registry(persisting_theory.Registry):
|
|||
conf = type_conf[None]
|
||||
except KeyError:
|
||||
raise ConfNotFound(
|
||||
"No mutation configuration found for {}".format(obj.__class__)
|
||||
f"No mutation configuration found for {obj.__class__}"
|
||||
)
|
||||
return conf
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ from dynamic_preferences import serializers, types
|
|||
from dynamic_preferences.registries import global_preferences_registry
|
||||
|
||||
|
||||
class DefaultFromSettingMixin(object):
|
||||
class DefaultFromSettingMixin:
|
||||
def get_default(self):
|
||||
return getattr(settings, self.setting)
|
||||
|
||||
|
@ -38,7 +38,7 @@ class StringListSerializer(serializers.BaseSerializer):
|
|||
|
||||
if type(value) not in [list, tuple]:
|
||||
raise cls.exception(
|
||||
"Cannot serialize, value {} is not a list or a tuple".format(value)
|
||||
f"Cannot serialize, value {value} is not a list or a tuple"
|
||||
)
|
||||
|
||||
if cls.sort:
|
||||
|
@ -57,7 +57,7 @@ class StringListPreference(types.BasePreferenceType):
|
|||
field_class = forms.MultipleChoiceField
|
||||
|
||||
def get_api_additional_data(self):
|
||||
d = super(StringListPreference, self).get_api_additional_data()
|
||||
d = super().get_api_additional_data()
|
||||
d["choices"] = self.get("choices")
|
||||
return d
|
||||
|
||||
|
@ -72,14 +72,14 @@ class JSONSerializer(serializers.BaseSerializer):
|
|||
data_serializer = cls.data_serializer_class(data=value)
|
||||
if not data_serializer.is_valid():
|
||||
raise cls.exception(
|
||||
"{} is not a valid value: {}".format(value, data_serializer.errors)
|
||||
f"{value} is not a valid value: {data_serializer.errors}"
|
||||
)
|
||||
value = data_serializer.validated_data
|
||||
try:
|
||||
return json.dumps(value, sort_keys=True)
|
||||
except TypeError:
|
||||
raise cls.exception(
|
||||
"Cannot serialize, value {} is not JSON serializable".format(value)
|
||||
f"Cannot serialize, value {value} is not JSON serializable"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
|
@ -9,15 +9,13 @@ from funkwhale_api.users.models import User, create_actor
|
|||
def main(command, **kwargs):
|
||||
qs = User.objects.filter(actor__isnull=True).order_by("username")
|
||||
total = len(qs)
|
||||
command.stdout.write("{} users found without actors".format(total))
|
||||
command.stdout.write(f"{total} users found without actors")
|
||||
for i, user in enumerate(qs):
|
||||
command.stdout.write(
|
||||
"{}/{} creating actor for {}".format(i + 1, total, user.username)
|
||||
)
|
||||
command.stdout.write(f"{i + 1}/{total} creating actor for {user.username}")
|
||||
try:
|
||||
user.actor = create_actor(user)
|
||||
except IntegrityError as e:
|
||||
# somehow, an actor with the the url exists in the database
|
||||
command.stderr.write("Error while creating actor: {}".format(str(e)))
|
||||
command.stderr.write(f"Error while creating actor: {str(e)}")
|
||||
continue
|
||||
user.save(update_fields=["actor"])
|
||||
|
|
|
@ -13,7 +13,7 @@ MODELS = [
|
|||
|
||||
def main(command, **kwargs):
|
||||
for model, attribute, key_set in MODELS:
|
||||
qs = model.objects.exclude(**{"{}__isnull".format(attribute): True})
|
||||
qs = model.objects.exclude(**{f"{attribute}__isnull": True})
|
||||
qs = qs.exclude(**{attribute: ""})
|
||||
warmer = VersatileImageFieldWarmer(
|
||||
instance_or_queryset=qs,
|
||||
|
@ -21,10 +21,8 @@ def main(command, **kwargs):
|
|||
image_attr=attribute,
|
||||
verbose=True,
|
||||
)
|
||||
command.stdout.write(
|
||||
"Creating images for {} / {}".format(model.__name__, attribute)
|
||||
)
|
||||
command.stdout.write(f"Creating images for {model.__name__} / {attribute}")
|
||||
num_created, failed_to_create = warmer.warm()
|
||||
command.stdout.write(
|
||||
" {} created, {} in error".format(num_created, len(failed_to_create))
|
||||
f" {num_created} created, {len(failed_to_create)} in error"
|
||||
)
|
||||
|
|
|
@ -10,5 +10,5 @@ def main(command, **kwargs):
|
|||
source__startswith="http", source__contains="/federation/music/file/"
|
||||
).exclude(source__contains="youtube")
|
||||
total = queryset.count()
|
||||
command.stdout.write("{} uploads found".format(total))
|
||||
command.stdout.write(f"{total} uploads found")
|
||||
queryset.delete()
|
||||
|
|
|
@ -23,6 +23,6 @@ def main(command, **kwargs):
|
|||
total = users.count()
|
||||
|
||||
command.stdout.write(
|
||||
"Updating {} users with {} permission...".format(total, user_permission)
|
||||
f"Updating {total} users with {user_permission} permission..."
|
||||
)
|
||||
users.update(**{"permission_{}".format(user_permission): True})
|
||||
users.update(**{f"permission_{user_permission}": True})
|
||||
|
|
|
@ -36,9 +36,7 @@ def create_libraries(open_api, stdout):
|
|||
)
|
||||
libraries_by_user[library.actor.user.pk] = library.pk
|
||||
if created:
|
||||
stdout.write(
|
||||
" * Created library {} for user {}".format(library.pk, a.user.pk)
|
||||
)
|
||||
stdout.write(f" * Created library {library.pk} for user {a.user.pk}")
|
||||
else:
|
||||
stdout.write(
|
||||
" * Found existing library {} for user {}".format(
|
||||
|
@ -60,13 +58,9 @@ def update_uploads(libraries_by_user, stdout):
|
|||
)
|
||||
total = candidates.update(library=library_id, import_status="finished")
|
||||
if total:
|
||||
stdout.write(
|
||||
" * Assigned {} uploads to user {}'s library".format(total, user_id)
|
||||
)
|
||||
stdout.write(f" * Assigned {total} uploads to user {user_id}'s library")
|
||||
else:
|
||||
stdout.write(
|
||||
" * No uploads to assign to user {}'s library".format(user_id)
|
||||
)
|
||||
stdout.write(f" * No uploads to assign to user {user_id}'s library")
|
||||
|
||||
|
||||
def update_orphan_uploads(open_api, stdout):
|
||||
|
@ -105,14 +99,12 @@ def update_orphan_uploads(open_api, stdout):
|
|||
def set_fid(queryset, path, stdout):
|
||||
model = queryset.model._meta.label
|
||||
qs = queryset.filter(fid=None)
|
||||
base_url = "{}{}".format(settings.FUNKWHALE_URL, path)
|
||||
stdout.write(
|
||||
"* Assigning federation ids to {} entries (path: {})".format(model, base_url)
|
||||
)
|
||||
base_url = f"{settings.FUNKWHALE_URL}{path}"
|
||||
stdout.write(f"* Assigning federation ids to {model} entries (path: {base_url})")
|
||||
new_fid = functions.Concat(Value(base_url), F("uuid"), output_field=CharField())
|
||||
total = qs.update(fid=new_fid)
|
||||
|
||||
stdout.write(" * {} entries updated".format(total))
|
||||
stdout.write(f" * {total} entries updated")
|
||||
|
||||
|
||||
def update_shared_inbox_url(stdout):
|
||||
|
@ -123,16 +115,16 @@ def update_shared_inbox_url(stdout):
|
|||
|
||||
|
||||
def generate_actor_urls(part, stdout):
|
||||
field = "{}_url".format(part)
|
||||
stdout.write("* Update {} for local actors...".format(field))
|
||||
field = f"{part}_url"
|
||||
stdout.write(f"* Update {field} for local actors...")
|
||||
|
||||
queryset = federation_models.Actor.objects.local().filter(**{field: None})
|
||||
base_url = "{}/federation/actors/".format(settings.FUNKWHALE_URL)
|
||||
base_url = f"{settings.FUNKWHALE_URL}/federation/actors/"
|
||||
|
||||
new_field = functions.Concat(
|
||||
Value(base_url),
|
||||
F("preferred_username"),
|
||||
Value("/{}".format(part)),
|
||||
Value(f"/{part}"),
|
||||
output_field=CharField(),
|
||||
)
|
||||
|
||||
|
|
|
@ -72,7 +72,7 @@ def get_fts_query(query_string, fts_fields=["body_text"], model=None):
|
|||
else:
|
||||
query_string = remove_chars(query_string, ['"', "&", "(", ")", "!", "'"])
|
||||
parts = query_string.replace(":", "").split(" ")
|
||||
parts = ["{}:*".format(p) for p in parts if p]
|
||||
parts = [f"{p}:*" for p in parts if p]
|
||||
if not parts:
|
||||
return Q(pk=None)
|
||||
|
||||
|
@ -97,7 +97,7 @@ def get_fts_query(query_string, fts_fields=["body_text"], model=None):
|
|||
)
|
||||
}
|
||||
).values_list("pk", flat=True)
|
||||
new_query = Q(**{"{}__in".format(fk_field_name): list(subquery)})
|
||||
new_query = Q(**{f"{fk_field_name}__in": list(subquery)})
|
||||
else:
|
||||
new_query = Q(
|
||||
**{
|
||||
|
@ -180,7 +180,7 @@ class SearchConfig:
|
|||
except KeyError:
|
||||
# no cleaning to apply
|
||||
value = token["value"]
|
||||
q = Q(**{"{}__icontains".format(to): value})
|
||||
q = Q(**{f"{to}__icontains": value})
|
||||
if not specific_field_query:
|
||||
specific_field_query = q
|
||||
else:
|
||||
|
|
|
@ -82,14 +82,14 @@ class RelatedField(serializers.RelatedField):
|
|||
)
|
||||
|
||||
|
||||
class Action(object):
|
||||
class Action:
|
||||
def __init__(self, name, allow_all=False, qs_filter=None):
|
||||
self.name = name
|
||||
self.allow_all = allow_all
|
||||
self.qs_filter = qs_filter
|
||||
|
||||
def __repr__(self):
|
||||
return "<Action {}>".format(self.name)
|
||||
return f"<Action {self.name}>"
|
||||
|
||||
|
||||
class ActionSerializer(serializers.Serializer):
|
||||
|
@ -113,7 +113,7 @@ class ActionSerializer(serializers.Serializer):
|
|||
)
|
||||
|
||||
for action in self.actions_by_name.keys():
|
||||
handler_name = "handle_{}".format(action)
|
||||
handler_name = f"handle_{action}"
|
||||
assert hasattr(self, handler_name), "{} miss a {} method".format(
|
||||
self.__class__.__name__, handler_name
|
||||
)
|
||||
|
@ -133,9 +133,9 @@ class ActionSerializer(serializers.Serializer):
|
|||
if value == "all":
|
||||
return self.queryset.all().order_by("id")
|
||||
if type(value) in [list, tuple]:
|
||||
return self.queryset.filter(
|
||||
**{"{}__in".format(self.pk_field): value}
|
||||
).order_by(self.pk_field)
|
||||
return self.queryset.filter(**{f"{self.pk_field}__in": value}).order_by(
|
||||
self.pk_field
|
||||
)
|
||||
|
||||
raise serializers.ValidationError(
|
||||
"{} is not a valid value for objects. You must provide either a "
|
||||
|
@ -281,7 +281,7 @@ class APIMutationSerializer(serializers.ModelSerializer):
|
|||
|
||||
def validate_type(self, value):
|
||||
if value not in self.context["registry"]:
|
||||
raise serializers.ValidationError("Invalid mutation type {}".format(value))
|
||||
raise serializers.ValidationError(f"Invalid mutation type {value}")
|
||||
return value
|
||||
|
||||
|
||||
|
@ -321,7 +321,7 @@ class ContentSerializer(serializers.Serializer):
|
|||
return utils.render_html(o.text, o.content_type)
|
||||
|
||||
|
||||
class NullToEmptDict(object):
|
||||
class NullToEmptDict:
|
||||
def get_attribute(self, o):
|
||||
attr = super().get_attribute(o)
|
||||
if attr is None:
|
||||
|
|
|
@ -36,7 +36,7 @@ def rename_file(instance, field_name, new_name, allow_missing_file=False):
|
|||
field = getattr(instance, field_name)
|
||||
current_name, extension = os.path.splitext(field.name)
|
||||
|
||||
new_name_with_extension = "{}{}".format(new_name, extension)
|
||||
new_name_with_extension = f"{new_name}{extension}"
|
||||
try:
|
||||
shutil.move(field.path, new_name_with_extension)
|
||||
except FileNotFoundError:
|
||||
|
@ -71,7 +71,7 @@ def set_query_parameter(url, **kwargs):
|
|||
|
||||
|
||||
@deconstructible
|
||||
class ChunkedPath(object):
|
||||
class ChunkedPath:
|
||||
def sanitize_filename(self, filename):
|
||||
return filename.replace("/", "-")
|
||||
|
||||
|
@ -88,7 +88,7 @@ class ChunkedPath(object):
|
|||
parts = chunks[:3] + [filename]
|
||||
else:
|
||||
ext = os.path.splitext(filename)[1][1:].lower()
|
||||
new_filename = "".join(chunks[3:]) + ".{}".format(ext)
|
||||
new_filename = "".join(chunks[3:]) + f".{ext}"
|
||||
parts = chunks[:3] + [new_filename]
|
||||
return os.path.join(self.root, *parts)
|
||||
|
||||
|
@ -227,7 +227,7 @@ def replace_prefix(queryset, field, old, new):
|
|||
|
||||
on a whole table with a single query.
|
||||
"""
|
||||
qs = queryset.filter(**{"{}__startswith".format(field): old})
|
||||
qs = queryset.filter(**{f"{field}__startswith": old})
|
||||
# we extract the part after the old prefix, and Concat it with our new prefix
|
||||
update = models.functions.Concat(
|
||||
models.Value(new),
|
||||
|
@ -353,7 +353,7 @@ def attach_content(obj, field, content_data):
|
|||
from . import models
|
||||
|
||||
content_data = content_data or {}
|
||||
existing = getattr(obj, "{}_id".format(field))
|
||||
existing = getattr(obj, f"{field}_id")
|
||||
|
||||
if existing:
|
||||
if same_content(getattr(obj, field), **content_data):
|
||||
|
@ -378,7 +378,7 @@ def attach_content(obj, field, content_data):
|
|||
def attach_file(obj, field, file_data, fetch=False):
|
||||
from . import models, tasks
|
||||
|
||||
existing = getattr(obj, "{}_id".format(field))
|
||||
existing = getattr(obj, f"{field}_id")
|
||||
if existing:
|
||||
getattr(obj, field).delete()
|
||||
|
||||
|
@ -395,7 +395,7 @@ def attach_file(obj, field, file_data, fetch=False):
|
|||
name = [
|
||||
getattr(obj, field) for field in name_fields if getattr(obj, field, None)
|
||||
][0]
|
||||
filename = "{}-{}.{}".format(field, name, extension)
|
||||
filename = f"{field}-{name}.{extension}"
|
||||
if "url" in file_data:
|
||||
attachment.url = file_data["url"]
|
||||
else:
|
||||
|
@ -487,4 +487,4 @@ def get_file_hash(file, algo=None, chunk_size=None, full_read=False):
|
|||
# sometimes, it's useful to only hash the beginning of the file, e.g
|
||||
# to avoid a lot of I/O when crawling large libraries
|
||||
hash.update(file.read(chunk_size))
|
||||
return "{}:{}".format(algo, hash.hexdigest())
|
||||
return f"{algo}:{hash.hexdigest()}"
|
||||
|
|
|
@ -72,7 +72,7 @@ class ImageDimensionsValidator:
|
|||
|
||||
|
||||
@deconstructible
|
||||
class FileValidator(object):
|
||||
class FileValidator:
|
||||
"""
|
||||
Taken from https://gist.github.com/jrosebr1/2140738
|
||||
Validator for files, checking the size, extension and mimetype.
|
||||
|
@ -163,5 +163,5 @@ class DomainValidator(validators.URLValidator):
|
|||
|
||||
If it fails, we know the domain is not valid.
|
||||
"""
|
||||
super().__call__("http://{}".format(value))
|
||||
super().__call__(f"http://{value}")
|
||||
return value
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
|
@ -68,7 +68,7 @@ class Track:
|
|||
}
|
||||
|
||||
def __repr__(self):
|
||||
return "Track(%s, %s)" % (self.artist_name, self.track_name)
|
||||
return f"Track({self.artist_name}, {self.track_name})"
|
||||
|
||||
|
||||
class ListenBrainzClient:
|
||||
|
@ -127,7 +127,7 @@ class ListenBrainzClient:
|
|||
response_data = response_text
|
||||
|
||||
self._handle_ratelimit(response)
|
||||
log_msg = "Response %s: %r" % (response.status, response_data)
|
||||
log_msg = f"Response {response.status}: {response_data!r}"
|
||||
if response.status == 429 and retry < 5: # Too Many Requests
|
||||
self.logger.warning(log_msg)
|
||||
return self._submit(listen_type, payload, retry + 1)
|
||||
|
|
|
@ -84,16 +84,16 @@ def get_scrobble_payload(track, date, suffix="[0]"):
|
|||
"""
|
||||
upload = track.uploads.filter(duration__gte=0).first()
|
||||
data = {
|
||||
"a{}".format(suffix): track.artist.name,
|
||||
"t{}".format(suffix): track.title,
|
||||
"l{}".format(suffix): upload.duration if upload else 0,
|
||||
"b{}".format(suffix): (track.album.title if track.album else "") or "",
|
||||
"n{}".format(suffix): track.position or "",
|
||||
"m{}".format(suffix): str(track.mbid or ""),
|
||||
"o{}".format(suffix): "P", # Source: P = chosen by user
|
||||
f"a{suffix}": track.artist.name,
|
||||
f"t{suffix}": track.title,
|
||||
f"l{suffix}": upload.duration if upload else 0,
|
||||
f"b{suffix}": (track.album.title if track.album else "") or "",
|
||||
f"n{suffix}": track.position or "",
|
||||
f"m{suffix}": str(track.mbid or ""),
|
||||
f"o{suffix}": "P", # Source: P = chosen by user
|
||||
}
|
||||
if date:
|
||||
data["i{}".format(suffix)] = int(date.timestamp())
|
||||
data[f"i{suffix}"] = int(date.timestamp())
|
||||
return data
|
||||
|
||||
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
|
@ -316,16 +316,16 @@ class FunkwhaleProvider(internet_provider.Provider):
|
|||
|
||||
def federation_url(self, prefix="", local=False):
|
||||
def path_generator():
|
||||
return "{}/{}".format(prefix, uuid.uuid4())
|
||||
return f"{prefix}/{uuid.uuid4()}"
|
||||
|
||||
domain = settings.FEDERATION_HOSTNAME if local else self.domain_name()
|
||||
protocol = "https"
|
||||
path = path_generator()
|
||||
return "{}://{}/{}".format(protocol, domain, path)
|
||||
return f"{protocol}://{domain}/{path}"
|
||||
|
||||
def user_name(self):
|
||||
u = super().user_name()
|
||||
return "{}{}".format(u, random.randint(10, 999))
|
||||
return f"{u}{random.randint(10, 999)}"
|
||||
|
||||
def music_genre(self):
|
||||
return random.choice(TAGS_DATA["genre"])
|
||||
|
|
|
@ -23,4 +23,4 @@ class TrackFavorite(models.Model):
|
|||
return favorite
|
||||
|
||||
def get_activity_url(self):
|
||||
return "{}/favorites/tracks/{}".format(self.user.get_activity_url(), self.pk)
|
||||
return f"{self.user.get_activity_url()}/favorites/tracks/{self.pk}"
|
||||
|
|
|
@ -241,8 +241,8 @@ class InboxRouter(Router):
|
|||
for k in r.keys():
|
||||
if k in ["object", "target", "related_object"]:
|
||||
update_fields += [
|
||||
"{}_id".format(k),
|
||||
"{}_content_type".format(k),
|
||||
f"{k}_id",
|
||||
f"{k}_content_type",
|
||||
]
|
||||
else:
|
||||
update_fields.append(k)
|
||||
|
@ -264,7 +264,7 @@ class InboxRouter(Router):
|
|||
user = ii.actor.get_user()
|
||||
if not user:
|
||||
continue
|
||||
group = "user.{}.inbox".format(user.pk)
|
||||
group = f"user.{user.pk}.inbox"
|
||||
channels.group_send(
|
||||
group,
|
||||
{
|
||||
|
|
|
@ -22,7 +22,7 @@ def get_actor_data(actor_url):
|
|||
try:
|
||||
return response.json()
|
||||
except Exception:
|
||||
raise ValueError("Invalid actor payload: {}".format(response.text))
|
||||
raise ValueError(f"Invalid actor payload: {response.text}")
|
||||
|
||||
|
||||
def get_actor(fid, skip_cache=False):
|
||||
|
|
|
@ -216,7 +216,7 @@ class FetchSerializer(serializers.ModelSerializer):
|
|||
except validators.ValidationError:
|
||||
return value
|
||||
|
||||
return "webfinger://{}".format(value)
|
||||
return f"webfinger://{value}"
|
||||
|
||||
def create(self, validated_data):
|
||||
check_duplicates = not validated_data.get("force", False)
|
||||
|
|
|
@ -190,12 +190,12 @@ class LibraryViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet):
|
|||
)
|
||||
except requests.exceptions.RequestException as e:
|
||||
return response.Response(
|
||||
{"detail": "Error while fetching the library: {}".format(str(e))},
|
||||
{"detail": f"Error while fetching the library: {str(e)}"},
|
||||
status=400,
|
||||
)
|
||||
except serializers.serializers.ValidationError as e:
|
||||
return response.Response(
|
||||
{"detail": "Invalid data in remote library: {}".format(str(e))},
|
||||
{"detail": f"Invalid data in remote library: {str(e)}"},
|
||||
status=400,
|
||||
)
|
||||
serializer = self.serializer_class(library)
|
||||
|
|
|
@ -362,14 +362,14 @@ class NS:
|
|||
def __getattr__(self, key):
|
||||
if key not in self.conf["document"]["@context"]:
|
||||
raise AttributeError(
|
||||
"{} is not a valid property of context {}".format(key, self.baseUrl)
|
||||
f"{key} is not a valid property of context {self.baseUrl}"
|
||||
)
|
||||
return self.baseUrl + key
|
||||
|
||||
|
||||
class NoopContext:
|
||||
def __getattr__(self, key):
|
||||
return "_:{}".format(key)
|
||||
return f"_:{key}"
|
||||
|
||||
|
||||
NOOP = NoopContext()
|
||||
|
|
|
@ -106,7 +106,7 @@ class ActorFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
|||
summary = factory.Faker("paragraph")
|
||||
domain = factory.SubFactory(DomainFactory)
|
||||
fid = factory.LazyAttribute(
|
||||
lambda o: "https://{}/users/{}".format(o.domain.name, o.preferred_username)
|
||||
lambda o: f"https://{o.domain.name}/users/{o.preferred_username}"
|
||||
)
|
||||
followers_url = factory.LazyAttribute(
|
||||
lambda o: "https://{}/users/{}followers".format(
|
||||
|
@ -142,7 +142,7 @@ class ActorFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
|||
self.domain = models.Domain.objects.get_or_create(
|
||||
name=settings.FEDERATION_HOSTNAME
|
||||
)[0]
|
||||
self.fid = "https://{}/actors/{}".format(self.domain, self.preferred_username)
|
||||
self.fid = f"https://{self.domain}/actors/{self.preferred_username}"
|
||||
self.save(update_fields=["domain", "fid"])
|
||||
if not create:
|
||||
if extracted and hasattr(extracted, "pk"):
|
||||
|
@ -300,13 +300,13 @@ class NoteFactory(factory.Factory):
|
|||
@registry.register(name="federation.AudioMetadata")
|
||||
class AudioMetadataFactory(factory.Factory):
|
||||
recording = factory.LazyAttribute(
|
||||
lambda o: "https://musicbrainz.org/recording/{}".format(uuid.uuid4())
|
||||
lambda o: f"https://musicbrainz.org/recording/{uuid.uuid4()}"
|
||||
)
|
||||
artist = factory.LazyAttribute(
|
||||
lambda o: "https://musicbrainz.org/artist/{}".format(uuid.uuid4())
|
||||
lambda o: f"https://musicbrainz.org/artist/{uuid.uuid4()}"
|
||||
)
|
||||
release = factory.LazyAttribute(
|
||||
lambda o: "https://musicbrainz.org/release/{}".format(uuid.uuid4())
|
||||
lambda o: f"https://musicbrainz.org/release/{uuid.uuid4()}"
|
||||
)
|
||||
bitrate = 42
|
||||
length = 43
|
||||
|
|
|
@ -257,7 +257,7 @@ class JsonLdSerializer(serializers.Serializer):
|
|||
data = expand(data)
|
||||
except ValueError as e:
|
||||
raise serializers.ValidationError(
|
||||
"{} is not a valid jsonld document: {}".format(data, e)
|
||||
f"{data} is not a valid jsonld document: {e}"
|
||||
)
|
||||
try:
|
||||
config = self.Meta.jsonld_mapping
|
||||
|
|
|
@ -21,7 +21,7 @@ def get_library_data(library_url, actor):
|
|||
elif scode == 403:
|
||||
return {"errors": ["Permission denied while scanning library"]}
|
||||
elif scode >= 400:
|
||||
return {"errors": ["Error {} while fetching the library".format(scode)]}
|
||||
return {"errors": [f"Error {scode} while fetching the library"]}
|
||||
serializer = serializers.LibrarySerializer(data=response.json())
|
||||
if not serializer.is_valid():
|
||||
return {"errors": ["Invalid ActivityPub response from remote library"]}
|
||||
|
|
|
@ -67,9 +67,7 @@ class Command(BaseCommand):
|
|||
for kls, fields in MODELS:
|
||||
results[kls] = {}
|
||||
for field in fields:
|
||||
candidates = kls.objects.filter(
|
||||
**{"{}__startswith".format(field): old_prefix}
|
||||
)
|
||||
candidates = kls.objects.filter(**{f"{field}__startswith": old_prefix})
|
||||
results[kls][field] = candidates.count()
|
||||
|
||||
total = sum([t for k in results.values() for t in k.values()])
|
||||
|
@ -92,9 +90,7 @@ class Command(BaseCommand):
|
|||
)
|
||||
|
||||
else:
|
||||
self.stdout.write(
|
||||
"No objects found with prefix {}, exiting.".format(old_prefix)
|
||||
)
|
||||
self.stdout.write(f"No objects found with prefix {old_prefix}, exiting.")
|
||||
return
|
||||
if options["dry_run"]:
|
||||
self.stdout.write(
|
||||
|
@ -112,9 +108,7 @@ class Command(BaseCommand):
|
|||
|
||||
for kls, fields in results.items():
|
||||
for field, count in fields.items():
|
||||
self.stdout.write(
|
||||
"Replacing {} on {} {}…".format(field, count, kls._meta.label)
|
||||
)
|
||||
self.stdout.write(f"Replacing {field} on {count} {kls._meta.label}…")
|
||||
candidates = kls.objects.all()
|
||||
utils.replace_prefix(candidates, field, old=old_prefix, new=new_prefix)
|
||||
self.stdout.write("")
|
||||
|
|
|
@ -80,7 +80,7 @@ class ActorQuerySet(models.QuerySet):
|
|||
)
|
||||
qs = qs.annotate(
|
||||
**{
|
||||
"_usage_{}".format(s): models.Sum(
|
||||
f"_usage_{s}": models.Sum(
|
||||
"libraries__uploads__size", filter=uploads_query
|
||||
)
|
||||
}
|
||||
|
@ -226,22 +226,22 @@ class Actor(models.Model):
|
|||
verbose_name = "Account"
|
||||
|
||||
def get_moderation_url(self):
|
||||
return "/manage/moderation/accounts/{}".format(self.full_username)
|
||||
return f"/manage/moderation/accounts/{self.full_username}"
|
||||
|
||||
@property
|
||||
def webfinger_subject(self):
|
||||
return "{}@{}".format(self.preferred_username, settings.FEDERATION_HOSTNAME)
|
||||
return f"{self.preferred_username}@{settings.FEDERATION_HOSTNAME}"
|
||||
|
||||
@property
|
||||
def private_key_id(self):
|
||||
return "{}#main-key".format(self.fid)
|
||||
return f"{self.fid}#main-key"
|
||||
|
||||
@property
|
||||
def full_username(self) -> str:
|
||||
return "{}@{}".format(self.preferred_username, self.domain_id)
|
||||
return f"{self.preferred_username}@{self.domain_id}"
|
||||
|
||||
def __str__(self):
|
||||
return "{}@{}".format(self.preferred_username, self.domain_id)
|
||||
return f"{self.preferred_username}@{self.domain_id}"
|
||||
|
||||
@property
|
||||
def is_local(self) -> bool:
|
||||
|
@ -270,14 +270,14 @@ class Actor(models.Model):
|
|||
|
||||
def get_absolute_url(self):
|
||||
if self.is_local:
|
||||
return federation_utils.full_url("/@{}".format(self.preferred_username))
|
||||
return federation_utils.full_url(f"/@{self.preferred_username}")
|
||||
return self.url or self.fid
|
||||
|
||||
def get_current_usage(self):
|
||||
actor = self.__class__.objects.filter(pk=self.pk).with_current_usage().get()
|
||||
data = {}
|
||||
for s in ["draft", "pending", "skipped", "errored", "finished"]:
|
||||
data[s] = getattr(actor, "_usage_{}".format(s)) or 0
|
||||
data[s] = getattr(actor, f"_usage_{s}") or 0
|
||||
|
||||
data["total"] = sum(data.values())
|
||||
return data
|
||||
|
@ -341,8 +341,8 @@ class Actor(models.Model):
|
|||
# matches, we consider the actor has the permission to manage
|
||||
# the object
|
||||
domain = self.domain_id
|
||||
return obj.fid.startswith("http://{}/".format(domain)) or obj.fid.startswith(
|
||||
"https://{}/".format(domain)
|
||||
return obj.fid.startswith(f"http://{domain}/") or obj.fid.startswith(
|
||||
f"https://{domain}/"
|
||||
)
|
||||
|
||||
@property
|
||||
|
@ -498,9 +498,7 @@ class AbstractFollow(models.Model):
|
|||
abstract = True
|
||||
|
||||
def get_federation_id(self):
|
||||
return federation_utils.full_url(
|
||||
"{}#follows/{}".format(self.actor.fid, self.uuid)
|
||||
)
|
||||
return federation_utils.full_url(f"{self.actor.fid}#follows/{self.uuid}")
|
||||
|
||||
|
||||
class Follow(AbstractFollow):
|
||||
|
@ -594,7 +592,7 @@ class LibraryTrack(models.Model):
|
|||
remote_response.raise_for_status()
|
||||
extension = music_utils.get_ext_from_type(self.audio_mimetype)
|
||||
title = " - ".join([self.title, self.album_title, self.artist_name])
|
||||
filename = "{}.{}".format(title, extension)
|
||||
filename = f"{title}.{extension}"
|
||||
tmp_file = tempfile.TemporaryFile()
|
||||
for chunk in r.iter_content(chunk_size=512):
|
||||
tmp_file.write(chunk)
|
||||
|
|
|
@ -116,7 +116,7 @@ class MediaSerializer(jsonld.JsonLdSerializer):
|
|||
|
||||
if not is_mimetype(v, self.allowed_mimetypes):
|
||||
raise serializers.ValidationError(
|
||||
"Invalid mimetype {}. Allowed: {}".format(v, self.allowed_mimetypes)
|
||||
f"Invalid mimetype {v}. Allowed: {self.allowed_mimetypes}"
|
||||
)
|
||||
return v
|
||||
|
||||
|
@ -371,7 +371,7 @@ class ActorSerializer(jsonld.JsonLdSerializer):
|
|||
ret["publicKey"] = {
|
||||
"owner": instance.fid,
|
||||
"publicKeyPem": instance.public_key,
|
||||
"id": "{}#main-key".format(instance.fid),
|
||||
"id": f"{instance.fid}#main-key",
|
||||
}
|
||||
ret["endpoints"] = {}
|
||||
|
||||
|
@ -453,7 +453,7 @@ class ActorSerializer(jsonld.JsonLdSerializer):
|
|||
actor,
|
||||
rss_url=rss_url,
|
||||
attributed_to_fid=attributed_to,
|
||||
**self.validated_data
|
||||
**self.validated_data,
|
||||
)
|
||||
return actor
|
||||
|
||||
|
@ -736,9 +736,7 @@ class FollowActionSerializer(serializers.Serializer):
|
|||
.get()
|
||||
)
|
||||
except follow_class.DoesNotExist:
|
||||
raise serializers.ValidationError(
|
||||
"No follow to {}".format(self.action_type)
|
||||
)
|
||||
raise serializers.ValidationError(f"No follow to {self.action_type}")
|
||||
return validated_data
|
||||
|
||||
def to_representation(self, instance):
|
||||
|
@ -749,7 +747,7 @@ class FollowActionSerializer(serializers.Serializer):
|
|||
|
||||
return {
|
||||
"@context": jsonld.get_default_context(),
|
||||
"id": instance.get_federation_id() + "/{}".format(self.action_type),
|
||||
"id": instance.get_federation_id() + f"/{self.action_type}",
|
||||
"type": self.action_type.title(),
|
||||
"actor": actor.fid,
|
||||
"object": FollowSerializer(instance).data,
|
||||
|
@ -855,7 +853,7 @@ class ActorWebfingerSerializer(serializers.Serializer):
|
|||
|
||||
def to_representation(self, instance):
|
||||
data = {}
|
||||
data["subject"] = "acct:{}".format(instance.webfinger_subject)
|
||||
data["subject"] = f"acct:{instance.webfinger_subject}"
|
||||
data["links"] = [
|
||||
{"rel": "self", "href": instance.fid, "type": "application/activity+json"}
|
||||
]
|
||||
|
@ -881,7 +879,7 @@ class ActivitySerializer(serializers.Serializer):
|
|||
try:
|
||||
object_serializer = OBJECT_SERIALIZERS[type]
|
||||
except KeyError:
|
||||
raise serializers.ValidationError("Unsupported type {}".format(type))
|
||||
raise serializers.ValidationError(f"Unsupported type {type}")
|
||||
|
||||
serializer = object_serializer(data=value)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
@ -1165,7 +1163,7 @@ MUSIC_ENTITY_JSONLD_MAPPING = {
|
|||
|
||||
|
||||
def repr_tag(tag_name):
|
||||
return {"type": "Hashtag", "name": "#{}".format(tag_name)}
|
||||
return {"type": "Hashtag", "name": f"#{tag_name}"}
|
||||
|
||||
|
||||
def include_content(repr, content_obj):
|
||||
|
@ -1704,9 +1702,7 @@ class FlagSerializer(jsonld.JsonLdSerializer):
|
|||
try:
|
||||
return utils.get_object_by_fid(v, local=True)
|
||||
except ObjectDoesNotExist:
|
||||
raise serializers.ValidationError(
|
||||
"Unknown id {} for reported object".format(v)
|
||||
)
|
||||
raise serializers.ValidationError(f"Unknown id {v} for reported object")
|
||||
|
||||
def validate_type(self, tags):
|
||||
if tags:
|
||||
|
@ -1918,7 +1914,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
|
|||
tags = [item.tag.name for item in upload.get_all_tagged_items()]
|
||||
if tags:
|
||||
data["tag"] = [repr_tag(name) for name in sorted(set(tags))]
|
||||
data["summary"] = " ".join(["#{}".format(name) for name in tags])
|
||||
data["summary"] = " ".join([f"#{name}" for name in tags])
|
||||
|
||||
if self.context.get("include_ap_context", True):
|
||||
data["@context"] = jsonld.get_default_context()
|
||||
|
@ -2039,7 +2035,7 @@ class DeleteSerializer(jsonld.JsonLdSerializer):
|
|||
try:
|
||||
obj = utils.get_object_by_fid(url)
|
||||
except utils.ObjectDoesNotExist:
|
||||
raise serializers.ValidationError("No object matching {}".format(url))
|
||||
raise serializers.ValidationError(f"No object matching {url}")
|
||||
if isinstance(obj, music_models.Upload):
|
||||
obj = obj.track
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ def verify_date(raw_date):
|
|||
now = timezone.now()
|
||||
if dt < now - delta or dt > now + delta:
|
||||
raise forms.ValidationError(
|
||||
"Request Date {} is too far in the future or in the past".format(raw_date)
|
||||
f"Request Date {raw_date} is too far in the future or in the past"
|
||||
)
|
||||
|
||||
return dt
|
||||
|
@ -70,10 +70,10 @@ def verify_django(django_request, public_key):
|
|||
signature = headers["Signature"]
|
||||
except KeyError:
|
||||
raise exceptions.MissingSignature
|
||||
url = "http://noop{}".format(django_request.path)
|
||||
url = f"http://noop{django_request.path}"
|
||||
query = django_request.META["QUERY_STRING"]
|
||||
if query:
|
||||
url += "?{}".format(query)
|
||||
url += f"?{query}"
|
||||
signature_headers = signature.split('headers="')[1].split('",')[0]
|
||||
expected = signature_headers.split(" ")
|
||||
logger.debug("Signature expected headers: %s", expected)
|
||||
|
|
|
@ -170,7 +170,7 @@ def deliver_to_remote(delivery):
|
|||
|
||||
def fetch_nodeinfo(domain_name):
|
||||
s = session.get_session()
|
||||
wellknown_url = "https://{}/.well-known/nodeinfo".format(domain_name)
|
||||
wellknown_url = f"https://{domain_name}/.well-known/nodeinfo"
|
||||
response = s.get(url=wellknown_url)
|
||||
response.raise_for_status()
|
||||
serializer = serializers.NodeInfoSerializer(data=response.json())
|
||||
|
|
|
@ -122,10 +122,8 @@ def get_domain_query_from_url(domain, url_field="fid"):
|
|||
to match objects that have this domain in the given field.
|
||||
"""
|
||||
|
||||
query = Q(**{"{}__startswith".format(url_field): "http://{}/".format(domain)})
|
||||
query = query | Q(
|
||||
**{"{}__startswith".format(url_field): "https://{}/".format(domain)}
|
||||
)
|
||||
query = Q(**{f"{url_field}__startswith": f"http://{domain}/"})
|
||||
query = query | Q(**{f"{url_field}__startswith": f"https://{domain}/"})
|
||||
return query
|
||||
|
||||
|
||||
|
@ -143,9 +141,7 @@ def is_local(url) -> bool:
|
|||
return True
|
||||
|
||||
d = settings.FEDERATION_HOSTNAME
|
||||
return url.startswith("http://{}/".format(d)) or url.startswith(
|
||||
"https://{}/".format(d)
|
||||
)
|
||||
return url.startswith(f"http://{d}/") or url.startswith(f"https://{d}/")
|
||||
|
||||
|
||||
def get_actor_data_from_username(username):
|
||||
|
@ -164,8 +160,8 @@ def get_actor_from_username_data_query(field, data):
|
|||
if field:
|
||||
return Q(
|
||||
**{
|
||||
"{}__preferred_username__iexact".format(field): data["username"],
|
||||
"{}__domain__name__iexact".format(field): data["domain"],
|
||||
f"{field}__preferred_username__iexact": data["username"],
|
||||
f"{field}__domain__name__iexact": data["domain"],
|
||||
}
|
||||
)
|
||||
else:
|
||||
|
|
|
@ -68,7 +68,7 @@ class AuthenticatedIfAllowListEnabled(permissions.BasePermission):
|
|||
return bool(request.actor)
|
||||
|
||||
|
||||
class FederationMixin(object):
|
||||
class FederationMixin:
|
||||
permission_classes = [AuthenticatedIfAllowListEnabled]
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
|
@ -223,9 +223,9 @@ class WellKnownViewSet(viewsets.GenericViewSet):
|
|||
return HttpResponse(status=405)
|
||||
try:
|
||||
resource_type, resource = webfinger.clean_resource(request.GET["resource"])
|
||||
cleaner = getattr(webfinger, "clean_{}".format(resource_type))
|
||||
cleaner = getattr(webfinger, f"clean_{resource_type}")
|
||||
result = cleaner(resource)
|
||||
handler = getattr(self, "handler_{}".format(resource_type))
|
||||
handler = getattr(self, f"handler_{resource_type}")
|
||||
data = handler(result)
|
||||
except forms.ValidationError as e:
|
||||
return response.Response({"errors": {"resource": e.message}}, status=400)
|
||||
|
|
|
@ -30,7 +30,7 @@ def clean_acct(acct_string, ensure_local=True):
|
|||
raise forms.ValidationError("Invalid format")
|
||||
|
||||
if ensure_local and hostname.lower() != settings.FEDERATION_HOSTNAME:
|
||||
raise forms.ValidationError("Invalid hostname {}".format(hostname))
|
||||
raise forms.ValidationError(f"Invalid hostname {hostname}")
|
||||
|
||||
return username, hostname
|
||||
|
||||
|
|
|
@ -22,4 +22,4 @@ class Listening(models.Model):
|
|||
ordering = ("-creation_date",)
|
||||
|
||||
def get_activity_url(self):
|
||||
return "{}/listenings/tracks/{}".format(self.user.get_activity_url(), self.pk)
|
||||
return f"{self.user.get_activity_url()}/listenings/tracks/{self.pk}"
|
||||
|
|
|
@ -85,10 +85,8 @@ class ManageUserSerializer(serializers.ModelSerializer):
|
|||
permissions = validated_data.pop("permissions", {})
|
||||
if permissions:
|
||||
for p, value in permissions.items():
|
||||
setattr(instance, "permission_{}".format(p), value)
|
||||
instance.save(
|
||||
update_fields=["permission_{}".format(p) for p in permissions.keys()]
|
||||
)
|
||||
setattr(instance, f"permission_{p}", value)
|
||||
instance.save(update_fields=[f"permission_{p}" for p in permissions.keys()])
|
||||
return instance
|
||||
|
||||
@extend_schema_field(OpenApiTypes.OBJECT)
|
||||
|
|
|
@ -19,7 +19,7 @@ def get_filtered_content_query(config, user):
|
|||
query = None
|
||||
ids = user.content_filters.values_list(filter_field, flat=True)
|
||||
for model_field in model_fields:
|
||||
q = Q(**{"{}__in".format(model_field): ids})
|
||||
q = Q(**{f"{model_field}__in": ids})
|
||||
if query:
|
||||
query |= q
|
||||
else:
|
||||
|
|
|
@ -71,7 +71,7 @@ class Command(BaseCommand):
|
|||
)
|
||||
)
|
||||
for name in registry.keys():
|
||||
self.stdout.write("- {}".format(name))
|
||||
self.stdout.write(f"- {name}")
|
||||
return
|
||||
raw_content = None
|
||||
content = None
|
||||
|
|
|
@ -29,13 +29,11 @@ def check_allow_list(payload, **kwargs):
|
|||
utils.recursive_getattr(payload, "object.id", permissive=True),
|
||||
]
|
||||
|
||||
relevant_domains = set(
|
||||
[
|
||||
domain
|
||||
for domain in [urllib.parse.urlparse(i).hostname for i in relevant_ids if i]
|
||||
if domain
|
||||
]
|
||||
)
|
||||
relevant_domains = {
|
||||
domain
|
||||
for domain in [urllib.parse.urlparse(i).hostname for i in relevant_ids if i]
|
||||
if domain
|
||||
}
|
||||
|
||||
if relevant_domains - allowed_domains:
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ class UserFilterSerializer(serializers.ModelSerializer):
|
|||
state_serializers = persisting_theory.Registry()
|
||||
|
||||
|
||||
class DescriptionStateMixin(object):
|
||||
class DescriptionStateMixin:
|
||||
def get_description(self, o):
|
||||
if o.description:
|
||||
return o.description.text
|
||||
|
|
|
@ -64,9 +64,7 @@ def send_new_report_email_to_moderators(report):
|
|||
subject = "[{} moderation - {}] New report from {}".format(
|
||||
settings.FUNKWHALE_HOSTNAME, report.get_type_display(), submitter_repr
|
||||
)
|
||||
detail_url = federation_utils.full_url(
|
||||
"/manage/moderation/reports/{}".format(report.uuid)
|
||||
)
|
||||
detail_url = federation_utils.full_url(f"/manage/moderation/reports/{report.uuid}")
|
||||
unresolved_reports_url = federation_utils.full_url(
|
||||
"/manage/moderation/reports?q=resolved:no"
|
||||
)
|
||||
|
@ -97,7 +95,7 @@ def send_new_report_email_to_moderators(report):
|
|||
|
||||
body += [
|
||||
"",
|
||||
"- To handle this report, please visit {}".format(detail_url),
|
||||
f"- To handle this report, please visit {detail_url}",
|
||||
"- To view all unresolved reports (currently {}), please visit {}".format(
|
||||
unresolved_reports, unresolved_reports_url
|
||||
),
|
||||
|
@ -173,9 +171,7 @@ def notify_mods_signup_request_pending(obj):
|
|||
subject = "[{} moderation] New sign-up request from {}".format(
|
||||
settings.FUNKWHALE_HOSTNAME, submitter_repr
|
||||
)
|
||||
detail_url = federation_utils.full_url(
|
||||
"/manage/moderation/requests/{}".format(obj.uuid)
|
||||
)
|
||||
detail_url = federation_utils.full_url(f"/manage/moderation/requests/{obj.uuid}")
|
||||
unresolved_requests_url = federation_utils.full_url(
|
||||
"/manage/moderation/requests?q=status:pending"
|
||||
)
|
||||
|
@ -185,7 +181,7 @@ def notify_mods_signup_request_pending(obj):
|
|||
submitter_repr
|
||||
),
|
||||
"",
|
||||
"- To handle this request, please visit {}".format(detail_url),
|
||||
f"- To handle this request, please visit {detail_url}",
|
||||
"- To view all unresolved requests (currently {}), please visit {}".format(
|
||||
unresolved_requests, unresolved_requests_url
|
||||
),
|
||||
|
@ -217,10 +213,10 @@ def notify_submitter_signup_request_approved(user_request):
|
|||
if not submitter_email:
|
||||
logger.warning("User %s has no e-mail address configured", submitter_repr)
|
||||
return
|
||||
subject = "Welcome to {}, {}!".format(settings.FUNKWHALE_HOSTNAME, submitter_repr)
|
||||
subject = f"Welcome to {settings.FUNKWHALE_HOSTNAME}, {submitter_repr}!"
|
||||
login_url = federation_utils.full_url("/login")
|
||||
body = [
|
||||
"Hi {} and welcome,".format(submitter_repr),
|
||||
f"Hi {submitter_repr} and welcome,",
|
||||
"",
|
||||
"Our moderation team has approved your account request and you can now start "
|
||||
"using the service. Please visit {} to get started.".format(login_url),
|
||||
|
@ -246,7 +242,7 @@ def notify_submitter_signup_request_refused(user_request):
|
|||
settings.FUNKWHALE_HOSTNAME
|
||||
)
|
||||
body = [
|
||||
"Hi {},".format(submitter_repr),
|
||||
f"Hi {submitter_repr},",
|
||||
"",
|
||||
"You recently submitted an account request on our service. However, our "
|
||||
"moderation team has refused it, and as a result, you won't be able to use "
|
||||
|
|
|
@ -6,7 +6,7 @@ def load(model, *args, **kwargs):
|
|||
EXCLUDE_VALIDATION = {"Track": ["artist"]}
|
||||
|
||||
|
||||
class Importer(object):
|
||||
class Importer:
|
||||
def __init__(self, model):
|
||||
self.model = model
|
||||
|
||||
|
@ -22,7 +22,7 @@ class Importer(object):
|
|||
return m
|
||||
|
||||
|
||||
class Mapping(object):
|
||||
class Mapping:
|
||||
"""Cast musicbrainz data to funkwhale data and vice-versa"""
|
||||
|
||||
def __init__(self, musicbrainz_mapping):
|
||||
|
|
|
@ -70,7 +70,7 @@ def match(*values):
|
|||
value,
|
||||
)
|
||||
if not urls:
|
||||
logger.debug('Impossible to guess license from string "{}"'.format(value))
|
||||
logger.debug(f'Impossible to guess license from string "{value}"')
|
||||
continue
|
||||
url = urls[0]
|
||||
if _cache:
|
||||
|
@ -122,7 +122,7 @@ def get_cc_license(version, perks, country=None, country_name=None):
|
|||
)
|
||||
if country:
|
||||
code_parts.append(country)
|
||||
name += " {}".format(country_name)
|
||||
name += f" {country_name}"
|
||||
url += country + "/"
|
||||
data = {
|
||||
"name": name,
|
||||
|
|
|
@ -13,7 +13,7 @@ def progress(buffer, count, total, status=""):
|
|||
|
||||
bar = "=" * filled_len + "-" * (bar_len - filled_len)
|
||||
|
||||
buffer.write("[%s] %s/%s ...%s\r" % (bar, count, total, status))
|
||||
buffer.write(f"[{bar}] {count}/{total} ...{status}\r")
|
||||
buffer.flush()
|
||||
|
||||
|
||||
|
@ -43,7 +43,7 @@ class Command(BaseCommand):
|
|||
candidates = models.Upload.objects.filter(source__startswith="file://")
|
||||
candidates = candidates.filter(audio_file__in=["", None])
|
||||
total = candidates.count()
|
||||
self.stdout.write("Checking {} in-place imported files…".format(total))
|
||||
self.stdout.write(f"Checking {total} in-place imported files…")
|
||||
|
||||
missing = []
|
||||
for i, row in enumerate(candidates.values("id", "source").iterator()):
|
||||
|
@ -54,7 +54,7 @@ class Command(BaseCommand):
|
|||
|
||||
if missing:
|
||||
for path, _ in missing:
|
||||
self.stdout.write(" {}".format(path))
|
||||
self.stdout.write(f" {path}")
|
||||
self.stdout.write(
|
||||
"The previous {} paths are referenced in database, but not found on disk!".format(
|
||||
len(missing)
|
||||
|
@ -71,5 +71,5 @@ class Command(BaseCommand):
|
|||
"Nothing was deleted, rerun this command with --no-dry-run to apply the changes"
|
||||
)
|
||||
else:
|
||||
self.stdout.write("Deleting {} uploads…".format(to_delete.count()))
|
||||
self.stdout.write(f"Deleting {to_delete.count()} uploads…")
|
||||
to_delete.delete()
|
||||
|
|
|
@ -21,7 +21,7 @@ class Command(BaseCommand):
|
|||
errored.append((data, response))
|
||||
|
||||
if errored:
|
||||
self.stdout.write("{} licenses were not reachable!".format(len(errored)))
|
||||
self.stdout.write(f"{len(errored)} licenses were not reachable!")
|
||||
for row, response in errored:
|
||||
self.stdout.write(
|
||||
"- {}: error {} at url {}".format(
|
||||
|
|
|
@ -73,13 +73,11 @@ class Command(BaseCommand):
|
|||
Q(source__startswith="file://") | Q(source__startswith="upload://")
|
||||
).exclude(mimetype__startswith="audio/")
|
||||
total = matching.count()
|
||||
self.stdout.write(
|
||||
"[mimetypes] {} entries found with bad or no mimetype".format(total)
|
||||
)
|
||||
self.stdout.write(f"[mimetypes] {total} entries found with bad or no mimetype")
|
||||
if not total:
|
||||
return
|
||||
for extension, mimetype in utils.EXTENSION_TO_MIMETYPE.items():
|
||||
qs = matching.filter(source__endswith=".{}".format(extension))
|
||||
qs = matching.filter(source__endswith=f".{extension}")
|
||||
self.stdout.write(
|
||||
"[mimetypes] setting {} {} files to {}".format(
|
||||
qs.count(), extension, mimetype
|
||||
|
@ -95,9 +93,7 @@ class Command(BaseCommand):
|
|||
Q(bitrate__isnull=True) | Q(duration__isnull=True)
|
||||
)
|
||||
total = matching.count()
|
||||
self.stdout.write(
|
||||
"[bitrate/length] {} entries found with missing values".format(total)
|
||||
)
|
||||
self.stdout.write(f"[bitrate/length] {total} entries found with missing values")
|
||||
if dry_run:
|
||||
return
|
||||
|
||||
|
@ -135,7 +131,7 @@ class Command(BaseCommand):
|
|||
self.stdout.write("Fixing missing size...")
|
||||
matching = models.Upload.objects.filter(size__isnull=True)
|
||||
total = matching.count()
|
||||
self.stdout.write("[size] {} entries found with missing values".format(total))
|
||||
self.stdout.write(f"[size] {total} entries found with missing values")
|
||||
if dry_run:
|
||||
return
|
||||
|
||||
|
@ -148,16 +144,12 @@ class Command(BaseCommand):
|
|||
for upload in chunk:
|
||||
handled += 1
|
||||
|
||||
self.stdout.write(
|
||||
"[size] {}/{} fixing file #{}".format(handled, total, upload.pk)
|
||||
)
|
||||
self.stdout.write(f"[size] {handled}/{total} fixing file #{upload.pk}")
|
||||
|
||||
try:
|
||||
upload.size = upload.get_file_size()
|
||||
except Exception as e:
|
||||
self.stderr.write(
|
||||
"[size] error with file #{}: {}".format(upload.pk, str(e))
|
||||
)
|
||||
self.stderr.write(f"[size] error with file #{upload.pk}: {str(e)}")
|
||||
else:
|
||||
updated.append(upload)
|
||||
|
||||
|
@ -170,9 +162,7 @@ class Command(BaseCommand):
|
|||
& (Q(audio_file__isnull=False) | Q(source__startswith="file://"))
|
||||
)
|
||||
total = matching.count()
|
||||
self.stdout.write(
|
||||
"[checksum] {} entries found with missing values".format(total)
|
||||
)
|
||||
self.stdout.write(f"[checksum] {total} entries found with missing values")
|
||||
if dry_run:
|
||||
return
|
||||
chunks = common_utils.chunk_queryset(
|
||||
|
@ -184,7 +174,7 @@ class Command(BaseCommand):
|
|||
for upload in chunk:
|
||||
handled += 1
|
||||
self.stdout.write(
|
||||
"[checksum] {}/{} fixing file #{}".format(handled, total, upload.pk)
|
||||
f"[checksum] {handled}/{total} fixing file #{upload.pk}"
|
||||
)
|
||||
|
||||
try:
|
||||
|
@ -193,7 +183,7 @@ class Command(BaseCommand):
|
|||
)
|
||||
except Exception as e:
|
||||
self.stderr.write(
|
||||
"[checksum] error with file #{}: {}".format(upload.pk, str(e))
|
||||
f"[checksum] error with file #{upload.pk}: {str(e)}"
|
||||
)
|
||||
else:
|
||||
updated.append(upload)
|
||||
|
|
|
@ -31,7 +31,7 @@ def crawl_dir(dir, extensions, recursive=True, ignored=[]):
|
|||
try:
|
||||
scanner = os.scandir(dir)
|
||||
except Exception as e:
|
||||
m = "Error while reading {}: {} {}\n".format(dir, e.__class__.__name__, e)
|
||||
m = f"Error while reading {dir}: {e.__class__.__name__} {e}\n"
|
||||
sys.stderr.write(m)
|
||||
return
|
||||
try:
|
||||
|
@ -39,7 +39,7 @@ def crawl_dir(dir, extensions, recursive=True, ignored=[]):
|
|||
try:
|
||||
if entry.is_file():
|
||||
for e in extensions:
|
||||
if entry.name.lower().endswith(".{}".format(e.lower())):
|
||||
if entry.name.lower().endswith(f".{e.lower()}"):
|
||||
if entry.path not in ignored:
|
||||
yield entry.path
|
||||
elif recursive and entry.is_dir():
|
||||
|
@ -260,7 +260,7 @@ class Command(BaseCommand):
|
|||
raise CommandError("Invalid library id")
|
||||
|
||||
if not library.actor.get_user():
|
||||
raise CommandError("Library {} is not a local library".format(library.uuid))
|
||||
raise CommandError(f"Library {library.uuid} is not a local library")
|
||||
|
||||
if options["in_place"]:
|
||||
self.stdout.write(
|
||||
|
@ -282,7 +282,7 @@ class Command(BaseCommand):
|
|||
"Culprit: {}".format(p, import_path)
|
||||
)
|
||||
|
||||
reference = options["reference"] or "cli-{}".format(timezone.now().isoformat())
|
||||
reference = options["reference"] or f"cli-{timezone.now().isoformat()}"
|
||||
|
||||
import_url = "{}://{}/library/{}/upload?{}"
|
||||
import_url = import_url.format(
|
||||
|
@ -393,10 +393,10 @@ class Command(BaseCommand):
|
|||
message.format(total - len(errors), int(time.time() - start_time))
|
||||
)
|
||||
if len(errors) > 0:
|
||||
self.stderr.write("{} tracks could not be imported:".format(len(errors)))
|
||||
self.stderr.write(f"{len(errors)} tracks could not be imported:")
|
||||
|
||||
for path, error in errors:
|
||||
self.stderr.write("- {}: {}".format(path, error))
|
||||
self.stderr.write(f"- {path}: {error}")
|
||||
|
||||
self.stdout.write(
|
||||
"For details, please refer to import reference '{}' or URL {}".format(
|
||||
|
@ -485,12 +485,12 @@ class Command(BaseCommand):
|
|||
return errors
|
||||
|
||||
def filter_matching(self, matching, library):
|
||||
sources = ["file://{}".format(p) for p in matching]
|
||||
sources = [f"file://{p}" for p in matching]
|
||||
# we skip reimport for path that are already found
|
||||
# as a Upload.source
|
||||
existing = library.uploads.filter(source__in=sources, import_status="finished")
|
||||
existing = existing.values_list("source", flat=True)
|
||||
existing = set([p.replace("file://", "", 1) for p in existing])
|
||||
existing = {p.replace("file://", "", 1) for p in existing}
|
||||
skipped = set(matching) & existing
|
||||
result = {
|
||||
"initial": matching,
|
||||
|
@ -530,7 +530,7 @@ class Command(BaseCommand):
|
|||
path, e.__class__.__name__, e
|
||||
)
|
||||
self.stderr.write(m)
|
||||
errors.append((path, "{} {}".format(e.__class__.__name__, e)))
|
||||
errors.append((path, f"{e.__class__.__name__} {e}"))
|
||||
return errors
|
||||
|
||||
def setup_watcher(self, path, extensions, recursive, **kwargs):
|
||||
|
@ -544,7 +544,7 @@ class Command(BaseCommand):
|
|||
worker.start()
|
||||
|
||||
# setup watchdog to monitor directory for trigger files
|
||||
patterns = ["*.{}".format(e) for e in extensions]
|
||||
patterns = [f"*.{e}" for e in extensions]
|
||||
event_handler = Watcher(
|
||||
stdout=self.stdout,
|
||||
queue=watchdog_queue,
|
||||
|
@ -556,9 +556,7 @@ class Command(BaseCommand):
|
|||
|
||||
try:
|
||||
while True:
|
||||
self.stdout.write(
|
||||
"Watching for changes at {}…".format(path), ending="\r"
|
||||
)
|
||||
self.stdout.write(f"Watching for changes at {path}…", ending="\r")
|
||||
time.sleep(10)
|
||||
if kwargs["prune"] and GLOBAL["need_pruning"]:
|
||||
self.stdout.write("Some files were deleted, pruning library…")
|
||||
|
@ -728,7 +726,7 @@ def handle_modified(event, stdout, library, in_place, **kwargs):
|
|||
try:
|
||||
tasks.update_track_metadata(audio_metadata, to_update.track)
|
||||
except serializers.ValidationError as e:
|
||||
stdout.write(" Invalid metadata: {}".format(e))
|
||||
stdout.write(f" Invalid metadata: {e}")
|
||||
else:
|
||||
to_update.checksum = checksum
|
||||
to_update.save(update_fields=["checksum"])
|
||||
|
@ -765,7 +763,7 @@ def handle_moved(event, stdout, library, in_place, **kwargs):
|
|||
existing_candidates = existing_candidates.in_place().filter(source=old_source)
|
||||
existing = existing_candidates.first()
|
||||
if existing:
|
||||
stdout.write(" Updating path of existing file #{}".format(existing.pk))
|
||||
stdout.write(f" Updating path of existing file #{existing.pk}")
|
||||
existing.source = new_source
|
||||
existing.save(update_fields=["source"])
|
||||
|
||||
|
@ -794,15 +792,14 @@ def check_updates(stdout, library, extensions, paths, batch_size):
|
|||
for path in paths:
|
||||
for ext in extensions:
|
||||
queries.append(
|
||||
Q(source__startswith="file://{}".format(path))
|
||||
& Q(source__endswith=".{}".format(ext))
|
||||
Q(source__startswith=f"file://{path}") & Q(source__endswith=f".{ext}")
|
||||
)
|
||||
query, remainder = queries[0], queries[1:]
|
||||
for q in remainder:
|
||||
query = q | query
|
||||
existing = existing.filter(query)
|
||||
total = existing.count()
|
||||
stdout.write("Found {} files to check in database!".format(total))
|
||||
stdout.write(f"Found {total} files to check in database!")
|
||||
uploads = existing.order_by("source")
|
||||
for i, rows in enumerate(batch(uploads.iterator(), batch_size)):
|
||||
stdout.write(
|
||||
|
@ -849,7 +846,7 @@ def check_upload(stdout, upload):
|
|||
try:
|
||||
tasks.update_track_metadata(upload.get_metadata(), track)
|
||||
except serializers.ValidationError as e:
|
||||
stdout.write(" Invalid metadata: {}".format(e))
|
||||
stdout.write(f" Invalid metadata: {e}")
|
||||
return
|
||||
except IntegrityError:
|
||||
stdout.write(
|
||||
|
|
|
@ -101,11 +101,9 @@ class Command(BaseCommand):
|
|||
pruned_total = prunable.count()
|
||||
total = models.Track.objects.count()
|
||||
if options["dry_run"]:
|
||||
self.stdout.write(
|
||||
"Would prune {}/{} tracks".format(pruned_total, total)
|
||||
)
|
||||
self.stdout.write(f"Would prune {pruned_total}/{total} tracks")
|
||||
else:
|
||||
self.stdout.write("Deleting {}/{} tracks…".format(pruned_total, total))
|
||||
self.stdout.write(f"Deleting {pruned_total}/{total} tracks…")
|
||||
prunable.delete()
|
||||
|
||||
if options["prune_albums"]:
|
||||
|
@ -113,11 +111,9 @@ class Command(BaseCommand):
|
|||
pruned_total = prunable.count()
|
||||
total = models.Album.objects.count()
|
||||
if options["dry_run"]:
|
||||
self.stdout.write(
|
||||
"Would prune {}/{} albums".format(pruned_total, total)
|
||||
)
|
||||
self.stdout.write(f"Would prune {pruned_total}/{total} albums")
|
||||
else:
|
||||
self.stdout.write("Deleting {}/{} albums…".format(pruned_total, total))
|
||||
self.stdout.write(f"Deleting {pruned_total}/{total} albums…")
|
||||
prunable.delete()
|
||||
|
||||
if options["prune_artists"]:
|
||||
|
@ -125,11 +121,9 @@ class Command(BaseCommand):
|
|||
pruned_total = prunable.count()
|
||||
total = models.Artist.objects.count()
|
||||
if options["dry_run"]:
|
||||
self.stdout.write(
|
||||
"Would prune {}/{} artists".format(pruned_total, total)
|
||||
)
|
||||
self.stdout.write(f"Would prune {pruned_total}/{total} artists")
|
||||
else:
|
||||
self.stdout.write("Deleting {}/{} artists…".format(pruned_total, total))
|
||||
self.stdout.write(f"Deleting {pruned_total}/{total} artists…")
|
||||
prunable.delete()
|
||||
|
||||
self.stdout.write("")
|
||||
|
|
|
@ -355,15 +355,15 @@ class Metadata(Mapping):
|
|||
def __init__(self, filething, kind=mutagen.File):
|
||||
self._file = kind(filething)
|
||||
if self._file is None:
|
||||
raise ValueError("Cannot parse metadata from {}".format(filething))
|
||||
raise ValueError(f"Cannot parse metadata from {filething}")
|
||||
if len(self._file) == 0:
|
||||
raise ValueError("No tags found in {}".format(filething))
|
||||
raise ValueError(f"No tags found in {filething}")
|
||||
self.fallback = self.load_fallback(filething, self._file)
|
||||
ft = self.get_file_type(self._file)
|
||||
try:
|
||||
self._conf = CONF[ft]
|
||||
except KeyError:
|
||||
raise ValueError("Unsupported format {}".format(ft))
|
||||
raise ValueError(f"Unsupported format {ft}")
|
||||
|
||||
def get_file_type(self, f):
|
||||
return f.__class__.__name__
|
||||
|
@ -420,7 +420,7 @@ class Metadata(Mapping):
|
|||
try:
|
||||
field_conf = self._conf["fields"][key]
|
||||
except KeyError:
|
||||
raise UnsupportedTag("{} is not supported for this file format".format(key))
|
||||
raise UnsupportedTag(f"{key} is not supported for this file format")
|
||||
real_key = field_conf.get("field", key)
|
||||
try:
|
||||
getter = field_conf.get("getter", self._conf["getter"])
|
||||
|
@ -467,8 +467,7 @@ class Metadata(Mapping):
|
|||
return 1
|
||||
|
||||
def __iter__(self):
|
||||
for field in self._conf["fields"]:
|
||||
yield field
|
||||
yield from self._conf["fields"]
|
||||
|
||||
|
||||
class ArtistField(serializers.Field):
|
||||
|
|
|
@ -85,9 +85,7 @@ class APIModelMixin(models.Model):
|
|||
cls.musicbrainz_model
|
||||
]
|
||||
else:
|
||||
raw_data = cls.api.search(**kwargs)[
|
||||
"{0}-list".format(cls.musicbrainz_model)
|
||||
][0]
|
||||
raw_data = cls.api.search(**kwargs)[f"{cls.musicbrainz_model}-list"][0]
|
||||
cleaned_data = cls.clean_musicbrainz_data(raw_data)
|
||||
return importers.load(cls, cleaned_data, raw_data, cls.import_hooks)
|
||||
|
||||
|
@ -116,7 +114,7 @@ class APIModelMixin(models.Model):
|
|||
|
||||
return federation_utils.full_url(
|
||||
reverse(
|
||||
"federation:music:{}-detail".format(self.federation_namespace),
|
||||
f"federation:music:{self.federation_namespace}-detail",
|
||||
kwargs={"uuid": self.uuid},
|
||||
)
|
||||
)
|
||||
|
@ -252,10 +250,10 @@ class Artist(APIModelMixin):
|
|||
return self.name
|
||||
|
||||
def get_absolute_url(self):
|
||||
return "/library/artists/{}".format(self.pk)
|
||||
return f"/library/artists/{self.pk}"
|
||||
|
||||
def get_moderation_url(self):
|
||||
return "/manage/library/artists/{}".format(self.pk)
|
||||
return f"/manage/library/artists/{self.pk}"
|
||||
|
||||
@classmethod
|
||||
def get_or_create_from_name(cls, name, **kwargs):
|
||||
|
@ -396,10 +394,10 @@ class Album(APIModelMixin):
|
|||
return self.title
|
||||
|
||||
def get_absolute_url(self):
|
||||
return "/library/albums/{}".format(self.pk)
|
||||
return f"/library/albums/{self.pk}"
|
||||
|
||||
def get_moderation_url(self):
|
||||
return "/manage/library/albums/{}".format(self.pk)
|
||||
return f"/manage/library/albums/{self.pk}"
|
||||
|
||||
@classmethod
|
||||
def get_or_create_from_title(cls, title, **kwargs):
|
||||
|
@ -557,10 +555,10 @@ class Track(APIModelMixin):
|
|||
return self.title
|
||||
|
||||
def get_absolute_url(self):
|
||||
return "/library/tracks/{}".format(self.pk)
|
||||
return f"/library/tracks/{self.pk}"
|
||||
|
||||
def get_moderation_url(self):
|
||||
return "/manage/library/tracks/{}".format(self.pk)
|
||||
return f"/manage/library/tracks/{self.pk}"
|
||||
|
||||
def save(self, **kwargs):
|
||||
try:
|
||||
|
@ -572,9 +570,9 @@ class Track(APIModelMixin):
|
|||
@property
|
||||
def full_name(self):
|
||||
try:
|
||||
return "{} - {} - {}".format(self.artist.name, self.album.title, self.title)
|
||||
return f"{self.artist.name} - {self.album.title} - {self.title}"
|
||||
except AttributeError:
|
||||
return "{} - {}".format(self.artist.name, self.title)
|
||||
return f"{self.artist.name} - {self.title}"
|
||||
|
||||
@property
|
||||
def cover(self):
|
||||
|
@ -582,8 +580,8 @@ class Track(APIModelMixin):
|
|||
|
||||
def get_activity_url(self):
|
||||
if self.mbid:
|
||||
return "https://musicbrainz.org/recording/{}".format(self.mbid)
|
||||
return settings.FUNKWHALE_URL + "/tracks/{}".format(self.pk)
|
||||
return f"https://musicbrainz.org/recording/{self.mbid}"
|
||||
return settings.FUNKWHALE_URL + f"/tracks/{self.pk}"
|
||||
|
||||
@classmethod
|
||||
def get_or_create_from_title(cls, title, **kwargs):
|
||||
|
@ -643,7 +641,7 @@ class Track(APIModelMixin):
|
|||
@property
|
||||
def listen_url(self) -> str:
|
||||
# Not using reverse because this is slow
|
||||
return "/api/v1/listen/{}/".format(self.uuid)
|
||||
return f"/api/v1/listen/{self.uuid}/"
|
||||
|
||||
@property
|
||||
def local_license(self):
|
||||
|
@ -807,7 +805,7 @@ class Upload(models.Model):
|
|||
title_parts.append(self.track.artist.name)
|
||||
|
||||
title = " - ".join(title_parts)
|
||||
filename = "{}.{}".format(title, extension)
|
||||
filename = f"{title}.{extension}"
|
||||
tmp_file = tempfile.TemporaryFile()
|
||||
for chunk in r.iter_content(chunk_size=512):
|
||||
tmp_file.write(chunk)
|
||||
|
@ -824,7 +822,7 @@ class Upload(models.Model):
|
|||
|
||||
@property
|
||||
def filename(self) -> str:
|
||||
return "{}.{}".format(self.track.full_name, self.extension)
|
||||
return f"{self.track.full_name}.{self.extension}"
|
||||
|
||||
@property
|
||||
def extension(self):
|
||||
|
@ -900,12 +898,12 @@ class Upload(models.Model):
|
|||
|
||||
@property
|
||||
def listen_url(self) -> str:
|
||||
return self.track.listen_url + "?upload={}".format(self.uuid)
|
||||
return self.track.listen_url + f"?upload={self.uuid}"
|
||||
|
||||
def get_listen_url(self, to=None, download=True) -> str:
|
||||
url = self.listen_url
|
||||
if to:
|
||||
url += "&to={}".format(to)
|
||||
url += f"&to={to}"
|
||||
if not download:
|
||||
url += "&download=false"
|
||||
|
||||
|
@ -946,9 +944,9 @@ class Upload(models.Model):
|
|||
bitrate = min(bitrate or 320000, self.bitrate or 320000)
|
||||
version = self.versions.create(mimetype=mimetype, bitrate=bitrate, size=0)
|
||||
# we keep the same name, but we update the extension
|
||||
new_name = os.path.splitext(os.path.basename(self.audio_file.name))[
|
||||
0
|
||||
] + ".{}".format(format)
|
||||
new_name = (
|
||||
os.path.splitext(os.path.basename(self.audio_file.name))[0] + f".{format}"
|
||||
)
|
||||
version.audio_file.save(new_name, f)
|
||||
utils.transcode_audio(
|
||||
audio=self.get_audio_segment(),
|
||||
|
@ -1091,9 +1089,7 @@ class ImportBatch(models.Model):
|
|||
tasks.import_batch_notify_followers.delay(import_batch_id=self.pk)
|
||||
|
||||
def get_federation_id(self):
|
||||
return federation_utils.full_url(
|
||||
"/federation/music/import/batch/{}".format(self.uuid)
|
||||
)
|
||||
return federation_utils.full_url(f"/federation/music/import/batch/{self.uuid}")
|
||||
|
||||
|
||||
class ImportJob(models.Model):
|
||||
|
@ -1204,7 +1200,7 @@ class Library(federation_models.FederationMixin):
|
|||
return self.name
|
||||
|
||||
def get_moderation_url(self) -> str:
|
||||
return "/manage/library/libraries/{}".format(self.uuid)
|
||||
return f"/manage/library/libraries/{self.uuid}"
|
||||
|
||||
def get_federation_id(self) -> str:
|
||||
return federation_utils.full_url(
|
||||
|
@ -1212,7 +1208,7 @@ class Library(federation_models.FederationMixin):
|
|||
)
|
||||
|
||||
def get_absolute_url(self) -> str:
|
||||
return "/library/{}".format(self.uuid)
|
||||
return f"/library/{self.uuid}"
|
||||
|
||||
def save(self, **kwargs):
|
||||
if not self.pk and not self.fid and self.actor.is_local:
|
||||
|
|
|
@ -40,7 +40,7 @@ class CoverField(common_serializers.AttachmentSerializer):
|
|||
cover_field = CoverField()
|
||||
|
||||
|
||||
class OptionalDescriptionMixin(object):
|
||||
class OptionalDescriptionMixin:
|
||||
def to_representation(self, obj):
|
||||
repr = super().to_representation(obj)
|
||||
if self.context.get("description", False):
|
||||
|
@ -579,7 +579,7 @@ class TrackActivitySerializer(activity_serializers.ModelSerializer):
|
|||
|
||||
|
||||
def get_embed_url(type, id):
|
||||
return settings.FUNKWHALE_EMBED_URL + "?type={}&id={}".format(type, id)
|
||||
return settings.FUNKWHALE_EMBED_URL + f"?type={type}&id={id}"
|
||||
|
||||
|
||||
class OembedSerializer(serializers.Serializer):
|
||||
|
@ -619,7 +619,7 @@ class OembedSerializer(serializers.Serializer):
|
|||
)
|
||||
embed_type = "track"
|
||||
embed_id = track.pk
|
||||
data["title"] = "{} by {}".format(track.title, track.artist.name)
|
||||
data["title"] = f"{track.title} by {track.artist.name}"
|
||||
if track.attachment_cover:
|
||||
data[
|
||||
"thumbnail_url"
|
||||
|
@ -658,8 +658,8 @@ class OembedSerializer(serializers.Serializer):
|
|||
] = album.attachment_cover.download_url_medium_square_crop
|
||||
data["thumbnail_width"] = 200
|
||||
data["thumbnail_height"] = 200
|
||||
data["title"] = "{} by {}".format(album.title, album.artist.name)
|
||||
data["description"] = "{} by {}".format(album.title, album.artist.name)
|
||||
data["title"] = f"{album.title} by {album.artist.name}"
|
||||
data["description"] = f"{album.title} by {album.artist.name}"
|
||||
data["author_name"] = album.artist.name
|
||||
data["height"] = 400
|
||||
data["author_url"] = federation_utils.full_url(
|
||||
|
|
|
@ -109,7 +109,7 @@ def library_track(request, pk, redirect_to_ap):
|
|||
"type": "application/json+oembed",
|
||||
"href": (
|
||||
utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed"))
|
||||
+ "?format=json&url={}".format(urllib.parse.quote_plus(track_url))
|
||||
+ f"?format=json&url={urllib.parse.quote_plus(track_url)}"
|
||||
),
|
||||
}
|
||||
)
|
||||
|
@ -181,7 +181,7 @@ def library_album(request, pk, redirect_to_ap):
|
|||
"type": "application/json+oembed",
|
||||
"href": (
|
||||
utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed"))
|
||||
+ "?format=json&url={}".format(urllib.parse.quote_plus(album_url))
|
||||
+ f"?format=json&url={urllib.parse.quote_plus(album_url)}"
|
||||
),
|
||||
}
|
||||
)
|
||||
|
@ -245,7 +245,7 @@ def library_artist(request, pk, redirect_to_ap):
|
|||
"type": "application/json+oembed",
|
||||
"href": (
|
||||
utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed"))
|
||||
+ "?format=json&url={}".format(urllib.parse.quote_plus(artist_url))
|
||||
+ f"?format=json&url={urllib.parse.quote_plus(artist_url)}"
|
||||
),
|
||||
}
|
||||
)
|
||||
|
@ -297,7 +297,7 @@ def library_playlist(request, pk, redirect_to_ap):
|
|||
"type": "application/json+oembed",
|
||||
"href": (
|
||||
utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed"))
|
||||
+ "?format=json&url={}".format(urllib.parse.quote_plus(obj_url))
|
||||
+ f"?format=json&url={urllib.parse.quote_plus(obj_url)}"
|
||||
),
|
||||
}
|
||||
)
|
||||
|
|
|
@ -66,7 +66,7 @@ def get_cover_from_fs(dir_path):
|
|||
if os.path.exists(dir_path):
|
||||
for name in FOLDER_IMAGE_NAMES:
|
||||
for e, m in IMAGE_TYPES:
|
||||
cover_path = os.path.join(dir_path, "{}.{}".format(name, e))
|
||||
cover_path = os.path.join(dir_path, f"{name}.{e}")
|
||||
if not os.path.exists(cover_path):
|
||||
logger.debug("Cover %s does not exists", cover_path)
|
||||
continue
|
||||
|
@ -764,7 +764,7 @@ def broadcast_import_status_update_to_owner(old_status, new_status, upload, **kw
|
|||
|
||||
from . import serializers
|
||||
|
||||
group = "user.{}.imports".format(user.pk)
|
||||
group = f"user.{user.pk}.imports"
|
||||
channels.group_send(
|
||||
group,
|
||||
{
|
||||
|
@ -788,7 +788,7 @@ def clean_transcoding_cache():
|
|||
limit = timezone.now() - datetime.timedelta(minutes=delay)
|
||||
candidates = (
|
||||
models.UploadVersion.objects.filter(
|
||||
(Q(accessed_date__lt=limit) | Q(accessed_date=None))
|
||||
Q(accessed_date__lt=limit) | Q(accessed_date=None)
|
||||
)
|
||||
.only("audio_file", "id")
|
||||
.order_by("id")
|
||||
|
|
|
@ -67,9 +67,7 @@ AUDIO_EXTENSIONS_AND_MIMETYPE = [
|
|||
EXTENSION_TO_MIMETYPE = {ext: mt for ext, mt in AUDIO_EXTENSIONS_AND_MIMETYPE}
|
||||
MIMETYPE_TO_EXTENSION = {mt: ext for ext, mt in AUDIO_EXTENSIONS_AND_MIMETYPE}
|
||||
|
||||
SUPPORTED_EXTENSIONS = list(
|
||||
sorted(set([ext for ext, _ in AUDIO_EXTENSIONS_AND_MIMETYPE]))
|
||||
)
|
||||
SUPPORTED_EXTENSIONS = list(sorted({ext for ext, _ in AUDIO_EXTENSIONS_AND_MIMETYPE}))
|
||||
|
||||
|
||||
def get_ext_from_type(mimetype):
|
||||
|
|
|
@ -101,7 +101,7 @@ def refetch_obj(obj, queryset):
|
|||
return obj
|
||||
|
||||
|
||||
class HandleInvalidSearch(object):
|
||||
class HandleInvalidSearch:
|
||||
def list(self, *args, **kwargs):
|
||||
try:
|
||||
return super().list(*args, **kwargs)
|
||||
|
@ -532,8 +532,8 @@ def should_transcode(upload, format, max_bitrate=None):
|
|||
|
||||
|
||||
def get_content_disposition(filename):
|
||||
filename = "filename*=UTF-8''{}".format(urllib.parse.quote(filename))
|
||||
return "attachment; {}".format(filename)
|
||||
filename = f"filename*=UTF-8''{urllib.parse.quote(filename)}"
|
||||
return f"attachment; {filename}"
|
||||
|
||||
|
||||
def record_downloads(f):
|
||||
|
|
|
@ -16,10 +16,10 @@ def clean_artist_search(query, **kwargs):
|
|||
return _api.search_artists(query, **cleaned_kwargs)
|
||||
|
||||
|
||||
class API(object):
|
||||
class API:
|
||||
_api = _api
|
||||
|
||||
class artists(object):
|
||||
class artists:
|
||||
search = cache_memoize(
|
||||
settings.MUSICBRAINZ_CACHE_DURATION,
|
||||
prefix="memoize:musicbrainz:clean_artist_search",
|
||||
|
@ -29,13 +29,13 @@ class API(object):
|
|||
prefix="memoize:musicbrainz:get_artist_by_id",
|
||||
)(_api.get_artist_by_id)
|
||||
|
||||
class images(object):
|
||||
class images:
|
||||
get_front = cache_memoize(
|
||||
settings.MUSICBRAINZ_CACHE_DURATION,
|
||||
prefix="memoize:musicbrainz:get_image_front",
|
||||
)(_api.get_image_front)
|
||||
|
||||
class recordings(object):
|
||||
class recordings:
|
||||
search = cache_memoize(
|
||||
settings.MUSICBRAINZ_CACHE_DURATION,
|
||||
prefix="memoize:musicbrainz:search_recordings",
|
||||
|
@ -45,7 +45,7 @@ class API(object):
|
|||
prefix="memoize:musicbrainz:get_recording_by_id",
|
||||
)(_api.get_recording_by_id)
|
||||
|
||||
class releases(object):
|
||||
class releases:
|
||||
search = cache_memoize(
|
||||
settings.MUSICBRAINZ_CACHE_DURATION,
|
||||
prefix="memoize:musicbrainz:search_releases",
|
||||
|
@ -60,7 +60,7 @@ class API(object):
|
|||
)(_api.browse_releases)
|
||||
# get_image_front = _api.get_image_front
|
||||
|
||||
class release_groups(object):
|
||||
class release_groups:
|
||||
search = cache_memoize(
|
||||
settings.MUSICBRAINZ_CACHE_DURATION,
|
||||
prefix="memoize:musicbrainz:search_release_groups",
|
||||
|
|
|
@ -82,7 +82,7 @@ class Playlist(models.Model):
|
|||
return self.name
|
||||
|
||||
def get_absolute_url(self):
|
||||
return "/library/playlists/{}".format(self.pk)
|
||||
return f"/library/playlists/{self.pk}"
|
||||
|
||||
@transaction.atomic
|
||||
def insert(self, plt, index=None, allow_duplicates=True):
|
||||
|
@ -151,7 +151,7 @@ class Playlist(models.Model):
|
|||
max_tracks = preferences.get("playlists__max_tracks")
|
||||
if existing.count() + len(tracks) > max_tracks:
|
||||
raise exceptions.ValidationError(
|
||||
"Playlist would reach the maximum of {} tracks".format(max_tracks)
|
||||
f"Playlist would reach the maximum of {max_tracks} tracks"
|
||||
)
|
||||
|
||||
if not allow_duplicates:
|
||||
|
|
|
@ -66,7 +66,7 @@ def clean_config(filter_config):
|
|||
return f.clean_config(filter_config)
|
||||
|
||||
|
||||
class RadioFilter(object):
|
||||
class RadioFilter:
|
||||
help_text = None
|
||||
label = None
|
||||
fields = []
|
||||
|
@ -114,7 +114,7 @@ class GroupFilter(RadioFilter):
|
|||
elif operator == "or":
|
||||
final_query |= query
|
||||
else:
|
||||
raise ValueError('Invalid query operator "{}"'.format(operator))
|
||||
raise ValueError(f'Invalid query operator "{operator}"')
|
||||
return final_query
|
||||
|
||||
def validate(self, config):
|
||||
|
@ -171,7 +171,7 @@ class ArtistFilter(RadioFilter):
|
|||
except KeyError:
|
||||
raise ValidationError("You must provide an id")
|
||||
except AssertionError:
|
||||
raise ValidationError('No artist matching ids "{}"'.format(diff))
|
||||
raise ValidationError(f'No artist matching ids "{diff}"')
|
||||
|
||||
|
||||
@registry.register
|
||||
|
@ -226,7 +226,7 @@ class TagFilter(RadioFilter):
|
|||
except KeyError:
|
||||
raise ValidationError("You must provide a name")
|
||||
except AssertionError:
|
||||
raise ValidationError('No tag matching names "{}"'.format(diff))
|
||||
raise ValidationError(f'No tag matching names "{diff}"')
|
||||
|
||||
|
||||
@registry.register
|
||||
|
|
|
@ -20,7 +20,7 @@ from .registries import registry
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SimpleRadio(object):
|
||||
class SimpleRadio:
|
||||
related_object_field = None
|
||||
|
||||
def clean(self, instance):
|
||||
|
|
|
@ -20,7 +20,7 @@ ET._original_serialize_xml = ET._serialize_xml
|
|||
|
||||
def _serialize_xml(write, elem, qnames, namespaces, **kwargs):
|
||||
if elem.tag == "![CDATA[":
|
||||
write("<%s%s]]>" % (elem.tag, elem.text))
|
||||
write(f"<{elem.tag}{elem.text}]]>")
|
||||
return
|
||||
return ET._original_serialize_xml(write, elem, qnames, namespaces, **kwargs)
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ def get_track_path(track, suffix):
|
|||
parts.append(get_valid_filepart(track.album.title))
|
||||
track_part = get_valid_filepart(track.title) + "." + suffix
|
||||
if track.position:
|
||||
track_part = "{} - {}".format(track.position, track_part)
|
||||
track_part = f"{track.position} - {track_part}"
|
||||
parts.append(track_part)
|
||||
return "/".join(parts)
|
||||
|
||||
|
@ -84,7 +84,7 @@ class GetArtistSerializer(serializers.Serializer):
|
|||
"album": [],
|
||||
}
|
||||
if artist.attachment_cover_id:
|
||||
payload["coverArt"] = "ar-{}".format(artist.id)
|
||||
payload["coverArt"] = f"ar-{artist.id}"
|
||||
for album in albums:
|
||||
album_data = {
|
||||
"id": album.id,
|
||||
|
@ -95,7 +95,7 @@ class GetArtistSerializer(serializers.Serializer):
|
|||
"songCount": len(album.tracks.all()),
|
||||
}
|
||||
if album.attachment_cover_id:
|
||||
album_data["coverArt"] = "al-{}".format(album.id)
|
||||
album_data["coverArt"] = f"al-{album.id}"
|
||||
if album.release_date:
|
||||
album_data["year"] = album.release_date.year
|
||||
payload["album"].append(album_data)
|
||||
|
@ -128,7 +128,7 @@ def get_track_data(album, track, upload):
|
|||
"type": "music",
|
||||
}
|
||||
if album and album.attachment_cover_id:
|
||||
data["coverArt"] = "al-{}".format(album.id)
|
||||
data["coverArt"] = f"al-{album.id}"
|
||||
if upload.bitrate:
|
||||
data["bitrate"] = int(upload.bitrate / 1000)
|
||||
if upload.size:
|
||||
|
@ -151,7 +151,7 @@ def get_album2_data(album):
|
|||
"playCount": album.tracks.aggregate(l=Count("listenings"))["l"] or 0,
|
||||
}
|
||||
if album.attachment_cover_id:
|
||||
payload["coverArt"] = "al-{}".format(album.id)
|
||||
payload["coverArt"] = f"al-{album.id}"
|
||||
if album.tagged_items:
|
||||
# exposes only first genre since the specification uses singular noun
|
||||
first_genre = album.tagged_items.first()
|
||||
|
@ -308,7 +308,7 @@ def get_channel_data(channel, uploads):
|
|||
"description": channel.artist.description.as_plain_text
|
||||
if channel.artist.description
|
||||
else "",
|
||||
"coverArt": "at-{}".format(channel.artist.attachment_cover.uuid)
|
||||
"coverArt": f"at-{channel.artist.attachment_cover.uuid}"
|
||||
if channel.artist.attachment_cover
|
||||
else "",
|
||||
"originalImageUrl": channel.artist.attachment_cover.url
|
||||
|
@ -333,7 +333,7 @@ def get_channel_episode_data(upload, channel_id):
|
|||
"description": upload.track.description.as_plain_text
|
||||
if upload.track.description
|
||||
else "",
|
||||
"coverArt": "at-{}".format(upload.track.attachment_cover.uuid)
|
||||
"coverArt": f"at-{upload.track.attachment_cover.uuid}"
|
||||
if upload.track.attachment_cover
|
||||
else "",
|
||||
"isDir": "false",
|
||||
|
|
|
@ -67,7 +67,7 @@ def find_object(
|
|||
{
|
||||
"error": {
|
||||
"code": 0,
|
||||
"message": 'For input string "{}"'.format(raw_value),
|
||||
"message": f'For input string "{raw_value}"',
|
||||
}
|
||||
}
|
||||
)
|
||||
|
@ -86,7 +86,7 @@ def find_object(
|
|||
{
|
||||
"error": {
|
||||
"code": 70,
|
||||
"message": "{} not found".format(qs.model.__name__),
|
||||
"message": f"{qs.model.__name__} not found",
|
||||
}
|
||||
}
|
||||
)
|
||||
|
@ -904,7 +904,7 @@ class SubsonicViewSet(viewsets.GenericViewSet):
|
|||
{
|
||||
"error": {
|
||||
"code": 0,
|
||||
"message": "Error while fetching url: {}".format(e),
|
||||
"message": f"Error while fetching url: {e}",
|
||||
}
|
||||
}
|
||||
)
|
||||
|
|
|
@ -14,7 +14,7 @@ class TagNameField(serializers.CharField):
|
|||
def to_internal_value(self, value):
|
||||
value = super().to_internal_value(value)
|
||||
if not models.TAG_REGEX.match(value):
|
||||
raise serializers.ValidationError('Invalid tag "{}"'.format(value))
|
||||
raise serializers.ValidationError(f'Invalid tag "{value}"')
|
||||
return value
|
||||
|
||||
|
||||
|
|
|
@ -14,16 +14,14 @@ def get_tags_from_foreign_key(
|
|||
"""
|
||||
data = {}
|
||||
objs = foreign_key_model.objects.filter(
|
||||
**{"{}__pk__in".format(foreign_key_attr): ids}
|
||||
**{f"{foreign_key_attr}__pk__in": ids}
|
||||
).order_by("-id")
|
||||
objs = objs.only("id", "{}_id".format(foreign_key_attr)).prefetch_related(
|
||||
tagged_items_attr
|
||||
)
|
||||
objs = objs.only("id", f"{foreign_key_attr}_id").prefetch_related(tagged_items_attr)
|
||||
|
||||
for obj in objs.iterator():
|
||||
# loop on all objects, store the objs tags + counter on the corresponding foreign key
|
||||
row_data = data.setdefault(
|
||||
getattr(obj, "{}_id".format(foreign_key_attr)),
|
||||
getattr(obj, f"{foreign_key_attr}_id"),
|
||||
{"total_objs": 0, "tags": []},
|
||||
)
|
||||
row_data["total_objs"] += 1
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
|
@ -22,7 +20,7 @@ app = celery.Celery("funkwhale_api")
|
|||
|
||||
@celery.signals.task_failure.connect
|
||||
def process_failure(sender, task_id, exception, args, kwargs, traceback, einfo, **kw):
|
||||
print("[celery] Error during task {}: {}".format(task_id, einfo.exception))
|
||||
print(f"[celery] Error during task {task_id}: {einfo.exception}")
|
||||
tb.print_exc()
|
||||
|
||||
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
|
@ -1,6 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from django import forms
|
||||
from django.contrib.auth.admin import UserAdmin as AuthUserAdmin
|
||||
from django.contrib.auth.forms import UserChangeForm, UserCreationForm
|
||||
|
|
|
@ -9,7 +9,7 @@ from .oauth import scopes as available_scopes
|
|||
|
||||
def generate_scoped_token(user_id, user_secret, scopes):
|
||||
if set(scopes) & set(available_scopes.SCOPES_BY_ID) != set(scopes):
|
||||
raise ValueError("{} contains invalid scopes".format(scopes))
|
||||
raise ValueError(f"{scopes} contains invalid scopes")
|
||||
|
||||
return signing.dumps(
|
||||
{
|
||||
|
|
|
@ -10,7 +10,7 @@ from . import models
|
|||
|
||||
@registry.register
|
||||
class GroupFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
name = factory.Sequence(lambda n: "group-{0}".format(n))
|
||||
name = factory.Sequence(lambda n: f"group-{n}")
|
||||
|
||||
class Meta:
|
||||
model = "auth.Group"
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import random
|
||||
|
@ -33,7 +30,7 @@ def get_token(length=5):
|
|||
wordlist_path = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), "wordlist.txt"
|
||||
)
|
||||
with open(wordlist_path, "r") as f:
|
||||
with open(wordlist_path) as f:
|
||||
words = f.readlines()
|
||||
phrase = "".join(random.choice(words) for i in range(length))
|
||||
return phrase.replace("\n", "-").rstrip("-")
|
||||
|
@ -201,11 +198,7 @@ class User(AbstractUser):
|
|||
defaults = defaults or preferences.get("users__default_permissions")
|
||||
perms = {}
|
||||
for p in PERMISSIONS:
|
||||
v = (
|
||||
self.is_superuser
|
||||
or getattr(self, "permission_{}".format(p))
|
||||
or p in defaults
|
||||
)
|
||||
v = self.is_superuser or getattr(self, f"permission_{p}") or p in defaults
|
||||
perms[p] = v
|
||||
return perms
|
||||
|
||||
|
@ -226,7 +219,7 @@ class User(AbstractUser):
|
|||
def has_permissions(self, *perms, **kwargs):
|
||||
operator = kwargs.pop("operator", "and")
|
||||
if operator not in ["and", "or"]:
|
||||
raise ValueError("Invalid operator {}".format(operator))
|
||||
raise ValueError(f"Invalid operator {operator}")
|
||||
permissions = self.get_permissions()
|
||||
checker = all if operator == "and" else any
|
||||
return checker([permissions[p] for p in perms])
|
||||
|
@ -249,7 +242,7 @@ class User(AbstractUser):
|
|||
self.update_subsonic_api_token()
|
||||
|
||||
def get_activity_url(self):
|
||||
return settings.FUNKWHALE_URL + "/@{}".format(self.username)
|
||||
return settings.FUNKWHALE_URL + f"/@{self.username}"
|
||||
|
||||
def record_activity(self):
|
||||
"""
|
||||
|
@ -292,16 +285,16 @@ class User(AbstractUser):
|
|||
|
||||
def get_channels_groups(self):
|
||||
groups = ["imports", "inbox"]
|
||||
groups = ["user.{}.{}".format(self.pk, g) for g in groups]
|
||||
groups = [f"user.{self.pk}.{g}" for g in groups]
|
||||
|
||||
for permission, value in self.all_permissions.items():
|
||||
if value:
|
||||
groups.append("admin.{}".format(permission))
|
||||
groups.append(f"admin.{permission}")
|
||||
|
||||
return groups
|
||||
|
||||
def full_username(self) -> str:
|
||||
return "{}@{}".format(self.username, settings.FEDERATION_HOSTNAME)
|
||||
return f"{self.username}@{settings.FEDERATION_HOSTNAME}"
|
||||
|
||||
def get_avatar(self):
|
||||
if not self.actor:
|
||||
|
|
|
@ -55,12 +55,12 @@ class ScopePermission(permissions.BasePermission):
|
|||
anonymous_policy = getattr(view, "anonymous_policy", False)
|
||||
if anonymous_policy not in [True, False, "setting"]:
|
||||
raise ImproperlyConfigured(
|
||||
"{} is not a valid value for anonymous_policy".format(anonymous_policy)
|
||||
f"{anonymous_policy} is not a valid value for anonymous_policy"
|
||||
)
|
||||
if isinstance(scope_config, str):
|
||||
scope_config = {
|
||||
"read": "read:{}".format(scope_config),
|
||||
"write": "write:{}".format(scope_config),
|
||||
"read": f"read:{scope_config}",
|
||||
"write": f"write:{scope_config}",
|
||||
}
|
||||
action = METHOD_SCOPE_MAPPING[request.method.lower()]
|
||||
required_scope = scope_config[action]
|
||||
|
|
|
@ -5,7 +5,7 @@ class Scope:
|
|||
self.children = children or []
|
||||
|
||||
def copy(self, prefix):
|
||||
return Scope("{}:{}".format(prefix, self.id))
|
||||
return Scope(f"{prefix}:{self.id}")
|
||||
|
||||
|
||||
BASE_SCOPES = [
|
||||
|
|
|
@ -665,7 +665,7 @@ def test_rss_feed_item_serializer_create(factories):
|
|||
|
||||
expected_uuid = uuid.uuid3(
|
||||
uuid.NAMESPACE_URL,
|
||||
"rss://{}-16f66fff-41ae-4a1c-9101-2746218c4f32".format(channel.pk),
|
||||
f"rss://{channel.pk}-16f66fff-41ae-4a1c-9101-2746218c4f32",
|
||||
)
|
||||
assert upload.library == channel.library
|
||||
assert upload.import_status == "finished"
|
||||
|
@ -692,7 +692,7 @@ def test_rss_feed_item_serializer_update(factories):
|
|||
channel = factories["audio.Channel"](rss_url=rss_url, external=True)
|
||||
expected_uuid = uuid.uuid3(
|
||||
uuid.NAMESPACE_URL,
|
||||
"rss://{}-16f66fff-41ae-4a1c-9101-2746218c4f32".format(channel.pk),
|
||||
f"rss://{channel.pk}-16f66fff-41ae-4a1c-9101-2746218c4f32",
|
||||
)
|
||||
upload = factories["music.Upload"](
|
||||
track__uuid=expected_uuid,
|
||||
|
|
|
@ -14,8 +14,8 @@ def test_channel_detail(attribute, spa_html, no_api_auth, client, factories, set
|
|||
library__privacy_level="everyone", artist__with_cover=True
|
||||
)
|
||||
factories["music.Upload"](playable=True, library=channel.library)
|
||||
url = "/channels/{}".format(utils.recursive_getattr(channel, attribute))
|
||||
detail_url = "/channels/{}".format(channel.actor.full_username)
|
||||
url = f"/channels/{utils.recursive_getattr(channel, attribute)}"
|
||||
detail_url = f"/channels/{channel.actor.full_username}"
|
||||
|
||||
response = client.get(url)
|
||||
|
||||
|
@ -44,7 +44,7 @@ def test_channel_detail(attribute, spa_html, no_api_auth, client, factories, set
|
|||
"rel": "alternate",
|
||||
"type": "application/rss+xml",
|
||||
"href": channel.get_rss_url(),
|
||||
"title": "{} - RSS Podcast Feed".format(channel.artist.name),
|
||||
"title": f"{channel.artist.name} - RSS Podcast Feed",
|
||||
},
|
||||
{
|
||||
"tag": "link",
|
||||
|
@ -81,8 +81,8 @@ def test_oembed_channel(factories, no_api_auth, api_client, settings):
|
|||
channel = factories["audio.Channel"](artist__with_cover=True)
|
||||
artist = channel.artist
|
||||
url = reverse("api:v1:oembed")
|
||||
obj_url = "https://test.com/channels/{}".format(channel.uuid)
|
||||
iframe_src = "http://embed?type=channel&id={}".format(channel.uuid)
|
||||
obj_url = f"https://test.com/channels/{channel.uuid}"
|
||||
iframe_src = f"http://embed?type=channel&id={channel.uuid}"
|
||||
expected = {
|
||||
"version": "1.0",
|
||||
"type": "rich",
|
||||
|
|
|
@ -442,7 +442,7 @@ def test_can_filter_channels_through_api_scope(factories, logged_in_api_client):
|
|||
factories["audio.Channel"]()
|
||||
url = reverse("api:v1:channels-list")
|
||||
response = logged_in_api_client.get(
|
||||
url, {"scope": "actor:{}".format(channel.attributed_to.full_username)}
|
||||
url, {"scope": f"actor:{channel.attributed_to.full_username}"}
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
|
|
@ -34,7 +34,7 @@ def test_should_verify_email(
|
|||
def test_app_token_authentication(factories, api_request):
|
||||
user = factories["users.User"]()
|
||||
app = factories["users.Application"](user=user, scope="read write")
|
||||
request = api_request.get("/", HTTP_AUTHORIZATION="Bearer {}".format(app.token))
|
||||
request = api_request.get("/", HTTP_AUTHORIZATION=f"Bearer {app.token}")
|
||||
|
||||
auth = authentication.ApplicationTokenAuthentication()
|
||||
assert auth.authenticate(request)[0] == app.user
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue