Merge branch 'develop'
This commit is contained in:
commit
62f401ed5a
1
.env.dev
1
.env.dev
|
@ -12,6 +12,7 @@ MUSIC_DIRECTORY_PATH=/music
|
|||
BROWSABLE_API_ENABLED=True
|
||||
FORWARDED_PROTO=http
|
||||
LDAP_ENABLED=False
|
||||
FUNKWHALE_SPA_HTML_ROOT=http://nginx/front/
|
||||
|
||||
# Uncomment this if you're using traefik/https
|
||||
# FORCE_HTTPS_URLS=True
|
||||
|
|
|
@ -114,7 +114,7 @@ black:
|
|||
before_script:
|
||||
- pip install black
|
||||
script:
|
||||
- black --exclude "/(\.git|\.hg|\.mypy_cache|\.tox|\.venv|_build|buck-out|build|dist|migrations)/" --check --diff api/
|
||||
- black --check --diff api/
|
||||
|
||||
flake8:
|
||||
image: python:3.6
|
||||
|
@ -281,6 +281,7 @@ build_api:
|
|||
paths:
|
||||
- api
|
||||
script:
|
||||
- rm -rf api/tests
|
||||
- (if [ "$CI_COMMIT_REF_NAME" == "develop" ]; then ./scripts/set-api-build-metadata.sh $(echo $CI_COMMIT_SHA | cut -c 1-8); fi);
|
||||
- chmod -R 750 api
|
||||
- echo Done!
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
<!--
|
||||
Hi there! You are reporting a bug on this project, and we want to thank you!
|
||||
|
||||
If it's the first time you post here, please take a moment to read our Code of Conduct
|
||||
(https://funkwhale.audio/code-of-conduct/) and ensure your issue respect our guidelines.
|
||||
|
||||
To ensure your bug report is as useful as possible, please try to stick
|
||||
to the following structure. You can leave the parts text between `<!- ->`
|
||||
markers untouched, they won't be displayed in your final message.
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
<!--
|
||||
Hi there! You are about to share feature request or an idea, and we want to thank you!
|
||||
|
||||
|
||||
If it's the first time you post here, please take a moment to read our Code of Conduct
|
||||
(https://funkwhale.audio/code-of-conduct/) and ensure your issue respect our guidelines.
|
||||
|
||||
To ensure we can deal with your idea or request, please try to stick
|
||||
to the following structure. You can leave the parts text between `<!- ->`
|
||||
markers untouched, they won't be displayed in your final message.
|
||||
|
|
|
@ -172,6 +172,10 @@ and metadata.
|
|||
Launch all services
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Before the first Funkwhale launch, it is required to run this::
|
||||
|
||||
docker-compose -f dev.yml run --rm front yarn run i18n-compile
|
||||
|
||||
Then you can run everything with::
|
||||
|
||||
docker-compose -f dev.yml up front api nginx celeryworker
|
||||
|
@ -276,7 +280,8 @@ When working on federation with traefik, ensure you have this in your ``env``::
|
|||
EXTERNAL_REQUESTS_VERIFY_SSL=false
|
||||
# this ensure you don't have incorrect urls pointing to http resources
|
||||
FUNKWHALE_PROTOCOL=https
|
||||
|
||||
# Disable host ports binding for the nginx container, as traefik is serving everything
|
||||
NGINX_PORTS_MAPPING=80
|
||||
|
||||
Typical workflow for a contribution
|
||||
-----------------------------------
|
||||
|
@ -513,13 +518,15 @@ It's possible to nest multiple component parts to reach a higher level of detail
|
|||
- ``Content/*/Form.Help text``
|
||||
|
||||
Collecting translatable strings
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you want to ensure your translatable strings are correctly marked for translation,
|
||||
you can try to extract them.
|
||||
|
||||
Extraction is done by calling ``yarn run i18n-extract``, which
|
||||
will pull all the strings from source files and put them in a PO file.
|
||||
will pull all the strings from source files and put them in a PO files.
|
||||
|
||||
You can then inspect the PO files to ensure everything is fine (but don't commit them, it's not needed).
|
||||
|
||||
Contributing to the API
|
||||
-----------------------
|
||||
|
|
|
@ -31,4 +31,9 @@ are outlined in `CONTRIBUTING <CONTRIBUTING.rst>`_.
|
|||
Translate
|
||||
^^^^^^^^^
|
||||
|
||||
Translators willing to help can refer to `TRANSLATORS <TRANSLATORS>`_ for instructions.
|
||||
Translators willing to help can refer to `TRANSLATORS <TRANSLATORS.rst>`_ for instructions.
|
||||
|
||||
Code of Conduct
|
||||
---------------
|
||||
|
||||
`Our Code of Conduct <https://funkwhale.audio/code-of-conduct/>`_ applies to all the community spaces, including our GitLab instance. Please, take a moment to read it.
|
||||
|
|
|
@ -22,5 +22,6 @@ fi
|
|||
if [ -d "frontend" ]; then
|
||||
mkdir -p /frontend
|
||||
cp -r frontend/* /frontend/
|
||||
export FUNKWHALE_SPA_HTML_ROOT=/frontend/index.html
|
||||
fi
|
||||
exec "$@"
|
||||
|
|
|
@ -5,6 +5,7 @@ from rest_framework.urlpatterns import format_suffix_patterns
|
|||
from rest_framework_jwt import views as jwt_views
|
||||
|
||||
from funkwhale_api.activity import views as activity_views
|
||||
from funkwhale_api.common import views as common_views
|
||||
from funkwhale_api.music import views
|
||||
from funkwhale_api.playlists import views as playlists_views
|
||||
from funkwhale_api.subsonic.views import SubsonicViewSet
|
||||
|
@ -24,6 +25,7 @@ router.register(r"playlists", playlists_views.PlaylistViewSet, "playlists")
|
|||
router.register(
|
||||
r"playlist-tracks", playlists_views.PlaylistTrackViewSet, "playlist-tracks"
|
||||
)
|
||||
router.register(r"mutations", common_views.MutationViewSet, "mutations")
|
||||
v1_patterns = router.urls
|
||||
|
||||
subsonic_router = routers.SimpleRouter(trailing_slash=False)
|
||||
|
@ -40,6 +42,12 @@ v1_patterns += [
|
|||
r"^manage/",
|
||||
include(("funkwhale_api.manage.urls", "manage"), namespace="manage"),
|
||||
),
|
||||
url(
|
||||
r"^moderation/",
|
||||
include(
|
||||
("funkwhale_api.moderation.urls", "moderation"), namespace="moderation"
|
||||
),
|
||||
),
|
||||
url(
|
||||
r"^federation/",
|
||||
include(
|
||||
|
@ -67,6 +75,10 @@ v1_patterns += [
|
|||
r"^users/",
|
||||
include(("funkwhale_api.users.api_urls", "users"), namespace="users"),
|
||||
),
|
||||
url(
|
||||
r"^oauth/",
|
||||
include(("funkwhale_api.users.oauth.urls", "oauth"), namespace="oauth"),
|
||||
),
|
||||
url(r"^token/$", jwt_views.obtain_jwt_token, name="token"),
|
||||
url(r"^token/refresh/$", jwt_views.refresh_jwt_token, name="token_refresh"),
|
||||
]
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import os
|
||||
|
||||
import django
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
|
||||
|
||||
import django # noqa
|
||||
|
||||
django.setup()
|
||||
|
||||
from .routing import application # noqa
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
|
||||
|
|
|
@ -29,7 +29,6 @@ env_file = env("ENV_FILE", default=None)
|
|||
if env_file:
|
||||
# we have an explicitely specified env file
|
||||
# so we try to load and it fail loudly if it does not exist
|
||||
print("ENV_FILE", env_file)
|
||||
env.read_env(env_file)
|
||||
else:
|
||||
# we try to load from .env and config/.env
|
||||
|
@ -79,7 +78,7 @@ FUNKWHALE_SPA_HTML_CACHE_DURATION = env.int(
|
|||
"FUNKWHALE_SPA_HTML_CACHE_DURATION", default=60 * 15
|
||||
)
|
||||
FUNKWHALE_EMBED_URL = env(
|
||||
"FUNKWHALE_EMBED_URL", default=FUNKWHALE_SPA_HTML_ROOT + "embed.html"
|
||||
"FUNKWHALE_EMBED_URL", default=FUNKWHALE_URL + "/front/embed.html"
|
||||
)
|
||||
APP_NAME = "Funkwhale"
|
||||
|
||||
|
@ -94,6 +93,9 @@ FEDERATION_MUSIC_NEEDS_APPROVAL = env.bool(
|
|||
)
|
||||
# XXX: deprecated, see #186
|
||||
FEDERATION_ACTOR_FETCH_DELAY = env.int("FEDERATION_ACTOR_FETCH_DELAY", default=60 * 12)
|
||||
FEDERATION_SERVICE_ACTOR_USERNAME = env(
|
||||
"FEDERATION_SERVICE_ACTOR_USERNAME", default="service"
|
||||
)
|
||||
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=[]) + [FUNKWHALE_HOSTNAME]
|
||||
|
||||
# APP CONFIGURATION
|
||||
|
@ -119,6 +121,7 @@ THIRD_PARTY_APPS = (
|
|||
"allauth.account", # registration
|
||||
"allauth.socialaccount", # registration
|
||||
"corsheaders",
|
||||
"oauth2_provider",
|
||||
"rest_framework",
|
||||
"rest_framework.authtoken",
|
||||
"taggit",
|
||||
|
@ -147,9 +150,10 @@ if RAVEN_ENABLED:
|
|||
|
||||
# Apps specific for this project go here.
|
||||
LOCAL_APPS = (
|
||||
"funkwhale_api.common",
|
||||
"funkwhale_api.common.apps.CommonConfig",
|
||||
"funkwhale_api.activity.apps.ActivityConfig",
|
||||
"funkwhale_api.users", # custom users app
|
||||
"funkwhale_api.users.oauth",
|
||||
# Your stuff: custom apps go here
|
||||
"funkwhale_api.instance",
|
||||
"funkwhale_api.music",
|
||||
|
@ -181,10 +185,6 @@ MIDDLEWARE = (
|
|||
"funkwhale_api.users.middleware.RecordActivityMiddleware",
|
||||
)
|
||||
|
||||
# MIGRATIONS CONFIGURATION
|
||||
# ------------------------------------------------------------------------------
|
||||
MIGRATION_MODULES = {"sites": "funkwhale_api.contrib.sites.migrations"}
|
||||
|
||||
# DEBUG
|
||||
# ------------------------------------------------------------------------------
|
||||
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
|
||||
|
@ -220,6 +220,16 @@ DATABASES = {
|
|||
"default": env.db("DATABASE_URL")
|
||||
}
|
||||
DATABASES["default"]["ATOMIC_REQUESTS"] = True
|
||||
DATABASES["default"]["CONN_MAX_AGE"] = env("DB_CONN_MAX_AGE", default=60 * 60)
|
||||
|
||||
MIGRATION_MODULES = {
|
||||
# see https://github.com/jazzband/django-oauth-toolkit/issues/634
|
||||
# swappable models are badly designed in oauth2_provider
|
||||
# ignore migrations and provide our own models.
|
||||
"oauth2_provider": None,
|
||||
"sites": "funkwhale_api.contrib.sites.migrations",
|
||||
}
|
||||
|
||||
#
|
||||
# DATABASES = {
|
||||
# 'default': {
|
||||
|
@ -296,6 +306,25 @@ STATIC_ROOT = env("STATIC_ROOT", default=str(ROOT_DIR("staticfiles")))
|
|||
STATIC_URL = env("STATIC_URL", default="/staticfiles/")
|
||||
DEFAULT_FILE_STORAGE = "funkwhale_api.common.storage.ASCIIFileSystemStorage"
|
||||
|
||||
PROXY_MEDIA = env.bool("PROXY_MEDIA", default=True)
|
||||
AWS_DEFAULT_ACL = None
|
||||
AWS_QUERYSTRING_AUTH = env.bool("AWS_QUERYSTRING_AUTH", default=not PROXY_MEDIA)
|
||||
AWS_S3_MAX_MEMORY_SIZE = env.int(
|
||||
"AWS_S3_MAX_MEMORY_SIZE", default=1000 * 1000 * 1000 * 20
|
||||
)
|
||||
AWS_QUERYSTRING_EXPIRE = env.int("AWS_QUERYSTRING_EXPIRE", default=3600)
|
||||
AWS_ACCESS_KEY_ID = env("AWS_ACCESS_KEY_ID", default=None)
|
||||
|
||||
if AWS_ACCESS_KEY_ID:
|
||||
AWS_ACCESS_KEY_ID = AWS_ACCESS_KEY_ID
|
||||
AWS_SECRET_ACCESS_KEY = env("AWS_SECRET_ACCESS_KEY")
|
||||
AWS_STORAGE_BUCKET_NAME = env("AWS_STORAGE_BUCKET_NAME")
|
||||
AWS_S3_ENDPOINT_URL = env("AWS_S3_ENDPOINT_URL", default=None)
|
||||
AWS_S3_REGION_NAME = env("AWS_S3_REGION_NAME", default=None)
|
||||
AWS_S3_SIGNATURE_VERSION = "s3v4"
|
||||
AWS_LOCATION = env("AWS_LOCATION", default="")
|
||||
DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage"
|
||||
|
||||
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
|
||||
STATICFILES_DIRS = (str(APPS_DIR.path("static")),)
|
||||
|
||||
|
@ -341,6 +370,23 @@ AUTH_USER_MODEL = "users.User"
|
|||
LOGIN_REDIRECT_URL = "users:redirect"
|
||||
LOGIN_URL = "account_login"
|
||||
|
||||
# OAuth configuration
|
||||
from funkwhale_api.users.oauth import scopes # noqa
|
||||
|
||||
OAUTH2_PROVIDER = {
|
||||
"SCOPES": {s.id: s.label for s in scopes.SCOPES_BY_ID.values()},
|
||||
"ALLOWED_REDIRECT_URI_SCHEMES": ["http", "https", "urn"],
|
||||
# we keep expired tokens for 15 days, for tracability
|
||||
"REFRESH_TOKEN_EXPIRE_SECONDS": 3600 * 24 * 15,
|
||||
"AUTHORIZATION_CODE_EXPIRE_SECONDS": 5 * 60,
|
||||
"ACCESS_TOKEN_EXPIRE_SECONDS": 60 * 60 * 10,
|
||||
"OAUTH2_SERVER_CLASS": "funkwhale_api.users.oauth.server.OAuth2Server",
|
||||
}
|
||||
OAUTH2_PROVIDER_APPLICATION_MODEL = "users.Application"
|
||||
OAUTH2_PROVIDER_ACCESS_TOKEN_MODEL = "users.AccessToken"
|
||||
OAUTH2_PROVIDER_GRANT_MODEL = "users.Grant"
|
||||
OAUTH2_PROVIDER_REFRESH_TOKEN_MODEL = "users.RefreshToken"
|
||||
|
||||
# LDAP AUTHENTICATION CONFIGURATION
|
||||
# ------------------------------------------------------------------------------
|
||||
AUTH_LDAP_ENABLED = env.bool("LDAP_ENABLED", default=False)
|
||||
|
@ -448,16 +494,28 @@ CELERY_TASK_TIME_LIMIT = 300
|
|||
CELERY_BEAT_SCHEDULE = {
|
||||
"federation.clean_music_cache": {
|
||||
"task": "federation.clean_music_cache",
|
||||
"schedule": crontab(hour="*/2"),
|
||||
"schedule": crontab(minute="0", hour="*/2"),
|
||||
"options": {"expires": 60 * 2},
|
||||
},
|
||||
"music.clean_transcoding_cache": {
|
||||
"task": "music.clean_transcoding_cache",
|
||||
"schedule": crontab(hour="*"),
|
||||
"schedule": crontab(minute="0", hour="*"),
|
||||
"options": {"expires": 60 * 2},
|
||||
},
|
||||
"oauth.clear_expired_tokens": {
|
||||
"task": "oauth.clear_expired_tokens",
|
||||
"schedule": crontab(minute="0", hour="0"),
|
||||
"options": {"expires": 60 * 60 * 24},
|
||||
},
|
||||
"federation.refresh_nodeinfo_known_nodes": {
|
||||
"task": "federation.refresh_nodeinfo_known_nodes",
|
||||
"schedule": crontab(minute="0", hour="*"),
|
||||
"options": {"expires": 60 * 60},
|
||||
},
|
||||
}
|
||||
|
||||
NODEINFO_REFRESH_DELAY = env.int("NODEINFO_REFRESH_DELAY", default=3600 * 24)
|
||||
|
||||
JWT_AUTH = {
|
||||
"JWT_ALLOW_REFRESH": True,
|
||||
"JWT_EXPIRATION_DELTA": datetime.timedelta(days=7),
|
||||
|
@ -475,7 +533,6 @@ CORS_ORIGIN_ALLOW_ALL = True
|
|||
CORS_ALLOW_CREDENTIALS = True
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",),
|
||||
"DEFAULT_PAGINATION_CLASS": "funkwhale_api.common.pagination.FunkwhalePagination",
|
||||
"PAGE_SIZE": 25,
|
||||
"DEFAULT_PARSER_CLASSES": (
|
||||
|
@ -485,11 +542,15 @@ REST_FRAMEWORK = {
|
|||
"funkwhale_api.federation.parsers.ActivityParser",
|
||||
),
|
||||
"DEFAULT_AUTHENTICATION_CLASSES": (
|
||||
"oauth2_provider.contrib.rest_framework.OAuth2Authentication",
|
||||
"funkwhale_api.common.authentication.JSONWebTokenAuthenticationQS",
|
||||
"funkwhale_api.common.authentication.BearerTokenHeaderAuth",
|
||||
"funkwhale_api.common.authentication.JSONWebTokenAuthentication",
|
||||
"rest_framework.authentication.SessionAuthentication",
|
||||
"rest_framework.authentication.BasicAuthentication",
|
||||
"rest_framework.authentication.SessionAuthentication",
|
||||
),
|
||||
"DEFAULT_PERMISSION_CLASSES": (
|
||||
"funkwhale_api.users.oauth.permissions.ScopePermission",
|
||||
),
|
||||
"DEFAULT_FILTER_BACKENDS": (
|
||||
"rest_framework.filters.OrderingFilter",
|
||||
|
|
|
@ -62,19 +62,6 @@ CELERY_TASK_ALWAYS_EAGER = False
|
|||
|
||||
# Your local stuff: Below this line define 3rd party library settings
|
||||
|
||||
LOGGING = {
|
||||
"version": 1,
|
||||
"handlers": {"console": {"level": "DEBUG", "class": "logging.StreamHandler"}},
|
||||
"loggers": {
|
||||
"django.request": {
|
||||
"handlers": ["console"],
|
||||
"propagate": True,
|
||||
"level": "DEBUG",
|
||||
},
|
||||
"django_auth_ldap": {"handlers": ["console"], "level": "DEBUG"},
|
||||
"": {"level": "DEBUG", "handlers": ["console"]},
|
||||
},
|
||||
}
|
||||
CSRF_TRUSTED_ORIGINS = [o for o in ALLOWED_HOSTS]
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
__version__ = "0.18.3"
|
||||
__version__ = "0.19.0-rc2"
|
||||
__version_info__ = tuple(
|
||||
[
|
||||
int(num) if num.isdigit() else num
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
from django.contrib.admin import register as initial_register, site, ModelAdmin # noqa
|
||||
from django.db.models.fields.related import RelatedField
|
||||
|
||||
from . import models
|
||||
from . import tasks
|
||||
|
||||
|
||||
def register(model):
|
||||
"""
|
||||
|
@ -17,3 +20,28 @@ def register(model):
|
|||
return initial_register(model)(modeladmin)
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def apply(modeladmin, request, queryset):
|
||||
queryset.update(is_approved=True)
|
||||
for id in queryset.values_list("id", flat=True):
|
||||
tasks.apply_mutation.delay(mutation_id=id)
|
||||
|
||||
|
||||
apply.short_description = "Approve and apply"
|
||||
|
||||
|
||||
@register(models.Mutation)
|
||||
class MutationAdmin(ModelAdmin):
|
||||
list_display = [
|
||||
"uuid",
|
||||
"type",
|
||||
"created_by",
|
||||
"creation_date",
|
||||
"applied_date",
|
||||
"is_approved",
|
||||
"is_applied",
|
||||
]
|
||||
search_fields = ["created_by__preferred_username"]
|
||||
list_filter = ["type", "is_approved", "is_applied"]
|
||||
actions = [apply]
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
from django.apps import AppConfig, apps
|
||||
|
||||
from . import mutations
|
||||
|
||||
|
||||
class CommonConfig(AppConfig):
|
||||
name = "funkwhale_api.common"
|
||||
|
||||
def ready(self):
|
||||
super().ready()
|
||||
|
||||
app_names = [app.name for app in apps.app_configs.values()]
|
||||
mutations.registry.autodiscover(app_names)
|
|
@ -1,5 +1,17 @@
|
|||
from rest_framework import response
|
||||
from django.db import transaction
|
||||
|
||||
from rest_framework import decorators
|
||||
from rest_framework import exceptions
|
||||
from rest_framework import response
|
||||
from rest_framework import status
|
||||
|
||||
from . import filters
|
||||
from . import models
|
||||
from . import mutations as common_mutations
|
||||
from . import serializers
|
||||
from . import signals
|
||||
from . import tasks
|
||||
from . import utils
|
||||
|
||||
|
||||
def action_route(serializer_class):
|
||||
|
@ -12,3 +24,69 @@ def action_route(serializer_class):
|
|||
return response.Response(result, status=200)
|
||||
|
||||
return action
|
||||
|
||||
|
||||
def mutations_route(types):
|
||||
"""
|
||||
Given a queryset and a list of mutation types, return a view
|
||||
that can be included in any viewset, and serve:
|
||||
|
||||
GET /{id}/mutations/ - list of mutations for the given object
|
||||
POST /{id}/mutations/ - create a mutation for the given object
|
||||
"""
|
||||
|
||||
@transaction.atomic
|
||||
def mutations(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if request.method == "GET":
|
||||
queryset = models.Mutation.objects.get_for_target(obj).filter(
|
||||
type__in=types
|
||||
)
|
||||
queryset = queryset.order_by("-creation_date")
|
||||
filterset = filters.MutationFilter(request.GET, queryset=queryset)
|
||||
page = self.paginate_queryset(filterset.qs)
|
||||
if page is not None:
|
||||
serializer = serializers.APIMutationSerializer(page, many=True)
|
||||
return self.get_paginated_response(serializer.data)
|
||||
|
||||
serializer = serializers.APIMutationSerializer(queryset, many=True)
|
||||
return response.Response(serializer.data)
|
||||
if request.method == "POST":
|
||||
if not request.user.is_authenticated:
|
||||
raise exceptions.NotAuthenticated()
|
||||
serializer = serializers.APIMutationSerializer(
|
||||
data=request.data, context={"registry": common_mutations.registry}
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
if not common_mutations.registry.has_perm(
|
||||
actor=request.user.actor,
|
||||
type=serializer.validated_data["type"],
|
||||
obj=obj,
|
||||
perm="approve"
|
||||
if serializer.validated_data.get("is_approved", False)
|
||||
else "suggest",
|
||||
):
|
||||
raise exceptions.PermissionDenied()
|
||||
|
||||
final_payload = common_mutations.registry.get_validated_payload(
|
||||
type=serializer.validated_data["type"],
|
||||
payload=serializer.validated_data["payload"],
|
||||
obj=obj,
|
||||
)
|
||||
mutation = serializer.save(
|
||||
created_by=request.user.actor,
|
||||
target=obj,
|
||||
payload=final_payload,
|
||||
is_approved=serializer.validated_data.get("is_approved", None),
|
||||
)
|
||||
if mutation.is_approved:
|
||||
utils.on_commit(tasks.apply_mutation.delay, mutation_id=mutation.pk)
|
||||
|
||||
utils.on_commit(
|
||||
signals.mutation_created.send, sender=None, mutation=mutation
|
||||
)
|
||||
return response.Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
return decorators.action(
|
||||
methods=["get", "post"], detail=True, required_scope="edits"
|
||||
)(mutations)
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
import factory
|
||||
|
||||
from funkwhale_api.factories import registry, NoUpdateOnCreate
|
||||
|
||||
from funkwhale_api.federation import factories as federation_factories
|
||||
|
||||
|
||||
@registry.register
|
||||
class MutationFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
fid = factory.Faker("federation_url")
|
||||
uuid = factory.Faker("uuid4")
|
||||
created_by = factory.SubFactory(federation_factories.ActorFactory)
|
||||
summary = factory.Faker("paragraph")
|
||||
type = "update"
|
||||
|
||||
class Meta:
|
||||
model = "common.Mutation"
|
||||
|
||||
@factory.post_generation
|
||||
def target(self, create, extracted, **kwargs):
|
||||
if not create:
|
||||
# Simple build, do nothing.
|
||||
return
|
||||
self.target = extracted
|
||||
self.save()
|
|
@ -1,4 +1,5 @@
|
|||
import django_filters
|
||||
from django import forms
|
||||
from django.db import models
|
||||
|
||||
from . import search
|
||||
|
@ -46,5 +47,8 @@ class SmartSearchFilter(django_filters.CharFilter):
|
|||
def filter(self, qs, value):
|
||||
if not value:
|
||||
return qs
|
||||
cleaned = self.config.clean(value)
|
||||
try:
|
||||
cleaned = self.config.clean(value)
|
||||
except (forms.ValidationError):
|
||||
return qs.none()
|
||||
return search.apply(qs, cleaned)
|
||||
|
|
|
@ -0,0 +1,152 @@
|
|||
from django import forms
|
||||
from django.db.models import Q
|
||||
|
||||
from django_filters import widgets
|
||||
from django_filters import rest_framework as filters
|
||||
|
||||
from . import fields
|
||||
from . import models
|
||||
from . import search
|
||||
|
||||
|
||||
class NoneObject(object):
|
||||
def __eq__(self, other):
|
||||
return other.__class__ == NoneObject
|
||||
|
||||
|
||||
NONE = NoneObject()
|
||||
NULL_BOOLEAN_CHOICES = [
|
||||
(True, True),
|
||||
("true", True),
|
||||
("True", True),
|
||||
("1", True),
|
||||
("yes", True),
|
||||
(False, False),
|
||||
("false", False),
|
||||
("False", False),
|
||||
("0", False),
|
||||
("no", False),
|
||||
("None", NONE),
|
||||
("none", NONE),
|
||||
("Null", NONE),
|
||||
("null", NONE),
|
||||
]
|
||||
|
||||
|
||||
class CoerceChoiceField(forms.ChoiceField):
|
||||
"""
|
||||
Same as forms.ChoiceField but will return the second value
|
||||
in the choices tuple instead of the user provided one
|
||||
"""
|
||||
|
||||
def clean(self, value):
|
||||
if value is None:
|
||||
return value
|
||||
v = super().clean(value)
|
||||
try:
|
||||
return [b for a, b in self.choices if v == a][0]
|
||||
except IndexError:
|
||||
raise forms.ValidationError("Invalid value {}".format(value))
|
||||
|
||||
|
||||
class NullBooleanFilter(filters.ChoiceFilter):
|
||||
field_class = CoerceChoiceField
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.choices = NULL_BOOLEAN_CHOICES
|
||||
kwargs["choices"] = self.choices
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def filter(self, qs, value):
|
||||
if value in ["", None]:
|
||||
return qs
|
||||
if value == NONE:
|
||||
value = None
|
||||
qs = self.get_method(qs)(
|
||||
**{"%s__%s" % (self.field_name, self.lookup_expr): value}
|
||||
)
|
||||
return qs.distinct() if self.distinct else qs
|
||||
|
||||
|
||||
def clean_null_boolean_filter(v):
|
||||
v = CoerceChoiceField(choices=NULL_BOOLEAN_CHOICES).clean(v)
|
||||
if v == NONE:
|
||||
v = None
|
||||
|
||||
return v
|
||||
|
||||
|
||||
def get_null_boolean_filter(name):
|
||||
return {"handler": lambda v: Q(**{name: clean_null_boolean_filter(v)})}
|
||||
|
||||
|
||||
class DummyTypedMultipleChoiceField(forms.TypedMultipleChoiceField):
|
||||
def valid_value(self, value):
|
||||
return True
|
||||
|
||||
|
||||
class QueryArrayWidget(widgets.QueryArrayWidget):
|
||||
"""
|
||||
Until https://github.com/carltongibson/django-filter/issues/1047 is fixed
|
||||
"""
|
||||
|
||||
def value_from_datadict(self, data, files, name):
|
||||
data = data.copy()
|
||||
return super().value_from_datadict(data, files, name)
|
||||
|
||||
|
||||
class MultipleQueryFilter(filters.TypedMultipleChoiceFilter):
|
||||
field_class = DummyTypedMultipleChoiceField
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs["widget"] = QueryArrayWidget()
|
||||
super().__init__(*args, **kwargs)
|
||||
self.lookup_expr = "in"
|
||||
|
||||
|
||||
def filter_target(value):
|
||||
|
||||
config = {
|
||||
"artist": ["artist", "target_id", int],
|
||||
"album": ["album", "target_id", int],
|
||||
"track": ["track", "target_id", int],
|
||||
}
|
||||
parts = value.lower().split(" ")
|
||||
if parts[0].strip() not in config:
|
||||
raise forms.ValidationError("Improper target")
|
||||
|
||||
conf = config[parts[0].strip()]
|
||||
|
||||
query = Q(target_content_type__model=conf[0])
|
||||
if len(parts) > 1:
|
||||
_, lookup_field, validator = conf
|
||||
try:
|
||||
lookup_value = validator(parts[1].strip())
|
||||
except TypeError:
|
||||
raise forms.ValidationError("Imparsable target id")
|
||||
return query & Q(**{lookup_field: lookup_value})
|
||||
|
||||
return query
|
||||
|
||||
|
||||
class MutationFilter(filters.FilterSet):
|
||||
is_approved = NullBooleanFilter("is_approved")
|
||||
q = fields.SmartSearchFilter(
|
||||
config=search.SearchConfig(
|
||||
search_fields={
|
||||
"summary": {"to": "summary"},
|
||||
"fid": {"to": "fid"},
|
||||
"type": {"to": "type"},
|
||||
},
|
||||
filter_fields={
|
||||
"domain": {"to": "created_by__domain__name__iexact"},
|
||||
"is_approved": get_null_boolean_filter("is_approved"),
|
||||
"target": {"handler": filter_target},
|
||||
"is_applied": {"to": "is_applied"},
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = models.Mutation
|
||||
fields = ["is_approved", "is_applied", "type"]
|
|
@ -0,0 +1,91 @@
|
|||
# Generated by Django 2.1.5 on 2019-01-31 15:44
|
||||
|
||||
import django.contrib.postgres.fields.jsonb
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("federation", "0017_auto_20190130_0926"),
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
("common", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Mutation",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("fid", models.URLField(db_index=True, max_length=500, unique=True)),
|
||||
(
|
||||
"uuid",
|
||||
models.UUIDField(db_index=True, default=uuid.uuid4, unique=True),
|
||||
),
|
||||
("type", models.CharField(db_index=True, max_length=100)),
|
||||
("is_approved", models.NullBooleanField(default=None)),
|
||||
("is_applied", models.NullBooleanField(default=None)),
|
||||
(
|
||||
"creation_date",
|
||||
models.DateTimeField(
|
||||
db_index=True, default=django.utils.timezone.now
|
||||
),
|
||||
),
|
||||
(
|
||||
"applied_date",
|
||||
models.DateTimeField(blank=True, db_index=True, null=True),
|
||||
),
|
||||
("summary", models.TextField(max_length=2000, blank=True, null=True)),
|
||||
("payload", django.contrib.postgres.fields.jsonb.JSONField()),
|
||||
(
|
||||
"previous_state",
|
||||
django.contrib.postgres.fields.jsonb.JSONField(
|
||||
null=True, default=None
|
||||
),
|
||||
),
|
||||
("target_id", models.IntegerField(null=True)),
|
||||
(
|
||||
"approved_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="approved_mutations",
|
||||
to="federation.Actor",
|
||||
),
|
||||
),
|
||||
(
|
||||
"created_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="created_mutations",
|
||||
to="federation.Actor",
|
||||
),
|
||||
),
|
||||
(
|
||||
"target_content_type",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="targeting_mutations",
|
||||
to="contenttypes.ContentType",
|
||||
),
|
||||
),
|
||||
],
|
||||
)
|
||||
]
|
|
@ -1,5 +1,18 @@
|
|||
import uuid
|
||||
|
||||
from django.contrib.postgres.fields import JSONField
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.conf import settings
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db import connections, models, transaction
|
||||
from django.db.models import Lookup
|
||||
from django.db.models.fields import Field
|
||||
from django.db.models.sql.compiler import SQLCompiler
|
||||
from django.utils import timezone
|
||||
from django.urls import reverse
|
||||
|
||||
from funkwhale_api.federation import utils as federation_utils
|
||||
|
||||
|
||||
@Field.register_lookup
|
||||
|
@ -11,3 +24,129 @@ class NotEqual(Lookup):
|
|||
rhs, rhs_params = self.process_rhs(compiler, connection)
|
||||
params = lhs_params + rhs_params
|
||||
return "%s <> %s" % (lhs, rhs), params
|
||||
|
||||
|
||||
class NullsLastSQLCompiler(SQLCompiler):
|
||||
def get_order_by(self):
|
||||
result = super().get_order_by()
|
||||
if result and self.connection.vendor == "postgresql":
|
||||
return [
|
||||
(
|
||||
expr,
|
||||
(
|
||||
sql + " NULLS LAST" if not sql.endswith(" NULLS LAST") else sql,
|
||||
params,
|
||||
is_ref,
|
||||
),
|
||||
)
|
||||
for (expr, (sql, params, is_ref)) in result
|
||||
]
|
||||
return result
|
||||
|
||||
|
||||
class NullsLastQuery(models.sql.query.Query):
|
||||
"""Use a custom compiler to inject 'NULLS LAST' (for PostgreSQL)."""
|
||||
|
||||
def get_compiler(self, using=None, connection=None):
|
||||
if using is None and connection is None:
|
||||
raise ValueError("Need either using or connection")
|
||||
if using:
|
||||
connection = connections[using]
|
||||
return NullsLastSQLCompiler(self, connection, using)
|
||||
|
||||
|
||||
class NullsLastQuerySet(models.QuerySet):
|
||||
def __init__(self, model=None, query=None, using=None, hints=None):
|
||||
super().__init__(model, query, using, hints)
|
||||
self.query = query or NullsLastQuery(self.model)
|
||||
|
||||
|
||||
class LocalFromFidQuerySet:
|
||||
def local(self, include=True):
|
||||
host = settings.FEDERATION_HOSTNAME
|
||||
query = models.Q(fid__startswith="http://{}/".format(host)) | models.Q(
|
||||
fid__startswith="https://{}/".format(host)
|
||||
)
|
||||
if include:
|
||||
return self.filter(query)
|
||||
else:
|
||||
return self.filter(~query)
|
||||
|
||||
|
||||
class MutationQuerySet(models.QuerySet):
|
||||
def get_for_target(self, target):
|
||||
content_type = ContentType.objects.get_for_model(target)
|
||||
return self.filter(target_content_type=content_type, target_id=target.pk)
|
||||
|
||||
|
||||
class Mutation(models.Model):
|
||||
fid = models.URLField(unique=True, max_length=500, db_index=True)
|
||||
uuid = models.UUIDField(unique=True, db_index=True, default=uuid.uuid4)
|
||||
created_by = models.ForeignKey(
|
||||
"federation.Actor",
|
||||
related_name="created_mutations",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
approved_by = models.ForeignKey(
|
||||
"federation.Actor",
|
||||
related_name="approved_mutations",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
|
||||
type = models.CharField(max_length=100, db_index=True)
|
||||
# None = no choice, True = approved, False = refused
|
||||
is_approved = models.NullBooleanField(default=None)
|
||||
|
||||
# None = not applied, True = applied, False = failed
|
||||
is_applied = models.NullBooleanField(default=None)
|
||||
creation_date = models.DateTimeField(default=timezone.now, db_index=True)
|
||||
applied_date = models.DateTimeField(null=True, blank=True, db_index=True)
|
||||
summary = models.TextField(max_length=2000, null=True, blank=True)
|
||||
|
||||
payload = JSONField(encoder=DjangoJSONEncoder)
|
||||
previous_state = JSONField(null=True, default=None, encoder=DjangoJSONEncoder)
|
||||
|
||||
target_id = models.IntegerField(null=True)
|
||||
target_content_type = models.ForeignKey(
|
||||
ContentType,
|
||||
null=True,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="targeting_mutations",
|
||||
)
|
||||
target = GenericForeignKey("target_content_type", "target_id")
|
||||
|
||||
objects = MutationQuerySet.as_manager()
|
||||
|
||||
def get_federation_id(self):
|
||||
if self.fid:
|
||||
return self.fid
|
||||
|
||||
return federation_utils.full_url(
|
||||
reverse("federation:edits-detail", kwargs={"uuid": self.uuid})
|
||||
)
|
||||
|
||||
def save(self, **kwargs):
|
||||
if not self.pk and not self.fid:
|
||||
self.fid = self.get_federation_id()
|
||||
|
||||
return super().save(**kwargs)
|
||||
|
||||
@transaction.atomic
|
||||
def apply(self):
|
||||
from . import mutations
|
||||
|
||||
if self.is_applied:
|
||||
raise ValueError("Mutation was already applied")
|
||||
|
||||
previous_state = mutations.registry.apply(
|
||||
type=self.type, obj=self.target, payload=self.payload
|
||||
)
|
||||
self.previous_state = previous_state
|
||||
self.is_applied = True
|
||||
self.applied_date = timezone.now()
|
||||
self.save(update_fields=["is_applied", "applied_date", "previous_state"])
|
||||
return previous_state
|
||||
|
|
|
@ -0,0 +1,164 @@
|
|||
import persisting_theory
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from django.db import models, transaction
|
||||
|
||||
|
||||
class ConfNotFound(KeyError):
|
||||
pass
|
||||
|
||||
|
||||
class Registry(persisting_theory.Registry):
|
||||
look_into = "mutations"
|
||||
|
||||
def connect(self, type, klass, perm_checkers=None):
|
||||
def decorator(serializer_class):
|
||||
t = self.setdefault(type, {})
|
||||
t[klass] = {
|
||||
"serializer_class": serializer_class,
|
||||
"perm_checkers": perm_checkers or {},
|
||||
}
|
||||
return serializer_class
|
||||
|
||||
return decorator
|
||||
|
||||
@transaction.atomic
|
||||
def apply(self, type, obj, payload):
|
||||
conf = self.get_conf(type, obj)
|
||||
serializer = conf["serializer_class"](obj, data=payload)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
previous_state = serializer.get_previous_state(obj, serializer.validated_data)
|
||||
serializer.apply(obj, serializer.validated_data)
|
||||
return previous_state
|
||||
|
||||
def is_valid(self, type, obj, payload):
|
||||
conf = self.get_conf(type, obj)
|
||||
serializer = conf["serializer_class"](obj, data=payload)
|
||||
return serializer.is_valid(raise_exception=True)
|
||||
|
||||
def get_validated_payload(self, type, obj, payload):
|
||||
conf = self.get_conf(type, obj)
|
||||
serializer = conf["serializer_class"](obj, data=payload)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
return serializer.payload_serialize(serializer.validated_data)
|
||||
|
||||
def has_perm(self, perm, type, obj, actor):
|
||||
if perm not in ["approve", "suggest"]:
|
||||
raise ValueError("Invalid permission {}".format(perm))
|
||||
conf = self.get_conf(type, obj)
|
||||
checker = conf["perm_checkers"].get(perm)
|
||||
if not checker:
|
||||
return False
|
||||
return checker(obj=obj, actor=actor)
|
||||
|
||||
def get_conf(self, type, obj):
|
||||
try:
|
||||
type_conf = self[type]
|
||||
except KeyError:
|
||||
raise ConfNotFound("{} is not a registered mutation".format(type))
|
||||
|
||||
try:
|
||||
conf = type_conf[obj.__class__]
|
||||
except KeyError:
|
||||
try:
|
||||
conf = type_conf[None]
|
||||
except KeyError:
|
||||
raise ConfNotFound(
|
||||
"No mutation configuration found for {}".format(obj.__class__)
|
||||
)
|
||||
return conf
|
||||
|
||||
|
||||
class MutationSerializer(serializers.Serializer):
|
||||
def apply(self, obj, validated_data):
|
||||
raise NotImplementedError()
|
||||
|
||||
def post_apply(self, obj, validated_data):
|
||||
pass
|
||||
|
||||
def get_previous_state(self, obj, validated_data):
|
||||
return
|
||||
|
||||
def payload_serialize(self, data):
|
||||
return data
|
||||
|
||||
|
||||
class UpdateMutationSerializer(serializers.ModelSerializer, MutationSerializer):
|
||||
serialized_relations = {}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# we force partial mode, because update mutations are partial
|
||||
kwargs.setdefault("partial", True)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@transaction.atomic
|
||||
def apply(self, obj, validated_data):
|
||||
r = self.update(obj, validated_data)
|
||||
self.post_apply(r, validated_data)
|
||||
return r
|
||||
|
||||
def validate(self, validated_data):
|
||||
if not validated_data:
|
||||
raise serializers.ValidationError("You must update at least one field")
|
||||
|
||||
return super().validate(validated_data)
|
||||
|
||||
def db_serialize(self, validated_data):
|
||||
data = {}
|
||||
# ensure model fields are serialized properly
|
||||
for key, value in list(validated_data.items()):
|
||||
if not isinstance(value, models.Model):
|
||||
data[key] = value
|
||||
continue
|
||||
field = self.serialized_relations[key]
|
||||
data[key] = getattr(value, field)
|
||||
return data
|
||||
|
||||
def payload_serialize(self, data):
|
||||
data = super().payload_serialize(data)
|
||||
# we use our serialized_relations configuration
|
||||
# to ensure we store ids instead of model instances in our json
|
||||
# payload
|
||||
for field, attr in self.serialized_relations.items():
|
||||
try:
|
||||
obj = data[field]
|
||||
except KeyError:
|
||||
continue
|
||||
if obj is None:
|
||||
data[field] = None
|
||||
else:
|
||||
data[field] = getattr(obj, attr)
|
||||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data = self.db_serialize(validated_data)
|
||||
return super().create(validated_data)
|
||||
|
||||
def get_previous_state(self, obj, validated_data):
|
||||
return get_update_previous_state(
|
||||
obj,
|
||||
*list(validated_data.keys()),
|
||||
serialized_relations=self.serialized_relations
|
||||
)
|
||||
|
||||
|
||||
def get_update_previous_state(obj, *fields, serialized_relations={}):
|
||||
if not fields:
|
||||
raise ValueError("You need to provide at least one field")
|
||||
|
||||
state = {}
|
||||
for field in fields:
|
||||
value = getattr(obj, field)
|
||||
if isinstance(value, models.Model):
|
||||
# we store the related object id and repr for better UX
|
||||
id_field = serialized_relations[field]
|
||||
related_value = getattr(value, id_field)
|
||||
state[field] = {"value": related_value, "repr": str(value)}
|
||||
else:
|
||||
state[field] = {"value": value}
|
||||
|
||||
return state
|
||||
|
||||
|
||||
registry = Registry()
|
|
@ -1,6 +1,29 @@
|
|||
from rest_framework.pagination import PageNumberPagination
|
||||
from rest_framework.pagination import PageNumberPagination, _positive_int
|
||||
|
||||
|
||||
class FunkwhalePagination(PageNumberPagination):
|
||||
page_size_query_param = "page_size"
|
||||
max_page_size = 50
|
||||
default_max_page_size = 50
|
||||
default_page_size = None
|
||||
view = None
|
||||
|
||||
def paginate_queryset(self, queryset, request, view=None):
|
||||
self.view = view
|
||||
return super().paginate_queryset(queryset, request, view)
|
||||
|
||||
def get_page_size(self, request):
|
||||
max_page_size = (
|
||||
getattr(self.view, "max_page_size", 0) or self.default_max_page_size
|
||||
)
|
||||
page_size = getattr(self.view, "default_page_size", 0) or max_page_size
|
||||
if self.page_size_query_param:
|
||||
try:
|
||||
return _positive_int(
|
||||
request.query_params[self.page_size_query_param],
|
||||
strict=True,
|
||||
cutoff=max_page_size,
|
||||
)
|
||||
except (KeyError, ValueError):
|
||||
pass
|
||||
|
||||
return page_size
|
||||
|
|
|
@ -47,6 +47,6 @@ class OwnerPermission(BasePermission):
|
|||
|
||||
owner_field = getattr(view, "owner_field", "user")
|
||||
owner = operator.attrgetter(owner_field)(obj)
|
||||
if owner != request.user:
|
||||
if not owner or not request.user.is_authenticated or owner != request.user:
|
||||
raise Http404
|
||||
return True
|
||||
|
|
|
@ -65,6 +65,9 @@ def apply(qs, config_data):
|
|||
q = config_data.get(k)
|
||||
if q:
|
||||
qs = qs.filter(q)
|
||||
distinct = config_data.get("distinct", False)
|
||||
if distinct:
|
||||
qs = qs.distinct()
|
||||
return qs
|
||||
|
||||
|
||||
|
@ -77,13 +80,28 @@ class SearchConfig:
|
|||
def clean(self, query):
|
||||
tokens = parse_query(query)
|
||||
cleaned_data = {}
|
||||
|
||||
cleaned_data["types"] = self.clean_types(filter_tokens(tokens, ["is"]))
|
||||
cleaned_data["search_query"] = self.clean_search_query(
|
||||
filter_tokens(tokens, [None, "in"])
|
||||
filter_tokens(tokens, [None, "in"] + list(self.search_fields.keys()))
|
||||
)
|
||||
unhandled_tokens = [t for t in tokens if t["key"] not in [None, "is", "in"]]
|
||||
cleaned_data["filter_query"] = self.clean_filter_query(unhandled_tokens)
|
||||
unhandled_tokens = [
|
||||
t
|
||||
for t in tokens
|
||||
if t["key"] not in [None, "is", "in"] + list(self.search_fields.keys())
|
||||
]
|
||||
cleaned_data["filter_query"], matching_filters = self.clean_filter_query(
|
||||
unhandled_tokens
|
||||
)
|
||||
if matching_filters:
|
||||
cleaned_data["distinct"] = any(
|
||||
[
|
||||
self.filter_fields[k].get("distinct", False)
|
||||
for k in matching_filters
|
||||
if k in self.filter_fields
|
||||
]
|
||||
)
|
||||
else:
|
||||
cleaned_data["distinct"] = False
|
||||
return cleaned_data
|
||||
|
||||
def clean_search_query(self, tokens):
|
||||
|
@ -95,24 +113,67 @@ class SearchConfig:
|
|||
} or set(self.search_fields.keys())
|
||||
fields_subset = set(self.search_fields.keys()) & fields_subset
|
||||
to_fields = [self.search_fields[k]["to"] for k in fields_subset]
|
||||
|
||||
specific_field_query = None
|
||||
for token in tokens:
|
||||
if token["key"] not in self.search_fields:
|
||||
continue
|
||||
to = self.search_fields[token["key"]]["to"]
|
||||
try:
|
||||
field = token["field"]
|
||||
value = field.clean(token["value"])
|
||||
except KeyError:
|
||||
# no cleaning to apply
|
||||
value = token["value"]
|
||||
q = Q(**{"{}__icontains".format(to): value})
|
||||
if not specific_field_query:
|
||||
specific_field_query = q
|
||||
else:
|
||||
specific_field_query &= q
|
||||
query_string = " ".join([t["value"] for t in filter_tokens(tokens, [None])])
|
||||
return get_query(query_string, sorted(to_fields))
|
||||
unhandled_tokens_query = get_query(query_string, sorted(to_fields))
|
||||
|
||||
if specific_field_query and unhandled_tokens_query:
|
||||
return unhandled_tokens_query & specific_field_query
|
||||
elif specific_field_query:
|
||||
return specific_field_query
|
||||
elif unhandled_tokens_query:
|
||||
return unhandled_tokens_query
|
||||
return None
|
||||
|
||||
def clean_filter_query(self, tokens):
|
||||
if not self.filter_fields or not tokens:
|
||||
return
|
||||
return None, []
|
||||
|
||||
matching = [t for t in tokens if t["key"] in self.filter_fields]
|
||||
queries = [
|
||||
Q(**{self.filter_fields[t["key"]]["to"]: t["value"]}) for t in matching
|
||||
]
|
||||
queries = [self.get_filter_query(token) for token in matching]
|
||||
query = None
|
||||
for q in queries:
|
||||
if not query:
|
||||
query = q
|
||||
else:
|
||||
query = query & q
|
||||
return query
|
||||
return query, [m["key"] for m in matching]
|
||||
|
||||
def get_filter_query(self, token):
|
||||
raw_value = token["value"]
|
||||
try:
|
||||
field = self.filter_fields[token["key"]]["field"]
|
||||
value = field.clean(raw_value)
|
||||
except KeyError:
|
||||
# no cleaning to apply
|
||||
value = raw_value
|
||||
try:
|
||||
query_field = self.filter_fields[token["key"]]["to"]
|
||||
return Q(**{query_field: value})
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# we don't have a basic filter -> field mapping, this likely means we
|
||||
# have a dynamic handler in the config
|
||||
handler = self.filter_fields[token["key"]]["handler"]
|
||||
value = handler(value)
|
||||
return value
|
||||
|
||||
def clean_types(self, tokens):
|
||||
if not self.types:
|
||||
|
|
|
@ -10,6 +10,8 @@ from django.core.files.uploadedfile import SimpleUploadedFile
|
|||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from . import models
|
||||
|
||||
|
||||
class RelatedField(serializers.RelatedField):
|
||||
default_error_messages = {
|
||||
|
@ -216,3 +218,57 @@ class StripExifImageField(serializers.ImageField):
|
|||
return SimpleUploadedFile(
|
||||
file_obj.name, content, content_type=file_obj.content_type
|
||||
)
|
||||
|
||||
|
||||
from funkwhale_api.federation import serializers as federation_serializers # noqa
|
||||
|
||||
TARGET_ID_TYPE_MAPPING = {
|
||||
"music.Track": ("id", "track"),
|
||||
"music.Artist": ("id", "artist"),
|
||||
"music.Album": ("id", "album"),
|
||||
}
|
||||
|
||||
|
||||
class APIMutationSerializer(serializers.ModelSerializer):
|
||||
created_by = federation_serializers.APIActorSerializer(read_only=True)
|
||||
target = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = models.Mutation
|
||||
fields = [
|
||||
"fid",
|
||||
"uuid",
|
||||
"type",
|
||||
"creation_date",
|
||||
"applied_date",
|
||||
"is_approved",
|
||||
"is_applied",
|
||||
"created_by",
|
||||
"approved_by",
|
||||
"summary",
|
||||
"payload",
|
||||
"previous_state",
|
||||
"target",
|
||||
]
|
||||
read_only_fields = [
|
||||
"uuid",
|
||||
"creation_date",
|
||||
"fid",
|
||||
"is_applied",
|
||||
"created_by",
|
||||
"approved_by",
|
||||
"previous_state",
|
||||
]
|
||||
|
||||
def get_target(self, obj):
|
||||
target = obj.target
|
||||
if not target:
|
||||
return
|
||||
|
||||
id_field, type = TARGET_ID_TYPE_MAPPING[target._meta.label]
|
||||
return {"type": type, "id": getattr(target, id_field), "repr": str(target)}
|
||||
|
||||
def validate_type(self, value):
|
||||
if value not in self.context["registry"]:
|
||||
raise serializers.ValidationError("Invalid mutation type {}".format(value))
|
||||
return value
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
import django.dispatch
|
||||
|
||||
mutation_created = django.dispatch.Signal(providing_args=["mutation"])
|
||||
mutation_updated = django.dispatch.Signal(
|
||||
providing_args=["mutation", "old_is_approved", "new_is_approved"]
|
||||
)
|
|
@ -0,0 +1,59 @@
|
|||
from django.db import transaction
|
||||
from django.dispatch import receiver
|
||||
|
||||
|
||||
from funkwhale_api.common import channels
|
||||
from funkwhale_api.taskapp import celery
|
||||
|
||||
from . import models
|
||||
from . import serializers
|
||||
from . import signals
|
||||
|
||||
|
||||
@celery.app.task(name="common.apply_mutation")
|
||||
@transaction.atomic
|
||||
@celery.require_instance(
|
||||
models.Mutation.objects.exclude(is_applied=True).select_for_update(), "mutation"
|
||||
)
|
||||
def apply_mutation(mutation):
|
||||
mutation.apply()
|
||||
|
||||
|
||||
@receiver(signals.mutation_created)
|
||||
def broadcast_mutation_created(mutation, **kwargs):
|
||||
group = "instance_activity"
|
||||
channels.group_send(
|
||||
group,
|
||||
{
|
||||
"type": "event.send",
|
||||
"text": "",
|
||||
"data": {
|
||||
"type": "mutation.created",
|
||||
"mutation": serializers.APIMutationSerializer(mutation).data,
|
||||
"pending_review_count": models.Mutation.objects.filter(
|
||||
is_approved=None
|
||||
).count(),
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@receiver(signals.mutation_updated)
|
||||
def broadcast_mutation_update(mutation, old_is_approved, new_is_approved, **kwargs):
|
||||
group = "instance_activity"
|
||||
channels.group_send(
|
||||
group,
|
||||
{
|
||||
"type": "event.send",
|
||||
"text": "",
|
||||
"data": {
|
||||
"type": "mutation.updated",
|
||||
"mutation": serializers.APIMutationSerializer(mutation).data,
|
||||
"pending_review_count": models.Mutation.objects.filter(
|
||||
is_approved=None
|
||||
).count(),
|
||||
"old_is_approved": old_is_approved,
|
||||
"new_is_approved": new_is_approved,
|
||||
},
|
||||
},
|
||||
)
|
|
@ -149,6 +149,27 @@ def order_for_search(qs, field):
|
|||
return qs.annotate(__size=models.functions.Length(field)).order_by("__size")
|
||||
|
||||
|
||||
def recursive_getattr(obj, key, permissive=False):
|
||||
"""
|
||||
Given a dictionary such as {'user': {'name': 'Bob'}} and
|
||||
a dotted string such as user.name, returns 'Bob'.
|
||||
|
||||
If the value is not present, returns None
|
||||
"""
|
||||
v = obj
|
||||
for k in key.split("."):
|
||||
try:
|
||||
v = v.get(k)
|
||||
except (TypeError, AttributeError):
|
||||
if not permissive:
|
||||
raise
|
||||
return
|
||||
if v is None:
|
||||
return
|
||||
|
||||
return v
|
||||
|
||||
|
||||
def replace_prefix(queryset, field, old, new):
|
||||
"""
|
||||
Given a queryset of objects and a field name, will find objects
|
||||
|
@ -172,3 +193,38 @@ def replace_prefix(queryset, field, old, new):
|
|||
models.functions.Substr(field, len(old) + 1, output_field=models.CharField()),
|
||||
)
|
||||
return qs.update(**{field: update})
|
||||
|
||||
|
||||
def concat_dicts(*dicts):
|
||||
n = {}
|
||||
for d in dicts:
|
||||
n.update(d)
|
||||
|
||||
return n
|
||||
|
||||
|
||||
def get_updated_fields(conf, data, obj):
|
||||
"""
|
||||
Given a list of fields, a dict and an object, will return the dict keys/values
|
||||
that differ from the corresponding fields on the object.
|
||||
"""
|
||||
final_conf = []
|
||||
for c in conf:
|
||||
if isinstance(c, str):
|
||||
final_conf.append((c, c))
|
||||
else:
|
||||
final_conf.append(c)
|
||||
|
||||
final_data = {}
|
||||
|
||||
for data_field, obj_field in final_conf:
|
||||
try:
|
||||
data_value = data[data_field]
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
obj_value = getattr(obj, obj_field)
|
||||
if obj_value != data_value:
|
||||
final_data[obj_field] = data_value
|
||||
|
||||
return final_data
|
||||
|
|
|
@ -0,0 +1,123 @@
|
|||
from django.db import transaction
|
||||
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework import exceptions
|
||||
from rest_framework import mixins
|
||||
from rest_framework import permissions
|
||||
from rest_framework import response
|
||||
from rest_framework import viewsets
|
||||
|
||||
from . import filters
|
||||
from . import models
|
||||
from . import mutations
|
||||
from . import serializers
|
||||
from . import signals
|
||||
from . import tasks
|
||||
from . import utils
|
||||
|
||||
|
||||
class SkipFilterForGetObject:
|
||||
def get_object(self, *args, **kwargs):
|
||||
setattr(self.request, "_skip_filters", True)
|
||||
return super().get_object(*args, **kwargs)
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if getattr(self.request, "_skip_filters", False):
|
||||
return queryset
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
|
||||
class MutationViewSet(
|
||||
mixins.ListModelMixin,
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
viewsets.GenericViewSet,
|
||||
):
|
||||
lookup_field = "uuid"
|
||||
queryset = (
|
||||
models.Mutation.objects.all()
|
||||
.exclude(target_id=None)
|
||||
.order_by("-creation_date")
|
||||
.select_related("created_by", "approved_by")
|
||||
.prefetch_related("target")
|
||||
)
|
||||
serializer_class = serializers.APIMutationSerializer
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
ordering_fields = ("creation_date",)
|
||||
filterset_class = filters.MutationFilter
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
if instance.is_applied:
|
||||
raise exceptions.PermissionDenied("You cannot delete an applied mutation")
|
||||
|
||||
actor = self.request.user.actor
|
||||
is_owner = actor == instance.created_by
|
||||
|
||||
if not any(
|
||||
[
|
||||
is_owner,
|
||||
mutations.registry.has_perm(
|
||||
perm="approve", type=instance.type, obj=instance.target, actor=actor
|
||||
),
|
||||
]
|
||||
):
|
||||
raise exceptions.PermissionDenied()
|
||||
|
||||
return super().perform_destroy(instance)
|
||||
|
||||
@action(detail=True, methods=["post"])
|
||||
@transaction.atomic
|
||||
def approve(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
if instance.is_applied:
|
||||
return response.Response(
|
||||
{"error": "This mutation was already applied"}, status=403
|
||||
)
|
||||
actor = self.request.user.actor
|
||||
can_approve = mutations.registry.has_perm(
|
||||
perm="approve", type=instance.type, obj=instance.target, actor=actor
|
||||
)
|
||||
|
||||
if not can_approve:
|
||||
raise exceptions.PermissionDenied()
|
||||
previous_is_approved = instance.is_approved
|
||||
instance.approved_by = actor
|
||||
instance.is_approved = True
|
||||
instance.save(update_fields=["approved_by", "is_approved"])
|
||||
utils.on_commit(tasks.apply_mutation.delay, mutation_id=instance.id)
|
||||
utils.on_commit(
|
||||
signals.mutation_updated.send,
|
||||
sender=None,
|
||||
mutation=instance,
|
||||
old_is_approved=previous_is_approved,
|
||||
new_is_approved=instance.is_approved,
|
||||
)
|
||||
return response.Response({}, status=200)
|
||||
|
||||
@action(detail=True, methods=["post"])
|
||||
@transaction.atomic
|
||||
def reject(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
if instance.is_applied:
|
||||
return response.Response(
|
||||
{"error": "This mutation was already applied"}, status=403
|
||||
)
|
||||
actor = self.request.user.actor
|
||||
can_approve = mutations.registry.has_perm(
|
||||
perm="approve", type=instance.type, obj=instance.target, actor=actor
|
||||
)
|
||||
|
||||
if not can_approve:
|
||||
raise exceptions.PermissionDenied()
|
||||
previous_is_approved = instance.is_approved
|
||||
instance.approved_by = actor
|
||||
instance.is_approved = False
|
||||
instance.save(update_fields=["approved_by", "is_approved"])
|
||||
utils.on_commit(
|
||||
signals.mutation_updated.send,
|
||||
sender=None,
|
||||
mutation=instance,
|
||||
old_is_approved=previous_is_approved,
|
||||
new_is_approved=instance.is_approved,
|
||||
)
|
||||
return response.Response({}, status=200)
|
|
@ -1,6 +1,11 @@
|
|||
import uuid
|
||||
import factory
|
||||
import persisting_theory
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from faker.providers import internet as internet_provider
|
||||
|
||||
|
||||
class FactoriesRegistry(persisting_theory.Registry):
|
||||
look_into = "factories"
|
||||
|
@ -39,3 +44,22 @@ class NoUpdateOnCreate:
|
|||
@classmethod
|
||||
def _after_postgeneration(cls, instance, create, results=None):
|
||||
return
|
||||
|
||||
|
||||
class FunkwhaleProvider(internet_provider.Provider):
|
||||
"""
|
||||
Our own faker data generator, since built-in ones are sometimes
|
||||
not random enough
|
||||
"""
|
||||
|
||||
def federation_url(self, prefix="", local=False):
|
||||
def path_generator():
|
||||
return "{}/{}".format(prefix, uuid.uuid4())
|
||||
|
||||
domain = settings.FEDERATION_HOSTNAME if local else self.domain_name()
|
||||
protocol = "https"
|
||||
path = path_generator()
|
||||
return "{}://{}/{}".format(protocol, domain, path)
|
||||
|
||||
|
||||
factory.Faker.add_provider(FunkwhaleProvider)
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
from django_filters import rest_framework as filters
|
||||
|
||||
from funkwhale_api.common import fields
|
||||
from funkwhale_api.moderation import filters as moderation_filters
|
||||
|
||||
from . import models
|
||||
|
||||
|
||||
class TrackFavoriteFilter(filters.FilterSet):
|
||||
class TrackFavoriteFilter(moderation_filters.HiddenContentFilterSet):
|
||||
q = fields.SearchFilter(
|
||||
search_fields=["track__title", "track__artist__name", "track__album__title"]
|
||||
)
|
||||
|
@ -13,3 +12,6 @@ class TrackFavoriteFilter(filters.FilterSet):
|
|||
class Meta:
|
||||
model = models.TrackFavorite
|
||||
fields = ["user", "q"]
|
||||
hidden_content_fields_mapping = moderation_filters.USER_FILTER_CONFIG[
|
||||
"TRACK_FAVORITE"
|
||||
]
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
from rest_framework import mixins, status, viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.permissions import IsAuthenticatedOrReadOnly
|
||||
from rest_framework.response import Response
|
||||
|
||||
from django.db.models import Prefetch
|
||||
|
@ -9,6 +8,7 @@ from funkwhale_api.activity import record
|
|||
from funkwhale_api.common import fields, permissions
|
||||
from funkwhale_api.music.models import Track
|
||||
from funkwhale_api.music import utils as music_utils
|
||||
from funkwhale_api.users.oauth import permissions as oauth_permissions
|
||||
|
||||
from . import filters, models, serializers
|
||||
|
||||
|
@ -24,10 +24,11 @@ class TrackFavoriteViewSet(
|
|||
serializer_class = serializers.UserTrackFavoriteSerializer
|
||||
queryset = models.TrackFavorite.objects.all().select_related("user")
|
||||
permission_classes = [
|
||||
permissions.ConditionalAuthentication,
|
||||
oauth_permissions.ScopePermission,
|
||||
permissions.OwnerPermission,
|
||||
IsAuthenticatedOrReadOnly,
|
||||
]
|
||||
required_scope = "favorites"
|
||||
anonymous_policy = "setting"
|
||||
owner_checks = ["write"]
|
||||
|
||||
def get_serializer_class(self):
|
||||
|
|
|
@ -9,9 +9,13 @@ from django.db.models import Q
|
|||
from funkwhale_api.common import channels
|
||||
from funkwhale_api.common import utils as funkwhale_utils
|
||||
|
||||
from . import contexts
|
||||
|
||||
recursive_getattr = funkwhale_utils.recursive_getattr
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
PUBLIC_ADDRESS = "https://www.w3.org/ns/activitystreams#Public"
|
||||
PUBLIC_ADDRESS = contexts.AS.Public
|
||||
|
||||
ACTIVITY_TYPES = [
|
||||
"Accept",
|
||||
|
@ -82,16 +86,19 @@ OBJECT_TYPES = (
|
|||
BROADCAST_TO_USER_ACTIVITIES = ["Follow", "Accept"]
|
||||
|
||||
|
||||
def should_reject(id, actor_id=None, payload={}):
|
||||
def should_reject(fid, actor_id=None, payload={}):
|
||||
if fid is None and actor_id is None:
|
||||
return False
|
||||
|
||||
from funkwhale_api.moderation import models as moderation_models
|
||||
|
||||
policies = moderation_models.InstancePolicy.objects.active()
|
||||
|
||||
media_types = ["Audio", "Artist", "Album", "Track", "Library", "Image"]
|
||||
relevant_values = [
|
||||
recursive_gettattr(payload, "type", permissive=True),
|
||||
recursive_gettattr(payload, "object.type", permissive=True),
|
||||
recursive_gettattr(payload, "target.type", permissive=True),
|
||||
recursive_getattr(payload, "type", permissive=True),
|
||||
recursive_getattr(payload, "object.type", permissive=True),
|
||||
recursive_getattr(payload, "target.type", permissive=True),
|
||||
]
|
||||
# if one of the payload types match our internal media types, then
|
||||
# we apply policies that reject media
|
||||
|
@ -100,9 +107,12 @@ def should_reject(id, actor_id=None, payload={}):
|
|||
else:
|
||||
policy_type = Q(block_all=True)
|
||||
|
||||
query = policies.matching_url_query(id) & policy_type
|
||||
if actor_id:
|
||||
if fid:
|
||||
query = policies.matching_url_query(fid) & policy_type
|
||||
if fid and actor_id:
|
||||
query |= policies.matching_url_query(actor_id) & policy_type
|
||||
elif actor_id:
|
||||
query = policies.matching_url_query(actor_id) & policy_type
|
||||
return policies.filter(query).exists()
|
||||
|
||||
|
||||
|
@ -111,6 +121,7 @@ def receive(activity, on_behalf_of):
|
|||
from . import models
|
||||
from . import serializers
|
||||
from . import tasks
|
||||
from .routes import inbox
|
||||
|
||||
# we ensure the activity has the bare minimum structure before storing
|
||||
# it in our database
|
||||
|
@ -118,8 +129,12 @@ def receive(activity, on_behalf_of):
|
|||
data=activity, context={"actor": on_behalf_of, "local_recipients": True}
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
if not inbox.get_matching_handlers(activity):
|
||||
# discard unhandlable activity
|
||||
return
|
||||
|
||||
if should_reject(
|
||||
id=serializer.validated_data["id"],
|
||||
fid=serializer.validated_data.get("id"),
|
||||
actor_id=serializer.validated_data["actor"].fid,
|
||||
payload=activity,
|
||||
):
|
||||
|
@ -350,30 +365,9 @@ class OutboxRouter(Router):
|
|||
return activities
|
||||
|
||||
|
||||
def recursive_gettattr(obj, key, permissive=False):
|
||||
"""
|
||||
Given a dictionary such as {'user': {'name': 'Bob'}} and
|
||||
a dotted string such as user.name, returns 'Bob'.
|
||||
|
||||
If the value is not present, returns None
|
||||
"""
|
||||
v = obj
|
||||
for k in key.split("."):
|
||||
try:
|
||||
v = v.get(k)
|
||||
except (TypeError, AttributeError):
|
||||
if not permissive:
|
||||
raise
|
||||
return
|
||||
if v is None:
|
||||
return
|
||||
|
||||
return v
|
||||
|
||||
|
||||
def match_route(route, payload):
|
||||
for key, value in route.items():
|
||||
payload_value = recursive_gettattr(payload, key)
|
||||
payload_value = recursive_getattr(payload, key, permissive=True)
|
||||
if payload_value != value:
|
||||
return False
|
||||
|
||||
|
@ -417,6 +411,27 @@ def prepare_deliveries_and_inbox_items(recipient_list, type):
|
|||
remote_inbox_urls.add(actor.shared_inbox_url or actor.inbox_url)
|
||||
urls.append(r["target"].followers_url)
|
||||
|
||||
elif isinstance(r, dict) and r["type"] == "instances_with_followers":
|
||||
# we want to broadcast the activity to other instances service actors
|
||||
# when we have at least one follower from this instance
|
||||
follows = (
|
||||
models.LibraryFollow.objects.filter(approved=True)
|
||||
.exclude(actor__domain_id=settings.FEDERATION_HOSTNAME)
|
||||
.exclude(actor__domain=None)
|
||||
.union(
|
||||
models.Follow.objects.filter(approved=True)
|
||||
.exclude(actor__domain_id=settings.FEDERATION_HOSTNAME)
|
||||
.exclude(actor__domain=None)
|
||||
)
|
||||
)
|
||||
actors = models.Actor.objects.filter(
|
||||
managed_domains__name__in=follows.values_list(
|
||||
"actor__domain_id", flat=True
|
||||
)
|
||||
)
|
||||
values = actors.values("shared_inbox_url", "inbox_url")
|
||||
for v in values:
|
||||
remote_inbox_urls.add(v["shared_inbox_url"] or v["inbox_url"])
|
||||
deliveries = [models.Delivery(inbox_url=url) for url in remote_inbox_urls]
|
||||
inbox_items = [
|
||||
models.InboxItem(actor=actor, type=type) for actor in local_recipients
|
||||
|
|
|
@ -5,8 +5,9 @@ from django.conf import settings
|
|||
from django.utils import timezone
|
||||
|
||||
from funkwhale_api.common import preferences, session
|
||||
from funkwhale_api.users import models as users_models
|
||||
|
||||
from . import models, serializers
|
||||
from . import keys, models, serializers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -28,7 +29,7 @@ def get_actor_data(actor_url):
|
|||
def get_actor(fid, skip_cache=False):
|
||||
if not skip_cache:
|
||||
try:
|
||||
actor = models.Actor.objects.get(fid=fid)
|
||||
actor = models.Actor.objects.select_related().get(fid=fid)
|
||||
except models.Actor.DoesNotExist:
|
||||
actor = None
|
||||
fetch_delta = datetime.timedelta(
|
||||
|
@ -42,3 +43,23 @@ def get_actor(fid, skip_cache=False):
|
|||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
return serializer.save(last_fetch_date=timezone.now())
|
||||
|
||||
|
||||
def get_service_actor():
|
||||
name, domain = (
|
||||
settings.FEDERATION_SERVICE_ACTOR_USERNAME,
|
||||
settings.FEDERATION_HOSTNAME,
|
||||
)
|
||||
try:
|
||||
return models.Actor.objects.select_related().get(
|
||||
preferred_username=name, domain__name=domain
|
||||
)
|
||||
except models.Actor.DoesNotExist:
|
||||
pass
|
||||
|
||||
args = users_models.get_actor_data(name)
|
||||
private, public = keys.get_key_pair()
|
||||
args["private_key"] = private.decode("utf-8")
|
||||
args["public_key"] = public.decode("utf-8")
|
||||
args["type"] = "Service"
|
||||
return models.Actor.objects.create(**args)
|
||||
|
|
|
@ -30,6 +30,14 @@ class DomainAdmin(admin.ModelAdmin):
|
|||
search_fields = ["name"]
|
||||
|
||||
|
||||
@admin.register(models.Fetch)
|
||||
class FetchAdmin(admin.ModelAdmin):
|
||||
list_display = ["url", "actor", "status", "creation_date", "fetch_date", "detail"]
|
||||
search_fields = ["url", "actor__username"]
|
||||
list_filter = ["status"]
|
||||
list_select_related = True
|
||||
|
||||
|
||||
@admin.register(models.Activity)
|
||||
class ActivityAdmin(admin.ModelAdmin):
|
||||
list_display = ["type", "fid", "url", "actor", "creation_date"]
|
||||
|
|
|
@ -144,3 +144,19 @@ class InboxItemActionSerializer(common_serializers.ActionSerializer):
|
|||
|
||||
def handle_read(self, objects):
|
||||
return objects.update(is_read=True)
|
||||
|
||||
|
||||
class FetchSerializer(serializers.ModelSerializer):
|
||||
actor = federation_serializers.APIActorSerializer()
|
||||
|
||||
class Meta:
|
||||
model = models.Fetch
|
||||
fields = [
|
||||
"id",
|
||||
"url",
|
||||
"actor",
|
||||
"status",
|
||||
"detail",
|
||||
"creation_date",
|
||||
"fetch_date",
|
||||
]
|
||||
|
|
|
@ -3,6 +3,7 @@ from rest_framework import routers
|
|||
from . import api_views
|
||||
|
||||
router = routers.SimpleRouter()
|
||||
router.register(r"fetches", api_views.FetchViewSet, "fetches")
|
||||
router.register(r"follows/library", api_views.LibraryFollowViewSet, "library-follows")
|
||||
router.register(r"inbox", api_views.InboxItemViewSet, "inbox")
|
||||
router.register(r"libraries", api_views.LibraryViewSet, "libraries")
|
||||
|
|
|
@ -10,6 +10,7 @@ from rest_framework import response
|
|||
from rest_framework import viewsets
|
||||
|
||||
from funkwhale_api.music import models as music_models
|
||||
from funkwhale_api.users.oauth import permissions as oauth_permissions
|
||||
|
||||
from . import activity
|
||||
from . import api_serializers
|
||||
|
@ -43,7 +44,8 @@ class LibraryFollowViewSet(
|
|||
.select_related("actor", "target__actor")
|
||||
)
|
||||
serializer_class = api_serializers.LibraryFollowSerializer
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
permission_classes = [oauth_permissions.ScopePermission]
|
||||
required_scope = "follows"
|
||||
filterset_class = filters.LibraryFollowFilter
|
||||
ordering_fields = ("creation_date",)
|
||||
|
||||
|
@ -100,7 +102,8 @@ class LibraryViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet):
|
|||
.annotate(_uploads_count=Count("uploads"))
|
||||
)
|
||||
serializer_class = api_serializers.LibrarySerializer
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
permission_classes = [oauth_permissions.ScopePermission]
|
||||
required_scope = "libraries"
|
||||
|
||||
def get_queryset(self):
|
||||
qs = super().get_queryset()
|
||||
|
@ -132,6 +135,7 @@ class LibraryViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet):
|
|||
try:
|
||||
library = utils.retrieve_ap_object(
|
||||
fid,
|
||||
actor=request.user.actor,
|
||||
queryset=self.queryset,
|
||||
serializer_class=serializers.LibrarySerializer,
|
||||
)
|
||||
|
@ -168,7 +172,8 @@ class InboxItemViewSet(
|
|||
.order_by("-activity__creation_date")
|
||||
)
|
||||
serializer_class = api_serializers.InboxItemSerializer
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
permission_classes = [oauth_permissions.ScopePermission]
|
||||
required_scope = "notifications"
|
||||
filterset_class = filters.InboxItemFilter
|
||||
ordering_fields = ("activity__creation_date",)
|
||||
|
||||
|
@ -185,3 +190,10 @@ class InboxItemViewSet(
|
|||
serializer.is_valid(raise_exception=True)
|
||||
result = serializer.save()
|
||||
return response.Response(result, status=200)
|
||||
|
||||
|
||||
class FetchViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet):
|
||||
|
||||
queryset = models.Fetch.objects.select_related("actor")
|
||||
serializer_class = api_serializers.FetchSerializer
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
import cryptography
|
||||
import logging
|
||||
import datetime
|
||||
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from rest_framework import authentication, exceptions as rest_exceptions
|
||||
from django.utils import timezone
|
||||
|
||||
from rest_framework import authentication, exceptions as rest_exceptions
|
||||
from funkwhale_api.moderation import models as moderation_models
|
||||
from . import actors, exceptions, keys, signing, utils
|
||||
from . import actors, exceptions, keys, signing, tasks, utils
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -57,6 +59,15 @@ class SignatureAuthentication(authentication.BaseAuthentication):
|
|||
actor = actors.get_actor(actor_url, skip_cache=True)
|
||||
signing.verify_django(request, actor.public_key.encode("utf-8"))
|
||||
|
||||
# we trigger a nodeinfo update on the actor's domain, if needed
|
||||
fetch_delay = 24 * 3600
|
||||
now = timezone.now()
|
||||
last_fetch = actor.domain.nodeinfo_fetch_date
|
||||
if not last_fetch or (
|
||||
last_fetch < (now - datetime.timedelta(seconds=fetch_delay))
|
||||
):
|
||||
tasks.update_domain_nodeinfo(domain_name=actor.domain.name)
|
||||
actor.domain.refresh_from_db()
|
||||
return actor
|
||||
|
||||
def authenticate(self, request):
|
||||
|
|
|
@ -0,0 +1,333 @@
|
|||
CONTEXTS = [
|
||||
{
|
||||
"shortId": "LDP",
|
||||
"contextUrl": None,
|
||||
"documentUrl": "http://www.w3.org/ns/ldp",
|
||||
"document": {
|
||||
"@context": {
|
||||
"ldp": "http://www.w3.org/ns/ldp#",
|
||||
"id": "@id",
|
||||
"type": "@type",
|
||||
"Container": "ldp:Container",
|
||||
"BasicContainer": "ldp:BasicContainer",
|
||||
"DirectContainer": "ldp:DirectContainer",
|
||||
"IndirectContainer": "ldp:IndirectContainer",
|
||||
"hasMemberRelation": {"@id": "ldp:hasMemberRelation", "@type": "@id"},
|
||||
"isMemberOfRelation": {"@id": "ldp:isMemberOfRelation", "@type": "@id"},
|
||||
"membershipResource": {"@id": "ldp:membershipResource", "@type": "@id"},
|
||||
"insertedContentRelation": {
|
||||
"@id": "ldp:insertedContentRelation",
|
||||
"@type": "@id",
|
||||
},
|
||||
"contains": {"@id": "ldp:contains", "@type": "@id"},
|
||||
"member": {"@id": "ldp:member", "@type": "@id"},
|
||||
"constrainedBy": {"@id": "ldp:constrainedBy", "@type": "@id"},
|
||||
"Resource": "ldp:Resource",
|
||||
"RDFSource": "ldp:RDFSource",
|
||||
"NonRDFSource": "ldp:NonRDFSource",
|
||||
"MemberSubject": "ldp:MemberSubject",
|
||||
"PreferContainment": "ldp:PreferContainment",
|
||||
"PreferMembership": "ldp:PreferMembership",
|
||||
"PreferMinimalContainer": "ldp:PreferMinimalContainer",
|
||||
"PageSortCriterion": "ldp:PageSortCriterion",
|
||||
"pageSortCriteria": {
|
||||
"@id": "ldp:pageSortCriteria",
|
||||
"@type": "@id",
|
||||
"@container": "@list",
|
||||
},
|
||||
"pageSortPredicate": {"@id": "ldp:pageSortPredicate", "@type": "@id"},
|
||||
"pageSortOrder": {"@id": "ldp:pageSortOrder", "@type": "@id"},
|
||||
"pageSortCollation": {"@id": "ldp:pageSortCollation", "@type": "@id"},
|
||||
"Ascending": "ldp:Ascending",
|
||||
"Descending": "ldp:Descending",
|
||||
"Page": "ldp:Page",
|
||||
"pageSequence": {"@id": "ldp:pageSequence", "@type": "@id"},
|
||||
"inbox": {"@id": "ldp:inbox", "@type": "@id"},
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
"shortId": "AS",
|
||||
"contextUrl": None,
|
||||
"documentUrl": "https://www.w3.org/ns/activitystreams",
|
||||
"document": {
|
||||
"@context": {
|
||||
"@vocab": "_:",
|
||||
"xsd": "http://www.w3.org/2001/XMLSchema#",
|
||||
"as": "https://www.w3.org/ns/activitystreams#",
|
||||
"ldp": "http://www.w3.org/ns/ldp#",
|
||||
"id": "@id",
|
||||
"type": "@type",
|
||||
"Accept": "as:Accept",
|
||||
"Activity": "as:Activity",
|
||||
"IntransitiveActivity": "as:IntransitiveActivity",
|
||||
"Add": "as:Add",
|
||||
"Announce": "as:Announce",
|
||||
"Application": "as:Application",
|
||||
"Arrive": "as:Arrive",
|
||||
"Article": "as:Article",
|
||||
"Audio": "as:Audio",
|
||||
"Block": "as:Block",
|
||||
"Collection": "as:Collection",
|
||||
"CollectionPage": "as:CollectionPage",
|
||||
"Relationship": "as:Relationship",
|
||||
"Create": "as:Create",
|
||||
"Delete": "as:Delete",
|
||||
"Dislike": "as:Dislike",
|
||||
"Document": "as:Document",
|
||||
"Event": "as:Event",
|
||||
"Follow": "as:Follow",
|
||||
"Flag": "as:Flag",
|
||||
"Group": "as:Group",
|
||||
"Ignore": "as:Ignore",
|
||||
"Image": "as:Image",
|
||||
"Invite": "as:Invite",
|
||||
"Join": "as:Join",
|
||||
"Leave": "as:Leave",
|
||||
"Like": "as:Like",
|
||||
"Link": "as:Link",
|
||||
"Mention": "as:Mention",
|
||||
"Note": "as:Note",
|
||||
"Object": "as:Object",
|
||||
"Offer": "as:Offer",
|
||||
"OrderedCollection": "as:OrderedCollection",
|
||||
"OrderedCollectionPage": "as:OrderedCollectionPage",
|
||||
"Organization": "as:Organization",
|
||||
"Page": "as:Page",
|
||||
"Person": "as:Person",
|
||||
"Place": "as:Place",
|
||||
"Profile": "as:Profile",
|
||||
"Question": "as:Question",
|
||||
"Reject": "as:Reject",
|
||||
"Remove": "as:Remove",
|
||||
"Service": "as:Service",
|
||||
"TentativeAccept": "as:TentativeAccept",
|
||||
"TentativeReject": "as:TentativeReject",
|
||||
"Tombstone": "as:Tombstone",
|
||||
"Undo": "as:Undo",
|
||||
"Update": "as:Update",
|
||||
"Video": "as:Video",
|
||||
"View": "as:View",
|
||||
"Listen": "as:Listen",
|
||||
"Read": "as:Read",
|
||||
"Move": "as:Move",
|
||||
"Travel": "as:Travel",
|
||||
"IsFollowing": "as:IsFollowing",
|
||||
"IsFollowedBy": "as:IsFollowedBy",
|
||||
"IsContact": "as:IsContact",
|
||||
"IsMember": "as:IsMember",
|
||||
"subject": {"@id": "as:subject", "@type": "@id"},
|
||||
"relationship": {"@id": "as:relationship", "@type": "@id"},
|
||||
"actor": {"@id": "as:actor", "@type": "@id"},
|
||||
"attributedTo": {"@id": "as:attributedTo", "@type": "@id"},
|
||||
"attachment": {"@id": "as:attachment", "@type": "@id"},
|
||||
"bcc": {"@id": "as:bcc", "@type": "@id"},
|
||||
"bto": {"@id": "as:bto", "@type": "@id"},
|
||||
"cc": {"@id": "as:cc", "@type": "@id"},
|
||||
"context": {"@id": "as:context", "@type": "@id"},
|
||||
"current": {"@id": "as:current", "@type": "@id"},
|
||||
"first": {"@id": "as:first", "@type": "@id"},
|
||||
"generator": {"@id": "as:generator", "@type": "@id"},
|
||||
"icon": {"@id": "as:icon", "@type": "@id"},
|
||||
"image": {"@id": "as:image", "@type": "@id"},
|
||||
"inReplyTo": {"@id": "as:inReplyTo", "@type": "@id"},
|
||||
"items": {"@id": "as:items", "@type": "@id"},
|
||||
"instrument": {"@id": "as:instrument", "@type": "@id"},
|
||||
"orderedItems": {
|
||||
"@id": "as:items",
|
||||
"@type": "@id",
|
||||
"@container": "@list",
|
||||
},
|
||||
"last": {"@id": "as:last", "@type": "@id"},
|
||||
"location": {"@id": "as:location", "@type": "@id"},
|
||||
"next": {"@id": "as:next", "@type": "@id"},
|
||||
"object": {"@id": "as:object", "@type": "@id"},
|
||||
"oneOf": {"@id": "as:oneOf", "@type": "@id"},
|
||||
"anyOf": {"@id": "as:anyOf", "@type": "@id"},
|
||||
"closed": {"@id": "as:closed", "@type": "xsd:dateTime"},
|
||||
"origin": {"@id": "as:origin", "@type": "@id"},
|
||||
"accuracy": {"@id": "as:accuracy", "@type": "xsd:float"},
|
||||
"prev": {"@id": "as:prev", "@type": "@id"},
|
||||
"preview": {"@id": "as:preview", "@type": "@id"},
|
||||
"replies": {"@id": "as:replies", "@type": "@id"},
|
||||
"result": {"@id": "as:result", "@type": "@id"},
|
||||
"audience": {"@id": "as:audience", "@type": "@id"},
|
||||
"partOf": {"@id": "as:partOf", "@type": "@id"},
|
||||
"tag": {"@id": "as:tag", "@type": "@id"},
|
||||
"target": {"@id": "as:target", "@type": "@id"},
|
||||
"to": {"@id": "as:to", "@type": "@id"},
|
||||
"url": {"@id": "as:url", "@type": "@id"},
|
||||
"altitude": {"@id": "as:altitude", "@type": "xsd:float"},
|
||||
"content": "as:content",
|
||||
"contentMap": {"@id": "as:content", "@container": "@language"},
|
||||
"name": "as:name",
|
||||
"nameMap": {"@id": "as:name", "@container": "@language"},
|
||||
"duration": {"@id": "as:duration", "@type": "xsd:duration"},
|
||||
"endTime": {"@id": "as:endTime", "@type": "xsd:dateTime"},
|
||||
"height": {"@id": "as:height", "@type": "xsd:nonNegativeInteger"},
|
||||
"href": {"@id": "as:href", "@type": "@id"},
|
||||
"hreflang": "as:hreflang",
|
||||
"latitude": {"@id": "as:latitude", "@type": "xsd:float"},
|
||||
"longitude": {"@id": "as:longitude", "@type": "xsd:float"},
|
||||
"mediaType": "as:mediaType",
|
||||
"published": {"@id": "as:published", "@type": "xsd:dateTime"},
|
||||
"radius": {"@id": "as:radius", "@type": "xsd:float"},
|
||||
"rel": "as:rel",
|
||||
"startIndex": {
|
||||
"@id": "as:startIndex",
|
||||
"@type": "xsd:nonNegativeInteger",
|
||||
},
|
||||
"startTime": {"@id": "as:startTime", "@type": "xsd:dateTime"},
|
||||
"summary": "as:summary",
|
||||
"summaryMap": {"@id": "as:summary", "@container": "@language"},
|
||||
"totalItems": {
|
||||
"@id": "as:totalItems",
|
||||
"@type": "xsd:nonNegativeInteger",
|
||||
},
|
||||
"units": "as:units",
|
||||
"updated": {"@id": "as:updated", "@type": "xsd:dateTime"},
|
||||
"width": {"@id": "as:width", "@type": "xsd:nonNegativeInteger"},
|
||||
"describes": {"@id": "as:describes", "@type": "@id"},
|
||||
"formerType": {"@id": "as:formerType", "@type": "@id"},
|
||||
"deleted": {"@id": "as:deleted", "@type": "xsd:dateTime"},
|
||||
"inbox": {"@id": "ldp:inbox", "@type": "@id"},
|
||||
"outbox": {"@id": "as:outbox", "@type": "@id"},
|
||||
"following": {"@id": "as:following", "@type": "@id"},
|
||||
"followers": {"@id": "as:followers", "@type": "@id"},
|
||||
"streams": {"@id": "as:streams", "@type": "@id"},
|
||||
"preferredUsername": "as:preferredUsername",
|
||||
"endpoints": {"@id": "as:endpoints", "@type": "@id"},
|
||||
"uploadMedia": {"@id": "as:uploadMedia", "@type": "@id"},
|
||||
"proxyUrl": {"@id": "as:proxyUrl", "@type": "@id"},
|
||||
"liked": {"@id": "as:liked", "@type": "@id"},
|
||||
"oauthAuthorizationEndpoint": {
|
||||
"@id": "as:oauthAuthorizationEndpoint",
|
||||
"@type": "@id",
|
||||
},
|
||||
"oauthTokenEndpoint": {"@id": "as:oauthTokenEndpoint", "@type": "@id"},
|
||||
"provideClientKey": {"@id": "as:provideClientKey", "@type": "@id"},
|
||||
"signClientKey": {"@id": "as:signClientKey", "@type": "@id"},
|
||||
"sharedInbox": {"@id": "as:sharedInbox", "@type": "@id"},
|
||||
"Public": {"@id": "as:Public", "@type": "@id"},
|
||||
"source": "as:source",
|
||||
"likes": {"@id": "as:likes", "@type": "@id"},
|
||||
"shares": {"@id": "as:shares", "@type": "@id"},
|
||||
# Added manually
|
||||
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
"shortId": "SEC",
|
||||
"contextUrl": None,
|
||||
"documentUrl": "https://w3id.org/security/v1",
|
||||
"document": {
|
||||
"@context": {
|
||||
"id": "@id",
|
||||
"type": "@type",
|
||||
"dc": "http://purl.org/dc/terms/",
|
||||
"sec": "https://w3id.org/security#",
|
||||
"xsd": "http://www.w3.org/2001/XMLSchema#",
|
||||
"EcdsaKoblitzSignature2016": "sec:EcdsaKoblitzSignature2016",
|
||||
"Ed25519Signature2018": "sec:Ed25519Signature2018",
|
||||
"EncryptedMessage": "sec:EncryptedMessage",
|
||||
"GraphSignature2012": "sec:GraphSignature2012",
|
||||
"LinkedDataSignature2015": "sec:LinkedDataSignature2015",
|
||||
"LinkedDataSignature2016": "sec:LinkedDataSignature2016",
|
||||
"CryptographicKey": "sec:Key",
|
||||
"authenticationTag": "sec:authenticationTag",
|
||||
"canonicalizationAlgorithm": "sec:canonicalizationAlgorithm",
|
||||
"cipherAlgorithm": "sec:cipherAlgorithm",
|
||||
"cipherData": "sec:cipherData",
|
||||
"cipherKey": "sec:cipherKey",
|
||||
"created": {"@id": "dc:created", "@type": "xsd:dateTime"},
|
||||
"creator": {"@id": "dc:creator", "@type": "@id"},
|
||||
"digestAlgorithm": "sec:digestAlgorithm",
|
||||
"digestValue": "sec:digestValue",
|
||||
"domain": "sec:domain",
|
||||
"encryptionKey": "sec:encryptionKey",
|
||||
"expiration": {"@id": "sec:expiration", "@type": "xsd:dateTime"},
|
||||
"expires": {"@id": "sec:expiration", "@type": "xsd:dateTime"},
|
||||
"initializationVector": "sec:initializationVector",
|
||||
"iterationCount": "sec:iterationCount",
|
||||
"nonce": "sec:nonce",
|
||||
"normalizationAlgorithm": "sec:normalizationAlgorithm",
|
||||
"owner": {"@id": "sec:owner", "@type": "@id"},
|
||||
"password": "sec:password",
|
||||
"privateKey": {"@id": "sec:privateKey", "@type": "@id"},
|
||||
"privateKeyPem": "sec:privateKeyPem",
|
||||
"publicKey": {"@id": "sec:publicKey", "@type": "@id"},
|
||||
"publicKeyBase58": "sec:publicKeyBase58",
|
||||
"publicKeyPem": "sec:publicKeyPem",
|
||||
"publicKeyWif": "sec:publicKeyWif",
|
||||
"publicKeyService": {"@id": "sec:publicKeyService", "@type": "@id"},
|
||||
"revoked": {"@id": "sec:revoked", "@type": "xsd:dateTime"},
|
||||
"salt": "sec:salt",
|
||||
"signature": "sec:signature",
|
||||
"signatureAlgorithm": "sec:signingAlgorithm",
|
||||
"signatureValue": "sec:signatureValue",
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
"shortId": "FW",
|
||||
"contextUrl": None,
|
||||
"documentUrl": "https://funkwhale.audio/ns",
|
||||
"document": {
|
||||
"@context": {
|
||||
"id": "@id",
|
||||
"type": "@type",
|
||||
"as": "https://www.w3.org/ns/activitystreams#",
|
||||
"fw": "https://funkwhale.audio/ns#",
|
||||
"xsd": "http://www.w3.org/2001/XMLSchema#",
|
||||
"Album": "fw:Album",
|
||||
"Track": "fw:Track",
|
||||
"Artist": "fw:Artist",
|
||||
"Library": "fw:Library",
|
||||
"bitrate": {"@id": "fw:bitrate", "@type": "xsd:nonNegativeInteger"},
|
||||
"size": {"@id": "fw:size", "@type": "xsd:nonNegativeInteger"},
|
||||
"position": {"@id": "fw:position", "@type": "xsd:nonNegativeInteger"},
|
||||
"disc": {"@id": "fw:disc", "@type": "xsd:nonNegativeInteger"},
|
||||
"library": {"@id": "fw:library", "@type": "@id"},
|
||||
"track": {"@id": "fw:track", "@type": "@id"},
|
||||
"cover": {"@id": "fw:cover", "@type": "as:Link"},
|
||||
"album": {"@id": "fw:album", "@type": "@id"},
|
||||
"artists": {"@id": "fw:artists", "@type": "@id", "@container": "@list"},
|
||||
"released": {"@id": "fw:released", "@type": "xsd:date"},
|
||||
"musicbrainzId": "fw:musicbrainzId",
|
||||
"license": {"@id": "fw:license", "@type": "@id"},
|
||||
"copyright": "fw:copyright",
|
||||
}
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
CONTEXTS_BY_ID = {c["shortId"]: c for c in CONTEXTS}
|
||||
|
||||
|
||||
class NS:
|
||||
def __init__(self, conf):
|
||||
self.conf = conf
|
||||
self.baseUrl = self.conf["document"]["@context"][self.conf["shortId"].lower()]
|
||||
|
||||
def __repr__(self):
|
||||
return "<{}: {}>".format(self.conf["shortId"], self.baseUrl)
|
||||
|
||||
def __getattr__(self, key):
|
||||
if key not in self.conf["document"]["@context"]:
|
||||
raise AttributeError(
|
||||
"{} is not a valid property of context {}".format(key, self.baseUrl)
|
||||
)
|
||||
return self.baseUrl + key
|
||||
|
||||
|
||||
class NoopContext:
|
||||
def __getattr__(self, key):
|
||||
return "_:{}".format(key)
|
||||
|
||||
|
||||
NOOP = NoopContext()
|
||||
AS = NS(CONTEXTS_BY_ID["AS"])
|
||||
LDP = NS(CONTEXTS_BY_ID["LDP"])
|
||||
SEC = NS(CONTEXTS_BY_ID["SEC"])
|
||||
FW = NS(CONTEXTS_BY_ID["FW"])
|
|
@ -0,0 +1,49 @@
|
|||
from django.db import transaction
|
||||
|
||||
from rest_framework import decorators
|
||||
from rest_framework import permissions
|
||||
from rest_framework import response
|
||||
from rest_framework import status
|
||||
|
||||
from funkwhale_api.common import utils as common_utils
|
||||
|
||||
from . import api_serializers
|
||||
from . import filters
|
||||
from . import models
|
||||
from . import tasks
|
||||
from . import utils
|
||||
|
||||
|
||||
def fetches_route():
|
||||
@transaction.atomic
|
||||
def fetches(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if request.method == "GET":
|
||||
queryset = models.Fetch.objects.get_for_object(obj).select_related("actor")
|
||||
queryset = queryset.order_by("-creation_date")
|
||||
filterset = filters.FetchFilter(request.GET, queryset=queryset)
|
||||
page = self.paginate_queryset(filterset.qs)
|
||||
if page is not None:
|
||||
serializer = api_serializers.FetchSerializer(page, many=True)
|
||||
return self.get_paginated_response(serializer.data)
|
||||
|
||||
serializer = api_serializers.FetchSerializer(queryset, many=True)
|
||||
return response.Response(serializer.data)
|
||||
if request.method == "POST":
|
||||
if utils.is_local(obj.fid):
|
||||
return response.Response(
|
||||
{"detail": "Cannot fetch a local object"}, status=400
|
||||
)
|
||||
|
||||
fetch = models.Fetch.objects.create(
|
||||
url=obj.fid, actor=request.user.actor, object=obj
|
||||
)
|
||||
common_utils.on_commit(tasks.fetch.delay, fetch_id=fetch.pk)
|
||||
serializer = api_serializers.FetchSerializer(fetch)
|
||||
return response.Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
return decorators.action(
|
||||
methods=["get", "post"],
|
||||
detail=True,
|
||||
permission_classes=[permissions.IsAuthenticated],
|
||||
)(fetches)
|
|
@ -69,11 +69,21 @@ def create_user(actor):
|
|||
@registry.register
|
||||
class DomainFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
name = factory.Faker("domain_name")
|
||||
nodeinfo_fetch_date = factory.LazyFunction(lambda: timezone.now())
|
||||
|
||||
class Meta:
|
||||
model = "federation.Domain"
|
||||
django_get_or_create = ("name",)
|
||||
|
||||
@factory.post_generation
|
||||
def with_service_actor(self, create, extracted, **kwargs):
|
||||
if not create or not extracted:
|
||||
return
|
||||
|
||||
self.service_actor = ActorFactory(domain=self)
|
||||
self.save(update_fields=["service_actor"])
|
||||
return self.service_actor
|
||||
|
||||
|
||||
@registry.register
|
||||
class ActorFactory(NoUpdateOnCreate, factory.DjangoModelFactory):
|
||||
|
@ -156,7 +166,7 @@ class MusicLibraryFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
|||
|
||||
|
||||
@registry.register
|
||||
class LibraryScan(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
class LibraryScanFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
library = factory.SubFactory(MusicLibraryFactory)
|
||||
actor = factory.SubFactory(ActorFactory)
|
||||
total_files = factory.LazyAttribute(lambda o: o.library.uploads_count)
|
||||
|
@ -165,6 +175,14 @@ class LibraryScan(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
|||
model = "music.LibraryScan"
|
||||
|
||||
|
||||
@registry.register
|
||||
class FetchFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
actor = factory.SubFactory(ActorFactory)
|
||||
|
||||
class Meta:
|
||||
model = "federation.Fetch"
|
||||
|
||||
|
||||
@registry.register
|
||||
class ActivityFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
actor = factory.SubFactory(ActorFactory)
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
import django_filters
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from . import models
|
||||
from . import utils
|
||||
|
||||
|
||||
class ActorRelatedField(serializers.EmailField):
|
||||
|
@ -16,3 +19,15 @@ class ActorRelatedField(serializers.EmailField):
|
|||
)
|
||||
except models.Actor.DoesNotExist:
|
||||
raise serializers.ValidationError("Invalid actor name")
|
||||
|
||||
|
||||
class DomainFromURLFilter(django_filters.CharFilter):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.url_field = kwargs.pop("url_field", "fid")
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def filter(self, qs, value):
|
||||
if not value:
|
||||
return qs
|
||||
query = utils.get_domain_query_from_url(value, self.url_field)
|
||||
return qs.filter(query)
|
||||
|
|
|
@ -46,3 +46,14 @@ class InboxItemFilter(django_filters.FilterSet):
|
|||
|
||||
def filter_before(self, queryset, field_name, value):
|
||||
return queryset.filter(pk__lte=value)
|
||||
|
||||
|
||||
class FetchFilter(django_filters.FilterSet):
|
||||
ordering = django_filters.OrderingFilter(
|
||||
# tuple-mapping retains order
|
||||
fields=(("creation_date", "creation_date"), ("fetch_date", "fetch_date"))
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = models.Fetch
|
||||
fields = ["status", "object_id", "url"]
|
||||
|
|
|
@ -0,0 +1,287 @@
|
|||
import aiohttp
|
||||
import asyncio
|
||||
import functools
|
||||
|
||||
import pyld.jsonld
|
||||
from django.conf import settings
|
||||
import pyld.documentloader.requests
|
||||
from rest_framework import serializers
|
||||
from rest_framework.fields import empty
|
||||
from . import contexts
|
||||
|
||||
|
||||
def cached_contexts(loader):
|
||||
functools.wraps(loader)
|
||||
|
||||
def load(url, *args, **kwargs):
|
||||
for cached in contexts.CONTEXTS:
|
||||
if url == cached["documentUrl"]:
|
||||
return cached
|
||||
return loader(url, *args, **kwargs)
|
||||
|
||||
return load
|
||||
|
||||
|
||||
def get_document_loader():
|
||||
loader = pyld.documentloader.requests.requests_document_loader(
|
||||
verify=settings.EXTERNAL_REQUESTS_VERIFY_SSL
|
||||
)
|
||||
return cached_contexts(loader)
|
||||
|
||||
|
||||
def expand(doc, options=None, insert_fw_context=True):
|
||||
options = options or {}
|
||||
options.setdefault("documentLoader", get_document_loader())
|
||||
if isinstance(doc, str):
|
||||
doc = options["documentLoader"](doc)["document"]
|
||||
if insert_fw_context:
|
||||
fw = contexts.CONTEXTS_BY_ID["FW"]["documentUrl"]
|
||||
try:
|
||||
insert_context(fw, doc)
|
||||
except KeyError:
|
||||
# probably an already expanded document
|
||||
pass
|
||||
result = pyld.jsonld.expand(doc, options=options)
|
||||
try:
|
||||
# jsonld.expand returns a list, which is useless for us
|
||||
return result[0]
|
||||
except IndexError:
|
||||
raise ValueError("Impossible to expand this jsonld document")
|
||||
|
||||
|
||||
def insert_context(ctx, doc):
|
||||
"""
|
||||
In some situations, we may want to add a default context to an existing document.
|
||||
This function enable that (this will mutate the original document)
|
||||
"""
|
||||
existing = doc["@context"]
|
||||
if isinstance(existing, list):
|
||||
if ctx not in existing:
|
||||
existing = existing[:]
|
||||
existing.append(ctx)
|
||||
doc["@context"] = existing
|
||||
else:
|
||||
doc["@context"] = [existing, ctx]
|
||||
return doc
|
||||
|
||||
|
||||
def get_session():
|
||||
return aiohttp.ClientSession(raise_for_status=True)
|
||||
|
||||
|
||||
async def fetch_json(url, session, cache=None, lock=None):
|
||||
async with session.get(url) as response:
|
||||
response.raise_for_status()
|
||||
return url, await response.json()
|
||||
|
||||
|
||||
async def fetch_many(*ids, references=None):
|
||||
"""
|
||||
Given a list of object ids, will fetch the remote
|
||||
representations for those objects, expand them
|
||||
and return a dictionnary with id as the key and expanded document as the values
|
||||
"""
|
||||
ids = set(ids)
|
||||
results = references if references is not None else {}
|
||||
|
||||
if not ids:
|
||||
return results
|
||||
|
||||
async with get_session() as session:
|
||||
tasks = [fetch_json(url, session) for url in ids if url not in results]
|
||||
tasks_results = await asyncio.gather(*tasks)
|
||||
|
||||
for url, payload in tasks_results:
|
||||
results[url] = payload
|
||||
|
||||
return results
|
||||
|
||||
|
||||
DEFAULT_PREPARE_CONFIG = {
|
||||
"type": {"property": "@type", "keep": "first"},
|
||||
"id": {"property": "@id"},
|
||||
}
|
||||
|
||||
|
||||
def dereference(value, references):
|
||||
"""
|
||||
Given a payload and a dictonary containing ids and objects, will replace
|
||||
all the matching objects in the payload by the one in the references dictionary.
|
||||
"""
|
||||
|
||||
def replace(obj, id):
|
||||
try:
|
||||
matching = references[id]
|
||||
except KeyError:
|
||||
return
|
||||
# we clear the current dict, and replace its content by the matching obj
|
||||
obj.clear()
|
||||
obj.update(matching)
|
||||
|
||||
if isinstance(value, dict):
|
||||
if "@id" in value:
|
||||
replace(value, value["@id"])
|
||||
else:
|
||||
for attr in value.values():
|
||||
dereference(attr, references)
|
||||
|
||||
elif isinstance(value, list):
|
||||
# we loop on nested objects and trigger dereferencing
|
||||
for obj in value:
|
||||
dereference(obj, references)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def get_value(value, keep=None, attr=None):
|
||||
|
||||
if keep == "first":
|
||||
value = value[0]
|
||||
if attr:
|
||||
value = value[attr]
|
||||
|
||||
elif attr:
|
||||
value = [obj[attr] for obj in value if attr in obj]
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def prepare_for_serializer(payload, config, fallbacks={}):
|
||||
"""
|
||||
Json-ld payloads, as returned by expand are quite complex to handle, because
|
||||
every attr is basically a list of dictionnaries. To make code simpler,
|
||||
we use this function to clean the payload a little bit, base on the config object.
|
||||
|
||||
Config is a dictionnary, with keys being serializer field names, and values
|
||||
being dictionaries describing how to handle this field.
|
||||
"""
|
||||
final_payload = {}
|
||||
final_config = {}
|
||||
final_config.update(DEFAULT_PREPARE_CONFIG)
|
||||
final_config.update(config)
|
||||
for field, field_config in final_config.items():
|
||||
try:
|
||||
value = get_value(
|
||||
payload[field_config["property"]],
|
||||
keep=field_config.get("keep"),
|
||||
attr=field_config.get("attr"),
|
||||
)
|
||||
except (IndexError, KeyError):
|
||||
aliases = field_config.get("aliases", [])
|
||||
noop = object()
|
||||
value = noop
|
||||
if not aliases:
|
||||
continue
|
||||
|
||||
for a in aliases:
|
||||
try:
|
||||
value = get_value(
|
||||
payload[a],
|
||||
keep=field_config.get("keep"),
|
||||
attr=field_config.get("attr"),
|
||||
)
|
||||
except (IndexError, KeyError):
|
||||
continue
|
||||
|
||||
break
|
||||
|
||||
if value is noop:
|
||||
continue
|
||||
|
||||
final_payload[field] = value
|
||||
|
||||
for key, choices in fallbacks.items():
|
||||
if key in final_payload:
|
||||
# initial attr was found, no need to rely on fallbacks
|
||||
continue
|
||||
|
||||
for choice in choices:
|
||||
if choice not in final_payload:
|
||||
continue
|
||||
|
||||
final_payload[key] = final_payload[choice]
|
||||
|
||||
return final_payload
|
||||
|
||||
|
||||
def get_ids(v):
|
||||
if isinstance(v, dict) and "@id" in v:
|
||||
yield v["@id"]
|
||||
|
||||
if isinstance(v, list):
|
||||
for obj in v:
|
||||
yield from get_ids(obj)
|
||||
|
||||
|
||||
def get_default_context():
|
||||
return ["https://www.w3.org/ns/activitystreams", "https://w3id.org/security/v1", {}]
|
||||
|
||||
|
||||
def get_default_context_fw():
|
||||
return [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
"https://w3id.org/security/v1",
|
||||
{},
|
||||
"https://funkwhale.audio/ns",
|
||||
]
|
||||
|
||||
|
||||
class JsonLdSerializer(serializers.Serializer):
|
||||
def run_validation(self, data=empty):
|
||||
if data and data is not empty and self.context.get("expand", True):
|
||||
try:
|
||||
data = expand(data)
|
||||
except ValueError:
|
||||
raise serializers.ValidationError(
|
||||
"{} is not a valid jsonld document".format(data)
|
||||
)
|
||||
try:
|
||||
config = self.Meta.jsonld_mapping
|
||||
except AttributeError:
|
||||
config = {}
|
||||
try:
|
||||
fallbacks = self.Meta.jsonld_fallbacks
|
||||
except AttributeError:
|
||||
fallbacks = {}
|
||||
data = prepare_for_serializer(data, config, fallbacks=fallbacks)
|
||||
dereferenced_fields = [
|
||||
k
|
||||
for k, c in config.items()
|
||||
if k in data and c.get("dereference", False)
|
||||
]
|
||||
dereferenced_ids = set()
|
||||
for field in dereferenced_fields:
|
||||
for i in get_ids(data[field]):
|
||||
dereferenced_ids.add(i)
|
||||
|
||||
if dereferenced_ids:
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
except RuntimeError:
|
||||
loop = asyncio.new_event_loop()
|
||||
references = self.context.setdefault("references", {})
|
||||
loop.run_until_complete(
|
||||
fetch_many(*dereferenced_ids, references=references)
|
||||
)
|
||||
data = dereference(data, references)
|
||||
return super().run_validation(data)
|
||||
|
||||
|
||||
def first_attr(property, attr, aliases=[]):
|
||||
return {"property": property, "keep": "first", "attr": attr, "aliases": aliases}
|
||||
|
||||
|
||||
def first_val(property, aliases=[]):
|
||||
return first_attr(property, "@value", aliases=aliases)
|
||||
|
||||
|
||||
def first_id(property, aliases=[]):
|
||||
return first_attr(property, "@id", aliases=aliases)
|
||||
|
||||
|
||||
def first_obj(property, aliases=[]):
|
||||
return {"property": property, "keep": "first", "aliases": aliases}
|
||||
|
||||
|
||||
def raw(property, aliases=[]):
|
||||
return {"property": property, "aliases": aliases}
|
|
@ -0,0 +1,36 @@
|
|||
# Generated by Django 2.1.5 on 2019-01-30 09:26
|
||||
|
||||
import django.contrib.postgres.fields.jsonb
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import funkwhale_api.common.validators
|
||||
import funkwhale_api.federation.models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('federation', '0016_auto_20181227_1605'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='actor',
|
||||
name='old_domain',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='domain',
|
||||
name='service_actor',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='managed_domains', to='federation.Actor'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='domain',
|
||||
name='name',
|
||||
field=models.CharField(max_length=255, primary_key=True, serialize=False, validators=[funkwhale_api.common.validators.DomainValidator()]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='domain',
|
||||
name='nodeinfo',
|
||||
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=funkwhale_api.federation.models.empty_dict, max_length=50000),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,33 @@
|
|||
# Generated by Django 2.1.7 on 2019-04-17 14:57
|
||||
|
||||
import django.contrib.postgres.fields.jsonb
|
||||
import django.core.serializers.json
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import funkwhale_api.federation.models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('contenttypes', '0002_remove_content_type_name'),
|
||||
('federation', '0017_auto_20190130_0926'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Fetch',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('url', models.URLField(db_index=True, max_length=500)),
|
||||
('creation_date', models.DateTimeField(default=django.utils.timezone.now)),
|
||||
('fetch_date', models.DateTimeField(blank=True, null=True)),
|
||||
('object_id', models.IntegerField(null=True)),
|
||||
('status', models.CharField(choices=[('pending', 'Pending'), ('errored', 'Errored'), ('finished', 'Finished'), ('skipped', 'Skipped')], default='pending', max_length=20)),
|
||||
('detail', django.contrib.postgres.fields.jsonb.JSONField(default=funkwhale_api.federation.models.empty_dict, encoder=django.core.serializers.json.DjangoJSONEncoder, max_length=50000)),
|
||||
('actor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='fetches', to='federation.Actor')),
|
||||
('object_content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
|
||||
],
|
||||
),
|
||||
]
|
|
@ -1,4 +1,5 @@
|
|||
import tempfile
|
||||
import urllib.parse
|
||||
import uuid
|
||||
|
||||
from django.conf import settings
|
||||
|
@ -43,10 +44,24 @@ class FederationMixin(models.Model):
|
|||
class Meta:
|
||||
abstract = True
|
||||
|
||||
@property
|
||||
def is_local(self):
|
||||
return federation_utils.is_local(self.fid)
|
||||
|
||||
@property
|
||||
def domain_name(self):
|
||||
if not self.fid:
|
||||
return
|
||||
|
||||
parsed = urllib.parse.urlparse(self.fid)
|
||||
return parsed.hostname
|
||||
|
||||
|
||||
class ActorQuerySet(models.QuerySet):
|
||||
def local(self, include=True):
|
||||
return self.exclude(user__isnull=include)
|
||||
if include:
|
||||
return self.filter(domain__name=settings.FEDERATION_HOSTNAME)
|
||||
return self.exclude(domain__name=settings.FEDERATION_HOSTNAME)
|
||||
|
||||
def with_current_usage(self):
|
||||
qs = self
|
||||
|
@ -96,7 +111,13 @@ class Domain(models.Model):
|
|||
creation_date = models.DateTimeField(default=timezone.now)
|
||||
nodeinfo_fetch_date = models.DateTimeField(default=None, null=True, blank=True)
|
||||
nodeinfo = JSONField(default=empty_dict, max_length=50000, blank=True)
|
||||
|
||||
service_actor = models.ForeignKey(
|
||||
"Actor",
|
||||
related_name="managed_domains",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
objects = DomainQuerySet.as_manager()
|
||||
|
||||
def __str__(self):
|
||||
|
@ -143,6 +164,10 @@ class Domain(models.Model):
|
|||
)
|
||||
return data
|
||||
|
||||
@property
|
||||
def is_local(self):
|
||||
return self.name == settings.FEDERATION_HOSTNAME
|
||||
|
||||
|
||||
class Actor(models.Model):
|
||||
ap_type = "Actor"
|
||||
|
@ -256,6 +281,76 @@ class Actor(models.Model):
|
|||
self.private_key = v[0].decode("utf-8")
|
||||
self.public_key = v[1].decode("utf-8")
|
||||
|
||||
def can_manage(self, obj):
|
||||
attributed_to = getattr(obj, "attributed_to_id", None)
|
||||
if attributed_to is not None and attributed_to == self.pk:
|
||||
# easiest case, the obj is attributed to the actor
|
||||
return True
|
||||
|
||||
if self.domain.service_actor_id != self.pk:
|
||||
# actor is not system actor, so there is no way the actor can manage
|
||||
# the object
|
||||
return False
|
||||
|
||||
# actor is service actor of its domain, so if the fid domain
|
||||
# matches, we consider the actor has the permission to manage
|
||||
# the object
|
||||
domain = self.domain_id
|
||||
return obj.fid.startswith("http://{}/".format(domain)) or obj.fid.startswith(
|
||||
"https://{}/".format(domain)
|
||||
)
|
||||
|
||||
|
||||
FETCH_STATUSES = [
|
||||
("pending", "Pending"),
|
||||
("errored", "Errored"),
|
||||
("finished", "Finished"),
|
||||
("skipped", "Skipped"),
|
||||
]
|
||||
|
||||
|
||||
class FetchQuerySet(models.QuerySet):
|
||||
def get_for_object(self, object):
|
||||
content_type = ContentType.objects.get_for_model(object)
|
||||
return self.filter(object_content_type=content_type, object_id=object.pk)
|
||||
|
||||
|
||||
class Fetch(models.Model):
|
||||
url = models.URLField(max_length=500, db_index=True)
|
||||
creation_date = models.DateTimeField(default=timezone.now)
|
||||
fetch_date = models.DateTimeField(null=True, blank=True)
|
||||
object_id = models.IntegerField(null=True)
|
||||
object_content_type = models.ForeignKey(
|
||||
ContentType, null=True, on_delete=models.CASCADE
|
||||
)
|
||||
object = GenericForeignKey("object_content_type", "object_id")
|
||||
status = models.CharField(default="pending", choices=FETCH_STATUSES, max_length=20)
|
||||
detail = JSONField(
|
||||
default=empty_dict, max_length=50000, encoder=DjangoJSONEncoder, blank=True
|
||||
)
|
||||
actor = models.ForeignKey(Actor, related_name="fetches", on_delete=models.CASCADE)
|
||||
|
||||
objects = FetchQuerySet.as_manager()
|
||||
|
||||
def save(self, **kwargs):
|
||||
if not self.url and self.object:
|
||||
self.url = self.object.fid
|
||||
|
||||
super().save(**kwargs)
|
||||
|
||||
@property
|
||||
def serializers(self):
|
||||
from . import contexts
|
||||
from . import serializers
|
||||
|
||||
return {
|
||||
contexts.FW.Artist: serializers.ArtistSerializer,
|
||||
contexts.FW.Album: serializers.AlbumSerializer,
|
||||
contexts.FW.Track: serializers.TrackSerializer,
|
||||
contexts.AS.Audio: serializers.UploadSerializer,
|
||||
contexts.FW.Library: serializers.LibrarySerializer,
|
||||
}
|
||||
|
||||
|
||||
class InboxItem(models.Model):
|
||||
"""
|
||||
|
@ -297,7 +392,9 @@ class Activity(models.Model):
|
|||
uuid = models.UUIDField(default=uuid.uuid4, unique=True)
|
||||
fid = models.URLField(unique=True, max_length=500, null=True, blank=True)
|
||||
url = models.URLField(max_length=500, null=True, blank=True)
|
||||
payload = JSONField(default=empty_dict, max_length=50000, encoder=DjangoJSONEncoder)
|
||||
payload = JSONField(
|
||||
default=empty_dict, max_length=50000, encoder=DjangoJSONEncoder, blank=True
|
||||
)
|
||||
creation_date = models.DateTimeField(default=timezone.now, db_index=True)
|
||||
type = models.CharField(db_index=True, null=True, max_length=100)
|
||||
|
||||
|
@ -413,7 +510,7 @@ class LibraryTrack(models.Model):
|
|||
album_title = models.CharField(max_length=500)
|
||||
title = models.CharField(max_length=500)
|
||||
metadata = JSONField(
|
||||
default=empty_dict, max_length=10000, encoder=DjangoJSONEncoder
|
||||
default=empty_dict, max_length=10000, encoder=DjangoJSONEncoder, blank=True
|
||||
)
|
||||
|
||||
@property
|
||||
|
|
|
@ -1,8 +1,17 @@
|
|||
from rest_framework.renderers import JSONRenderer
|
||||
|
||||
|
||||
class ActivityPubRenderer(JSONRenderer):
|
||||
media_type = "application/activity+json"
|
||||
def get_ap_renderers():
|
||||
MEDIA_TYPES = [
|
||||
("APActivity", "application/activity+json"),
|
||||
("APLD", "application/ld+json"),
|
||||
("APJSON", "application/json"),
|
||||
]
|
||||
|
||||
return [
|
||||
type(name, (JSONRenderer,), {"media_type": media_type})
|
||||
for name, media_type in MEDIA_TYPES
|
||||
]
|
||||
|
||||
|
||||
class WebfingerRenderer(JSONRenderer):
|
||||
|
|
|
@ -3,6 +3,7 @@ import logging
|
|||
from funkwhale_api.music import models as music_models
|
||||
|
||||
from . import activity
|
||||
from . import actors
|
||||
from . import serializers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -269,3 +270,113 @@ def outbox_delete_audio(context):
|
|||
serializer.data, to=[{"type": "followers", "target": library}]
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def handle_library_entry_update(payload, context, queryset, serializer_class):
|
||||
actor = context["actor"]
|
||||
obj_id = payload["object"].get("id")
|
||||
if not obj_id:
|
||||
logger.debug("Discarding update of empty obj")
|
||||
return
|
||||
|
||||
try:
|
||||
obj = queryset.select_related("attributed_to").get(fid=obj_id)
|
||||
except queryset.model.DoesNotExist:
|
||||
logger.debug("Discarding update of unkwnown obj %s", obj_id)
|
||||
return
|
||||
if not actor.can_manage(obj):
|
||||
logger.debug(
|
||||
"Discarding unauthorize update of obj %s from %s", obj_id, actor.fid
|
||||
)
|
||||
return
|
||||
|
||||
serializer = serializer_class(obj, data=payload["object"])
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
else:
|
||||
logger.debug(
|
||||
"Discarding update of obj %s because of payload errors: %s",
|
||||
obj_id,
|
||||
serializer.errors,
|
||||
)
|
||||
|
||||
|
||||
@inbox.register({"type": "Update", "object.type": "Track"})
|
||||
def inbox_update_track(payload, context):
|
||||
return handle_library_entry_update(
|
||||
payload,
|
||||
context,
|
||||
queryset=music_models.Track.objects.all(),
|
||||
serializer_class=serializers.TrackSerializer,
|
||||
)
|
||||
|
||||
|
||||
@inbox.register({"type": "Update", "object.type": "Artist"})
|
||||
def inbox_update_artist(payload, context):
|
||||
return handle_library_entry_update(
|
||||
payload,
|
||||
context,
|
||||
queryset=music_models.Artist.objects.all(),
|
||||
serializer_class=serializers.ArtistSerializer,
|
||||
)
|
||||
|
||||
|
||||
@inbox.register({"type": "Update", "object.type": "Album"})
|
||||
def inbox_update_album(payload, context):
|
||||
return handle_library_entry_update(
|
||||
payload,
|
||||
context,
|
||||
queryset=music_models.Album.objects.all(),
|
||||
serializer_class=serializers.AlbumSerializer,
|
||||
)
|
||||
|
||||
|
||||
@outbox.register({"type": "Update", "object.type": "Track"})
|
||||
def outbox_update_track(context):
|
||||
track = context["track"]
|
||||
serializer = serializers.ActivitySerializer(
|
||||
{"type": "Update", "object": serializers.TrackSerializer(track).data}
|
||||
)
|
||||
|
||||
yield {
|
||||
"type": "Update",
|
||||
"actor": actors.get_service_actor(),
|
||||
"payload": with_recipients(
|
||||
serializer.data,
|
||||
to=[activity.PUBLIC_ADDRESS, {"type": "instances_with_followers"}],
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@outbox.register({"type": "Update", "object.type": "Album"})
|
||||
def outbox_update_album(context):
|
||||
album = context["album"]
|
||||
serializer = serializers.ActivitySerializer(
|
||||
{"type": "Update", "object": serializers.AlbumSerializer(album).data}
|
||||
)
|
||||
|
||||
yield {
|
||||
"type": "Update",
|
||||
"actor": actors.get_service_actor(),
|
||||
"payload": with_recipients(
|
||||
serializer.data,
|
||||
to=[activity.PUBLIC_ADDRESS, {"type": "instances_with_followers"}],
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@outbox.register({"type": "Update", "object.type": "Artist"})
|
||||
def outbox_update_artist(context):
|
||||
artist = context["artist"]
|
||||
serializer = serializers.ActivitySerializer(
|
||||
{"type": "Update", "object": serializers.ArtistSerializer(artist).data}
|
||||
)
|
||||
|
||||
yield {
|
||||
"type": "Update",
|
||||
"actor": actors.get_service_actor(),
|
||||
"payload": with_recipients(
|
||||
serializer.data,
|
||||
to=[activity.PUBLIC_ADDRESS, {"type": "instances_with_followers"}],
|
||||
),
|
||||
}
|
||||
|
|
|
@ -1,30 +1,35 @@
|
|||
import logging
|
||||
import mimetypes
|
||||
import urllib.parse
|
||||
import uuid
|
||||
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.core.paginator import Paginator
|
||||
from rest_framework import serializers
|
||||
|
||||
from funkwhale_api.common import utils as funkwhale_utils
|
||||
from funkwhale_api.music import licenses
|
||||
from funkwhale_api.music import models as music_models
|
||||
from funkwhale_api.music import tasks as music_tasks
|
||||
|
||||
from . import activity, models, utils
|
||||
from . import activity, actors, contexts, jsonld, models, tasks, utils
|
||||
|
||||
AP_CONTEXT = [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
"https://w3id.org/security/v1",
|
||||
{},
|
||||
]
|
||||
AP_CONTEXT = jsonld.get_default_context()
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LinkSerializer(serializers.Serializer):
|
||||
type = serializers.ChoiceField(choices=["Link"])
|
||||
class LinkSerializer(jsonld.JsonLdSerializer):
|
||||
type = serializers.ChoiceField(choices=[contexts.AS.Link])
|
||||
href = serializers.URLField(max_length=500)
|
||||
mediaType = serializers.CharField()
|
||||
|
||||
class Meta:
|
||||
jsonld_mapping = {
|
||||
"href": jsonld.first_id(contexts.AS.href),
|
||||
"mediaType": jsonld.first_val(contexts.AS.mediaType),
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.allowed_mimetypes = kwargs.pop("allowed_mimetypes", [])
|
||||
super().__init__(*args, **kwargs)
|
||||
|
@ -45,18 +50,52 @@ class LinkSerializer(serializers.Serializer):
|
|||
)
|
||||
|
||||
|
||||
class ActorSerializer(serializers.Serializer):
|
||||
class EndpointsSerializer(jsonld.JsonLdSerializer):
|
||||
sharedInbox = serializers.URLField(max_length=500, required=False)
|
||||
|
||||
class Meta:
|
||||
jsonld_mapping = {"sharedInbox": jsonld.first_id(contexts.AS.sharedInbox)}
|
||||
|
||||
|
||||
class PublicKeySerializer(jsonld.JsonLdSerializer):
|
||||
publicKeyPem = serializers.CharField(trim_whitespace=False)
|
||||
|
||||
class Meta:
|
||||
jsonld_mapping = {"publicKeyPem": jsonld.first_val(contexts.SEC.publicKeyPem)}
|
||||
|
||||
|
||||
class ActorSerializer(jsonld.JsonLdSerializer):
|
||||
id = serializers.URLField(max_length=500)
|
||||
outbox = serializers.URLField(max_length=500)
|
||||
inbox = serializers.URLField(max_length=500)
|
||||
type = serializers.ChoiceField(choices=models.TYPE_CHOICES)
|
||||
type = serializers.ChoiceField(
|
||||
choices=[getattr(contexts.AS, c[0]) for c in models.TYPE_CHOICES]
|
||||
)
|
||||
preferredUsername = serializers.CharField()
|
||||
manuallyApprovesFollowers = serializers.NullBooleanField(required=False)
|
||||
name = serializers.CharField(required=False, max_length=200)
|
||||
summary = serializers.CharField(max_length=None, required=False)
|
||||
followers = serializers.URLField(max_length=500)
|
||||
following = serializers.URLField(max_length=500, required=False, allow_null=True)
|
||||
publicKey = serializers.JSONField(required=False)
|
||||
publicKey = PublicKeySerializer(required=False)
|
||||
endpoints = EndpointsSerializer(required=False)
|
||||
|
||||
class Meta:
|
||||
jsonld_mapping = {
|
||||
"outbox": jsonld.first_id(contexts.AS.outbox),
|
||||
"inbox": jsonld.first_id(contexts.LDP.inbox),
|
||||
"following": jsonld.first_id(contexts.AS.following),
|
||||
"followers": jsonld.first_id(contexts.AS.followers),
|
||||
"preferredUsername": jsonld.first_val(contexts.AS.preferredUsername),
|
||||
"summary": jsonld.first_val(contexts.AS.summary),
|
||||
"name": jsonld.first_val(contexts.AS.name),
|
||||
"publicKey": jsonld.first_obj(contexts.SEC.publicKey),
|
||||
"manuallyApprovesFollowers": jsonld.first_val(
|
||||
contexts.AS.manuallyApprovesFollowers
|
||||
),
|
||||
"mediaType": jsonld.first_val(contexts.AS.mediaType),
|
||||
"endpoints": jsonld.first_obj(contexts.AS.endpoints),
|
||||
}
|
||||
|
||||
def to_representation(self, instance):
|
||||
ret = {
|
||||
|
@ -91,7 +130,7 @@ class ActorSerializer(serializers.Serializer):
|
|||
if instance.user.avatar:
|
||||
ret["icon"] = {
|
||||
"type": "Image",
|
||||
"mediaType": mimetypes.guess_type(instance.user.avatar.path)[0],
|
||||
"mediaType": mimetypes.guess_type(instance.user.avatar_path)[0],
|
||||
"url": utils.full_url(instance.user.avatar.crop["400x400"].url),
|
||||
}
|
||||
except ObjectDoesNotExist:
|
||||
|
@ -114,17 +153,25 @@ class ActorSerializer(serializers.Serializer):
|
|||
if maf is not None:
|
||||
kwargs["manually_approves_followers"] = maf
|
||||
domain = urllib.parse.urlparse(kwargs["fid"]).netloc
|
||||
kwargs["domain"] = models.Domain.objects.get_or_create(pk=domain)[0]
|
||||
for endpoint, url in self.initial_data.get("endpoints", {}).items():
|
||||
domain, domain_created = models.Domain.objects.get_or_create(pk=domain)
|
||||
if domain_created and not domain.is_local:
|
||||
# first time we see the domain, we trigger nodeinfo fetching
|
||||
tasks.update_domain_nodeinfo(domain_name=domain.name)
|
||||
|
||||
kwargs["domain"] = domain
|
||||
for endpoint, url in self.validated_data.get("endpoints", {}).items():
|
||||
if endpoint == "sharedInbox":
|
||||
kwargs["shared_inbox_url"] = url
|
||||
break
|
||||
try:
|
||||
kwargs["public_key"] = self.initial_data["publicKey"]["publicKeyPem"]
|
||||
kwargs["public_key"] = self.validated_data["publicKey"]["publicKeyPem"]
|
||||
except KeyError:
|
||||
pass
|
||||
return kwargs
|
||||
|
||||
def validate_type(self, v):
|
||||
return v.split("#")[-1]
|
||||
|
||||
def build(self):
|
||||
d = self.prepare_missing_fields()
|
||||
return models.Actor(**d)
|
||||
|
@ -251,11 +298,29 @@ class FollowSerializer(serializers.Serializer):
|
|||
follow_class = models.Follow
|
||||
defaults = kwargs
|
||||
defaults["fid"] = self.validated_data["id"]
|
||||
return follow_class.objects.update_or_create(
|
||||
approved = kwargs.pop("approved", None)
|
||||
follow, created = follow_class.objects.update_or_create(
|
||||
actor=self.validated_data["actor"],
|
||||
target=self.validated_data["object"],
|
||||
defaults=defaults,
|
||||
)[0]
|
||||
)
|
||||
if not created:
|
||||
# We likely received a new follow when we had an existing one in database
|
||||
# this can happen when two instances are out of sync, e.g because some
|
||||
# messages are not delivered properly. In this case, we don't change
|
||||
# the follow approved status and return the follow as is.
|
||||
# We set a new UUID to ensure the follow urls are updated properly
|
||||
# cf #830
|
||||
follow.uuid = uuid.uuid4()
|
||||
follow.save(update_fields=["uuid"])
|
||||
return follow
|
||||
|
||||
# it's a brand new follow, we use the approved value stored earlier
|
||||
if approved != follow.approved:
|
||||
follow.approved = approved
|
||||
follow.save(update_fields=["approved"])
|
||||
|
||||
return follow
|
||||
|
||||
def to_representation(self, instance):
|
||||
return {
|
||||
|
@ -507,14 +572,40 @@ def get_additional_fields(data):
|
|||
return additional_fields
|
||||
|
||||
|
||||
class PaginatedCollectionSerializer(serializers.Serializer):
|
||||
type = serializers.ChoiceField(choices=["Collection"])
|
||||
PAGINATED_COLLECTION_JSONLD_MAPPING = {
|
||||
"totalItems": jsonld.first_val(contexts.AS.totalItems),
|
||||
"actor": jsonld.first_id(contexts.AS.actor),
|
||||
"attributedTo": jsonld.first_id(contexts.AS.attributedTo),
|
||||
"first": jsonld.first_id(contexts.AS.first),
|
||||
"last": jsonld.first_id(contexts.AS.last),
|
||||
"partOf": jsonld.first_id(contexts.AS.partOf),
|
||||
}
|
||||
|
||||
|
||||
class PaginatedCollectionSerializer(jsonld.JsonLdSerializer):
|
||||
type = serializers.ChoiceField(choices=[contexts.AS.Collection])
|
||||
totalItems = serializers.IntegerField(min_value=0)
|
||||
actor = serializers.URLField(max_length=500)
|
||||
actor = serializers.URLField(max_length=500, required=False)
|
||||
attributedTo = serializers.URLField(max_length=500, required=False)
|
||||
id = serializers.URLField(max_length=500)
|
||||
first = serializers.URLField(max_length=500)
|
||||
last = serializers.URLField(max_length=500)
|
||||
|
||||
class Meta:
|
||||
jsonld_mapping = PAGINATED_COLLECTION_JSONLD_MAPPING
|
||||
|
||||
def validate(self, validated_data):
|
||||
d = super().validate(validated_data)
|
||||
actor = d.get("actor")
|
||||
attributed_to = d.get("attributedTo")
|
||||
if not actor and not attributed_to:
|
||||
raise serializers.ValidationError(
|
||||
"You need to provide at least actor or attributedTo"
|
||||
)
|
||||
|
||||
d["attributedTo"] = attributed_to or actor
|
||||
return d
|
||||
|
||||
def to_representation(self, conf):
|
||||
paginator = Paginator(conf["items"], conf.get("page_size", 20))
|
||||
first = funkwhale_utils.set_query_parameter(conf["id"], page=1)
|
||||
|
@ -522,7 +613,9 @@ class PaginatedCollectionSerializer(serializers.Serializer):
|
|||
last = funkwhale_utils.set_query_parameter(conf["id"], page=paginator.num_pages)
|
||||
d = {
|
||||
"id": conf["id"],
|
||||
# XXX Stable release: remove the obsolete actor field
|
||||
"actor": conf["actor"].fid,
|
||||
"attributedTo": conf["actor"].fid,
|
||||
"totalItems": paginator.count,
|
||||
"type": conf.get("type", "Collection"),
|
||||
"current": current,
|
||||
|
@ -536,64 +629,77 @@ class PaginatedCollectionSerializer(serializers.Serializer):
|
|||
|
||||
|
||||
class LibrarySerializer(PaginatedCollectionSerializer):
|
||||
type = serializers.ChoiceField(choices=["Library"])
|
||||
type = serializers.ChoiceField(
|
||||
choices=[contexts.AS.Collection, contexts.FW.Library]
|
||||
)
|
||||
name = serializers.CharField()
|
||||
summary = serializers.CharField(allow_blank=True, allow_null=True, required=False)
|
||||
followers = serializers.URLField(max_length=500)
|
||||
audience = serializers.ChoiceField(
|
||||
choices=["", None, "https://www.w3.org/ns/activitystreams#Public"],
|
||||
choices=["", "./", None, "https://www.w3.org/ns/activitystreams#Public"],
|
||||
required=False,
|
||||
allow_null=True,
|
||||
allow_blank=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
jsonld_mapping = funkwhale_utils.concat_dicts(
|
||||
PAGINATED_COLLECTION_JSONLD_MAPPING,
|
||||
{
|
||||
"name": jsonld.first_val(contexts.AS.name),
|
||||
"summary": jsonld.first_val(contexts.AS.summary),
|
||||
"audience": jsonld.first_id(contexts.AS.audience),
|
||||
"followers": jsonld.first_id(contexts.AS.followers),
|
||||
},
|
||||
)
|
||||
|
||||
def to_representation(self, library):
|
||||
conf = {
|
||||
"id": library.fid,
|
||||
"name": library.name,
|
||||
"summary": library.description,
|
||||
"page_size": 100,
|
||||
# XXX Stable release: remove the obsolete actor field
|
||||
"actor": library.actor,
|
||||
"attributedTo": library.actor,
|
||||
"items": library.uploads.for_federation(),
|
||||
"type": "Library",
|
||||
}
|
||||
r = super().to_representation(conf)
|
||||
r["audience"] = (
|
||||
"https://www.w3.org/ns/activitystreams#Public"
|
||||
if library.privacy_level == "everyone"
|
||||
else ""
|
||||
contexts.AS.Public if library.privacy_level == "everyone" else ""
|
||||
)
|
||||
r["followers"] = library.followers_url
|
||||
return r
|
||||
|
||||
def create(self, validated_data):
|
||||
actor = utils.retrieve_ap_object(
|
||||
validated_data["actor"],
|
||||
validated_data["attributedTo"],
|
||||
actor=self.context.get("fetch_actor"),
|
||||
queryset=models.Actor,
|
||||
serializer_class=ActorSerializer,
|
||||
)
|
||||
privacy = {"": "me", "./": "me", None: "me", contexts.AS.Public: "everyone"}
|
||||
library, created = music_models.Library.objects.update_or_create(
|
||||
fid=validated_data["id"],
|
||||
actor=actor,
|
||||
defaults={
|
||||
"uploads_count": validated_data["totalItems"],
|
||||
"name": validated_data["name"],
|
||||
"description": validated_data["summary"],
|
||||
"description": validated_data.get("summary"),
|
||||
"followers_url": validated_data["followers"],
|
||||
"privacy_level": "everyone"
|
||||
if validated_data["audience"]
|
||||
== "https://www.w3.org/ns/activitystreams#Public"
|
||||
else "me",
|
||||
"privacy_level": privacy[validated_data["audience"]],
|
||||
},
|
||||
)
|
||||
return library
|
||||
|
||||
|
||||
class CollectionPageSerializer(serializers.Serializer):
|
||||
type = serializers.ChoiceField(choices=["CollectionPage"])
|
||||
class CollectionPageSerializer(jsonld.JsonLdSerializer):
|
||||
type = serializers.ChoiceField(choices=[contexts.AS.CollectionPage])
|
||||
totalItems = serializers.IntegerField(min_value=0)
|
||||
items = serializers.ListField()
|
||||
actor = serializers.URLField(max_length=500)
|
||||
actor = serializers.URLField(max_length=500, required=False)
|
||||
attributedTo = serializers.URLField(max_length=500, required=False)
|
||||
id = serializers.URLField(max_length=500)
|
||||
first = serializers.URLField(max_length=500)
|
||||
last = serializers.URLField(max_length=500)
|
||||
|
@ -601,6 +707,19 @@ class CollectionPageSerializer(serializers.Serializer):
|
|||
prev = serializers.URLField(max_length=500, required=False)
|
||||
partOf = serializers.URLField(max_length=500)
|
||||
|
||||
class Meta:
|
||||
jsonld_mapping = {
|
||||
"totalItems": jsonld.first_val(contexts.AS.totalItems),
|
||||
"items": jsonld.raw(contexts.AS.items),
|
||||
"actor": jsonld.first_id(contexts.AS.actor),
|
||||
"attributedTo": jsonld.first_id(contexts.AS.attributedTo),
|
||||
"first": jsonld.first_id(contexts.AS.first),
|
||||
"last": jsonld.first_id(contexts.AS.last),
|
||||
"next": jsonld.first_id(contexts.AS.next),
|
||||
"prev": jsonld.first_id(contexts.AS.prev),
|
||||
"partOf": jsonld.first_id(contexts.AS.partOf),
|
||||
}
|
||||
|
||||
def validate_items(self, v):
|
||||
item_serializer = self.context.get("item_serializer")
|
||||
if not item_serializer:
|
||||
|
@ -626,7 +745,9 @@ class CollectionPageSerializer(serializers.Serializer):
|
|||
d = {
|
||||
"id": id,
|
||||
"partOf": conf["id"],
|
||||
# XXX Stable release: remove the obsolete actor field
|
||||
"actor": conf["actor"].fid,
|
||||
"attributedTo": conf["actor"].fid,
|
||||
"totalItems": page.paginator.count,
|
||||
"type": "CollectionPage",
|
||||
"first": first,
|
||||
|
@ -654,14 +775,46 @@ class CollectionPageSerializer(serializers.Serializer):
|
|||
return d
|
||||
|
||||
|
||||
class MusicEntitySerializer(serializers.Serializer):
|
||||
MUSIC_ENTITY_JSONLD_MAPPING = {
|
||||
"name": jsonld.first_val(contexts.AS.name),
|
||||
"published": jsonld.first_val(contexts.AS.published),
|
||||
"musicbrainzId": jsonld.first_val(contexts.FW.musicbrainzId),
|
||||
"attributedTo": jsonld.first_id(contexts.AS.attributedTo),
|
||||
}
|
||||
|
||||
|
||||
class MusicEntitySerializer(jsonld.JsonLdSerializer):
|
||||
id = serializers.URLField(max_length=500)
|
||||
published = serializers.DateTimeField()
|
||||
musicbrainzId = serializers.UUIDField(allow_null=True, required=False)
|
||||
name = serializers.CharField(max_length=1000)
|
||||
attributedTo = serializers.URLField(max_length=500, allow_null=True, required=False)
|
||||
updateable_fields = []
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
attributed_to_fid = validated_data.get("attributedTo")
|
||||
if attributed_to_fid:
|
||||
validated_data["attributedTo"] = actors.get_actor(attributed_to_fid)
|
||||
updated_fields = funkwhale_utils.get_updated_fields(
|
||||
self.updateable_fields, validated_data, instance
|
||||
)
|
||||
if updated_fields:
|
||||
return music_tasks.update_library_entity(instance, updated_fields)
|
||||
|
||||
return instance
|
||||
|
||||
|
||||
class ArtistSerializer(MusicEntitySerializer):
|
||||
updateable_fields = [
|
||||
("name", "name"),
|
||||
("musicbrainzId", "mbid"),
|
||||
("attributedTo", "attributed_to"),
|
||||
]
|
||||
|
||||
class Meta:
|
||||
model = music_models.Artist
|
||||
jsonld_mapping = MUSIC_ENTITY_JSONLD_MAPPING
|
||||
|
||||
def to_representation(self, instance):
|
||||
d = {
|
||||
"type": "Artist",
|
||||
|
@ -669,6 +822,9 @@ class ArtistSerializer(MusicEntitySerializer):
|
|||
"name": instance.name,
|
||||
"published": instance.creation_date.isoformat(),
|
||||
"musicbrainzId": str(instance.mbid) if instance.mbid else None,
|
||||
"attributedTo": instance.attributed_to.fid
|
||||
if instance.attributed_to
|
||||
else None,
|
||||
}
|
||||
|
||||
if self.context.get("include_ap_context", self.parent is None):
|
||||
|
@ -682,6 +838,23 @@ class AlbumSerializer(MusicEntitySerializer):
|
|||
cover = LinkSerializer(
|
||||
allowed_mimetypes=["image/*"], allow_null=True, required=False
|
||||
)
|
||||
updateable_fields = [
|
||||
("name", "title"),
|
||||
("musicbrainzId", "mbid"),
|
||||
("attributedTo", "attributed_to"),
|
||||
("released", "release_date"),
|
||||
]
|
||||
|
||||
class Meta:
|
||||
model = music_models.Album
|
||||
jsonld_mapping = funkwhale_utils.concat_dicts(
|
||||
MUSIC_ENTITY_JSONLD_MAPPING,
|
||||
{
|
||||
"released": jsonld.first_val(contexts.FW.released),
|
||||
"artists": jsonld.first_attr(contexts.FW.artists, "@list"),
|
||||
"cover": jsonld.first_obj(contexts.FW.cover),
|
||||
},
|
||||
)
|
||||
|
||||
def to_representation(self, instance):
|
||||
d = {
|
||||
|
@ -698,34 +871,21 @@ class AlbumSerializer(MusicEntitySerializer):
|
|||
instance.artist, context={"include_ap_context": False}
|
||||
).data
|
||||
],
|
||||
"attributedTo": instance.attributed_to.fid
|
||||
if instance.attributed_to
|
||||
else None,
|
||||
}
|
||||
if instance.cover:
|
||||
d["cover"] = {
|
||||
"type": "Link",
|
||||
"href": utils.full_url(instance.cover.url),
|
||||
"mediaType": mimetypes.guess_type(instance.cover.path)[0]
|
||||
"mediaType": mimetypes.guess_type(instance.cover_path)[0]
|
||||
or "image/jpeg",
|
||||
}
|
||||
if self.context.get("include_ap_context", self.parent is None):
|
||||
d["@context"] = AP_CONTEXT
|
||||
return d
|
||||
|
||||
def get_create_data(self, validated_data):
|
||||
artist_data = validated_data["artists"][0]
|
||||
artist = ArtistSerializer(
|
||||
context={"activity": self.context.get("activity")}
|
||||
).create(artist_data)
|
||||
|
||||
return {
|
||||
"mbid": validated_data.get("musicbrainzId"),
|
||||
"fid": validated_data["id"],
|
||||
"title": validated_data["name"],
|
||||
"creation_date": validated_data["published"],
|
||||
"artist": artist,
|
||||
"release_date": validated_data.get("released"),
|
||||
"from_activity": self.context.get("activity"),
|
||||
}
|
||||
|
||||
|
||||
class TrackSerializer(MusicEntitySerializer):
|
||||
position = serializers.IntegerField(min_value=0, allow_null=True, required=False)
|
||||
|
@ -735,6 +895,30 @@ class TrackSerializer(MusicEntitySerializer):
|
|||
license = serializers.URLField(allow_null=True, required=False)
|
||||
copyright = serializers.CharField(allow_null=True, required=False)
|
||||
|
||||
updateable_fields = [
|
||||
("name", "title"),
|
||||
("musicbrainzId", "mbid"),
|
||||
("attributedTo", "attributed_to"),
|
||||
("disc", "disc_number"),
|
||||
("position", "position"),
|
||||
("copyright", "copyright"),
|
||||
("license", "license"),
|
||||
]
|
||||
|
||||
class Meta:
|
||||
model = music_models.Track
|
||||
jsonld_mapping = funkwhale_utils.concat_dicts(
|
||||
MUSIC_ENTITY_JSONLD_MAPPING,
|
||||
{
|
||||
"album": jsonld.first_obj(contexts.FW.album),
|
||||
"artists": jsonld.first_attr(contexts.FW.artists, "@list"),
|
||||
"copyright": jsonld.first_val(contexts.FW.copyright),
|
||||
"disc": jsonld.first_val(contexts.FW.disc),
|
||||
"license": jsonld.first_id(contexts.FW.license),
|
||||
"position": jsonld.first_val(contexts.FW.position),
|
||||
},
|
||||
)
|
||||
|
||||
def to_representation(self, instance):
|
||||
d = {
|
||||
"type": "Track",
|
||||
|
@ -756,6 +940,9 @@ class TrackSerializer(MusicEntitySerializer):
|
|||
"album": AlbumSerializer(
|
||||
instance.album, context={"include_ap_context": False}
|
||||
).data,
|
||||
"attributedTo": instance.attributed_to.fid
|
||||
if instance.attributed_to
|
||||
else None,
|
||||
}
|
||||
|
||||
if self.context.get("include_ap_context", self.parent is None):
|
||||
|
@ -765,16 +952,56 @@ class TrackSerializer(MusicEntitySerializer):
|
|||
def create(self, validated_data):
|
||||
from funkwhale_api.music import tasks as music_tasks
|
||||
|
||||
metadata = music_tasks.federation_audio_track_to_metadata(validated_data)
|
||||
references = {}
|
||||
actors_to_fetch = set()
|
||||
actors_to_fetch.add(
|
||||
funkwhale_utils.recursive_getattr(
|
||||
validated_data, "attributedTo", permissive=True
|
||||
)
|
||||
)
|
||||
actors_to_fetch.add(
|
||||
funkwhale_utils.recursive_getattr(
|
||||
validated_data, "album.attributedTo", permissive=True
|
||||
)
|
||||
)
|
||||
artists = (
|
||||
funkwhale_utils.recursive_getattr(
|
||||
validated_data, "artists", permissive=True
|
||||
)
|
||||
or []
|
||||
)
|
||||
album_artists = (
|
||||
funkwhale_utils.recursive_getattr(
|
||||
validated_data, "album.artists", permissive=True
|
||||
)
|
||||
or []
|
||||
)
|
||||
for artist in artists + album_artists:
|
||||
actors_to_fetch.add(artist.get("attributedTo"))
|
||||
|
||||
for url in actors_to_fetch:
|
||||
if not url:
|
||||
continue
|
||||
references[url] = actors.get_actor(url)
|
||||
|
||||
metadata = music_tasks.federation_audio_track_to_metadata(
|
||||
validated_data, references
|
||||
)
|
||||
|
||||
from_activity = self.context.get("activity")
|
||||
if from_activity:
|
||||
metadata["from_activity_id"] = from_activity.pk
|
||||
track = music_tasks.get_track_from_import_metadata(metadata, update_cover=True)
|
||||
return track
|
||||
|
||||
def update(self, obj, validated_data):
|
||||
if validated_data.get("license"):
|
||||
validated_data["license"] = licenses.match(validated_data["license"])
|
||||
return super().update(obj, validated_data)
|
||||
|
||||
class UploadSerializer(serializers.Serializer):
|
||||
type = serializers.ChoiceField(choices=["Audio"])
|
||||
|
||||
class UploadSerializer(jsonld.JsonLdSerializer):
|
||||
type = serializers.ChoiceField(choices=[contexts.AS.Audio])
|
||||
id = serializers.URLField(max_length=500)
|
||||
library = serializers.URLField(max_length=500)
|
||||
url = LinkSerializer(allowed_mimetypes=["audio/*"])
|
||||
|
@ -786,6 +1013,19 @@ class UploadSerializer(serializers.Serializer):
|
|||
|
||||
track = TrackSerializer(required=True)
|
||||
|
||||
class Meta:
|
||||
model = music_models.Upload
|
||||
jsonld_mapping = {
|
||||
"track": jsonld.first_obj(contexts.FW.track),
|
||||
"library": jsonld.first_id(contexts.FW.library),
|
||||
"url": jsonld.first_obj(contexts.AS.url),
|
||||
"published": jsonld.first_val(contexts.AS.published),
|
||||
"updated": jsonld.first_val(contexts.AS.updated),
|
||||
"duration": jsonld.first_val(contexts.AS.duration),
|
||||
"bitrate": jsonld.first_val(contexts.FW.bitrate),
|
||||
"size": jsonld.first_val(contexts.FW.size),
|
||||
}
|
||||
|
||||
def validate_url(self, v):
|
||||
try:
|
||||
v["href"]
|
||||
|
@ -870,26 +1110,6 @@ class UploadSerializer(serializers.Serializer):
|
|||
return d
|
||||
|
||||
|
||||
class CollectionSerializer(serializers.Serializer):
|
||||
def to_representation(self, conf):
|
||||
d = {
|
||||
"id": conf["id"],
|
||||
"actor": conf["actor"].fid,
|
||||
"totalItems": len(conf["items"]),
|
||||
"type": "Collection",
|
||||
"items": [
|
||||
conf["item_serializer"](
|
||||
i, context={"actor": conf["actor"], "include_ap_context": False}
|
||||
).data
|
||||
for i in conf["items"]
|
||||
],
|
||||
}
|
||||
|
||||
if self.context.get("include_ap_context", True):
|
||||
d["@context"] = AP_CONTEXT
|
||||
return d
|
||||
|
||||
|
||||
class NodeInfoLinkSerializer(serializers.Serializer):
|
||||
href = serializers.URLField()
|
||||
rel = serializers.URLField()
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import requests
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
from django.db.models import Q, F
|
||||
from django.utils import timezone
|
||||
from dynamic_preferences.registries import global_preferences_registry
|
||||
|
@ -11,13 +13,17 @@ from requests.exceptions import RequestException
|
|||
|
||||
from funkwhale_api.common import preferences
|
||||
from funkwhale_api.common import session
|
||||
from funkwhale_api.common import utils as common_utils
|
||||
from funkwhale_api.music import models as music_models
|
||||
from funkwhale_api.taskapp import celery
|
||||
|
||||
from . import actors
|
||||
from . import jsonld
|
||||
from . import keys
|
||||
from . import models, signing
|
||||
from . import serializers
|
||||
from . import routes
|
||||
from . import utils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -97,7 +103,8 @@ def dispatch_outbox(activity):
|
|||
inbox_items = activity.inbox_items.filter(is_read=False).select_related()
|
||||
|
||||
if inbox_items.exists():
|
||||
dispatch_inbox.delay(activity_id=activity.pk, call_handlers=False)
|
||||
call_handlers = activity.type in ["Follow"]
|
||||
dispatch_inbox.delay(activity_id=activity.pk, call_handlers=call_handlers)
|
||||
|
||||
if not preferences.get("federation__enabled"):
|
||||
# federation is disabled, we only deliver to local recipients
|
||||
|
@ -185,9 +192,44 @@ def update_domain_nodeinfo(domain):
|
|||
nodeinfo = {"status": "ok", "payload": fetch_nodeinfo(domain.name)}
|
||||
except (requests.RequestException, serializers.serializers.ValidationError) as e:
|
||||
nodeinfo = {"status": "error", "error": str(e)}
|
||||
|
||||
service_actor_id = common_utils.recursive_getattr(
|
||||
nodeinfo, "payload.metadata.actorId", permissive=True
|
||||
)
|
||||
try:
|
||||
domain.service_actor = (
|
||||
utils.retrieve_ap_object(
|
||||
service_actor_id,
|
||||
actor=actors.get_service_actor(),
|
||||
queryset=models.Actor,
|
||||
serializer_class=serializers.ActorSerializer,
|
||||
)
|
||||
if service_actor_id
|
||||
else None
|
||||
)
|
||||
except (serializers.serializers.ValidationError, RequestException) as e:
|
||||
logger.warning(
|
||||
"Cannot fetch system actor for domain %s: %s", domain.name, str(e)
|
||||
)
|
||||
domain.nodeinfo_fetch_date = now
|
||||
domain.nodeinfo = nodeinfo
|
||||
domain.save(update_fields=["nodeinfo", "nodeinfo_fetch_date"])
|
||||
domain.save(update_fields=["nodeinfo", "nodeinfo_fetch_date", "service_actor"])
|
||||
|
||||
|
||||
@celery.app.task(name="federation.refresh_nodeinfo_known_nodes")
|
||||
def refresh_nodeinfo_known_nodes():
|
||||
"""
|
||||
Trigger a node info refresh on all nodes that weren't refreshed since
|
||||
settings.NODEINFO_REFRESH_DELAY
|
||||
"""
|
||||
limit = timezone.now() - datetime.timedelta(seconds=settings.NODEINFO_REFRESH_DELAY)
|
||||
candidates = models.Domain.objects.external().exclude(
|
||||
nodeinfo_fetch_date__gte=limit
|
||||
)
|
||||
names = candidates.values_list("name", flat=True)
|
||||
logger.info("Launching periodic nodeinfo refresh on %s domains", len(names))
|
||||
for domain_name in names:
|
||||
update_domain_nodeinfo.delay(domain_name=domain_name)
|
||||
|
||||
|
||||
def delete_qs(qs):
|
||||
|
@ -240,3 +282,83 @@ def rotate_actor_key(actor):
|
|||
actor.private_key = pair[0].decode()
|
||||
actor.public_key = pair[1].decode()
|
||||
actor.save(update_fields=["private_key", "public_key"])
|
||||
|
||||
|
||||
@celery.app.task(name="federation.fetch")
|
||||
@transaction.atomic
|
||||
@celery.require_instance(
|
||||
models.Fetch.objects.filter(status="pending").select_related("actor"), "fetch"
|
||||
)
|
||||
def fetch(fetch):
|
||||
actor = fetch.actor
|
||||
auth = signing.get_auth(actor.private_key, actor.private_key_id)
|
||||
|
||||
def error(code, **kwargs):
|
||||
fetch.status = "errored"
|
||||
fetch.fetch_date = timezone.now()
|
||||
fetch.detail = {"error_code": code}
|
||||
fetch.detail.update(kwargs)
|
||||
fetch.save(update_fields=["fetch_date", "status", "detail"])
|
||||
|
||||
try:
|
||||
response = session.get_session().get(
|
||||
auth=auth,
|
||||
url=fetch.url,
|
||||
timeout=5,
|
||||
verify=settings.EXTERNAL_REQUESTS_VERIFY_SSL,
|
||||
headers={"Content-Type": "application/activity+json"},
|
||||
)
|
||||
logger.debug("Remote answered with %s", response.status_code)
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
return error("http", status_code=e.response.status_code if e.response else None)
|
||||
except requests.exceptions.Timeout:
|
||||
return error("timeout")
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
return error("connection", message=str(e))
|
||||
except requests.RequestException as e:
|
||||
return error("request", message=str(e))
|
||||
except Exception as e:
|
||||
return error("unhandled", message=str(e))
|
||||
|
||||
try:
|
||||
payload = response.json()
|
||||
except json.decoder.JSONDecodeError:
|
||||
return error("invalid_json")
|
||||
|
||||
try:
|
||||
doc = jsonld.expand(payload)
|
||||
except ValueError:
|
||||
return error("invalid_jsonld")
|
||||
|
||||
try:
|
||||
type = doc.get("@type", [])[0]
|
||||
except IndexError:
|
||||
return error("missing_jsonld_type")
|
||||
try:
|
||||
serializer_class = fetch.serializers[type]
|
||||
model = serializer_class.Meta.model
|
||||
except (KeyError, AttributeError):
|
||||
fetch.status = "skipped"
|
||||
fetch.fetch_date = timezone.now()
|
||||
fetch.detail = {"reason": "unhandled_type", "type": type}
|
||||
return fetch.save(update_fields=["fetch_date", "status", "detail"])
|
||||
try:
|
||||
id = doc.get("@id")
|
||||
except IndexError:
|
||||
existing = None
|
||||
else:
|
||||
existing = model.objects.filter(fid=id).first()
|
||||
|
||||
serializer = serializer_class(existing, data=payload)
|
||||
if not serializer.is_valid():
|
||||
return error("validation", validation_errors=serializer.errors)
|
||||
try:
|
||||
serializer.save()
|
||||
except Exception as e:
|
||||
error("save", message=str(e))
|
||||
raise
|
||||
|
||||
fetch.status = "finished"
|
||||
fetch.fetch_date = timezone.now()
|
||||
return fetch.save(update_fields=["fetch_date", "status"])
|
||||
|
|
|
@ -8,6 +8,7 @@ music_router = routers.SimpleRouter(trailing_slash=False)
|
|||
|
||||
router.register(r"federation/shared", views.SharedViewSet, "shared")
|
||||
router.register(r"federation/actors", views.ActorViewSet, "actors")
|
||||
router.register(r"federation/edits", views.EditViewSet, "edits")
|
||||
router.register(r".well-known", views.WellKnownViewSet, "well-known")
|
||||
|
||||
music_router.register(r"libraries", views.MusicLibraryViewSet, "libraries")
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import unicodedata
|
||||
import re
|
||||
from django.conf import settings
|
||||
from django.db.models import Q
|
||||
|
||||
from funkwhale_api.common import session
|
||||
from funkwhale_api.moderation import models as moderation_models
|
||||
|
@ -61,7 +62,7 @@ def slugify_username(username):
|
|||
|
||||
|
||||
def retrieve_ap_object(
|
||||
fid, actor=None, serializer_class=None, queryset=None, apply_instance_policies=True
|
||||
fid, actor, serializer_class=None, queryset=None, apply_instance_policies=True
|
||||
):
|
||||
from . import activity
|
||||
|
||||
|
@ -100,10 +101,33 @@ def retrieve_ap_object(
|
|||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if apply_instance_policies and activity.should_reject(id=id, payload=data):
|
||||
if apply_instance_policies and activity.should_reject(fid=id, payload=data):
|
||||
raise exceptions.BlockedActorOrDomain()
|
||||
if not serializer_class:
|
||||
return data
|
||||
serializer = serializer_class(data=data)
|
||||
serializer = serializer_class(data=data, context={"fetch_actor": actor})
|
||||
serializer.is_valid(raise_exception=True)
|
||||
return serializer.save()
|
||||
|
||||
|
||||
def get_domain_query_from_url(domain, url_field="fid"):
|
||||
"""
|
||||
Given a domain name and a field, will return a Q() object
|
||||
to match objects that have this domain in the given field.
|
||||
"""
|
||||
|
||||
query = Q(**{"{}__startswith".format(url_field): "http://{}/".format(domain)})
|
||||
query = query | Q(
|
||||
**{"{}__startswith".format(url_field): "https://{}/".format(domain)}
|
||||
)
|
||||
return query
|
||||
|
||||
|
||||
def is_local(url):
|
||||
if not url:
|
||||
return True
|
||||
|
||||
d = settings.FEDERATION_HOSTNAME
|
||||
return url.startswith("http://{}/".format(d)) or url.startswith(
|
||||
"https://{}/".format(d)
|
||||
)
|
||||
|
|
|
@ -7,6 +7,7 @@ from rest_framework.decorators import action
|
|||
|
||||
from funkwhale_api.common import preferences
|
||||
from funkwhale_api.music import models as music_models
|
||||
from funkwhale_api.music import utils as music_utils
|
||||
|
||||
from . import activity, authentication, models, renderers, serializers, utils, webfinger
|
||||
|
||||
|
@ -21,7 +22,7 @@ class FederationMixin(object):
|
|||
class SharedViewSet(FederationMixin, viewsets.GenericViewSet):
|
||||
permission_classes = []
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
renderer_classes = [renderers.ActivityPubRenderer]
|
||||
renderer_classes = renderers.get_ap_renderers()
|
||||
|
||||
@action(methods=["post"], detail=False)
|
||||
def inbox(self, request, *args, **kwargs):
|
||||
|
@ -38,7 +39,7 @@ class ActorViewSet(FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericV
|
|||
lookup_field = "preferred_username"
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
permission_classes = []
|
||||
renderer_classes = [renderers.ActivityPubRenderer]
|
||||
renderer_classes = renderers.get_ap_renderers()
|
||||
queryset = models.Actor.objects.local().select_related("user")
|
||||
serializer_class = serializers.ActorSerializer
|
||||
|
||||
|
@ -69,6 +70,15 @@ class ActorViewSet(FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericV
|
|||
return response.Response({})
|
||||
|
||||
|
||||
class EditViewSet(FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet):
|
||||
lookup_field = "uuid"
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
permission_classes = []
|
||||
renderer_classes = renderers.get_ap_renderers()
|
||||
# queryset = common_models.Mutation.objects.local().select_related()
|
||||
# serializer_class = serializers.ActorSerializer
|
||||
|
||||
|
||||
class WellKnownViewSet(viewsets.GenericViewSet):
|
||||
authentication_classes = []
|
||||
permission_classes = []
|
||||
|
@ -137,7 +147,7 @@ class MusicLibraryViewSet(
|
|||
):
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
permission_classes = []
|
||||
renderer_classes = [renderers.ActivityPubRenderer]
|
||||
renderer_classes = renderers.get_ap_renderers()
|
||||
serializer_class = serializers.LibrarySerializer
|
||||
queryset = music_models.Library.objects.all().select_related("actor")
|
||||
lookup_field = "uuid"
|
||||
|
@ -192,18 +202,27 @@ class MusicUploadViewSet(
|
|||
):
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
permission_classes = []
|
||||
renderer_classes = [renderers.ActivityPubRenderer]
|
||||
queryset = music_models.Upload.objects.none()
|
||||
renderer_classes = renderers.get_ap_renderers()
|
||||
queryset = music_models.Upload.objects.local().select_related(
|
||||
"library__actor", "track__artist", "track__album__artist"
|
||||
)
|
||||
serializer_class = serializers.UploadSerializer
|
||||
lookup_field = "uuid"
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
actor = music_utils.get_actor_from_request(self.request)
|
||||
return queryset.playable_by(actor)
|
||||
|
||||
|
||||
class MusicArtistViewSet(
|
||||
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
|
||||
):
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
permission_classes = []
|
||||
renderer_classes = [renderers.ActivityPubRenderer]
|
||||
queryset = music_models.Artist.objects.none()
|
||||
renderer_classes = renderers.get_ap_renderers()
|
||||
queryset = music_models.Artist.objects.local()
|
||||
serializer_class = serializers.ArtistSerializer
|
||||
lookup_field = "uuid"
|
||||
|
||||
|
||||
|
@ -212,8 +231,9 @@ class MusicAlbumViewSet(
|
|||
):
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
permission_classes = []
|
||||
renderer_classes = [renderers.ActivityPubRenderer]
|
||||
queryset = music_models.Album.objects.none()
|
||||
renderer_classes = renderers.get_ap_renderers()
|
||||
queryset = music_models.Album.objects.local().select_related("artist")
|
||||
serializer_class = serializers.AlbumSerializer
|
||||
lookup_field = "uuid"
|
||||
|
||||
|
||||
|
@ -222,6 +242,9 @@ class MusicTrackViewSet(
|
|||
):
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
permission_classes = []
|
||||
renderer_classes = [renderers.ActivityPubRenderer]
|
||||
queryset = music_models.Track.objects.none()
|
||||
renderer_classes = renderers.get_ap_renderers()
|
||||
queryset = music_models.Track.objects.local().select_related(
|
||||
"album__artist", "artist"
|
||||
)
|
||||
serializer_class = serializers.TrackSerializer
|
||||
lookup_field = "uuid"
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
from funkwhale_api.moderation import filters as moderation_filters
|
||||
|
||||
from . import models
|
||||
|
||||
|
||||
class ListeningFilter(moderation_filters.HiddenContentFilterSet):
|
||||
class Meta:
|
||||
model = models.Listening
|
||||
hidden_content_fields_mapping = moderation_filters.USER_FILTER_CONFIG[
|
||||
"LISTENING"
|
||||
]
|
||||
fields = ["hidden"]
|
|
@ -1,5 +1,4 @@
|
|||
from rest_framework import mixins, viewsets
|
||||
from rest_framework.permissions import IsAuthenticatedOrReadOnly
|
||||
|
||||
from django.db.models import Prefetch
|
||||
|
||||
|
@ -7,7 +6,9 @@ from funkwhale_api.activity import record
|
|||
from funkwhale_api.common import fields, permissions
|
||||
from funkwhale_api.music.models import Track
|
||||
from funkwhale_api.music import utils as music_utils
|
||||
from . import models, serializers
|
||||
from . import filters, models, serializers
|
||||
|
||||
from funkwhale_api.users.oauth import permissions as oauth_permissions
|
||||
|
||||
|
||||
class ListeningViewSet(
|
||||
|
@ -19,12 +20,15 @@ class ListeningViewSet(
|
|||
|
||||
serializer_class = serializers.ListeningSerializer
|
||||
queryset = models.Listening.objects.all().select_related("user")
|
||||
|
||||
permission_classes = [
|
||||
permissions.ConditionalAuthentication,
|
||||
oauth_permissions.ScopePermission,
|
||||
permissions.OwnerPermission,
|
||||
IsAuthenticatedOrReadOnly,
|
||||
]
|
||||
required_scope = "listenings"
|
||||
anonymous_policy = "setting"
|
||||
owner_checks = ["write"]
|
||||
filterset_class = filters.ListeningFilter
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.request.method.lower() in ["head", "get", "options"]:
|
||||
|
|
|
@ -2,6 +2,8 @@ import memoize.djangocache
|
|||
|
||||
import funkwhale_api
|
||||
from funkwhale_api.common import preferences
|
||||
from funkwhale_api.federation import actors
|
||||
from funkwhale_api.music import utils as music_utils
|
||||
|
||||
from . import stats
|
||||
|
||||
|
@ -19,6 +21,7 @@ def get():
|
|||
"openRegistrations": preferences.get("users__registration_enabled"),
|
||||
"usage": {"users": {"total": 0, "activeHalfyear": 0, "activeMonth": 0}},
|
||||
"metadata": {
|
||||
"actorId": actors.get_service_actor().fid,
|
||||
"private": preferences.get("instance__nodeinfo_private"),
|
||||
"shortDescription": preferences.get("instance__short_description"),
|
||||
"longDescription": preferences.get("instance__long_description"),
|
||||
|
@ -32,6 +35,7 @@ def get():
|
|||
"common__api_authentication_required"
|
||||
),
|
||||
},
|
||||
"supportedUploadExtensions": music_utils.SUPPORTED_EXTENSIONS,
|
||||
},
|
||||
}
|
||||
if share_stats:
|
||||
|
|
|
@ -5,7 +5,7 @@ from rest_framework import views
|
|||
from rest_framework.response import Response
|
||||
|
||||
from funkwhale_api.common import preferences
|
||||
from funkwhale_api.users.permissions import HasUserPermission
|
||||
from funkwhale_api.users.oauth import permissions as oauth_permissions
|
||||
|
||||
from . import nodeinfo
|
||||
|
||||
|
@ -14,8 +14,8 @@ NODEINFO_2_CONTENT_TYPE = "application/json; profile=http://nodeinfo.diaspora.so
|
|||
|
||||
class AdminSettings(preferences_viewsets.GlobalPreferencesViewSet):
|
||||
pagination_class = None
|
||||
permission_classes = (HasUserPermission,)
|
||||
required_permissions = ["settings"]
|
||||
permission_classes = [oauth_permissions.ScopePermission]
|
||||
required_scope = "instance:settings"
|
||||
|
||||
|
||||
class InstanceSettings(views.APIView):
|
||||
|
|
|
@ -1,27 +1,238 @@
|
|||
from django import forms
|
||||
from django.db.models import Q
|
||||
from django.conf import settings
|
||||
|
||||
import django_filters
|
||||
from django_filters import rest_framework as filters
|
||||
|
||||
from funkwhale_api.common import fields
|
||||
from funkwhale_api.common import search
|
||||
|
||||
from funkwhale_api.federation import models as federation_models
|
||||
from funkwhale_api.federation import utils as federation_utils
|
||||
from funkwhale_api.moderation import models as moderation_models
|
||||
from funkwhale_api.music import models as music_models
|
||||
from funkwhale_api.users import models as users_models
|
||||
|
||||
|
||||
class ManageUploadFilterSet(filters.FilterSet):
|
||||
q = fields.SearchFilter(
|
||||
search_fields=[
|
||||
"track__title",
|
||||
"track__album__title",
|
||||
"track__artist__name",
|
||||
"source",
|
||||
]
|
||||
class ActorField(forms.CharField):
|
||||
def clean(self, value):
|
||||
value = super().clean(value)
|
||||
if not value:
|
||||
return value
|
||||
|
||||
parts = value.split("@")
|
||||
|
||||
return {
|
||||
"username": parts[0],
|
||||
"domain": parts[1] if len(parts) > 1 else settings.FEDERATION_HOSTNAME,
|
||||
}
|
||||
|
||||
|
||||
def get_actor_filter(actor_field):
|
||||
def handler(v):
|
||||
if not v:
|
||||
return Q(**{actor_field: None})
|
||||
return Q(
|
||||
**{
|
||||
"{}__preferred_username__iexact".format(actor_field): v["username"],
|
||||
"{}__domain__name__iexact".format(actor_field): v["domain"],
|
||||
}
|
||||
)
|
||||
|
||||
return {"field": ActorField(), "handler": handler}
|
||||
|
||||
|
||||
class ManageArtistFilterSet(filters.FilterSet):
|
||||
q = fields.SmartSearchFilter(
|
||||
config=search.SearchConfig(
|
||||
search_fields={
|
||||
"name": {"to": "name"},
|
||||
"fid": {"to": "fid"},
|
||||
"mbid": {"to": "mbid"},
|
||||
},
|
||||
filter_fields={
|
||||
"uuid": {"to": "uuid"},
|
||||
"domain": {
|
||||
"handler": lambda v: federation_utils.get_domain_query_from_url(v)
|
||||
},
|
||||
"library_id": {
|
||||
"to": "tracks__uploads__library_id",
|
||||
"field": forms.IntegerField(),
|
||||
"distinct": True,
|
||||
},
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = music_models.Artist
|
||||
fields = ["q", "name", "mbid", "fid"]
|
||||
|
||||
|
||||
class ManageAlbumFilterSet(filters.FilterSet):
|
||||
q = fields.SmartSearchFilter(
|
||||
config=search.SearchConfig(
|
||||
search_fields={
|
||||
"title": {"to": "title"},
|
||||
"fid": {"to": "fid"},
|
||||
"artist": {"to": "artist__name"},
|
||||
"mbid": {"to": "mbid"},
|
||||
},
|
||||
filter_fields={
|
||||
"uuid": {"to": "uuid"},
|
||||
"artist_id": {"to": "artist_id", "field": forms.IntegerField()},
|
||||
"domain": {
|
||||
"handler": lambda v: federation_utils.get_domain_query_from_url(v)
|
||||
},
|
||||
"library_id": {
|
||||
"to": "tracks__uploads__library_id",
|
||||
"field": forms.IntegerField(),
|
||||
"distinct": True,
|
||||
},
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = music_models.Album
|
||||
fields = ["q", "title", "mbid", "fid", "artist"]
|
||||
|
||||
|
||||
class ManageTrackFilterSet(filters.FilterSet):
|
||||
q = fields.SmartSearchFilter(
|
||||
config=search.SearchConfig(
|
||||
search_fields={
|
||||
"title": {"to": "title"},
|
||||
"fid": {"to": "fid"},
|
||||
"mbid": {"to": "mbid"},
|
||||
"artist": {"to": "artist__name"},
|
||||
"album": {"to": "album__title"},
|
||||
"album_artist": {"to": "album__artist__name"},
|
||||
"copyright": {"to": "copyright"},
|
||||
},
|
||||
filter_fields={
|
||||
"album_id": {"to": "album_id", "field": forms.IntegerField()},
|
||||
"album_artist_id": {
|
||||
"to": "album__artist_id",
|
||||
"field": forms.IntegerField(),
|
||||
},
|
||||
"artist_id": {"to": "artist_id", "field": forms.IntegerField()},
|
||||
"uuid": {"to": "uuid"},
|
||||
"license": {"to": "license"},
|
||||
"domain": {
|
||||
"handler": lambda v: federation_utils.get_domain_query_from_url(v)
|
||||
},
|
||||
"library_id": {
|
||||
"to": "uploads__library_id",
|
||||
"field": forms.IntegerField(),
|
||||
"distinct": True,
|
||||
},
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = music_models.Track
|
||||
fields = ["q", "title", "mbid", "fid", "artist", "album", "license"]
|
||||
|
||||
|
||||
class ManageLibraryFilterSet(filters.FilterSet):
|
||||
ordering = django_filters.OrderingFilter(
|
||||
# tuple-mapping retains order
|
||||
fields=(
|
||||
("creation_date", "creation_date"),
|
||||
("_uploads_count", "uploads_count"),
|
||||
("followers_count", "followers_count"),
|
||||
)
|
||||
)
|
||||
q = fields.SmartSearchFilter(
|
||||
config=search.SearchConfig(
|
||||
search_fields={
|
||||
"name": {"to": "name"},
|
||||
"description": {"to": "description"},
|
||||
"fid": {"to": "fid"},
|
||||
},
|
||||
filter_fields={
|
||||
"uuid": {"to": "uuid"},
|
||||
"artist_id": {
|
||||
"to": "uploads__track__artist_id",
|
||||
"field": forms.IntegerField(),
|
||||
"distinct": True,
|
||||
},
|
||||
"album_id": {
|
||||
"to": "uploads__track__album_id",
|
||||
"field": forms.IntegerField(),
|
||||
"distinct": True,
|
||||
},
|
||||
"track_id": {
|
||||
"to": "uploads__track__id",
|
||||
"field": forms.IntegerField(),
|
||||
"distinct": True,
|
||||
},
|
||||
"domain": {"to": "actor__domain_id"},
|
||||
"account": get_actor_filter("actor"),
|
||||
"privacy_level": {"to": "privacy_level"},
|
||||
},
|
||||
)
|
||||
)
|
||||
domain = filters.CharFilter("actor__domain_id")
|
||||
|
||||
class Meta:
|
||||
model = music_models.Library
|
||||
fields = ["q", "name", "fid", "privacy_level", "domain"]
|
||||
|
||||
|
||||
class ManageUploadFilterSet(filters.FilterSet):
|
||||
ordering = django_filters.OrderingFilter(
|
||||
# tuple-mapping retains order
|
||||
fields=(
|
||||
("creation_date", "creation_date"),
|
||||
("modification_date", "modification_date"),
|
||||
("accessed_date", "accessed_date"),
|
||||
("size", "size"),
|
||||
("bitrate", "bitrate"),
|
||||
("duration", "duration"),
|
||||
)
|
||||
)
|
||||
q = fields.SmartSearchFilter(
|
||||
config=search.SearchConfig(
|
||||
search_fields={
|
||||
"source": {"to": "source"},
|
||||
"fid": {"to": "fid"},
|
||||
"track": {"to": "track__title"},
|
||||
"album": {"to": "track__album__title"},
|
||||
"artist": {"to": "track__artist__name"},
|
||||
},
|
||||
filter_fields={
|
||||
"uuid": {"to": "uuid"},
|
||||
"library_id": {"to": "library_id", "field": forms.IntegerField()},
|
||||
"artist_id": {"to": "track__artist_id", "field": forms.IntegerField()},
|
||||
"album_id": {"to": "track__album_id", "field": forms.IntegerField()},
|
||||
"track_id": {"to": "track__id", "field": forms.IntegerField()},
|
||||
"domain": {"to": "library__actor__domain_id"},
|
||||
"import_reference": {"to": "import_reference"},
|
||||
"type": {"to": "mimetype"},
|
||||
"status": {"to": "import_status"},
|
||||
"account": get_actor_filter("library__actor"),
|
||||
"privacy_level": {"to": "library__privacy_level"},
|
||||
},
|
||||
)
|
||||
)
|
||||
domain = filters.CharFilter("library__actor__domain_id")
|
||||
privacy_level = filters.CharFilter("library__privacy_level")
|
||||
|
||||
class Meta:
|
||||
model = music_models.Upload
|
||||
fields = ["q", "track__album", "track__artist", "track"]
|
||||
fields = [
|
||||
"q",
|
||||
"fid",
|
||||
"privacy_level",
|
||||
"domain",
|
||||
"mimetype",
|
||||
"import_reference",
|
||||
"import_status",
|
||||
]
|
||||
|
||||
|
||||
class ManageDomainFilterSet(filters.FilterSet):
|
||||
|
@ -43,6 +254,7 @@ class ManageActorFilterSet(filters.FilterSet):
|
|||
"type": {"to": "type"},
|
||||
},
|
||||
filter_fields={
|
||||
"uuid": {"to": "uuid"},
|
||||
"domain": {"to": "domain__name__iexact"},
|
||||
"username": {"to": "preferred_username__iexact"},
|
||||
"email": {"to": "user__email__iexact"},
|
||||
|
@ -60,7 +272,15 @@ class ManageActorFilterSet(filters.FilterSet):
|
|||
|
||||
|
||||
class ManageUserFilterSet(filters.FilterSet):
|
||||
q = fields.SearchFilter(search_fields=["username", "email", "name"])
|
||||
q = fields.SmartSearchFilter(
|
||||
config=search.SearchConfig(
|
||||
search_fields={
|
||||
"name": {"to": "name"},
|
||||
"username": {"to": "username"},
|
||||
"email": {"to": "email"},
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = users_models.User
|
||||
|
|
|
@ -9,72 +9,12 @@ from funkwhale_api.federation import fields as federation_fields
|
|||
from funkwhale_api.federation import tasks as federation_tasks
|
||||
from funkwhale_api.moderation import models as moderation_models
|
||||
from funkwhale_api.music import models as music_models
|
||||
from funkwhale_api.music import serializers as music_serializers
|
||||
from funkwhale_api.users import models as users_models
|
||||
|
||||
from . import filters
|
||||
|
||||
|
||||
class ManageUploadArtistSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = music_models.Artist
|
||||
fields = ["id", "mbid", "creation_date", "name"]
|
||||
|
||||
|
||||
class ManageUploadAlbumSerializer(serializers.ModelSerializer):
|
||||
artist = ManageUploadArtistSerializer()
|
||||
|
||||
class Meta:
|
||||
model = music_models.Album
|
||||
fields = (
|
||||
"id",
|
||||
"mbid",
|
||||
"title",
|
||||
"artist",
|
||||
"release_date",
|
||||
"cover",
|
||||
"creation_date",
|
||||
)
|
||||
|
||||
|
||||
class ManageUploadTrackSerializer(serializers.ModelSerializer):
|
||||
artist = ManageUploadArtistSerializer()
|
||||
album = ManageUploadAlbumSerializer()
|
||||
|
||||
class Meta:
|
||||
model = music_models.Track
|
||||
fields = ("id", "mbid", "title", "album", "artist", "creation_date", "position")
|
||||
|
||||
|
||||
class ManageUploadSerializer(serializers.ModelSerializer):
|
||||
track = ManageUploadTrackSerializer()
|
||||
|
||||
class Meta:
|
||||
model = music_models.Upload
|
||||
fields = (
|
||||
"id",
|
||||
"path",
|
||||
"source",
|
||||
"filename",
|
||||
"mimetype",
|
||||
"track",
|
||||
"duration",
|
||||
"mimetype",
|
||||
"creation_date",
|
||||
"bitrate",
|
||||
"size",
|
||||
"path",
|
||||
)
|
||||
|
||||
|
||||
class ManageUploadActionSerializer(common_serializers.ActionSerializer):
|
||||
actions = [common_serializers.Action("delete", allow_all=False)]
|
||||
filterset_class = filters.ManageUploadFilterSet
|
||||
|
||||
@transaction.atomic
|
||||
def handle_delete(self, objects):
|
||||
return objects.delete()
|
||||
|
||||
|
||||
class PermissionsSerializer(serializers.Serializer):
|
||||
def to_representation(self, o):
|
||||
return o.get_permissions(defaults=self.context.get("default_permissions"))
|
||||
|
@ -216,10 +156,7 @@ class ManageDomainActionSerializer(common_serializers.ActionSerializer):
|
|||
common_utils.on_commit(federation_tasks.purge_actors.delay, domains=list(ids))
|
||||
|
||||
|
||||
class ManageActorSerializer(serializers.ModelSerializer):
|
||||
uploads_count = serializers.SerializerMethodField()
|
||||
user = ManageUserSerializer()
|
||||
|
||||
class ManageBaseActorSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = federation_models.Actor
|
||||
fields = [
|
||||
|
@ -238,6 +175,17 @@ class ManageActorSerializer(serializers.ModelSerializer):
|
|||
"outbox_url",
|
||||
"shared_inbox_url",
|
||||
"manually_approves_followers",
|
||||
]
|
||||
read_only_fields = ["creation_date", "instance_policy"]
|
||||
|
||||
|
||||
class ManageActorSerializer(ManageBaseActorSerializer):
|
||||
uploads_count = serializers.SerializerMethodField()
|
||||
user = ManageUserSerializer()
|
||||
|
||||
class Meta:
|
||||
model = federation_models.Actor
|
||||
fields = ManageBaseActorSerializer.Meta.fields + [
|
||||
"uploads_count",
|
||||
"user",
|
||||
"instance_policy",
|
||||
|
@ -339,3 +287,256 @@ class ManageInstancePolicySerializer(serializers.ModelSerializer):
|
|||
)
|
||||
|
||||
return instance
|
||||
|
||||
|
||||
class ManageBaseArtistSerializer(serializers.ModelSerializer):
|
||||
domain = serializers.CharField(source="domain_name")
|
||||
|
||||
class Meta:
|
||||
model = music_models.Artist
|
||||
fields = ["id", "fid", "mbid", "name", "creation_date", "domain", "is_local"]
|
||||
|
||||
|
||||
class ManageBaseAlbumSerializer(serializers.ModelSerializer):
|
||||
cover = music_serializers.cover_field
|
||||
domain = serializers.CharField(source="domain_name")
|
||||
|
||||
class Meta:
|
||||
model = music_models.Album
|
||||
fields = [
|
||||
"id",
|
||||
"fid",
|
||||
"mbid",
|
||||
"title",
|
||||
"creation_date",
|
||||
"release_date",
|
||||
"cover",
|
||||
"domain",
|
||||
"is_local",
|
||||
]
|
||||
|
||||
|
||||
class ManageNestedTrackSerializer(serializers.ModelSerializer):
|
||||
domain = serializers.CharField(source="domain_name")
|
||||
|
||||
class Meta:
|
||||
model = music_models.Track
|
||||
fields = [
|
||||
"id",
|
||||
"fid",
|
||||
"mbid",
|
||||
"title",
|
||||
"creation_date",
|
||||
"position",
|
||||
"disc_number",
|
||||
"domain",
|
||||
"is_local",
|
||||
"copyright",
|
||||
"license",
|
||||
]
|
||||
|
||||
|
||||
class ManageNestedAlbumSerializer(ManageBaseAlbumSerializer):
|
||||
|
||||
tracks_count = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = music_models.Album
|
||||
fields = ManageBaseAlbumSerializer.Meta.fields + ["tracks_count"]
|
||||
|
||||
def get_tracks_count(self, obj):
|
||||
return getattr(obj, "tracks_count", None)
|
||||
|
||||
|
||||
class ManageArtistSerializer(ManageBaseArtistSerializer):
|
||||
albums = ManageNestedAlbumSerializer(many=True)
|
||||
tracks = ManageNestedTrackSerializer(many=True)
|
||||
attributed_to = ManageBaseActorSerializer()
|
||||
|
||||
class Meta:
|
||||
model = music_models.Artist
|
||||
fields = ManageBaseArtistSerializer.Meta.fields + [
|
||||
"albums",
|
||||
"tracks",
|
||||
"attributed_to",
|
||||
]
|
||||
|
||||
|
||||
class ManageNestedArtistSerializer(ManageBaseArtistSerializer):
|
||||
pass
|
||||
|
||||
|
||||
class ManageAlbumSerializer(ManageBaseAlbumSerializer):
|
||||
tracks = ManageNestedTrackSerializer(many=True)
|
||||
attributed_to = ManageBaseActorSerializer()
|
||||
artist = ManageNestedArtistSerializer()
|
||||
|
||||
class Meta:
|
||||
model = music_models.Album
|
||||
fields = ManageBaseAlbumSerializer.Meta.fields + [
|
||||
"artist",
|
||||
"tracks",
|
||||
"attributed_to",
|
||||
]
|
||||
|
||||
|
||||
class ManageTrackAlbumSerializer(ManageBaseAlbumSerializer):
|
||||
artist = ManageNestedArtistSerializer()
|
||||
|
||||
class Meta:
|
||||
model = music_models.Album
|
||||
fields = ManageBaseAlbumSerializer.Meta.fields + ["artist"]
|
||||
|
||||
|
||||
class ManageTrackSerializer(ManageNestedTrackSerializer):
|
||||
artist = ManageNestedArtistSerializer()
|
||||
album = ManageTrackAlbumSerializer()
|
||||
attributed_to = ManageBaseActorSerializer()
|
||||
uploads_count = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = music_models.Track
|
||||
fields = ManageNestedTrackSerializer.Meta.fields + [
|
||||
"artist",
|
||||
"album",
|
||||
"attributed_to",
|
||||
"uploads_count",
|
||||
]
|
||||
|
||||
def get_uploads_count(self, obj):
|
||||
return getattr(obj, "uploads_count", None)
|
||||
|
||||
|
||||
class ManageTrackActionSerializer(common_serializers.ActionSerializer):
|
||||
actions = [common_serializers.Action("delete", allow_all=False)]
|
||||
filterset_class = filters.ManageTrackFilterSet
|
||||
|
||||
@transaction.atomic
|
||||
def handle_delete(self, objects):
|
||||
return objects.delete()
|
||||
|
||||
|
||||
class ManageAlbumActionSerializer(common_serializers.ActionSerializer):
|
||||
actions = [common_serializers.Action("delete", allow_all=False)]
|
||||
filterset_class = filters.ManageAlbumFilterSet
|
||||
|
||||
@transaction.atomic
|
||||
def handle_delete(self, objects):
|
||||
return objects.delete()
|
||||
|
||||
|
||||
class ManageArtistActionSerializer(common_serializers.ActionSerializer):
|
||||
actions = [common_serializers.Action("delete", allow_all=False)]
|
||||
filterset_class = filters.ManageArtistFilterSet
|
||||
|
||||
@transaction.atomic
|
||||
def handle_delete(self, objects):
|
||||
return objects.delete()
|
||||
|
||||
|
||||
class ManageLibraryActionSerializer(common_serializers.ActionSerializer):
|
||||
actions = [common_serializers.Action("delete", allow_all=False)]
|
||||
filterset_class = filters.ManageLibraryFilterSet
|
||||
|
||||
@transaction.atomic
|
||||
def handle_delete(self, objects):
|
||||
return objects.delete()
|
||||
|
||||
|
||||
class ManageUploadActionSerializer(common_serializers.ActionSerializer):
|
||||
actions = [common_serializers.Action("delete", allow_all=False)]
|
||||
filterset_class = filters.ManageUploadFilterSet
|
||||
|
||||
@transaction.atomic
|
||||
def handle_delete(self, objects):
|
||||
return objects.delete()
|
||||
|
||||
|
||||
class ManageLibrarySerializer(serializers.ModelSerializer):
|
||||
domain = serializers.CharField(source="domain_name")
|
||||
actor = ManageBaseActorSerializer()
|
||||
uploads_count = serializers.SerializerMethodField()
|
||||
followers_count = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = music_models.Library
|
||||
fields = [
|
||||
"id",
|
||||
"uuid",
|
||||
"fid",
|
||||
"url",
|
||||
"name",
|
||||
"description",
|
||||
"domain",
|
||||
"is_local",
|
||||
"creation_date",
|
||||
"privacy_level",
|
||||
"uploads_count",
|
||||
"followers_count",
|
||||
"followers_url",
|
||||
"actor",
|
||||
]
|
||||
|
||||
def get_uploads_count(self, obj):
|
||||
return getattr(obj, "_uploads_count", obj.uploads_count)
|
||||
|
||||
def get_followers_count(self, obj):
|
||||
return getattr(obj, "followers_count", None)
|
||||
|
||||
|
||||
class ManageNestedLibrarySerializer(serializers.ModelSerializer):
|
||||
domain = serializers.CharField(source="domain_name")
|
||||
actor = ManageBaseActorSerializer()
|
||||
|
||||
class Meta:
|
||||
model = music_models.Library
|
||||
fields = [
|
||||
"id",
|
||||
"uuid",
|
||||
"fid",
|
||||
"url",
|
||||
"name",
|
||||
"description",
|
||||
"domain",
|
||||
"is_local",
|
||||
"creation_date",
|
||||
"privacy_level",
|
||||
"followers_url",
|
||||
"actor",
|
||||
]
|
||||
|
||||
|
||||
class ManageUploadSerializer(serializers.ModelSerializer):
|
||||
track = ManageNestedTrackSerializer()
|
||||
library = ManageNestedLibrarySerializer()
|
||||
domain = serializers.CharField(source="domain_name")
|
||||
|
||||
class Meta:
|
||||
model = music_models.Upload
|
||||
fields = (
|
||||
"id",
|
||||
"uuid",
|
||||
"fid",
|
||||
"domain",
|
||||
"is_local",
|
||||
"audio_file",
|
||||
"listen_url",
|
||||
"source",
|
||||
"filename",
|
||||
"mimetype",
|
||||
"duration",
|
||||
"mimetype",
|
||||
"bitrate",
|
||||
"size",
|
||||
"creation_date",
|
||||
"accessed_date",
|
||||
"modification_date",
|
||||
"metadata",
|
||||
"import_date",
|
||||
"import_details",
|
||||
"import_status",
|
||||
"import_metadata",
|
||||
"import_reference",
|
||||
"track",
|
||||
"library",
|
||||
)
|
||||
|
|
|
@ -7,6 +7,10 @@ federation_router = routers.SimpleRouter()
|
|||
federation_router.register(r"domains", views.ManageDomainViewSet, "domains")
|
||||
|
||||
library_router = routers.SimpleRouter()
|
||||
library_router.register(r"albums", views.ManageAlbumViewSet, "albums")
|
||||
library_router.register(r"artists", views.ManageArtistViewSet, "artists")
|
||||
library_router.register(r"libraries", views.ManageLibraryViewSet, "libraries")
|
||||
library_router.register(r"tracks", views.ManageTrackViewSet, "tracks")
|
||||
library_router.register(r"uploads", views.ManageUploadViewSet, "uploads")
|
||||
|
||||
moderation_router = routers.SimpleRouter()
|
||||
|
|
|
@ -1,39 +1,278 @@
|
|||
from rest_framework import mixins, response, viewsets
|
||||
from rest_framework import decorators as rest_decorators
|
||||
|
||||
from django.db.models import Count, Prefetch, Q, Sum, OuterRef, Subquery
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
from funkwhale_api.common import models as common_models
|
||||
from funkwhale_api.common import preferences, decorators
|
||||
from funkwhale_api.favorites import models as favorites_models
|
||||
from funkwhale_api.federation import models as federation_models
|
||||
from funkwhale_api.federation import tasks as federation_tasks
|
||||
from funkwhale_api.history import models as history_models
|
||||
from funkwhale_api.music import models as music_models
|
||||
from funkwhale_api.moderation import models as moderation_models
|
||||
from funkwhale_api.playlists import models as playlists_models
|
||||
from funkwhale_api.users import models as users_models
|
||||
from funkwhale_api.users.permissions import HasUserPermission
|
||||
|
||||
|
||||
from . import filters, serializers
|
||||
|
||||
|
||||
class ManageUploadViewSet(
|
||||
mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
|
||||
def get_stats(tracks, target):
|
||||
data = {}
|
||||
tracks = list(tracks.values_list("pk", flat=True))
|
||||
uploads = music_models.Upload.objects.filter(track__in=tracks)
|
||||
data["listenings"] = history_models.Listening.objects.filter(
|
||||
track__in=tracks
|
||||
).count()
|
||||
data["mutations"] = common_models.Mutation.objects.get_for_target(target).count()
|
||||
data["playlists"] = (
|
||||
playlists_models.PlaylistTrack.objects.filter(track__in=tracks)
|
||||
.values_list("playlist", flat=True)
|
||||
.distinct()
|
||||
.count()
|
||||
)
|
||||
data["track_favorites"] = favorites_models.TrackFavorite.objects.filter(
|
||||
track__in=tracks
|
||||
).count()
|
||||
data["libraries"] = uploads.values_list("library", flat=True).distinct().count()
|
||||
data["uploads"] = uploads.count()
|
||||
data.update(get_media_stats(uploads))
|
||||
return data
|
||||
|
||||
|
||||
def get_media_stats(uploads):
|
||||
data = {}
|
||||
data["media_total_size"] = uploads.aggregate(v=Sum("size"))["v"] or 0
|
||||
data["media_downloaded_size"] = (
|
||||
uploads.with_file().aggregate(v=Sum("size"))["v"] or 0
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
class ManageArtistViewSet(
|
||||
mixins.ListModelMixin,
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
viewsets.GenericViewSet,
|
||||
):
|
||||
queryset = (
|
||||
music_models.Upload.objects.all()
|
||||
.select_related("track__artist", "track__album__artist")
|
||||
music_models.Artist.objects.all()
|
||||
.order_by("-id")
|
||||
.select_related("attributed_to")
|
||||
.prefetch_related(
|
||||
"tracks",
|
||||
Prefetch(
|
||||
"albums",
|
||||
queryset=music_models.Album.objects.annotate(
|
||||
tracks_count=Count("tracks")
|
||||
),
|
||||
),
|
||||
)
|
||||
)
|
||||
serializer_class = serializers.ManageArtistSerializer
|
||||
filterset_class = filters.ManageArtistFilterSet
|
||||
required_scope = "instance:libraries"
|
||||
ordering_fields = ["creation_date", "name"]
|
||||
|
||||
@rest_decorators.action(methods=["get"], detail=True)
|
||||
def stats(self, request, *args, **kwargs):
|
||||
artist = self.get_object()
|
||||
tracks = music_models.Track.objects.filter(
|
||||
Q(artist=artist) | Q(album__artist=artist)
|
||||
)
|
||||
data = get_stats(tracks, artist)
|
||||
return response.Response(data, status=200)
|
||||
|
||||
@rest_decorators.action(methods=["post"], detail=False)
|
||||
def action(self, request, *args, **kwargs):
|
||||
queryset = self.get_queryset()
|
||||
serializer = serializers.ManageArtistActionSerializer(
|
||||
request.data, queryset=queryset
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
result = serializer.save()
|
||||
return response.Response(result, status=200)
|
||||
|
||||
|
||||
class ManageAlbumViewSet(
|
||||
mixins.ListModelMixin,
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
viewsets.GenericViewSet,
|
||||
):
|
||||
queryset = (
|
||||
music_models.Album.objects.all()
|
||||
.order_by("-id")
|
||||
.select_related("attributed_to", "artist")
|
||||
.prefetch_related("tracks")
|
||||
)
|
||||
serializer_class = serializers.ManageAlbumSerializer
|
||||
filterset_class = filters.ManageAlbumFilterSet
|
||||
required_scope = "instance:libraries"
|
||||
ordering_fields = ["creation_date", "title", "release_date"]
|
||||
|
||||
@rest_decorators.action(methods=["get"], detail=True)
|
||||
def stats(self, request, *args, **kwargs):
|
||||
album = self.get_object()
|
||||
data = get_stats(album.tracks.all(), album)
|
||||
return response.Response(data, status=200)
|
||||
|
||||
@rest_decorators.action(methods=["post"], detail=False)
|
||||
def action(self, request, *args, **kwargs):
|
||||
queryset = self.get_queryset()
|
||||
serializer = serializers.ManageAlbumActionSerializer(
|
||||
request.data, queryset=queryset
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
result = serializer.save()
|
||||
return response.Response(result, status=200)
|
||||
|
||||
|
||||
uploads_subquery = (
|
||||
music_models.Upload.objects.filter(track_id=OuterRef("pk"))
|
||||
.order_by()
|
||||
.values("track_id")
|
||||
.annotate(track_count=Count("track_id"))
|
||||
.values("track_count")
|
||||
)
|
||||
|
||||
|
||||
class ManageTrackViewSet(
|
||||
mixins.ListModelMixin,
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
viewsets.GenericViewSet,
|
||||
):
|
||||
queryset = (
|
||||
music_models.Track.objects.all()
|
||||
.order_by("-id")
|
||||
.select_related("attributed_to", "artist", "album__artist")
|
||||
.annotate(uploads_count=Coalesce(Subquery(uploads_subquery), 0))
|
||||
)
|
||||
serializer_class = serializers.ManageTrackSerializer
|
||||
filterset_class = filters.ManageTrackFilterSet
|
||||
required_scope = "instance:libraries"
|
||||
ordering_fields = [
|
||||
"creation_date",
|
||||
"title",
|
||||
"album__release_date",
|
||||
"position",
|
||||
"disc_number",
|
||||
]
|
||||
|
||||
@rest_decorators.action(methods=["get"], detail=True)
|
||||
def stats(self, request, *args, **kwargs):
|
||||
track = self.get_object()
|
||||
data = get_stats(track.__class__.objects.filter(pk=track.pk), track)
|
||||
return response.Response(data, status=200)
|
||||
|
||||
@rest_decorators.action(methods=["post"], detail=False)
|
||||
def action(self, request, *args, **kwargs):
|
||||
queryset = self.get_queryset()
|
||||
serializer = serializers.ManageTrackActionSerializer(
|
||||
request.data, queryset=queryset
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
result = serializer.save()
|
||||
return response.Response(result, status=200)
|
||||
|
||||
|
||||
uploads_subquery = (
|
||||
music_models.Upload.objects.filter(library_id=OuterRef("pk"))
|
||||
.order_by()
|
||||
.values("library_id")
|
||||
.annotate(library_count=Count("library_id"))
|
||||
.values("library_count")
|
||||
)
|
||||
|
||||
follows_subquery = (
|
||||
federation_models.LibraryFollow.objects.filter(target_id=OuterRef("pk"))
|
||||
.order_by()
|
||||
.values("target_id")
|
||||
.annotate(library_count=Count("target_id"))
|
||||
.values("library_count")
|
||||
)
|
||||
|
||||
|
||||
class ManageLibraryViewSet(
|
||||
mixins.ListModelMixin,
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
viewsets.GenericViewSet,
|
||||
):
|
||||
lookup_field = "uuid"
|
||||
queryset = (
|
||||
music_models.Library.objects.all()
|
||||
.order_by("-id")
|
||||
.select_related("actor")
|
||||
.annotate(
|
||||
followers_count=Coalesce(Subquery(follows_subquery), 0),
|
||||
_uploads_count=Coalesce(Subquery(uploads_subquery), 0),
|
||||
)
|
||||
)
|
||||
serializer_class = serializers.ManageLibrarySerializer
|
||||
filterset_class = filters.ManageLibraryFilterSet
|
||||
required_scope = "instance:libraries"
|
||||
|
||||
@rest_decorators.action(methods=["get"], detail=True)
|
||||
def stats(self, request, *args, **kwargs):
|
||||
library = self.get_object()
|
||||
uploads = library.uploads.all()
|
||||
tracks = uploads.values_list("track", flat=True).distinct()
|
||||
albums = (
|
||||
music_models.Track.objects.filter(pk__in=tracks)
|
||||
.values_list("album", flat=True)
|
||||
.distinct()
|
||||
)
|
||||
artists = set(
|
||||
music_models.Album.objects.filter(pk__in=albums).values_list(
|
||||
"artist", flat=True
|
||||
)
|
||||
) | set(
|
||||
music_models.Track.objects.filter(pk__in=tracks).values_list(
|
||||
"artist", flat=True
|
||||
)
|
||||
)
|
||||
|
||||
data = {
|
||||
"uploads": uploads.count(),
|
||||
"followers": library.received_follows.count(),
|
||||
"tracks": tracks.count(),
|
||||
"albums": albums.count(),
|
||||
"artists": len(artists),
|
||||
}
|
||||
data.update(get_media_stats(uploads.all()))
|
||||
return response.Response(data, status=200)
|
||||
|
||||
@rest_decorators.action(methods=["post"], detail=False)
|
||||
def action(self, request, *args, **kwargs):
|
||||
queryset = self.get_queryset()
|
||||
serializer = serializers.ManageTrackActionSerializer(
|
||||
request.data, queryset=queryset
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
result = serializer.save()
|
||||
return response.Response(result, status=200)
|
||||
|
||||
|
||||
class ManageUploadViewSet(
|
||||
mixins.ListModelMixin,
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
viewsets.GenericViewSet,
|
||||
):
|
||||
lookup_field = "uuid"
|
||||
queryset = (
|
||||
music_models.Upload.objects.all()
|
||||
.order_by("-id")
|
||||
.select_related("library__actor", "track__artist", "track__album__artist")
|
||||
)
|
||||
serializer_class = serializers.ManageUploadSerializer
|
||||
filterset_class = filters.ManageUploadFilterSet
|
||||
permission_classes = (HasUserPermission,)
|
||||
required_permissions = ["library"]
|
||||
ordering_fields = [
|
||||
"accessed_date",
|
||||
"modification_date",
|
||||
"creation_date",
|
||||
"track__artist__name",
|
||||
"bitrate",
|
||||
"size",
|
||||
"duration",
|
||||
]
|
||||
required_scope = "instance:libraries"
|
||||
|
||||
@rest_decorators.action(methods=["post"], detail=False)
|
||||
def action(self, request, *args, **kwargs):
|
||||
|
@ -55,8 +294,7 @@ class ManageUserViewSet(
|
|||
queryset = users_models.User.objects.all().order_by("-id")
|
||||
serializer_class = serializers.ManageUserSerializer
|
||||
filterset_class = filters.ManageUserFilterSet
|
||||
permission_classes = (HasUserPermission,)
|
||||
required_permissions = ["settings"]
|
||||
required_scope = "instance:users"
|
||||
ordering_fields = ["date_joined", "last_activity", "username"]
|
||||
|
||||
def get_serializer_context(self):
|
||||
|
@ -80,8 +318,7 @@ class ManageInvitationViewSet(
|
|||
)
|
||||
serializer_class = serializers.ManageInvitationSerializer
|
||||
filterset_class = filters.ManageInvitationFilterSet
|
||||
permission_classes = (HasUserPermission,)
|
||||
required_permissions = ["settings"]
|
||||
required_scope = "instance:invitations"
|
||||
ordering_fields = ["creation_date", "expiration_date"]
|
||||
|
||||
def perform_create(self, serializer):
|
||||
|
@ -114,8 +351,7 @@ class ManageDomainViewSet(
|
|||
)
|
||||
serializer_class = serializers.ManageDomainSerializer
|
||||
filterset_class = filters.ManageDomainFilterSet
|
||||
permission_classes = (HasUserPermission,)
|
||||
required_permissions = ["moderation"]
|
||||
required_scope = "instance:domains"
|
||||
ordering_fields = [
|
||||
"name",
|
||||
"creation_date",
|
||||
|
@ -125,6 +361,10 @@ class ManageDomainViewSet(
|
|||
"instance_policy",
|
||||
]
|
||||
|
||||
def perform_create(self, serializer):
|
||||
domain = serializer.save()
|
||||
federation_tasks.update_domain_nodeinfo(domain_name=domain.name)
|
||||
|
||||
@rest_decorators.action(methods=["get"], detail=True)
|
||||
def nodeinfo(self, request, *args, **kwargs):
|
||||
domain = self.get_object()
|
||||
|
@ -153,7 +393,7 @@ class ManageActorViewSet(
|
|||
)
|
||||
serializer_class = serializers.ManageActorSerializer
|
||||
filterset_class = filters.ManageActorFilterSet
|
||||
permission_classes = (HasUserPermission,)
|
||||
required_scope = "instance:accounts"
|
||||
required_permissions = ["moderation"]
|
||||
ordering_fields = [
|
||||
"name",
|
||||
|
@ -199,8 +439,7 @@ class ManageInstancePolicyViewSet(
|
|||
)
|
||||
serializer_class = serializers.ManageInstancePolicySerializer
|
||||
filterset_class = filters.ManageInstancePolicyFilterSet
|
||||
permission_classes = (HasUserPermission,)
|
||||
required_permissions = ["moderation"]
|
||||
required_scope = "instance:policies"
|
||||
ordering_fields = ["id", "creation_date"]
|
||||
|
||||
def perform_create(self, serializer):
|
||||
|
|
|
@ -28,3 +28,10 @@ class InstancePolicyAdmin(admin.ModelAdmin):
|
|||
"summary",
|
||||
]
|
||||
list_select_related = True
|
||||
|
||||
|
||||
@admin.register(models.UserFilter)
|
||||
class UserFilterAdmin(admin.ModelAdmin):
|
||||
list_display = ["uuid", "user", "target_artist", "creation_date"]
|
||||
search_fields = ["target_artist__name", "user__username", "user__email"]
|
||||
list_select_related = True
|
||||
|
|
|
@ -2,6 +2,8 @@ import factory
|
|||
|
||||
from funkwhale_api.factories import registry, NoUpdateOnCreate
|
||||
from funkwhale_api.federation import factories as federation_factories
|
||||
from funkwhale_api.music import factories as music_factories
|
||||
from funkwhale_api.users import factories as users_factories
|
||||
|
||||
|
||||
@registry.register
|
||||
|
@ -21,3 +23,17 @@ class InstancePolicyFactory(NoUpdateOnCreate, factory.DjangoModelFactory):
|
|||
for_actor = factory.Trait(
|
||||
target_actor=factory.SubFactory(federation_factories.ActorFactory)
|
||||
)
|
||||
|
||||
|
||||
@registry.register
|
||||
class UserFilterFactory(NoUpdateOnCreate, factory.DjangoModelFactory):
|
||||
user = factory.SubFactory(users_factories.UserFactory)
|
||||
target_artist = None
|
||||
|
||||
class Meta:
|
||||
model = "moderation.UserFilter"
|
||||
|
||||
class Params:
|
||||
for_artist = factory.Trait(
|
||||
target_artist=factory.SubFactory(music_factories.ArtistFactory)
|
||||
)
|
||||
|
|
|
@ -0,0 +1,69 @@
|
|||
from django.db.models import Q
|
||||
|
||||
from django_filters import rest_framework as filters
|
||||
|
||||
|
||||
USER_FILTER_CONFIG = {
|
||||
"ARTIST": {"target_artist": ["pk"]},
|
||||
"ALBUM": {"target_artist": ["artist__pk"]},
|
||||
"TRACK": {"target_artist": ["artist__pk", "album__artist__pk"]},
|
||||
"LISTENING": {"target_artist": ["track__album__artist__pk", "track__artist__pk"]},
|
||||
"TRACK_FAVORITE": {
|
||||
"target_artist": ["track__album__artist__pk", "track__artist__pk"]
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def get_filtered_content_query(config, user):
|
||||
final_query = None
|
||||
for filter_field, model_fields in config.items():
|
||||
query = None
|
||||
ids = user.content_filters.values_list(filter_field, flat=True)
|
||||
for model_field in model_fields:
|
||||
q = Q(**{"{}__in".format(model_field): ids})
|
||||
if query:
|
||||
query |= q
|
||||
else:
|
||||
query = q
|
||||
|
||||
final_query = query
|
||||
return final_query
|
||||
|
||||
|
||||
class HiddenContentFilterSet(filters.FilterSet):
|
||||
"""
|
||||
A filterset that include a "hidden" param:
|
||||
- hidden=true : list user hidden/filtered objects
|
||||
- hidden=false : list all objects user hidden/filtered objects
|
||||
- not specified: hidden=false
|
||||
|
||||
Usage:
|
||||
|
||||
class MyFilterSet(HiddenContentFilterSet):
|
||||
class Meta:
|
||||
hidden_content_fields_mapping = {'target_artist': ['pk']}
|
||||
|
||||
Will map UserContentFilter.artist values to the pk field of the filtered model.
|
||||
|
||||
"""
|
||||
|
||||
hidden = filters.BooleanFilter(field_name="_", method="filter_hidden_content")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.data = self.data.copy()
|
||||
self.data.setdefault("hidden", False)
|
||||
|
||||
def filter_hidden_content(self, queryset, name, value):
|
||||
user = self.request.user
|
||||
if not user.is_authenticated:
|
||||
# no filter to apply
|
||||
return queryset
|
||||
|
||||
config = self.__class__.Meta.hidden_content_fields_mapping
|
||||
final_query = get_filtered_content_query(config, user)
|
||||
|
||||
if value is True:
|
||||
return queryset.filter(final_query)
|
||||
else:
|
||||
return queryset.exclude(final_query)
|
|
@ -0,0 +1,57 @@
|
|||
# Generated by Django 2.1.5 on 2019-02-13 09:27
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("music", "0037_auto_20190103_1757"),
|
||||
("moderation", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="UserFilter",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("uuid", models.UUIDField(default=uuid.uuid4, unique=True)),
|
||||
(
|
||||
"creation_date",
|
||||
models.DateTimeField(default=django.utils.timezone.now),
|
||||
),
|
||||
(
|
||||
"target_artist",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="user_filters",
|
||||
to="music.Artist",
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="content_filters",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="userfilter", unique_together={("user", "target_artist")}
|
||||
),
|
||||
]
|
|
@ -73,3 +73,22 @@ class InstancePolicy(models.Model):
|
|||
return {"type": "actor", "obj": self.target_actor}
|
||||
if self.target_domain_id:
|
||||
return {"type": "domain", "obj": self.target_domain}
|
||||
|
||||
|
||||
class UserFilter(models.Model):
|
||||
uuid = models.UUIDField(default=uuid.uuid4, unique=True)
|
||||
creation_date = models.DateTimeField(default=timezone.now)
|
||||
target_artist = models.ForeignKey(
|
||||
"music.Artist", on_delete=models.CASCADE, related_name="user_filters"
|
||||
)
|
||||
user = models.ForeignKey(
|
||||
"users.User", on_delete=models.CASCADE, related_name="content_filters"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
unique_together = ("user", "target_artist")
|
||||
|
||||
@property
|
||||
def target(self):
|
||||
if self.target_artist:
|
||||
return {"type": "artist", "obj": self.target_artist}
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
from rest_framework import serializers
|
||||
|
||||
from funkwhale_api.music import models as music_models
|
||||
from . import models
|
||||
|
||||
|
||||
class FilteredArtistSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = music_models.Artist
|
||||
fields = ["id", "name"]
|
||||
|
||||
|
||||
class TargetSerializer(serializers.Serializer):
|
||||
type = serializers.ChoiceField(choices=["artist"])
|
||||
id = serializers.CharField()
|
||||
|
||||
def to_representation(self, value):
|
||||
if value["type"] == "artist":
|
||||
data = FilteredArtistSerializer(value["obj"]).data
|
||||
data.update({"type": "artist"})
|
||||
return data
|
||||
|
||||
def to_internal_value(self, value):
|
||||
if value["type"] == "artist":
|
||||
field = serializers.PrimaryKeyRelatedField(
|
||||
queryset=music_models.Artist.objects.all()
|
||||
)
|
||||
value["obj"] = field.to_internal_value(value["id"])
|
||||
return value
|
||||
|
||||
|
||||
class UserFilterSerializer(serializers.ModelSerializer):
|
||||
target = TargetSerializer()
|
||||
|
||||
class Meta:
|
||||
model = models.UserFilter
|
||||
fields = ["uuid", "target", "creation_date"]
|
||||
read_only_fields = ["uuid", "creation_date"]
|
||||
|
||||
def validate(self, data):
|
||||
target = data.pop("target")
|
||||
if target["type"] == "artist":
|
||||
data["target_artist"] = target["obj"]
|
||||
|
||||
return data
|
|
@ -0,0 +1,8 @@
|
|||
from rest_framework import routers
|
||||
|
||||
from . import views
|
||||
|
||||
router = routers.SimpleRouter()
|
||||
router.register(r"content-filters", views.UserFilterViewSet, "content-filters")
|
||||
|
||||
urlpatterns = router.urls
|
|
@ -0,0 +1,41 @@
|
|||
from django.db import IntegrityError
|
||||
|
||||
from rest_framework import mixins
|
||||
from rest_framework import response
|
||||
from rest_framework import status
|
||||
from rest_framework import viewsets
|
||||
|
||||
from . import models
|
||||
from . import serializers
|
||||
|
||||
|
||||
class UserFilterViewSet(
|
||||
mixins.ListModelMixin,
|
||||
mixins.CreateModelMixin,
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
viewsets.GenericViewSet,
|
||||
):
|
||||
lookup_field = "uuid"
|
||||
queryset = (
|
||||
models.UserFilter.objects.all()
|
||||
.order_by("-creation_date")
|
||||
.select_related("target_artist")
|
||||
)
|
||||
serializer_class = serializers.UserFilterSerializer
|
||||
required_scope = "filters"
|
||||
ordering_fields = ("creation_date",)
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
try:
|
||||
return super().create(request, *args, **kwargs)
|
||||
except IntegrityError:
|
||||
content = {"detail": "A content filter already exists for this object"}
|
||||
return response.Response(content, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def get_queryset(self):
|
||||
qs = super().get_queryset()
|
||||
return qs.filter(user=self.request.user)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
serializer.save(user=self.request.user)
|
|
@ -39,22 +39,6 @@ class ImportJobAdmin(admin.ModelAdmin):
|
|||
list_filter = ["status"]
|
||||
|
||||
|
||||
@admin.register(models.Work)
|
||||
class WorkAdmin(admin.ModelAdmin):
|
||||
list_display = ["title", "mbid", "language", "nature"]
|
||||
list_select_related = True
|
||||
search_fields = ["title"]
|
||||
list_filter = ["language", "nature"]
|
||||
|
||||
|
||||
@admin.register(models.Lyrics)
|
||||
class LyricsAdmin(admin.ModelAdmin):
|
||||
list_display = ["url", "id", "url"]
|
||||
list_select_related = True
|
||||
search_fields = ["url", "work__title"]
|
||||
list_filter = ["work__language"]
|
||||
|
||||
|
||||
@admin.register(models.Upload)
|
||||
class UploadAdmin(admin.ModelAdmin):
|
||||
list_display = [
|
||||
|
|
|
@ -64,6 +64,12 @@ class ArtistFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
|||
class Meta:
|
||||
model = "music.Artist"
|
||||
|
||||
class Params:
|
||||
attributed = factory.Trait(
|
||||
attributed_to=factory.SubFactory(federation_factories.ActorFactory)
|
||||
)
|
||||
local = factory.Trait(fid=factory.Faker("federation_url", local=True))
|
||||
|
||||
|
||||
@registry.register
|
||||
class AlbumFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
|
@ -79,6 +85,15 @@ class AlbumFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
|||
class Meta:
|
||||
model = "music.Album"
|
||||
|
||||
class Params:
|
||||
attributed = factory.Trait(
|
||||
attributed_to=factory.SubFactory(federation_factories.ActorFactory)
|
||||
)
|
||||
|
||||
local = factory.Trait(
|
||||
fid=factory.Faker("federation_url", local=True), artist__local=True
|
||||
)
|
||||
|
||||
|
||||
@registry.register
|
||||
class TrackFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
|
@ -94,6 +109,15 @@ class TrackFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
|||
class Meta:
|
||||
model = "music.Track"
|
||||
|
||||
class Params:
|
||||
attributed = factory.Trait(
|
||||
attributed_to=factory.SubFactory(federation_factories.ActorFactory)
|
||||
)
|
||||
|
||||
local = factory.Trait(
|
||||
fid=factory.Faker("federation_url", local=True), album__local=True
|
||||
)
|
||||
|
||||
@factory.post_generation
|
||||
def license(self, created, extracted, **kwargs):
|
||||
if not created:
|
||||
|
@ -140,27 +164,6 @@ class UploadVersionFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
|||
model = "music.UploadVersion"
|
||||
|
||||
|
||||
@registry.register
|
||||
class WorkFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
mbid = factory.Faker("uuid4")
|
||||
language = "eng"
|
||||
nature = "song"
|
||||
title = factory.Faker("sentence", nb_words=3)
|
||||
|
||||
class Meta:
|
||||
model = "music.Work"
|
||||
|
||||
|
||||
@registry.register
|
||||
class LyricsFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
work = factory.SubFactory(WorkFactory)
|
||||
url = factory.Faker("url")
|
||||
content = factory.Faker("paragraphs", nb=4)
|
||||
|
||||
class Meta:
|
||||
model = "music.Lyrics"
|
||||
|
||||
|
||||
@registry.register
|
||||
class TagFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
name = factory.SelfAttribute("slug")
|
||||
|
|
|
@ -1,13 +1,15 @@
|
|||
from django_filters import rest_framework as filters
|
||||
|
||||
from funkwhale_api.common import fields
|
||||
from funkwhale_api.common import filters as common_filters
|
||||
from funkwhale_api.common import search
|
||||
from funkwhale_api.moderation import filters as moderation_filters
|
||||
|
||||
from . import models
|
||||
from . import utils
|
||||
|
||||
|
||||
class ArtistFilter(filters.FilterSet):
|
||||
class ArtistFilter(moderation_filters.HiddenContentFilterSet):
|
||||
q = fields.SearchFilter(search_fields=["name"])
|
||||
playable = filters.BooleanFilter(field_name="_", method="filter_playable")
|
||||
|
||||
|
@ -17,25 +19,29 @@ class ArtistFilter(filters.FilterSet):
|
|||
"name": ["exact", "iexact", "startswith", "icontains"],
|
||||
"playable": "exact",
|
||||
}
|
||||
hidden_content_fields_mapping = moderation_filters.USER_FILTER_CONFIG["ARTIST"]
|
||||
|
||||
def filter_playable(self, queryset, name, value):
|
||||
actor = utils.get_actor_from_request(self.request)
|
||||
return queryset.playable_by(actor, value)
|
||||
|
||||
|
||||
class TrackFilter(filters.FilterSet):
|
||||
class TrackFilter(moderation_filters.HiddenContentFilterSet):
|
||||
q = fields.SearchFilter(search_fields=["title", "album__title", "artist__name"])
|
||||
playable = filters.BooleanFilter(field_name="_", method="filter_playable")
|
||||
id = common_filters.MultipleQueryFilter(coerce=int)
|
||||
|
||||
class Meta:
|
||||
model = models.Track
|
||||
fields = {
|
||||
"title": ["exact", "iexact", "startswith", "icontains"],
|
||||
"playable": ["exact"],
|
||||
"id": ["exact"],
|
||||
"artist": ["exact"],
|
||||
"album": ["exact"],
|
||||
"license": ["exact"],
|
||||
}
|
||||
hidden_content_fields_mapping = moderation_filters.USER_FILTER_CONFIG["TRACK"]
|
||||
|
||||
def filter_playable(self, queryset, name, value):
|
||||
actor = utils.get_actor_from_request(self.request)
|
||||
|
@ -85,13 +91,14 @@ class UploadFilter(filters.FilterSet):
|
|||
return queryset.playable_by(actor, value)
|
||||
|
||||
|
||||
class AlbumFilter(filters.FilterSet):
|
||||
class AlbumFilter(moderation_filters.HiddenContentFilterSet):
|
||||
playable = filters.BooleanFilter(field_name="_", method="filter_playable")
|
||||
q = fields.SearchFilter(search_fields=["title", "artist__name" "source"])
|
||||
q = fields.SearchFilter(search_fields=["title", "artist__name"])
|
||||
|
||||
class Meta:
|
||||
model = models.Album
|
||||
fields = ["playable", "q", "artist"]
|
||||
hidden_content_fields_mapping = moderation_filters.USER_FILTER_CONFIG["ALBUM"]
|
||||
|
||||
def filter_playable(self, queryset, name, value):
|
||||
actor = utils.get_actor_from_request(self.request)
|
||||
|
|
|
@ -47,4 +47,4 @@ class Mapping(object):
|
|||
)
|
||||
|
||||
|
||||
registry = {"Artist": Importer, "Track": Importer, "Album": Importer, "Work": Importer}
|
||||
registry = {"Artist": Importer, "Track": Importer, "Album": Importer}
|
||||
|
|
|
@ -1,31 +0,0 @@
|
|||
import urllib.request
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
||||
def _get_html(url):
|
||||
with urllib.request.urlopen(url) as response:
|
||||
html = response.read()
|
||||
return html.decode("utf-8")
|
||||
|
||||
|
||||
def extract_content(html):
|
||||
soup = BeautifulSoup(html, "html.parser")
|
||||
return soup.find_all("div", class_="lyricbox")[0].contents
|
||||
|
||||
|
||||
def clean_content(contents):
|
||||
final_content = ""
|
||||
for e in contents:
|
||||
if e == "\n":
|
||||
continue
|
||||
if e.name == "script":
|
||||
continue
|
||||
if e.name == "br":
|
||||
final_content += "\n"
|
||||
continue
|
||||
try:
|
||||
final_content += e.text
|
||||
except AttributeError:
|
||||
final_content += str(e)
|
||||
return final_content
|
|
@ -0,0 +1,76 @@
|
|||
import os
|
||||
from argparse import RawTextHelpFormatter
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from django.db import transaction
|
||||
|
||||
from funkwhale_api.music import models
|
||||
|
||||
|
||||
def progress(buffer, count, total, status=""):
|
||||
bar_len = 60
|
||||
filled_len = int(round(bar_len * count / float(total)))
|
||||
|
||||
bar = "=" * filled_len + "-" * (bar_len - filled_len)
|
||||
|
||||
buffer.write("[%s] %s/%s ...%s\r" % (bar, count, total, status))
|
||||
buffer.flush()
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """
|
||||
Loop through all in-place imported files in the database, and verify
|
||||
that the corresponding files are present on the filesystem. If some files are not
|
||||
found and --no-dry-run is specified, the corresponding database objects will be deleted.
|
||||
"""
|
||||
|
||||
def create_parser(self, *args, **kwargs):
|
||||
parser = super().create_parser(*args, **kwargs)
|
||||
parser.formatter_class = RawTextHelpFormatter
|
||||
return parser
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--no-dry-run",
|
||||
action="store_false",
|
||||
dest="dry_run",
|
||||
default=True,
|
||||
help="Disable dry run mode and apply pruning for real on the database",
|
||||
)
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
candidates = models.Upload.objects.filter(source__startswith="file://")
|
||||
candidates = candidates.filter(audio_file__in=["", None])
|
||||
total = candidates.count()
|
||||
self.stdout.write("Checking {} in-place imported files…".format(total))
|
||||
|
||||
missing = []
|
||||
for i, row in enumerate(candidates.values("id", "source")):
|
||||
path = row["source"].replace("file://", "")
|
||||
progress(self.stdout, i + 1, total)
|
||||
if not os.path.exists(path):
|
||||
missing.append((path, row["id"]))
|
||||
|
||||
if missing:
|
||||
for path, _ in missing:
|
||||
self.stdout.write(" {}".format(path))
|
||||
self.stdout.write(
|
||||
"The previous {} paths are referenced in database, but not found on disk!".format(
|
||||
len(missing)
|
||||
)
|
||||
)
|
||||
|
||||
else:
|
||||
self.stdout.write("All in-place imports have a matching on-disk file")
|
||||
return
|
||||
|
||||
to_delete = candidates.filter(pk__in=[id for _, id in missing])
|
||||
if options["dry_run"]:
|
||||
self.stdout.write(
|
||||
"Nothing was deleted, rerun this command with --no-dry-run to apply the changes"
|
||||
)
|
||||
else:
|
||||
self.stdout.write("Deleting {} uploads…".format(to_delete.count()))
|
||||
to_delete.delete()
|
|
@ -0,0 +1,145 @@
|
|||
from argparse import RawTextHelpFormatter
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management.base import CommandError
|
||||
|
||||
from django.db import transaction
|
||||
|
||||
from funkwhale_api.music import models, tasks
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """
|
||||
Remove tracks, albums and artists that are not associated with any file from the instance library:
|
||||
|
||||
- Tracks without uploads are deleted, if the --tracks flag is passed
|
||||
- Albums without tracks are deleted, if the --albums flag is passed
|
||||
- Artists without albums are deleted, if the --artists flag is passed
|
||||
|
||||
Tracks with associated favorites, playlists or listening won't be deleted
|
||||
by default, unless you pass the corresponding --ignore-* flags.
|
||||
|
||||
"""
|
||||
|
||||
def create_parser(self, *args, **kwargs):
|
||||
parser = super().create_parser(*args, **kwargs)
|
||||
parser.formatter_class = RawTextHelpFormatter
|
||||
return parser
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--no-dry-run",
|
||||
action="store_false",
|
||||
dest="dry_run",
|
||||
default=True,
|
||||
help="Disable dry run mode and apply pruning for real on the database",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--artists",
|
||||
action="store_true",
|
||||
dest="prune_artists",
|
||||
default=False,
|
||||
help="Prune artists without albums/tracks",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--albums",
|
||||
action="store_true",
|
||||
dest="prune_albums",
|
||||
default=False,
|
||||
help="Prune albums without tracks",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tracks",
|
||||
action="store_true",
|
||||
dest="prune_tracks",
|
||||
default=False,
|
||||
help="Prune tracks without uploads",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--ignore-favorites",
|
||||
action="store_false",
|
||||
dest="exclude_favorites",
|
||||
default=True,
|
||||
help="Allow favorited tracks to be pruned",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--ignore-playlists",
|
||||
action="store_false",
|
||||
dest="exclude_playlists",
|
||||
default=True,
|
||||
help="Allow tracks included in playlists to be pruned",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--ignore-listenings",
|
||||
action="store_false",
|
||||
dest="exclude_listenings",
|
||||
default=True,
|
||||
help="Allow tracks with listening history to be pruned",
|
||||
)
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
if not any(
|
||||
[options["prune_albums"], options["prune_tracks"], options["prune_artists"]]
|
||||
):
|
||||
raise CommandError(
|
||||
"You need to provide at least one of the --tracks, --albums or --artists flags"
|
||||
)
|
||||
|
||||
if options["dry_run"]:
|
||||
self.stdout.write("Dry-run on, will not commit anything")
|
||||
else:
|
||||
self.stdout.write("Dry-run off, *pruning for real*")
|
||||
self.stdout.write("")
|
||||
if options["prune_tracks"]:
|
||||
prunable = tasks.get_prunable_tracks(
|
||||
exclude_favorites=options["exclude_favorites"],
|
||||
exclude_playlists=options["exclude_playlists"],
|
||||
exclude_listenings=options["exclude_listenings"],
|
||||
)
|
||||
pruned_total = prunable.count()
|
||||
total = models.Track.objects.count()
|
||||
if options["dry_run"]:
|
||||
self.stdout.write(
|
||||
"Would prune {}/{} tracks".format(pruned_total, total)
|
||||
)
|
||||
else:
|
||||
self.stdout.write("Deleting {}/{} tracks…".format(pruned_total, total))
|
||||
prunable.delete()
|
||||
|
||||
if options["prune_albums"]:
|
||||
prunable = tasks.get_prunable_albums()
|
||||
pruned_total = prunable.count()
|
||||
total = models.Album.objects.count()
|
||||
if options["dry_run"]:
|
||||
self.stdout.write(
|
||||
"Would prune {}/{} albums".format(pruned_total, total)
|
||||
)
|
||||
else:
|
||||
self.stdout.write("Deleting {}/{} albums…".format(pruned_total, total))
|
||||
prunable.delete()
|
||||
|
||||
if options["prune_artists"]:
|
||||
prunable = tasks.get_prunable_artists()
|
||||
pruned_total = prunable.count()
|
||||
total = models.Artist.objects.count()
|
||||
if options["dry_run"]:
|
||||
self.stdout.write(
|
||||
"Would prune {}/{} artists".format(pruned_total, total)
|
||||
)
|
||||
else:
|
||||
self.stdout.write("Deleting {}/{} artists…".format(pruned_total, total))
|
||||
prunable.delete()
|
||||
|
||||
self.stdout.write("")
|
||||
if options["dry_run"]:
|
||||
self.stdout.write(
|
||||
"Nothing was pruned, rerun this command with --no-dry-run to apply the changes"
|
||||
)
|
||||
else:
|
||||
self.stdout.write("Pruning completed!")
|
||||
|
||||
self.stdout.write("")
|
|
@ -8,7 +8,8 @@ import mutagen.oggtheora
|
|||
import mutagen.oggvorbis
|
||||
import mutagen.flac
|
||||
|
||||
from django import forms
|
||||
from rest_framework import serializers
|
||||
from rest_framework.compat import Mapping
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
NODEFAULT = object()
|
||||
|
@ -122,85 +123,23 @@ def get_mp3_recording_id(f, k):
|
|||
raise TagNotFound(k)
|
||||
|
||||
|
||||
def convert_position(v):
|
||||
try:
|
||||
return int(v)
|
||||
except ValueError:
|
||||
# maybe the position is of the form "1/4"
|
||||
pass
|
||||
|
||||
try:
|
||||
return int(v.split("/")[0])
|
||||
except (ValueError, AttributeError, IndexError):
|
||||
pass
|
||||
|
||||
|
||||
class FirstUUIDField(forms.UUIDField):
|
||||
def to_python(self, value):
|
||||
try:
|
||||
# sometimes, Picard leaves two uuids in the field, separated
|
||||
# by a slash or a ;
|
||||
value = value.split(";")[0].split("/")[0].strip()
|
||||
except (AttributeError, IndexError, TypeError):
|
||||
pass
|
||||
|
||||
return super().to_python(value)
|
||||
|
||||
|
||||
def get_date(value):
|
||||
ADDITIONAL_FORMATS = ["%Y-%d-%m %H:%M"] # deezer date format
|
||||
try:
|
||||
parsed = pendulum.parse(str(value))
|
||||
return datetime.date(parsed.year, parsed.month, parsed.day)
|
||||
except pendulum.exceptions.ParserError:
|
||||
pass
|
||||
|
||||
for date_format in ADDITIONAL_FORMATS:
|
||||
try:
|
||||
parsed = datetime.datetime.strptime(value, date_format)
|
||||
except ValueError:
|
||||
continue
|
||||
else:
|
||||
return datetime.date(parsed.year, parsed.month, parsed.day)
|
||||
|
||||
raise ParseError("{} cannot be parsed as a date".format(value))
|
||||
|
||||
|
||||
def split_and_return_first(separator):
|
||||
def inner(v):
|
||||
return v.split(separator)[0].strip()
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
VALIDATION = {
|
||||
"musicbrainz_artistid": FirstUUIDField(),
|
||||
"musicbrainz_albumid": FirstUUIDField(),
|
||||
"musicbrainz_recordingid": FirstUUIDField(),
|
||||
"musicbrainz_albumartistid": FirstUUIDField(),
|
||||
}
|
||||
VALIDATION = {}
|
||||
|
||||
CONF = {
|
||||
"OggOpus": {
|
||||
"getter": lambda f, k: f[k][0],
|
||||
"fields": {
|
||||
"track_number": {
|
||||
"field": "TRACKNUMBER",
|
||||
"to_application": convert_position,
|
||||
},
|
||||
"disc_number": {"field": "DISCNUMBER", "to_application": convert_position},
|
||||
"position": {"field": "TRACKNUMBER"},
|
||||
"disc_number": {"field": "DISCNUMBER"},
|
||||
"title": {},
|
||||
"artist": {},
|
||||
"album_artist": {
|
||||
"field": "albumartist",
|
||||
"to_application": split_and_return_first(";"),
|
||||
},
|
||||
"album_artist": {"field": "albumartist"},
|
||||
"album": {},
|
||||
"date": {"field": "date", "to_application": get_date},
|
||||
"date": {"field": "date"},
|
||||
"musicbrainz_albumid": {},
|
||||
"musicbrainz_artistid": {},
|
||||
"musicbrainz_albumartistid": {},
|
||||
"musicbrainz_recordingid": {"field": "musicbrainz_trackid"},
|
||||
"mbid": {"field": "musicbrainz_trackid"},
|
||||
"license": {},
|
||||
"copyright": {},
|
||||
},
|
||||
|
@ -208,23 +147,17 @@ CONF = {
|
|||
"OggVorbis": {
|
||||
"getter": lambda f, k: f[k][0],
|
||||
"fields": {
|
||||
"track_number": {
|
||||
"field": "TRACKNUMBER",
|
||||
"to_application": convert_position,
|
||||
},
|
||||
"disc_number": {"field": "DISCNUMBER", "to_application": convert_position},
|
||||
"position": {"field": "TRACKNUMBER"},
|
||||
"disc_number": {"field": "DISCNUMBER"},
|
||||
"title": {},
|
||||
"artist": {},
|
||||
"album_artist": {
|
||||
"field": "albumartist",
|
||||
"to_application": split_and_return_first(";"),
|
||||
},
|
||||
"album_artist": {"field": "albumartist"},
|
||||
"album": {},
|
||||
"date": {"field": "date", "to_application": get_date},
|
||||
"date": {"field": "date"},
|
||||
"musicbrainz_albumid": {},
|
||||
"musicbrainz_artistid": {},
|
||||
"musicbrainz_albumartistid": {},
|
||||
"musicbrainz_recordingid": {"field": "musicbrainz_trackid"},
|
||||
"mbid": {"field": "musicbrainz_trackid"},
|
||||
"license": {},
|
||||
"copyright": {},
|
||||
"pictures": {
|
||||
|
@ -236,20 +169,17 @@ CONF = {
|
|||
"OggTheora": {
|
||||
"getter": lambda f, k: f[k][0],
|
||||
"fields": {
|
||||
"track_number": {
|
||||
"field": "TRACKNUMBER",
|
||||
"to_application": convert_position,
|
||||
},
|
||||
"disc_number": {"field": "DISCNUMBER", "to_application": convert_position},
|
||||
"position": {"field": "TRACKNUMBER"},
|
||||
"disc_number": {"field": "DISCNUMBER"},
|
||||
"title": {},
|
||||
"artist": {},
|
||||
"album_artist": {"field": "albumartist"},
|
||||
"album": {},
|
||||
"date": {"field": "date", "to_application": get_date},
|
||||
"date": {"field": "date"},
|
||||
"musicbrainz_albumid": {"field": "MusicBrainz Album Id"},
|
||||
"musicbrainz_artistid": {"field": "MusicBrainz Artist Id"},
|
||||
"musicbrainz_albumartistid": {"field": "MusicBrainz Album Artist Id"},
|
||||
"musicbrainz_recordingid": {"field": "MusicBrainz Track Id"},
|
||||
"mbid": {"field": "MusicBrainz Track Id"},
|
||||
"license": {},
|
||||
"copyright": {},
|
||||
},
|
||||
|
@ -258,20 +188,17 @@ CONF = {
|
|||
"getter": get_id3_tag,
|
||||
"clean_pictures": clean_id3_pictures,
|
||||
"fields": {
|
||||
"track_number": {"field": "TRCK", "to_application": convert_position},
|
||||
"disc_number": {"field": "TPOS", "to_application": convert_position},
|
||||
"position": {"field": "TRCK"},
|
||||
"disc_number": {"field": "TPOS"},
|
||||
"title": {"field": "TIT2"},
|
||||
"artist": {"field": "TPE1"},
|
||||
"album_artist": {"field": "TPE2"},
|
||||
"album": {"field": "TALB"},
|
||||
"date": {"field": "TDRC", "to_application": get_date},
|
||||
"date": {"field": "TDRC"},
|
||||
"musicbrainz_albumid": {"field": "MusicBrainz Album Id"},
|
||||
"musicbrainz_artistid": {"field": "MusicBrainz Artist Id"},
|
||||
"musicbrainz_albumartistid": {"field": "MusicBrainz Album Artist Id"},
|
||||
"musicbrainz_recordingid": {
|
||||
"field": "UFID",
|
||||
"getter": get_mp3_recording_id,
|
||||
},
|
||||
"mbid": {"field": "UFID", "getter": get_mp3_recording_id},
|
||||
"pictures": {},
|
||||
"license": {"field": "WCOP"},
|
||||
"copyright": {"field": "TCOP"},
|
||||
|
@ -281,20 +208,17 @@ CONF = {
|
|||
"getter": get_flac_tag,
|
||||
"clean_pictures": clean_flac_pictures,
|
||||
"fields": {
|
||||
"track_number": {
|
||||
"field": "tracknumber",
|
||||
"to_application": convert_position,
|
||||
},
|
||||
"disc_number": {"field": "discnumber", "to_application": convert_position},
|
||||
"position": {"field": "tracknumber"},
|
||||
"disc_number": {"field": "discnumber"},
|
||||
"title": {},
|
||||
"artist": {},
|
||||
"album_artist": {"field": "albumartist"},
|
||||
"album": {},
|
||||
"date": {"field": "date", "to_application": get_date},
|
||||
"date": {"field": "date"},
|
||||
"musicbrainz_albumid": {},
|
||||
"musicbrainz_artistid": {},
|
||||
"musicbrainz_albumartistid": {},
|
||||
"musicbrainz_recordingid": {"field": "musicbrainz_trackid"},
|
||||
"mbid": {"field": "musicbrainz_trackid"},
|
||||
"test": {},
|
||||
"pictures": {},
|
||||
"license": {},
|
||||
|
@ -304,7 +228,7 @@ CONF = {
|
|||
}
|
||||
|
||||
ALL_FIELDS = [
|
||||
"track_number",
|
||||
"position",
|
||||
"disc_number",
|
||||
"title",
|
||||
"artist",
|
||||
|
@ -314,13 +238,13 @@ ALL_FIELDS = [
|
|||
"musicbrainz_albumid",
|
||||
"musicbrainz_artistid",
|
||||
"musicbrainz_albumartistid",
|
||||
"musicbrainz_recordingid",
|
||||
"mbid",
|
||||
"license",
|
||||
"copyright",
|
||||
]
|
||||
|
||||
|
||||
class Metadata(object):
|
||||
class Metadata(Mapping):
|
||||
def __init__(self, filething, kind=mutagen.File):
|
||||
self._file = kind(filething)
|
||||
if self._file is None:
|
||||
|
@ -368,6 +292,21 @@ class Metadata(object):
|
|||
else:
|
||||
return self.fallback.get(key, default=default)
|
||||
|
||||
def all(self):
|
||||
"""
|
||||
Return a dict with all support metadata fields, if they are available
|
||||
"""
|
||||
final = {}
|
||||
for field in self._conf["fields"]:
|
||||
if field in ["pictures"]:
|
||||
continue
|
||||
value = self.get(field, None)
|
||||
if value is None:
|
||||
continue
|
||||
final[field] = str(value)
|
||||
|
||||
return final
|
||||
|
||||
def _get_from_self(self, key, default=NODEFAULT):
|
||||
try:
|
||||
field_conf = self._conf["fields"][key]
|
||||
|
@ -390,25 +329,6 @@ class Metadata(object):
|
|||
v = field.to_python(v)
|
||||
return v
|
||||
|
||||
def all(self, ignore_parse_errors=True):
|
||||
"""
|
||||
Return a dict containing all metadata of the file
|
||||
"""
|
||||
|
||||
data = {}
|
||||
for field in ALL_FIELDS:
|
||||
try:
|
||||
data[field] = self.get(field, None)
|
||||
except (TagNotFound, forms.ValidationError):
|
||||
data[field] = None
|
||||
except ParseError as e:
|
||||
if not ignore_parse_errors:
|
||||
raise
|
||||
logger.warning("Unparsable field {}: {}".format(field, str(e)))
|
||||
data[field] = None
|
||||
|
||||
return data
|
||||
|
||||
def get_picture(self, *picture_types):
|
||||
if not picture_types:
|
||||
raise ValueError("You need to request at least one picture type")
|
||||
|
@ -430,3 +350,192 @@ class Metadata(object):
|
|||
for p in pictures:
|
||||
if p["type"] == ptype:
|
||||
return p
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.get(key)
|
||||
|
||||
def __len__(self):
|
||||
return 1
|
||||
|
||||
def __iter__(self):
|
||||
for field in self._conf["fields"]:
|
||||
yield field
|
||||
|
||||
|
||||
class ArtistField(serializers.Field):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.for_album = kwargs.pop("for_album", False)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def get_value(self, data):
|
||||
if self.for_album:
|
||||
keys = [("names", "album_artist"), ("mbids", "musicbrainz_albumartistid")]
|
||||
else:
|
||||
keys = [("names", "artist"), ("mbids", "musicbrainz_artistid")]
|
||||
|
||||
final = {}
|
||||
for field, key in keys:
|
||||
final[field] = data.get(key, None)
|
||||
|
||||
return final
|
||||
|
||||
def to_internal_value(self, data):
|
||||
# we have multiple values that can be separated by various separators
|
||||
separators = [";"]
|
||||
# we get a list like that if tagged via musicbrainz
|
||||
# ae29aae4-abfb-4609-8f54-417b1f4d64cc; 3237b5a8-ae44-400c-aa6d-cea51f0b9074;
|
||||
raw_mbids = data["mbids"]
|
||||
used_separator = None
|
||||
mbids = [raw_mbids]
|
||||
if raw_mbids:
|
||||
if "/" in raw_mbids:
|
||||
# it's a featuring, we can't handle this now
|
||||
mbids = []
|
||||
else:
|
||||
for separator in separators:
|
||||
if separator in raw_mbids:
|
||||
used_separator = separator
|
||||
mbids = [m.strip() for m in raw_mbids.split(separator)]
|
||||
break
|
||||
|
||||
# now, we split on artist names, using the same separator as the one used
|
||||
# by mbids, if any
|
||||
if used_separator and mbids:
|
||||
names = [n.strip() for n in data["names"].split(used_separator)]
|
||||
else:
|
||||
names = [data["names"]]
|
||||
|
||||
final = []
|
||||
for i, name in enumerate(names):
|
||||
try:
|
||||
mbid = mbids[i]
|
||||
except IndexError:
|
||||
mbid = None
|
||||
artist = {"name": name, "mbid": mbid}
|
||||
final.append(artist)
|
||||
|
||||
field = serializers.ListField(child=ArtistSerializer(), min_length=1)
|
||||
|
||||
return field.to_internal_value(final)
|
||||
|
||||
|
||||
class AlbumField(serializers.Field):
|
||||
def get_value(self, data):
|
||||
return data
|
||||
|
||||
def to_internal_value(self, data):
|
||||
try:
|
||||
title = data.get("album")
|
||||
except TagNotFound:
|
||||
raise serializers.ValidationError("Missing album tag")
|
||||
final = {
|
||||
"title": title,
|
||||
"release_date": data.get("date", None),
|
||||
"mbid": data.get("musicbrainz_albumid", None),
|
||||
}
|
||||
artists_field = ArtistField(for_album=True)
|
||||
payload = artists_field.get_value(data)
|
||||
try:
|
||||
artists = artists_field.to_internal_value(payload)
|
||||
except serializers.ValidationError as e:
|
||||
artists = []
|
||||
logger.debug("Ignoring validation error on album artists: %s", e)
|
||||
album_serializer = AlbumSerializer(data=final)
|
||||
album_serializer.is_valid(raise_exception=True)
|
||||
album_serializer.validated_data["artists"] = artists
|
||||
return album_serializer.validated_data
|
||||
|
||||
|
||||
class CoverDataField(serializers.Field):
|
||||
def get_value(self, data):
|
||||
return data
|
||||
|
||||
def to_internal_value(self, data):
|
||||
return data.get_picture("cover_front", "other")
|
||||
|
||||
|
||||
class PermissiveDateField(serializers.CharField):
|
||||
def to_internal_value(self, value):
|
||||
if not value:
|
||||
return None
|
||||
value = super().to_internal_value(str(value))
|
||||
ADDITIONAL_FORMATS = [
|
||||
"%Y-%d-%m %H:%M", # deezer date format
|
||||
"%Y-%W", # weird date format based on week number, see #718
|
||||
]
|
||||
|
||||
for date_format in ADDITIONAL_FORMATS:
|
||||
try:
|
||||
parsed = datetime.datetime.strptime(value, date_format)
|
||||
except ValueError:
|
||||
continue
|
||||
else:
|
||||
return datetime.date(parsed.year, parsed.month, parsed.day)
|
||||
|
||||
try:
|
||||
parsed = pendulum.parse(str(value))
|
||||
return datetime.date(parsed.year, parsed.month, parsed.day)
|
||||
except pendulum.exceptions.ParserError:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class ArtistSerializer(serializers.Serializer):
|
||||
name = serializers.CharField()
|
||||
mbid = serializers.UUIDField(required=False, allow_null=True)
|
||||
|
||||
|
||||
class AlbumSerializer(serializers.Serializer):
|
||||
title = serializers.CharField()
|
||||
mbid = serializers.UUIDField(required=False, allow_null=True)
|
||||
release_date = PermissiveDateField(required=False, allow_null=True)
|
||||
|
||||
|
||||
class PositionField(serializers.CharField):
|
||||
def to_internal_value(self, v):
|
||||
v = super().to_internal_value(v)
|
||||
if not v:
|
||||
return v
|
||||
|
||||
try:
|
||||
return int(v)
|
||||
except ValueError:
|
||||
# maybe the position is of the form "1/4"
|
||||
pass
|
||||
|
||||
try:
|
||||
return int(v.split("/")[0])
|
||||
except (ValueError, AttributeError, IndexError):
|
||||
pass
|
||||
|
||||
|
||||
class TrackMetadataSerializer(serializers.Serializer):
|
||||
title = serializers.CharField()
|
||||
position = PositionField(allow_null=True, required=False)
|
||||
disc_number = PositionField(allow_null=True, required=False)
|
||||
copyright = serializers.CharField(allow_null=True, required=False)
|
||||
license = serializers.CharField(allow_null=True, required=False)
|
||||
mbid = serializers.UUIDField(allow_null=True, required=False)
|
||||
|
||||
album = AlbumField()
|
||||
artists = ArtistField()
|
||||
cover_data = CoverDataField()
|
||||
|
||||
|
||||
class FakeMetadata(Mapping):
|
||||
def __init__(self, data, picture=None):
|
||||
self.data = data
|
||||
self.picture = None
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.data[key]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.data)
|
||||
|
||||
def __iter__(self):
|
||||
yield from self.data
|
||||
|
||||
def get_picture(self, *args):
|
||||
return self.picture
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
# Generated by Django 2.1.7 on 2019-04-09 09:33
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("federation", "0017_auto_20190130_0926"),
|
||||
("music", "0037_auto_20190103_1757"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="artist",
|
||||
name="attributed_to",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="attributed_artists",
|
||||
to="federation.Actor",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="album",
|
||||
name="attributed_to",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="attributed_albums",
|
||||
to="federation.Actor",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="track",
|
||||
name="attributed_to",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="attributed_tracks",
|
||||
to="federation.Actor",
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,31 @@
|
|||
# Generated by Django 2.1.7 on 2019-04-23 08:20
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('music', '0038_attributed_to'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='lyrics',
|
||||
name='work',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='work',
|
||||
name='from_activity',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='track',
|
||||
name='work',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='Lyrics',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='Work',
|
||||
),
|
||||
]
|
|
@ -3,9 +3,9 @@ import logging
|
|||
import mimetypes
|
||||
import os
|
||||
import tempfile
|
||||
import urllib.parse
|
||||
import uuid
|
||||
|
||||
import markdown
|
||||
import pendulum
|
||||
import pydub
|
||||
from django.conf import settings
|
||||
|
@ -24,6 +24,7 @@ from versatileimagefield.image_warmer import VersatileImageFieldWarmer
|
|||
|
||||
from funkwhale_api import musicbrainz
|
||||
from funkwhale_api.common import fields
|
||||
from funkwhale_api.common import models as common_models
|
||||
from funkwhale_api.common import session
|
||||
from funkwhale_api.common import utils as common_utils
|
||||
from funkwhale_api.federation import models as federation_models
|
||||
|
@ -113,6 +114,18 @@ class APIModelMixin(models.Model):
|
|||
|
||||
return super().save(**kwargs)
|
||||
|
||||
@property
|
||||
def is_local(self):
|
||||
return federation_utils.is_local(self.fid)
|
||||
|
||||
@property
|
||||
def domain_name(self):
|
||||
if not self.fid:
|
||||
return
|
||||
|
||||
parsed = urllib.parse.urlparse(self.fid)
|
||||
return parsed.hostname
|
||||
|
||||
|
||||
class License(models.Model):
|
||||
code = models.CharField(primary_key=True, max_length=100)
|
||||
|
@ -141,7 +154,7 @@ class License(models.Model):
|
|||
logger.warning("%s do not match any registered license", self.code)
|
||||
|
||||
|
||||
class ArtistQuerySet(models.QuerySet):
|
||||
class ArtistQuerySet(common_models.LocalFromFidQuerySet, models.QuerySet):
|
||||
def with_albums_count(self):
|
||||
return self.annotate(_albums_count=models.Count("albums"))
|
||||
|
||||
|
@ -177,6 +190,16 @@ class Artist(APIModelMixin):
|
|||
"mbid": {"musicbrainz_field_name": "id"},
|
||||
"name": {"musicbrainz_field_name": "name"},
|
||||
}
|
||||
# Music entities are attributed to actors, to validate that updates occur
|
||||
# from an authorized account. On top of that, we consider the instance actor
|
||||
# can update anything under it's own domain
|
||||
attributed_to = models.ForeignKey(
|
||||
"federation.Actor",
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name="attributed_artists",
|
||||
)
|
||||
api = musicbrainz.api.artists
|
||||
objects = ArtistQuerySet.as_manager()
|
||||
|
||||
|
@ -215,7 +238,7 @@ def import_tracks(instance, cleaned_data, raw_data):
|
|||
importers.load(Track, track_cleaned_data, track_data, Track.import_hooks)
|
||||
|
||||
|
||||
class AlbumQuerySet(models.QuerySet):
|
||||
class AlbumQuerySet(common_models.LocalFromFidQuerySet, models.QuerySet):
|
||||
def with_tracks_count(self):
|
||||
return self.annotate(_tracks_count=models.Count("tracks"))
|
||||
|
||||
|
@ -253,6 +276,16 @@ class Album(APIModelMixin):
|
|||
TYPE_CHOICES = (("album", "Album"),)
|
||||
type = models.CharField(choices=TYPE_CHOICES, max_length=30, default="album")
|
||||
|
||||
# Music entities are attributed to actors, to validate that updates occur
|
||||
# from an authorized account. On top of that, we consider the instance actor
|
||||
# can update anything under it's own domain
|
||||
attributed_to = models.ForeignKey(
|
||||
"federation.Actor",
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name="attributed_albums",
|
||||
)
|
||||
api_includes = ["artist-credits", "recordings", "media", "release-groups"]
|
||||
api = musicbrainz.api.releases
|
||||
federation_namespace = "albums"
|
||||
|
@ -313,6 +346,16 @@ class Album(APIModelMixin):
|
|||
def __str__(self):
|
||||
return self.title
|
||||
|
||||
@property
|
||||
def cover_path(self):
|
||||
if not self.cover:
|
||||
return None
|
||||
try:
|
||||
return self.cover.path
|
||||
except NotImplementedError:
|
||||
# external storage
|
||||
return self.cover.name
|
||||
|
||||
@property
|
||||
def tags(self):
|
||||
t = []
|
||||
|
@ -345,78 +388,7 @@ def import_album(v):
|
|||
return a
|
||||
|
||||
|
||||
def link_recordings(instance, cleaned_data, raw_data):
|
||||
tracks = [r["target"] for r in raw_data["recording-relation-list"]]
|
||||
Track.objects.filter(mbid__in=tracks).update(work=instance)
|
||||
|
||||
|
||||
def import_lyrics(instance, cleaned_data, raw_data):
|
||||
try:
|
||||
url = [
|
||||
url_data
|
||||
for url_data in raw_data["url-relation-list"]
|
||||
if url_data["type"] == "lyrics"
|
||||
][0]["target"]
|
||||
except (IndexError, KeyError):
|
||||
return
|
||||
l, _ = Lyrics.objects.get_or_create(work=instance, url=url)
|
||||
|
||||
return l
|
||||
|
||||
|
||||
class Work(APIModelMixin):
|
||||
language = models.CharField(max_length=20)
|
||||
nature = models.CharField(max_length=50)
|
||||
title = models.CharField(max_length=255)
|
||||
|
||||
api = musicbrainz.api.works
|
||||
api_includes = ["url-rels", "recording-rels"]
|
||||
musicbrainz_model = "work"
|
||||
federation_namespace = "works"
|
||||
|
||||
musicbrainz_mapping = {
|
||||
"mbid": {"musicbrainz_field_name": "id"},
|
||||
"title": {"musicbrainz_field_name": "title"},
|
||||
"language": {"musicbrainz_field_name": "language"},
|
||||
"nature": {"musicbrainz_field_name": "type", "converter": lambda v: v.lower()},
|
||||
}
|
||||
import_hooks = [import_lyrics, link_recordings]
|
||||
|
||||
def fetch_lyrics(self):
|
||||
lyric = self.lyrics.first()
|
||||
if lyric:
|
||||
return lyric
|
||||
data = self.api.get(self.mbid, includes=["url-rels"])["work"]
|
||||
lyric = import_lyrics(self, {}, data)
|
||||
|
||||
return lyric
|
||||
|
||||
def get_federation_id(self):
|
||||
if self.fid:
|
||||
return self.fid
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class Lyrics(models.Model):
|
||||
uuid = models.UUIDField(unique=True, db_index=True, default=uuid.uuid4)
|
||||
work = models.ForeignKey(
|
||||
Work, related_name="lyrics", null=True, blank=True, on_delete=models.CASCADE
|
||||
)
|
||||
url = models.URLField(unique=True)
|
||||
content = models.TextField(null=True, blank=True)
|
||||
|
||||
@property
|
||||
def content_rendered(self):
|
||||
return markdown.markdown(
|
||||
self.content,
|
||||
safe_mode=True,
|
||||
enable_attributes=False,
|
||||
extensions=["markdown.extensions.nl2br"],
|
||||
)
|
||||
|
||||
|
||||
class TrackQuerySet(models.QuerySet):
|
||||
class TrackQuerySet(common_models.LocalFromFidQuerySet, models.QuerySet):
|
||||
def for_nested_serialization(self):
|
||||
return self.select_related().select_related("album__artist", "artist")
|
||||
|
||||
|
@ -465,9 +437,6 @@ class Track(APIModelMixin):
|
|||
album = models.ForeignKey(
|
||||
Album, related_name="tracks", null=True, blank=True, on_delete=models.CASCADE
|
||||
)
|
||||
work = models.ForeignKey(
|
||||
Work, related_name="tracks", null=True, blank=True, on_delete=models.CASCADE
|
||||
)
|
||||
license = models.ForeignKey(
|
||||
License,
|
||||
null=True,
|
||||
|
@ -475,11 +444,21 @@ class Track(APIModelMixin):
|
|||
on_delete=models.DO_NOTHING,
|
||||
related_name="tracks",
|
||||
)
|
||||
# Music entities are attributed to actors, to validate that updates occur
|
||||
# from an authorized account. On top of that, we consider the instance actor
|
||||
# can update anything under it's own domain
|
||||
attributed_to = models.ForeignKey(
|
||||
"federation.Actor",
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name="attributed_tracks",
|
||||
)
|
||||
copyright = models.CharField(max_length=500, null=True, blank=True)
|
||||
federation_namespace = "tracks"
|
||||
musicbrainz_model = "recording"
|
||||
api = musicbrainz.api.recordings
|
||||
api_includes = ["artist-credits", "releases", "media", "tags", "work-rels"]
|
||||
api_includes = ["artist-credits", "releases", "media", "tags"]
|
||||
musicbrainz_mapping = {
|
||||
"mbid": {"musicbrainz_field_name": "id"},
|
||||
"title": {"musicbrainz_field_name": "title"},
|
||||
|
@ -508,20 +487,6 @@ class Track(APIModelMixin):
|
|||
self.artist = self.album.artist
|
||||
super().save(**kwargs)
|
||||
|
||||
def get_work(self):
|
||||
if self.work:
|
||||
return self.work
|
||||
data = self.api.get(self.mbid, includes=["work-rels"])
|
||||
try:
|
||||
work_data = data["recording"]["work-relation-list"][0]["work"]
|
||||
except (IndexError, KeyError):
|
||||
return
|
||||
work, _ = Work.get_or_create_from_api(mbid=work_data["id"])
|
||||
return work
|
||||
|
||||
def get_lyrics_url(self):
|
||||
return reverse("api:v1:tracks-lyrics", kwargs={"pk": self.pk})
|
||||
|
||||
@property
|
||||
def full_name(self):
|
||||
try:
|
||||
|
@ -605,7 +570,7 @@ class Track(APIModelMixin):
|
|||
return licenses.LICENSES_BY_ID.get(self.license_id)
|
||||
|
||||
|
||||
class UploadQuerySet(models.QuerySet):
|
||||
class UploadQuerySet(common_models.NullsLastQuerySet):
|
||||
def playable_by(self, actor, include=True):
|
||||
libraries = Library.objects.viewable_by(actor)
|
||||
|
||||
|
@ -677,12 +642,12 @@ class Upload(models.Model):
|
|||
|
||||
# metadata from federation
|
||||
metadata = JSONField(
|
||||
default=empty_dict, max_length=50000, encoder=DjangoJSONEncoder
|
||||
default=empty_dict, max_length=50000, encoder=DjangoJSONEncoder, blank=True
|
||||
)
|
||||
import_date = models.DateTimeField(null=True, blank=True)
|
||||
# optionnal metadata provided during import
|
||||
import_metadata = JSONField(
|
||||
default=empty_dict, max_length=50000, encoder=DjangoJSONEncoder
|
||||
default=empty_dict, max_length=50000, encoder=DjangoJSONEncoder, blank=True
|
||||
)
|
||||
# status / error details for the import
|
||||
import_status = models.CharField(
|
||||
|
@ -694,20 +659,32 @@ class Upload(models.Model):
|
|||
|
||||
# optionnal metadata about import results (error messages, etc.)
|
||||
import_details = JSONField(
|
||||
default=empty_dict, max_length=50000, encoder=DjangoJSONEncoder
|
||||
default=empty_dict, max_length=50000, encoder=DjangoJSONEncoder, blank=True
|
||||
)
|
||||
from_activity = models.ForeignKey(
|
||||
"federation.Activity", null=True, on_delete=models.SET_NULL
|
||||
"federation.Activity", null=True, on_delete=models.SET_NULL, blank=True
|
||||
)
|
||||
|
||||
objects = UploadQuerySet.as_manager()
|
||||
|
||||
def download_audio_from_remote(self, user):
|
||||
@property
|
||||
def is_local(self):
|
||||
return federation_utils.is_local(self.fid)
|
||||
|
||||
@property
|
||||
def domain_name(self):
|
||||
if not self.fid:
|
||||
return
|
||||
|
||||
parsed = urllib.parse.urlparse(self.fid)
|
||||
return parsed.hostname
|
||||
|
||||
def download_audio_from_remote(self, actor):
|
||||
from funkwhale_api.common import session
|
||||
from funkwhale_api.federation import signing
|
||||
|
||||
if user.is_authenticated and user.actor:
|
||||
auth = signing.get_auth(user.actor.private_key, user.actor.private_key_id)
|
||||
if actor:
|
||||
auth = signing.get_auth(actor.private_key, actor.private_key_id)
|
||||
else:
|
||||
auth = None
|
||||
|
||||
|
@ -812,23 +789,35 @@ class Upload(models.Model):
|
|||
def listen_url(self):
|
||||
return self.track.listen_url + "?upload={}".format(self.uuid)
|
||||
|
||||
def get_transcoded_version(self, format):
|
||||
mimetype = utils.EXTENSION_TO_MIMETYPE[format]
|
||||
existing_versions = list(self.versions.filter(mimetype=mimetype))
|
||||
def get_transcoded_version(self, format, max_bitrate=None):
|
||||
if format:
|
||||
mimetype = utils.EXTENSION_TO_MIMETYPE[format]
|
||||
else:
|
||||
mimetype = self.mimetype or "audio/mpeg"
|
||||
format = utils.MIMETYPE_TO_EXTENSION[mimetype]
|
||||
|
||||
existing_versions = self.versions.filter(mimetype=mimetype)
|
||||
if max_bitrate is not None:
|
||||
# we don't want to transcode if a 320kbps version is available
|
||||
# and we're requestiong 300kbps
|
||||
acceptable_max_bitrate = max_bitrate * 1.2
|
||||
acceptable_min_bitrate = max_bitrate * 0.8
|
||||
existing_versions = existing_versions.filter(
|
||||
bitrate__gte=acceptable_min_bitrate, bitrate__lte=acceptable_max_bitrate
|
||||
).order_by("-bitrate")
|
||||
if existing_versions:
|
||||
# we found an existing version, no need to transcode again
|
||||
return existing_versions[0]
|
||||
|
||||
return self.create_transcoded_version(mimetype, format)
|
||||
return self.create_transcoded_version(mimetype, format, bitrate=max_bitrate)
|
||||
|
||||
@transaction.atomic
|
||||
def create_transcoded_version(self, mimetype, format):
|
||||
def create_transcoded_version(self, mimetype, format, bitrate):
|
||||
# we create the version with an empty file, then
|
||||
# we'll write to it
|
||||
f = ContentFile(b"")
|
||||
version = self.versions.create(
|
||||
mimetype=mimetype, bitrate=self.bitrate or 128000, size=0
|
||||
)
|
||||
bitrate = min(bitrate or 320000, self.bitrate or 320000)
|
||||
version = self.versions.create(mimetype=mimetype, bitrate=bitrate, size=0)
|
||||
# we keep the same name, but we update the extension
|
||||
new_name = os.path.splitext(os.path.basename(self.audio_file.name))[
|
||||
0
|
||||
|
@ -838,6 +827,7 @@ class Upload(models.Model):
|
|||
audio=self.get_audio_segment(),
|
||||
output=version.audio_file,
|
||||
output_format=utils.MIMETYPE_TO_EXTENSION[mimetype],
|
||||
bitrate=str(bitrate),
|
||||
)
|
||||
version.size = version.audio_file.size
|
||||
version.save(update_fields=["size"])
|
||||
|
@ -850,6 +840,16 @@ class Upload(models.Model):
|
|||
return
|
||||
return self.source.lstrip("file://")
|
||||
|
||||
@property
|
||||
def audio_file_path(self):
|
||||
if not self.audio_file:
|
||||
return None
|
||||
try:
|
||||
return self.audio_file.path
|
||||
except NotImplementedError:
|
||||
# external storage
|
||||
return self.audio_file.name
|
||||
|
||||
|
||||
MIMETYPE_CHOICES = [(mt, ext) for ext, mt in utils.AUDIO_EXTENSIONS_AND_MIMETYPE]
|
||||
|
||||
|
@ -872,6 +872,16 @@ class UploadVersion(models.Model):
|
|||
def filename(self):
|
||||
return self.upload.filename
|
||||
|
||||
@property
|
||||
def audio_file_path(self):
|
||||
if not self.audio_file:
|
||||
return None
|
||||
try:
|
||||
return self.audio_file.path
|
||||
except NotImplementedError:
|
||||
# external storage
|
||||
return self.audio_file.name
|
||||
|
||||
|
||||
IMPORT_STATUS_CHOICES = (
|
||||
("pending", "Pending"),
|
||||
|
|
|
@ -0,0 +1,62 @@
|
|||
from funkwhale_api.common import mutations
|
||||
from funkwhale_api.federation import routes
|
||||
|
||||
from . import models
|
||||
|
||||
|
||||
def can_suggest(obj, actor):
|
||||
return obj.is_local
|
||||
|
||||
|
||||
def can_approve(obj, actor):
|
||||
return obj.is_local and actor.user and actor.user.get_permissions()["library"]
|
||||
|
||||
|
||||
@mutations.registry.connect(
|
||||
"update",
|
||||
models.Track,
|
||||
perm_checkers={"suggest": can_suggest, "approve": can_approve},
|
||||
)
|
||||
class TrackMutationSerializer(mutations.UpdateMutationSerializer):
|
||||
serialized_relations = {"license": "code"}
|
||||
|
||||
class Meta:
|
||||
model = models.Track
|
||||
fields = ["license", "title", "position", "copyright"]
|
||||
|
||||
def post_apply(self, obj, validated_data):
|
||||
routes.outbox.dispatch(
|
||||
{"type": "Update", "object": {"type": "Track"}}, context={"track": obj}
|
||||
)
|
||||
|
||||
|
||||
@mutations.registry.connect(
|
||||
"update",
|
||||
models.Artist,
|
||||
perm_checkers={"suggest": can_suggest, "approve": can_approve},
|
||||
)
|
||||
class ArtistMutationSerializer(mutations.UpdateMutationSerializer):
|
||||
class Meta:
|
||||
model = models.Artist
|
||||
fields = ["name"]
|
||||
|
||||
def post_apply(self, obj, validated_data):
|
||||
routes.outbox.dispatch(
|
||||
{"type": "Update", "object": {"type": "Artist"}}, context={"artist": obj}
|
||||
)
|
||||
|
||||
|
||||
@mutations.registry.connect(
|
||||
"update",
|
||||
models.Album,
|
||||
perm_checkers={"suggest": can_suggest, "approve": can_approve},
|
||||
)
|
||||
class AlbumMutationSerializer(mutations.UpdateMutationSerializer):
|
||||
class Meta:
|
||||
model = models.Album
|
||||
fields = ["title", "release_date"]
|
||||
|
||||
def post_apply(self, obj, validated_data):
|
||||
routes.outbox.dispatch(
|
||||
{"type": "Update", "object": {"type": "Album"}}, context={"album": obj}
|
||||
)
|
|
@ -43,6 +43,7 @@ class ArtistAlbumSerializer(serializers.ModelSerializer):
|
|||
model = models.Album
|
||||
fields = (
|
||||
"id",
|
||||
"fid",
|
||||
"mbid",
|
||||
"title",
|
||||
"artist",
|
||||
|
@ -51,6 +52,7 @@ class ArtistAlbumSerializer(serializers.ModelSerializer):
|
|||
"creation_date",
|
||||
"tracks_count",
|
||||
"is_playable",
|
||||
"is_local",
|
||||
)
|
||||
|
||||
def get_tracks_count(self, o):
|
||||
|
@ -68,13 +70,13 @@ class ArtistWithAlbumsSerializer(serializers.ModelSerializer):
|
|||
|
||||
class Meta:
|
||||
model = models.Artist
|
||||
fields = ("id", "mbid", "name", "creation_date", "albums")
|
||||
fields = ("id", "fid", "mbid", "name", "creation_date", "albums", "is_local")
|
||||
|
||||
|
||||
class ArtistSimpleSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Artist
|
||||
fields = ("id", "mbid", "name", "creation_date")
|
||||
fields = ("id", "fid", "mbid", "name", "creation_date", "is_local")
|
||||
|
||||
|
||||
class AlbumTrackSerializer(serializers.ModelSerializer):
|
||||
|
@ -87,6 +89,7 @@ class AlbumTrackSerializer(serializers.ModelSerializer):
|
|||
model = models.Track
|
||||
fields = (
|
||||
"id",
|
||||
"fid",
|
||||
"mbid",
|
||||
"title",
|
||||
"album",
|
||||
|
@ -99,6 +102,7 @@ class AlbumTrackSerializer(serializers.ModelSerializer):
|
|||
"duration",
|
||||
"copyright",
|
||||
"license",
|
||||
"is_local",
|
||||
)
|
||||
|
||||
def get_uploads(self, obj):
|
||||
|
@ -125,6 +129,7 @@ class AlbumSerializer(serializers.ModelSerializer):
|
|||
model = models.Album
|
||||
fields = (
|
||||
"id",
|
||||
"fid",
|
||||
"mbid",
|
||||
"title",
|
||||
"artist",
|
||||
|
@ -133,6 +138,7 @@ class AlbumSerializer(serializers.ModelSerializer):
|
|||
"cover",
|
||||
"creation_date",
|
||||
"is_playable",
|
||||
"is_local",
|
||||
)
|
||||
|
||||
def get_tracks(self, o):
|
||||
|
@ -156,12 +162,14 @@ class TrackAlbumSerializer(serializers.ModelSerializer):
|
|||
model = models.Album
|
||||
fields = (
|
||||
"id",
|
||||
"fid",
|
||||
"mbid",
|
||||
"title",
|
||||
"artist",
|
||||
"release_date",
|
||||
"cover",
|
||||
"creation_date",
|
||||
"is_local",
|
||||
)
|
||||
|
||||
|
||||
|
@ -182,7 +190,6 @@ class TrackUploadSerializer(serializers.ModelSerializer):
|
|||
class TrackSerializer(serializers.ModelSerializer):
|
||||
artist = ArtistSimpleSerializer(read_only=True)
|
||||
album = TrackAlbumSerializer(read_only=True)
|
||||
lyrics = serializers.SerializerMethodField()
|
||||
uploads = serializers.SerializerMethodField()
|
||||
listen_url = serializers.SerializerMethodField()
|
||||
|
||||
|
@ -190,6 +197,7 @@ class TrackSerializer(serializers.ModelSerializer):
|
|||
model = models.Track
|
||||
fields = (
|
||||
"id",
|
||||
"fid",
|
||||
"mbid",
|
||||
"title",
|
||||
"album",
|
||||
|
@ -197,16 +205,13 @@ class TrackSerializer(serializers.ModelSerializer):
|
|||
"creation_date",
|
||||
"position",
|
||||
"disc_number",
|
||||
"lyrics",
|
||||
"uploads",
|
||||
"listen_url",
|
||||
"copyright",
|
||||
"license",
|
||||
"is_local",
|
||||
)
|
||||
|
||||
def get_lyrics(self, obj):
|
||||
return obj.get_lyrics_url()
|
||||
|
||||
def get_listen_url(self, obj):
|
||||
return obj.listen_url
|
||||
|
||||
|
@ -367,12 +372,6 @@ class SimpleAlbumSerializer(serializers.ModelSerializer):
|
|||
fields = ("id", "mbid", "title", "release_date", "cover")
|
||||
|
||||
|
||||
class LyricsSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Lyrics
|
||||
fields = ("id", "work", "content", "content_rendered")
|
||||
|
||||
|
||||
class TrackActivitySerializer(activity_serializers.ModelSerializer):
|
||||
type = serializers.SerializerMethodField()
|
||||
name = serializers.CharField(source="title")
|
||||
|
@ -387,6 +386,10 @@ class TrackActivitySerializer(activity_serializers.ModelSerializer):
|
|||
return "Audio"
|
||||
|
||||
|
||||
def get_embed_url(type, id):
|
||||
return settings.FUNKWHALE_EMBED_URL + "?type={}&id={}".format(type, id)
|
||||
|
||||
|
||||
class OembedSerializer(serializers.Serializer):
|
||||
format = serializers.ChoiceField(choices=["json"])
|
||||
url = serializers.URLField()
|
||||
|
@ -466,6 +469,36 @@ class OembedSerializer(serializers.Serializer):
|
|||
"library_artist", kwargs={"pk": album.artist.pk}
|
||||
)
|
||||
)
|
||||
elif match.url_name == "library_artist":
|
||||
qs = models.Artist.objects.filter(pk=int(match.kwargs["pk"]))
|
||||
try:
|
||||
artist = qs.get()
|
||||
except models.Artist.DoesNotExist:
|
||||
raise serializers.ValidationError(
|
||||
"No artist matching id {}".format(match.kwargs["pk"])
|
||||
)
|
||||
embed_type = "artist"
|
||||
embed_id = artist.pk
|
||||
album = (
|
||||
artist.albums.filter(cover__isnull=False)
|
||||
.exclude(cover="")
|
||||
.order_by("-id")
|
||||
.first()
|
||||
)
|
||||
|
||||
if album and album.cover:
|
||||
data["thumbnail_url"] = federation_utils.full_url(
|
||||
album.cover.crop["400x400"].url
|
||||
)
|
||||
data["thumbnail_width"] = 400
|
||||
data["thumbnail_height"] = 400
|
||||
data["title"] = artist.name
|
||||
data["description"] = artist.name
|
||||
data["author_name"] = artist.name
|
||||
data["height"] = 400
|
||||
data["author_url"] = federation_utils.full_url(
|
||||
common_utils.spa_reverse("library_artist", kwargs={"pk": artist.pk})
|
||||
)
|
||||
else:
|
||||
raise serializers.ValidationError(
|
||||
"Unsupported url: {}".format(validated_data["url"])
|
||||
|
@ -473,10 +506,7 @@ class OembedSerializer(serializers.Serializer):
|
|||
data[
|
||||
"html"
|
||||
] = '<iframe width="{}" height="{}" scrolling="no" frameborder="no" src="{}"></iframe>'.format(
|
||||
data["width"],
|
||||
data["height"],
|
||||
settings.FUNKWHALE_EMBED_URL
|
||||
+ "?type={}&id={}".format(embed_type, embed_id),
|
||||
data["width"], data["height"], get_embed_url(embed_type, embed_id)
|
||||
)
|
||||
return data
|
||||
|
||||
|
|
|
@ -2,10 +2,25 @@ import urllib.parse
|
|||
|
||||
from django.conf import settings
|
||||
from django.urls import reverse
|
||||
from django.db.models import Q
|
||||
|
||||
from funkwhale_api.common import utils
|
||||
|
||||
from . import models
|
||||
from . import serializers
|
||||
|
||||
|
||||
def get_twitter_card_metas(type, id):
|
||||
return [
|
||||
{"tag": "meta", "property": "twitter:card", "content": "player"},
|
||||
{
|
||||
"tag": "meta",
|
||||
"property": "twitter:player",
|
||||
"content": serializers.get_embed_url(type, id),
|
||||
},
|
||||
{"tag": "meta", "property": "twitter:player:width", "content": "600"},
|
||||
{"tag": "meta", "property": "twitter:player:height", "content": "400"},
|
||||
]
|
||||
|
||||
|
||||
def library_track(request, pk):
|
||||
|
@ -72,6 +87,8 @@ def library_track(request, pk):
|
|||
),
|
||||
}
|
||||
)
|
||||
# twitter player is also supported in various software
|
||||
metas += get_twitter_card_metas(type="track", id=obj.pk)
|
||||
return metas
|
||||
|
||||
|
||||
|
@ -131,6 +148,8 @@ def library_album(request, pk):
|
|||
),
|
||||
}
|
||||
)
|
||||
# twitter player is also supported in various software
|
||||
metas += get_twitter_card_metas(type="album", id=obj.pk)
|
||||
return metas
|
||||
|
||||
|
||||
|
@ -165,4 +184,22 @@ def library_artist(request, pk):
|
|||
}
|
||||
)
|
||||
|
||||
if (
|
||||
models.Upload.objects.filter(Q(track__artist=obj) | Q(track__album__artist=obj))
|
||||
.playable_by(None)
|
||||
.exists()
|
||||
):
|
||||
metas.append(
|
||||
{
|
||||
"tag": "link",
|
||||
"rel": "alternate",
|
||||
"type": "application/json+oembed",
|
||||
"href": (
|
||||
utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed"))
|
||||
+ "?format=json&url={}".format(urllib.parse.quote_plus(artist_url))
|
||||
),
|
||||
}
|
||||
)
|
||||
# twitter player is also supported in various software
|
||||
metas += get_twitter_card_metas(type="artist", id=obj.pk)
|
||||
return metas
|
||||
|
|
|
@ -17,7 +17,6 @@ from funkwhale_api.federation import library as lb
|
|||
from funkwhale_api.taskapp import celery
|
||||
|
||||
from . import licenses
|
||||
from . import lyrics as lyrics_utils
|
||||
from . import models
|
||||
from . import metadata
|
||||
from . import signals
|
||||
|
@ -70,16 +69,6 @@ def get_cover_from_fs(dir_path):
|
|||
return {"mimetype": m, "content": c.read()}
|
||||
|
||||
|
||||
@celery.app.task(name="Lyrics.fetch_content")
|
||||
@celery.require_instance(models.Lyrics, "lyrics")
|
||||
def fetch_content(lyrics):
|
||||
html = lyrics_utils._get_html(lyrics.url)
|
||||
content = lyrics_utils.extract_content(html)
|
||||
cleaned_content = lyrics_utils.clean_content(content)
|
||||
lyrics.content = cleaned_content
|
||||
lyrics.save(update_fields=["content"])
|
||||
|
||||
|
||||
@celery.app.task(name="music.start_library_scan")
|
||||
@celery.require_instance(
|
||||
models.LibraryScan.objects.select_related().filter(status="pending"), "library_scan"
|
||||
|
@ -151,10 +140,11 @@ class UploadImportError(ValueError):
|
|||
super().__init__(code)
|
||||
|
||||
|
||||
def fail_import(upload, error_code):
|
||||
def fail_import(upload, error_code, detail=None, **fields):
|
||||
old_status = upload.import_status
|
||||
upload.import_status = "errored"
|
||||
upload.import_details = {"error_code": error_code}
|
||||
upload.import_details = {"error_code": error_code, "detail": detail}
|
||||
upload.import_details.update(fields)
|
||||
upload.import_date = timezone.now()
|
||||
upload.save(update_fields=["import_details", "import_status", "import_date"])
|
||||
|
||||
|
@ -182,20 +172,32 @@ def process_upload(upload):
|
|||
old_status = upload.import_status
|
||||
audio_file = upload.get_audio_file()
|
||||
additional_data = {}
|
||||
|
||||
m = metadata.Metadata(audio_file)
|
||||
try:
|
||||
if not audio_file:
|
||||
# we can only rely on user proveded data
|
||||
final_metadata = import_metadata
|
||||
else:
|
||||
# we use user provided data and data from the file itself
|
||||
m = metadata.Metadata(audio_file)
|
||||
file_metadata = m.all()
|
||||
final_metadata = collections.ChainMap(
|
||||
additional_data, import_metadata, file_metadata
|
||||
)
|
||||
additional_data["cover_data"] = m.get_picture("cover_front", "other")
|
||||
additional_data["upload_source"] = upload.source
|
||||
track = get_track_from_import_metadata(final_metadata)
|
||||
serializer = metadata.TrackMetadataSerializer(data=m)
|
||||
serializer.is_valid()
|
||||
except Exception:
|
||||
fail_import(upload, "unknown_error")
|
||||
raise
|
||||
if not serializer.is_valid():
|
||||
detail = serializer.errors
|
||||
try:
|
||||
metadata_dump = m.all()
|
||||
except Exception as e:
|
||||
logger.warn("Cannot dump metadata for file %s: %s", audio_file, str(e))
|
||||
return fail_import(
|
||||
upload, "invalid_metadata", detail=detail, file_metadata=metadata_dump
|
||||
)
|
||||
|
||||
final_metadata = collections.ChainMap(
|
||||
additional_data, serializer.validated_data, import_metadata
|
||||
)
|
||||
additional_data["upload_source"] = upload.source
|
||||
try:
|
||||
track = get_track_from_import_metadata(
|
||||
final_metadata, attributed_to=upload.library.actor
|
||||
)
|
||||
except UploadImportError as e:
|
||||
return fail_import(upload, e.code)
|
||||
except Exception:
|
||||
|
@ -271,48 +273,54 @@ def process_upload(upload):
|
|||
)
|
||||
|
||||
|
||||
def federation_audio_track_to_metadata(payload):
|
||||
def federation_audio_track_to_metadata(payload, references):
|
||||
"""
|
||||
Given a valid payload as returned by federation.serializers.TrackSerializer.validated_data,
|
||||
returns a correct metadata payload for use with get_track_from_import_metadata.
|
||||
"""
|
||||
musicbrainz_recordingid = payload.get("musicbrainzId")
|
||||
musicbrainz_artistid = payload["artists"][0].get("musicbrainzId")
|
||||
musicbrainz_albumartistid = payload["album"]["artists"][0].get("musicbrainzId")
|
||||
musicbrainz_albumid = payload["album"].get("musicbrainzId")
|
||||
|
||||
new_data = {
|
||||
"title": payload["name"],
|
||||
"album": payload["album"]["name"],
|
||||
"track_number": payload.get("position") or 1,
|
||||
"position": payload.get("position") or 1,
|
||||
"disc_number": payload.get("disc"),
|
||||
"artist": payload["artists"][0]["name"],
|
||||
"album_artist": payload["album"]["artists"][0]["name"],
|
||||
"date": payload["album"].get("released"),
|
||||
"license": payload.get("license"),
|
||||
"copyright": payload.get("copyright"),
|
||||
# musicbrainz
|
||||
"musicbrainz_recordingid": str(musicbrainz_recordingid)
|
||||
if musicbrainz_recordingid
|
||||
else None,
|
||||
"musicbrainz_artistid": str(musicbrainz_artistid)
|
||||
if musicbrainz_artistid
|
||||
else None,
|
||||
"musicbrainz_albumartistid": str(musicbrainz_albumartistid)
|
||||
if musicbrainz_albumartistid
|
||||
else None,
|
||||
"musicbrainz_albumid": str(musicbrainz_albumid)
|
||||
if musicbrainz_albumid
|
||||
"attributed_to": references.get(payload.get("attributedTo")),
|
||||
"mbid": str(payload.get("musicbrainzId"))
|
||||
if payload.get("musicbrainzId")
|
||||
else None,
|
||||
"album": {
|
||||
"title": payload["album"]["name"],
|
||||
"fdate": payload["album"]["published"],
|
||||
"fid": payload["album"]["id"],
|
||||
"attributed_to": references.get(payload["album"].get("attributedTo")),
|
||||
"mbid": str(payload["album"]["musicbrainzId"])
|
||||
if payload["album"].get("musicbrainzId")
|
||||
else None,
|
||||
"release_date": payload["album"].get("released"),
|
||||
"artists": [
|
||||
{
|
||||
"fid": a["id"],
|
||||
"name": a["name"],
|
||||
"fdate": a["published"],
|
||||
"attributed_to": references.get(a.get("attributedTo")),
|
||||
"mbid": str(a["musicbrainzId"]) if a.get("musicbrainzId") else None,
|
||||
}
|
||||
for a in payload["album"]["artists"]
|
||||
],
|
||||
},
|
||||
"artists": [
|
||||
{
|
||||
"fid": a["id"],
|
||||
"name": a["name"],
|
||||
"fdate": a["published"],
|
||||
"attributed_to": references.get(a.get("attributedTo")),
|
||||
"mbid": str(a["musicbrainzId"]) if a.get("musicbrainzId") else None,
|
||||
}
|
||||
for a in payload["artists"]
|
||||
],
|
||||
# federation
|
||||
"fid": payload["id"],
|
||||
"artist_fid": payload["artists"][0]["id"],
|
||||
"album_artist_fid": payload["album"]["artists"][0]["id"],
|
||||
"album_fid": payload["album"]["id"],
|
||||
"fdate": payload["published"],
|
||||
"album_fdate": payload["album"]["published"],
|
||||
"album_artist_fdate": payload["album"]["artists"][0]["published"],
|
||||
"artist_fdate": payload["artists"][0]["published"],
|
||||
}
|
||||
cover = payload["album"].get("cover")
|
||||
if cover:
|
||||
|
@ -380,8 +388,8 @@ def sort_candidates(candidates, important_fields):
|
|||
|
||||
|
||||
@transaction.atomic
|
||||
def get_track_from_import_metadata(data, update_cover=False):
|
||||
track = _get_track(data)
|
||||
def get_track_from_import_metadata(data, update_cover=False, attributed_to=None):
|
||||
track = _get_track(data, attributed_to=attributed_to)
|
||||
if update_cover and track and not track.album.cover:
|
||||
update_album_cover(
|
||||
track.album,
|
||||
|
@ -391,7 +399,7 @@ def get_track_from_import_metadata(data, update_cover=False):
|
|||
return track
|
||||
|
||||
|
||||
def _get_track(data):
|
||||
def _get_track(data, attributed_to=None):
|
||||
track_uuid = getter(data, "funkwhale", "track", "uuid")
|
||||
|
||||
if track_uuid:
|
||||
|
@ -405,8 +413,8 @@ def _get_track(data):
|
|||
return track
|
||||
|
||||
from_activity_id = data.get("from_activity_id", None)
|
||||
track_mbid = data.get("musicbrainz_recordingid", None)
|
||||
album_mbid = data.get("musicbrainz_albumid", None)
|
||||
track_mbid = data.get("mbid", None)
|
||||
album_mbid = getter(data, "album", "mbid")
|
||||
track_fid = getter(data, "fid")
|
||||
|
||||
query = None
|
||||
|
@ -428,12 +436,16 @@ def _get_track(data):
|
|||
pass
|
||||
|
||||
# get / create artist and album artist
|
||||
artist_mbid = data.get("musicbrainz_artistid", None)
|
||||
artist_fid = data.get("artist_fid", None)
|
||||
artist_name = data["artist"]
|
||||
query = Q(name__iexact=artist_name)
|
||||
artists = getter(data, "artists", default=[])
|
||||
artist = artists[0]
|
||||
artist_mbid = artist.get("mbid", None)
|
||||
artist_fid = artist.get("fid", None)
|
||||
artist_name = artist["name"]
|
||||
|
||||
if artist_mbid:
|
||||
query |= Q(mbid=artist_mbid)
|
||||
query = Q(mbid=artist_mbid)
|
||||
else:
|
||||
query = Q(name__iexact=artist_name)
|
||||
if artist_fid:
|
||||
query |= Q(fid=artist_fid)
|
||||
defaults = {
|
||||
|
@ -441,21 +453,24 @@ def _get_track(data):
|
|||
"mbid": artist_mbid,
|
||||
"fid": artist_fid,
|
||||
"from_activity_id": from_activity_id,
|
||||
"attributed_to": artist.get("attributed_to", attributed_to),
|
||||
}
|
||||
if data.get("artist_fdate"):
|
||||
defaults["creation_date"] = data.get("artist_fdate")
|
||||
if artist.get("fdate"):
|
||||
defaults["creation_date"] = artist.get("fdate")
|
||||
|
||||
artist = get_best_candidate_or_create(
|
||||
models.Artist, query, defaults=defaults, sort_fields=["mbid", "fid"]
|
||||
)[0]
|
||||
|
||||
album_artist_name = data.get("album_artist") or artist_name
|
||||
album_artists = getter(data, "album", "artists", default=artists) or artists
|
||||
album_artist = album_artists[0]
|
||||
album_artist_name = album_artist.get("name")
|
||||
if album_artist_name == artist_name:
|
||||
album_artist = artist
|
||||
else:
|
||||
query = Q(name__iexact=album_artist_name)
|
||||
album_artist_mbid = data.get("musicbrainz_albumartistid", None)
|
||||
album_artist_fid = data.get("album_artist_fid", None)
|
||||
album_artist_mbid = album_artist.get("mbid", None)
|
||||
album_artist_fid = album_artist.get("fid", None)
|
||||
if album_artist_mbid:
|
||||
query |= Q(mbid=album_artist_mbid)
|
||||
if album_artist_fid:
|
||||
|
@ -465,32 +480,38 @@ def _get_track(data):
|
|||
"mbid": album_artist_mbid,
|
||||
"fid": album_artist_fid,
|
||||
"from_activity_id": from_activity_id,
|
||||
"attributed_to": album_artist.get("attributed_to", attributed_to),
|
||||
}
|
||||
if data.get("album_artist_fdate"):
|
||||
defaults["creation_date"] = data.get("album_artist_fdate")
|
||||
if album_artist.get("fdate"):
|
||||
defaults["creation_date"] = album_artist.get("fdate")
|
||||
|
||||
album_artist = get_best_candidate_or_create(
|
||||
models.Artist, query, defaults=defaults, sort_fields=["mbid", "fid"]
|
||||
)[0]
|
||||
|
||||
# get / create album
|
||||
album_title = data["album"]
|
||||
album_fid = data.get("album_fid", None)
|
||||
query = Q(title__iexact=album_title, artist=album_artist)
|
||||
album = data["album"]
|
||||
album_title = album["title"]
|
||||
album_fid = album.get("fid", None)
|
||||
|
||||
if album_mbid:
|
||||
query |= Q(mbid=album_mbid)
|
||||
query = Q(mbid=album_mbid)
|
||||
else:
|
||||
query = Q(title__iexact=album_title, artist=album_artist)
|
||||
|
||||
if album_fid:
|
||||
query |= Q(fid=album_fid)
|
||||
defaults = {
|
||||
"title": album_title,
|
||||
"artist": album_artist,
|
||||
"mbid": album_mbid,
|
||||
"release_date": data.get("date"),
|
||||
"release_date": album.get("release_date"),
|
||||
"fid": album_fid,
|
||||
"from_activity_id": from_activity_id,
|
||||
"attributed_to": album.get("attributed_to", attributed_to),
|
||||
}
|
||||
if data.get("album_fdate"):
|
||||
defaults["creation_date"] = data.get("album_fdate")
|
||||
if album.get("fdate"):
|
||||
defaults["creation_date"] = album.get("fdate")
|
||||
|
||||
album = get_best_candidate_or_create(
|
||||
models.Album, query, defaults=defaults, sort_fields=["mbid", "fid"]
|
||||
|
@ -498,8 +519,8 @@ def _get_track(data):
|
|||
|
||||
# get / create track
|
||||
track_title = data["title"]
|
||||
track_number = data.get("track_number", 1)
|
||||
query = Q(title__iexact=track_title, artist=artist, album=album)
|
||||
position = data.get("position", 1)
|
||||
query = Q(title__iexact=track_title, artist=artist, album=album, position=position)
|
||||
if track_mbid:
|
||||
query |= Q(mbid=track_mbid)
|
||||
if track_fid:
|
||||
|
@ -509,10 +530,11 @@ def _get_track(data):
|
|||
"album": album,
|
||||
"mbid": track_mbid,
|
||||
"artist": artist,
|
||||
"position": track_number,
|
||||
"position": position,
|
||||
"disc_number": data.get("disc_number"),
|
||||
"fid": track_fid,
|
||||
"from_activity_id": from_activity_id,
|
||||
"attributed_to": data.get("attributed_to", attributed_to),
|
||||
"license": licenses.match(data.get("license"), data.get("copyright")),
|
||||
"copyright": data.get("copyright"),
|
||||
}
|
||||
|
@ -562,3 +584,46 @@ def clean_transcoding_cache():
|
|||
.order_by("id")
|
||||
)
|
||||
return candidates.delete()
|
||||
|
||||
|
||||
def get_prunable_tracks(
|
||||
exclude_favorites=True, exclude_playlists=True, exclude_listenings=True
|
||||
):
|
||||
"""
|
||||
Returns a list of tracks with no associated uploads,
|
||||
excluding the one that were listened/favorited/included in playlists.
|
||||
"""
|
||||
|
||||
queryset = models.Track.objects.all()
|
||||
queryset = queryset.filter(uploads__isnull=True)
|
||||
if exclude_favorites:
|
||||
queryset = queryset.filter(track_favorites__isnull=True)
|
||||
if exclude_playlists:
|
||||
queryset = queryset.filter(playlist_tracks__isnull=True)
|
||||
if exclude_listenings:
|
||||
queryset = queryset.filter(listenings__isnull=True)
|
||||
|
||||
return queryset
|
||||
|
||||
|
||||
def get_prunable_albums():
|
||||
return models.Album.objects.filter(tracks__isnull=True)
|
||||
|
||||
|
||||
def get_prunable_artists():
|
||||
return models.Artist.objects.filter(tracks__isnull=True, albums__isnull=True)
|
||||
|
||||
|
||||
def update_library_entity(obj, data):
|
||||
"""
|
||||
Given an obj and some updated fields, will persist the changes on the obj
|
||||
and also check if the entity need to be aliased with existing objs (i.e
|
||||
if a mbid was added on the obj, and match another entity with the same mbid)
|
||||
"""
|
||||
for key, value in data.items():
|
||||
setattr(obj, key, value)
|
||||
|
||||
# Todo: handle integrity error on unique fields (such as MBID)
|
||||
obj.save(update_fields=list(data.keys()))
|
||||
|
||||
return obj
|
||||
|
|
|
@ -39,6 +39,10 @@ AUDIO_EXTENSIONS_AND_MIMETYPE = [
|
|||
EXTENSION_TO_MIMETYPE = {ext: mt for ext, mt in AUDIO_EXTENSIONS_AND_MIMETYPE}
|
||||
MIMETYPE_TO_EXTENSION = {mt: ext for ext, mt in AUDIO_EXTENSIONS_AND_MIMETYPE}
|
||||
|
||||
SUPPORTED_EXTENSIONS = list(
|
||||
sorted(set([ext for ext, _ in AUDIO_EXTENSIONS_AND_MIMETYPE]))
|
||||
)
|
||||
|
||||
|
||||
def get_ext_from_type(mimetype):
|
||||
return MIMETYPE_TO_EXTENSION.get(mimetype)
|
||||
|
|
|
@ -8,19 +8,23 @@ from django.db.models.functions import Length
|
|||
from django.utils import timezone
|
||||
|
||||
from rest_framework import mixins
|
||||
from rest_framework import permissions
|
||||
from rest_framework import settings as rest_settings
|
||||
from rest_framework import views, viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
from taggit.models import Tag
|
||||
|
||||
from funkwhale_api.common import decorators as common_decorators
|
||||
from funkwhale_api.common import permissions as common_permissions
|
||||
from funkwhale_api.common import preferences
|
||||
from funkwhale_api.common import utils as common_utils
|
||||
from funkwhale_api.common import views as common_views
|
||||
from funkwhale_api.federation.authentication import SignatureAuthentication
|
||||
from funkwhale_api.federation import actors
|
||||
from funkwhale_api.federation import api_serializers as federation_api_serializers
|
||||
from funkwhale_api.federation import decorators as federation_decorators
|
||||
from funkwhale_api.federation import routes
|
||||
from funkwhale_api.users.oauth import permissions as oauth_permissions
|
||||
|
||||
from . import filters, licenses, models, serializers, tasks, utils
|
||||
|
||||
|
@ -58,13 +62,18 @@ class TagViewSetMixin(object):
|
|||
return queryset
|
||||
|
||||
|
||||
class ArtistViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
class ArtistViewSet(common_views.SkipFilterForGetObject, viewsets.ReadOnlyModelViewSet):
|
||||
queryset = models.Artist.objects.all()
|
||||
serializer_class = serializers.ArtistWithAlbumsSerializer
|
||||
permission_classes = [common_permissions.ConditionalAuthentication]
|
||||
permission_classes = [oauth_permissions.ScopePermission]
|
||||
required_scope = "libraries"
|
||||
anonymous_policy = "setting"
|
||||
filterset_class = filters.ArtistFilter
|
||||
ordering_fields = ("id", "name", "creation_date")
|
||||
|
||||
fetches = federation_decorators.fetches_route()
|
||||
mutations = common_decorators.mutations_route(types=["update"])
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
albums = models.Album.objects.with_tracks_count()
|
||||
|
@ -82,15 +91,20 @@ class ArtistViewSet(viewsets.ReadOnlyModelViewSet):
|
|||
)
|
||||
|
||||
|
||||
class AlbumViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
class AlbumViewSet(common_views.SkipFilterForGetObject, viewsets.ReadOnlyModelViewSet):
|
||||
queryset = (
|
||||
models.Album.objects.all().order_by("artist", "release_date").select_related()
|
||||
)
|
||||
serializer_class = serializers.AlbumSerializer
|
||||
permission_classes = [common_permissions.ConditionalAuthentication]
|
||||
permission_classes = [oauth_permissions.ScopePermission]
|
||||
required_scope = "libraries"
|
||||
anonymous_policy = "setting"
|
||||
ordering_fields = ("creation_date", "release_date", "title")
|
||||
filterset_class = filters.AlbumFilter
|
||||
|
||||
fetches = federation_decorators.fetches_route()
|
||||
mutations = common_decorators.mutations_route(types=["update"])
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
tracks = (
|
||||
|
@ -123,9 +137,11 @@ class LibraryViewSet(
|
|||
)
|
||||
serializer_class = serializers.LibraryForOwnerSerializer
|
||||
permission_classes = [
|
||||
permissions.IsAuthenticated,
|
||||
oauth_permissions.ScopePermission,
|
||||
common_permissions.OwnerPermission,
|
||||
]
|
||||
required_scope = "libraries"
|
||||
anonymous_policy = "setting"
|
||||
owner_field = "actor.user"
|
||||
owner_checks = ["read", "write"]
|
||||
|
||||
|
@ -166,22 +182,30 @@ class LibraryViewSet(
|
|||
return Response(serializer.data)
|
||||
|
||||
|
||||
class TrackViewSet(TagViewSetMixin, viewsets.ReadOnlyModelViewSet):
|
||||
class TrackViewSet(
|
||||
common_views.SkipFilterForGetObject, TagViewSetMixin, viewsets.ReadOnlyModelViewSet
|
||||
):
|
||||
"""
|
||||
A simple ViewSet for viewing and editing accounts.
|
||||
"""
|
||||
|
||||
queryset = models.Track.objects.all().for_nested_serialization()
|
||||
serializer_class = serializers.TrackSerializer
|
||||
permission_classes = [common_permissions.ConditionalAuthentication]
|
||||
permission_classes = [oauth_permissions.ScopePermission]
|
||||
required_scope = "libraries"
|
||||
anonymous_policy = "setting"
|
||||
filterset_class = filters.TrackFilter
|
||||
ordering_fields = (
|
||||
"creation_date",
|
||||
"title",
|
||||
"album__release_date",
|
||||
"size",
|
||||
"position",
|
||||
"disc_number",
|
||||
"artist__name",
|
||||
)
|
||||
fetches = federation_decorators.fetches_route()
|
||||
mutations = common_decorators.mutations_route(types=["update"])
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
|
@ -195,31 +219,6 @@ class TrackViewSet(TagViewSetMixin, viewsets.ReadOnlyModelViewSet):
|
|||
)
|
||||
return queryset
|
||||
|
||||
@action(methods=["get"], detail=True)
|
||||
@transaction.non_atomic_requests
|
||||
def lyrics(self, request, *args, **kwargs):
|
||||
try:
|
||||
track = models.Track.objects.get(pk=kwargs["pk"])
|
||||
except models.Track.DoesNotExist:
|
||||
return Response(status=404)
|
||||
|
||||
work = track.work
|
||||
if not work:
|
||||
work = track.get_work()
|
||||
|
||||
if not work:
|
||||
return Response({"error": "unavailable work "}, status=404)
|
||||
|
||||
lyrics = work.fetch_lyrics()
|
||||
try:
|
||||
if not lyrics.content:
|
||||
tasks.fetch_content(lyrics_id=lyrics.pk)
|
||||
lyrics.refresh_from_db()
|
||||
except AttributeError:
|
||||
return Response({"error": "unavailable lyrics"}, status=404)
|
||||
serializer = serializers.LyricsSerializer(lyrics)
|
||||
return Response(serializer.data)
|
||||
|
||||
libraries = action(methods=["get"], detail=True)(
|
||||
get_libraries(filter_uploads=lambda o, uploads: uploads.filter(track=o))
|
||||
)
|
||||
|
@ -241,6 +240,8 @@ def get_file_path(audio_file):
|
|||
"MUSIC_DIRECTORY_PATH to serve in-place imported files"
|
||||
)
|
||||
path = "/music" + audio_file.replace(prefix, "", 1)
|
||||
if path.startswith("http://") or path.startswith("https://"):
|
||||
return (settings.PROTECT_FILES_PATH + "/media/" + path).encode("utf-8")
|
||||
return (settings.PROTECT_FILES_PATH + path).encode("utf-8")
|
||||
if t == "apache2":
|
||||
try:
|
||||
|
@ -256,25 +257,35 @@ def get_file_path(audio_file):
|
|||
return path.encode("utf-8")
|
||||
|
||||
|
||||
def should_transcode(upload, format):
|
||||
def should_transcode(upload, format, max_bitrate=None):
|
||||
if not preferences.get("music__transcoding_enabled"):
|
||||
return False
|
||||
format_need_transcoding = True
|
||||
bitrate_need_transcoding = True
|
||||
if format is None:
|
||||
return False
|
||||
if format not in utils.EXTENSION_TO_MIMETYPE:
|
||||
format_need_transcoding = False
|
||||
elif format not in utils.EXTENSION_TO_MIMETYPE:
|
||||
# format should match supported formats
|
||||
return False
|
||||
if upload.mimetype is None:
|
||||
format_need_transcoding = False
|
||||
elif upload.mimetype is None:
|
||||
# upload should have a mimetype, otherwise we cannot transcode
|
||||
return False
|
||||
if upload.mimetype == utils.EXTENSION_TO_MIMETYPE[format]:
|
||||
format_need_transcoding = False
|
||||
elif upload.mimetype == utils.EXTENSION_TO_MIMETYPE[format]:
|
||||
# requested format sould be different than upload mimetype, otherwise
|
||||
# there is no need to transcode
|
||||
return False
|
||||
return True
|
||||
format_need_transcoding = False
|
||||
|
||||
if max_bitrate is None:
|
||||
bitrate_need_transcoding = False
|
||||
elif not upload.bitrate:
|
||||
bitrate_need_transcoding = False
|
||||
elif upload.bitrate <= max_bitrate:
|
||||
bitrate_need_transcoding = False
|
||||
|
||||
return format_need_transcoding or bitrate_need_transcoding
|
||||
|
||||
|
||||
def handle_serve(upload, user, format=None):
|
||||
def handle_serve(upload, user, format=None, max_bitrate=None, proxy_media=True):
|
||||
f = upload
|
||||
# we update the accessed_date
|
||||
now = timezone.now()
|
||||
|
@ -295,7 +306,11 @@ def handle_serve(upload, user, format=None):
|
|||
# thus resulting in multiple downloads from the remote
|
||||
qs = f.__class__.objects.select_for_update()
|
||||
f = qs.get(pk=f.pk)
|
||||
f.download_audio_from_remote(user=user)
|
||||
if user.is_authenticated:
|
||||
actor = user.actor
|
||||
else:
|
||||
actor = actors.get_service_actor()
|
||||
f.download_audio_from_remote(actor=actor)
|
||||
data = f.get_audio_data()
|
||||
if data:
|
||||
f.duration = data["duration"]
|
||||
|
@ -307,13 +322,18 @@ def handle_serve(upload, user, format=None):
|
|||
file_path = get_file_path(f.source.replace("file://", "", 1))
|
||||
mt = f.mimetype
|
||||
|
||||
if should_transcode(f, format):
|
||||
transcoded_version = upload.get_transcoded_version(format)
|
||||
if should_transcode(f, format, max_bitrate=max_bitrate):
|
||||
transcoded_version = f.get_transcoded_version(format, max_bitrate=max_bitrate)
|
||||
transcoded_version.accessed_date = now
|
||||
transcoded_version.save(update_fields=["accessed_date"])
|
||||
f = transcoded_version
|
||||
file_path = get_file_path(f.audio_file)
|
||||
mt = f.mimetype
|
||||
if not proxy_media:
|
||||
# we simply issue a 302 redirect to the real URL
|
||||
response = Response(status=302)
|
||||
response["Location"] = f.audio_file.url
|
||||
return response
|
||||
if mt:
|
||||
response = Response(content_type=mt)
|
||||
else:
|
||||
|
@ -337,7 +357,9 @@ class ListenViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet):
|
|||
rest_settings.api_settings.DEFAULT_AUTHENTICATION_CLASSES
|
||||
+ [SignatureAuthentication]
|
||||
)
|
||||
permission_classes = [common_permissions.ConditionalAuthentication]
|
||||
permission_classes = [oauth_permissions.ScopePermission]
|
||||
required_scope = "libraries"
|
||||
anonymous_policy = "setting"
|
||||
lookup_field = "uuid"
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
|
@ -354,7 +376,21 @@ class ListenViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet):
|
|||
return Response(status=404)
|
||||
|
||||
format = request.GET.get("to")
|
||||
return handle_serve(upload, user=request.user, format=format)
|
||||
max_bitrate = request.GET.get("max_bitrate")
|
||||
try:
|
||||
max_bitrate = min(max(int(max_bitrate), 0), 320) or None
|
||||
except (TypeError, ValueError):
|
||||
max_bitrate = None
|
||||
|
||||
if max_bitrate:
|
||||
max_bitrate = max_bitrate * 1000
|
||||
return handle_serve(
|
||||
upload,
|
||||
user=request.user,
|
||||
format=format,
|
||||
max_bitrate=max_bitrate,
|
||||
proxy_media=settings.PROXY_MEDIA,
|
||||
)
|
||||
|
||||
|
||||
class UploadViewSet(
|
||||
|
@ -372,9 +408,11 @@ class UploadViewSet(
|
|||
)
|
||||
serializer_class = serializers.UploadForOwnerSerializer
|
||||
permission_classes = [
|
||||
permissions.IsAuthenticated,
|
||||
oauth_permissions.ScopePermission,
|
||||
common_permissions.OwnerPermission,
|
||||
]
|
||||
required_scope = "libraries"
|
||||
anonymous_policy = "setting"
|
||||
owner_field = "library.actor.user"
|
||||
owner_checks = ["read", "write"]
|
||||
filterset_class = filters.UploadFilter
|
||||
|
@ -419,12 +457,16 @@ class UploadViewSet(
|
|||
class TagViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
queryset = Tag.objects.all().order_by("name")
|
||||
serializer_class = serializers.TagSerializer
|
||||
permission_classes = [common_permissions.ConditionalAuthentication]
|
||||
permission_classes = [oauth_permissions.ScopePermission]
|
||||
required_scope = "libraries"
|
||||
anonymous_policy = "setting"
|
||||
|
||||
|
||||
class Search(views.APIView):
|
||||
max_results = 3
|
||||
permission_classes = [common_permissions.ConditionalAuthentication]
|
||||
permission_classes = [oauth_permissions.ScopePermission]
|
||||
required_scope = "libraries"
|
||||
anonymous_policy = "setting"
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
query = request.GET["query"]
|
||||
|
@ -489,10 +531,13 @@ class Search(views.APIView):
|
|||
|
||||
|
||||
class LicenseViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
permission_classes = [common_permissions.ConditionalAuthentication]
|
||||
permission_classes = [oauth_permissions.ScopePermission]
|
||||
required_scope = "libraries"
|
||||
anonymous_policy = "setting"
|
||||
serializer_class = serializers.LicenseSerializer
|
||||
queryset = models.License.objects.all().order_by("code")
|
||||
lookup_value_regex = ".*"
|
||||
max_page_size = 1000
|
||||
|
||||
def get_queryset(self):
|
||||
# ensure our licenses are up to date in DB
|
||||
|
@ -514,7 +559,9 @@ class LicenseViewSet(viewsets.ReadOnlyModelViewSet):
|
|||
|
||||
|
||||
class OembedView(views.APIView):
|
||||
permission_classes = [common_permissions.ConditionalAuthentication]
|
||||
permission_classes = [oauth_permissions.ScopePermission]
|
||||
required_scope = "libraries"
|
||||
anonymous_policy = "setting"
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
serializer = serializers.OembedSerializer(data=request.GET)
|
||||
|
|
|
@ -40,10 +40,6 @@ class API(object):
|
|||
_api.get_recording_by_id, max_age=settings.MUSICBRAINZ_CACHE_DURATION
|
||||
)
|
||||
|
||||
class works(object):
|
||||
search = memo(_api.search_works, max_age=settings.MUSICBRAINZ_CACHE_DURATION)
|
||||
get = memo(_api.get_work_by_id, max_age=settings.MUSICBRAINZ_CACHE_DURATION)
|
||||
|
||||
class releases(object):
|
||||
search = memo(_api.search_releases, max_age=settings.MUSICBRAINZ_CACHE_DURATION)
|
||||
get = memo(_api.get_release_by_id, max_age=settings.MUSICBRAINZ_CACHE_DURATION)
|
||||
|
|
|
@ -17,7 +17,7 @@ class PlaylistQuerySet(models.QuerySet):
|
|||
|
||||
def with_covers(self):
|
||||
album_prefetch = models.Prefetch(
|
||||
"album", queryset=music_models.Album.objects.only("cover")
|
||||
"album", queryset=music_models.Album.objects.only("cover", "artist_id")
|
||||
)
|
||||
track_prefetch = models.Prefetch(
|
||||
"track",
|
||||
|
@ -70,7 +70,7 @@ class Playlist(models.Model):
|
|||
return self.name
|
||||
|
||||
@transaction.atomic
|
||||
def insert(self, plt, index=None):
|
||||
def insert(self, plt, index=None, allow_duplicates=True):
|
||||
"""
|
||||
Given a PlaylistTrack, insert it at the correct index in the playlist,
|
||||
and update other tracks index if necessary.
|
||||
|
@ -96,6 +96,10 @@ class Playlist(models.Model):
|
|||
if index < 0:
|
||||
raise exceptions.ValidationError("Index must be zero or positive")
|
||||
|
||||
if not allow_duplicates:
|
||||
existing_without_current_plt = existing.exclude(pk=plt.pk)
|
||||
self._check_duplicate_add(existing_without_current_plt, [plt.track])
|
||||
|
||||
if move:
|
||||
# we remove the index temporarily, to avoid integrity errors
|
||||
plt.index = None
|
||||
|
@ -125,7 +129,7 @@ class Playlist(models.Model):
|
|||
return to_update.update(index=models.F("index") - 1)
|
||||
|
||||
@transaction.atomic
|
||||
def insert_many(self, tracks):
|
||||
def insert_many(self, tracks, allow_duplicates=True):
|
||||
existing = self.playlist_tracks.select_for_update()
|
||||
now = timezone.now()
|
||||
total = existing.filter(index__isnull=False).count()
|
||||
|
@ -134,6 +138,10 @@ class Playlist(models.Model):
|
|||
raise exceptions.ValidationError(
|
||||
"Playlist would reach the maximum of {} tracks".format(max_tracks)
|
||||
)
|
||||
|
||||
if not allow_duplicates:
|
||||
self._check_duplicate_add(existing, tracks)
|
||||
|
||||
self.save(update_fields=["modification_date"])
|
||||
start = total
|
||||
plts = [
|
||||
|
@ -144,6 +152,26 @@ class Playlist(models.Model):
|
|||
]
|
||||
return PlaylistTrack.objects.bulk_create(plts)
|
||||
|
||||
def _check_duplicate_add(self, existing_playlist_tracks, tracks_to_add):
|
||||
track_ids = [t.pk for t in tracks_to_add]
|
||||
|
||||
duplicates = existing_playlist_tracks.filter(
|
||||
track__pk__in=track_ids
|
||||
).values_list("track__pk", flat=True)
|
||||
if duplicates:
|
||||
duplicate_tracks = [t for t in tracks_to_add if t.pk in duplicates]
|
||||
raise exceptions.ValidationError(
|
||||
{
|
||||
"non_field_errors": [
|
||||
{
|
||||
"tracks": duplicate_tracks,
|
||||
"playlist_name": self.name,
|
||||
"code": "tracks_already_exist_in_playlist",
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class PlaylistTrackQuerySet(models.QuerySet):
|
||||
def for_nested_serialization(self, actor=None):
|
||||
|
|
|
@ -24,10 +24,11 @@ class PlaylistTrackSerializer(serializers.ModelSerializer):
|
|||
|
||||
class PlaylistTrackWriteSerializer(serializers.ModelSerializer):
|
||||
index = serializers.IntegerField(required=False, min_value=0, allow_null=True)
|
||||
allow_duplicates = serializers.BooleanField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = models.PlaylistTrack
|
||||
fields = ("id", "track", "playlist", "index")
|
||||
fields = ("id", "track", "playlist", "index", "allow_duplicates")
|
||||
|
||||
def validate_playlist(self, value):
|
||||
if self.context.get("request"):
|
||||
|
@ -47,17 +48,21 @@ class PlaylistTrackWriteSerializer(serializers.ModelSerializer):
|
|||
@transaction.atomic
|
||||
def create(self, validated_data):
|
||||
index = validated_data.pop("index", None)
|
||||
allow_duplicates = validated_data.pop("allow_duplicates", True)
|
||||
instance = super().create(validated_data)
|
||||
instance.playlist.insert(instance, index)
|
||||
|
||||
instance.playlist.insert(instance, index, allow_duplicates)
|
||||
return instance
|
||||
|
||||
@transaction.atomic
|
||||
def update(self, instance, validated_data):
|
||||
update_index = "index" in validated_data
|
||||
index = validated_data.pop("index", None)
|
||||
allow_duplicates = validated_data.pop("allow_duplicates", True)
|
||||
super().update(instance, validated_data)
|
||||
if update_index:
|
||||
instance.playlist.insert(instance, index)
|
||||
instance.playlist.insert(instance, index, allow_duplicates)
|
||||
|
||||
return instance
|
||||
|
||||
def get_unique_together_validators(self):
|
||||
|
@ -117,9 +122,21 @@ class PlaylistSerializer(serializers.ModelSerializer):
|
|||
except AttributeError:
|
||||
return []
|
||||
|
||||
excluded_artists = []
|
||||
try:
|
||||
user = self.context["request"].user
|
||||
except (KeyError, AttributeError):
|
||||
user = None
|
||||
if user and user.is_authenticated:
|
||||
excluded_artists = list(
|
||||
user.content_filters.values_list("target_artist", flat=True)
|
||||
)
|
||||
|
||||
covers = []
|
||||
max_covers = 5
|
||||
for plt in plts:
|
||||
if plt.track.album.artist_id in excluded_artists:
|
||||
continue
|
||||
url = plt.track.album.cover.crop["200x200"].url
|
||||
if url in covers:
|
||||
continue
|
||||
|
@ -139,3 +156,7 @@ class PlaylistAddManySerializer(serializers.Serializer):
|
|||
tracks = serializers.PrimaryKeyRelatedField(
|
||||
many=True, queryset=Track.objects.for_nested_serialization()
|
||||
)
|
||||
allow_duplicates = serializers.BooleanField(required=False)
|
||||
|
||||
class Meta:
|
||||
fields = "allow_duplicates"
|
||||
|
|
|
@ -2,11 +2,12 @@ from django.db import transaction
|
|||
from django.db.models import Count
|
||||
from rest_framework import exceptions, mixins, viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.permissions import IsAuthenticatedOrReadOnly
|
||||
from rest_framework.response import Response
|
||||
|
||||
from funkwhale_api.common import fields, permissions
|
||||
from funkwhale_api.music import utils as music_utils
|
||||
from funkwhale_api.users.oauth import permissions as oauth_permissions
|
||||
|
||||
from . import filters, models, serializers
|
||||
|
||||
|
||||
|
@ -28,10 +29,11 @@ class PlaylistViewSet(
|
|||
.with_duration()
|
||||
)
|
||||
permission_classes = [
|
||||
permissions.ConditionalAuthentication,
|
||||
oauth_permissions.ScopePermission,
|
||||
permissions.OwnerPermission,
|
||||
IsAuthenticatedOrReadOnly,
|
||||
]
|
||||
required_scope = "playlists"
|
||||
anonymous_policy = "setting"
|
||||
owner_checks = ["write"]
|
||||
filterset_class = filters.PlaylistFilter
|
||||
ordering_fields = ("id", "name", "creation_date", "modification_date")
|
||||
|
@ -53,7 +55,10 @@ class PlaylistViewSet(
|
|||
serializer = serializers.PlaylistAddManySerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
try:
|
||||
plts = playlist.insert_many(serializer.validated_data["tracks"])
|
||||
plts = playlist.insert_many(
|
||||
serializer.validated_data["tracks"],
|
||||
serializer.validated_data["allow_duplicates"],
|
||||
)
|
||||
except exceptions.ValidationError as e:
|
||||
payload = {"playlist": e.detail}
|
||||
return Response(payload, status=400)
|
||||
|
@ -101,10 +106,11 @@ class PlaylistTrackViewSet(
|
|||
serializer_class = serializers.PlaylistTrackSerializer
|
||||
queryset = models.PlaylistTrack.objects.all()
|
||||
permission_classes = [
|
||||
permissions.ConditionalAuthentication,
|
||||
oauth_permissions.ScopePermission,
|
||||
permissions.OwnerPermission,
|
||||
IsAuthenticatedOrReadOnly,
|
||||
]
|
||||
required_scope = "playlists"
|
||||
anonymous_policy = "setting"
|
||||
owner_field = "playlist.user"
|
||||
owner_checks = ["write"]
|
||||
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
import random
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db.models import Count
|
||||
from django.db import connection
|
||||
from rest_framework import serializers
|
||||
from taggit.models import Tag
|
||||
|
||||
from funkwhale_api.moderation import filters as moderation_filters
|
||||
from funkwhale_api.music.models import Artist, Track
|
||||
from funkwhale_api.users.models import User
|
||||
|
||||
|
@ -43,8 +44,16 @@ class SessionRadio(SimpleRadio):
|
|||
return self.session
|
||||
|
||||
def get_queryset(self, **kwargs):
|
||||
qs = Track.objects.annotate(uploads_count=Count("uploads"))
|
||||
return qs.filter(uploads_count__gt=0)
|
||||
qs = Track.objects.all()
|
||||
if not self.session:
|
||||
return qs
|
||||
if not self.session.user:
|
||||
return qs
|
||||
query = moderation_filters.get_filtered_content_query(
|
||||
config=moderation_filters.USER_FILTER_CONFIG["TRACK"],
|
||||
user=self.session.user,
|
||||
)
|
||||
return qs.exclude(query)
|
||||
|
||||
def get_queryset_kwargs(self):
|
||||
return {}
|
||||
|
@ -55,7 +64,13 @@ class SessionRadio(SimpleRadio):
|
|||
if self.session:
|
||||
queryset = self.filter_from_session(queryset)
|
||||
if kwargs.pop("filter_playable", True):
|
||||
queryset = queryset.playable_by(self.session.user.actor)
|
||||
queryset = queryset.playable_by(
|
||||
self.session.user.actor if self.session.user else None
|
||||
)
|
||||
queryset = self.filter_queryset(queryset)
|
||||
return queryset
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
return queryset
|
||||
|
||||
def filter_from_session(self, queryset):
|
||||
|
@ -118,7 +133,7 @@ class CustomRadio(SessionRadio):
|
|||
try:
|
||||
user = data["user"]
|
||||
except KeyError:
|
||||
user = context["user"]
|
||||
user = context.get("user")
|
||||
try:
|
||||
assert data["custom_radio"].user == user or data["custom_radio"].is_public
|
||||
except KeyError:
|
||||
|
@ -153,6 +168,74 @@ class TagRadio(RelatedObjectRadio):
|
|||
return qs.filter(tags__in=[self.session.related_object])
|
||||
|
||||
|
||||
def weighted_choice(choices):
|
||||
total = sum(w for c, w in choices)
|
||||
r = random.uniform(0, total)
|
||||
upto = 0
|
||||
for c, w in choices:
|
||||
if upto + w >= r:
|
||||
return c
|
||||
upto += w
|
||||
assert False, "Shouldn't get here"
|
||||
|
||||
|
||||
class NextNotFound(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@registry.register(name="similar")
|
||||
class SimilarRadio(RelatedObjectRadio):
|
||||
model = Track
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
queryset = super().filter_queryset(queryset)
|
||||
seeds = list(
|
||||
self.session.session_tracks.all()
|
||||
.values_list("track_id", flat=True)
|
||||
.order_by("-id")[:3]
|
||||
) + [self.session.related_object.pk]
|
||||
for seed in seeds:
|
||||
try:
|
||||
return queryset.filter(pk=self.find_next_id(queryset, seed))
|
||||
except NextNotFound:
|
||||
continue
|
||||
|
||||
return queryset.none()
|
||||
|
||||
def find_next_id(self, queryset, seed):
|
||||
with connection.cursor() as cursor:
|
||||
query = """
|
||||
SELECT next, count(next) AS c
|
||||
FROM (
|
||||
SELECT
|
||||
track_id,
|
||||
creation_date,
|
||||
LEAD(track_id) OVER (
|
||||
PARTITION by user_id order by creation_date asc
|
||||
) AS next
|
||||
FROM history_listening
|
||||
INNER JOIN users_user ON (users_user.id = user_id)
|
||||
WHERE users_user.privacy_level = 'instance' OR users_user.privacy_level = 'everyone' OR user_id = %s
|
||||
ORDER BY creation_date ASC
|
||||
) t WHERE track_id = %s AND next != %s GROUP BY next ORDER BY c DESC;
|
||||
"""
|
||||
cursor.execute(query, [self.session.user_id, seed, seed])
|
||||
next_candidates = list(cursor.fetchall())
|
||||
|
||||
if not next_candidates:
|
||||
raise NextNotFound()
|
||||
|
||||
matching_tracks = list(
|
||||
queryset.filter(pk__in=[c[0] for c in next_candidates]).values_list(
|
||||
"id", flat=True
|
||||
)
|
||||
)
|
||||
next_candidates = [n for n in next_candidates if n[0] in matching_tracks]
|
||||
if not next_candidates:
|
||||
raise NextNotFound()
|
||||
return random.choice([c[0] for c in next_candidates])
|
||||
|
||||
|
||||
@registry.register(name="artist")
|
||||
class ArtistRadio(RelatedObjectRadio):
|
||||
model = Artist
|
||||
|
|
|
@ -70,7 +70,7 @@ class RadioSessionSerializer(serializers.ModelSerializer):
|
|||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data["user"] = self.context["user"]
|
||||
validated_data["user"] = self.context.get("user")
|
||||
if validated_data.get("related_object_id"):
|
||||
radio = registry[validated_data["radio_type"]]()
|
||||
validated_data["related_object"] = radio.get_related_object(
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
from django.db.models import Q
|
||||
from rest_framework import mixins, permissions, status, viewsets
|
||||
from rest_framework import mixins, status, viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from funkwhale_api.common import permissions as common_permissions
|
||||
from funkwhale_api.music.serializers import TrackSerializer
|
||||
from funkwhale_api.music import utils as music_utils
|
||||
from funkwhale_api.users.oauth import permissions as oauth_permissions
|
||||
|
||||
from . import filters, filtersets, models, serializers
|
||||
|
||||
|
@ -20,12 +22,14 @@ class RadioViewSet(
|
|||
|
||||
serializer_class = serializers.RadioSerializer
|
||||
permission_classes = [
|
||||
permissions.IsAuthenticated,
|
||||
oauth_permissions.ScopePermission,
|
||||
common_permissions.OwnerPermission,
|
||||
]
|
||||
filterset_class = filtersets.RadioFilter
|
||||
required_scope = "radios"
|
||||
owner_field = "user"
|
||||
owner_checks = ["write"]
|
||||
anonymous_policy = "setting"
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = models.Radio.objects.all()
|
||||
|
@ -44,7 +48,9 @@ class RadioViewSet(
|
|||
def tracks(self, request, *args, **kwargs):
|
||||
radio = self.get_object()
|
||||
tracks = radio.get_candidates().for_nested_serialization()
|
||||
|
||||
actor = music_utils.get_actor_from_request(self.request)
|
||||
tracks = tracks.with_playable_uploads(actor)
|
||||
tracks = tracks.playable_by(actor)
|
||||
page = self.paginate_queryset(tracks)
|
||||
if page is not None:
|
||||
serializer = TrackSerializer(page, many=True)
|
||||
|
@ -80,29 +86,55 @@ class RadioSessionViewSet(
|
|||
|
||||
serializer_class = serializers.RadioSessionSerializer
|
||||
queryset = models.RadioSession.objects.all()
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
permission_classes = []
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
return queryset.filter(user=self.request.user)
|
||||
if self.request.user.is_authenticated:
|
||||
return queryset.filter(
|
||||
Q(user=self.request.user)
|
||||
| Q(session_key=self.request.session.session_key)
|
||||
)
|
||||
|
||||
return queryset.filter(session_key=self.request.session.session_key).exclude(
|
||||
session_key=None
|
||||
)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
if (
|
||||
not self.request.user.is_authenticated
|
||||
and not self.request.session.session_key
|
||||
):
|
||||
self.request.session.create()
|
||||
return serializer.save(
|
||||
user=self.request.user if self.request.user.is_authenticated else None,
|
||||
session_key=self.request.session.session_key,
|
||||
)
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
context["user"] = self.request.user
|
||||
context["user"] = (
|
||||
self.request.user if self.request.user.is_authenticated else None
|
||||
)
|
||||
return context
|
||||
|
||||
|
||||
class RadioSessionTrackViewSet(mixins.CreateModelMixin, viewsets.GenericViewSet):
|
||||
serializer_class = serializers.RadioSessionTrackSerializer
|
||||
queryset = models.RadioSessionTrack.objects.all()
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
permission_classes = []
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
session = serializer.validated_data["session"]
|
||||
if not request.user.is_authenticated and not request.session.session_key:
|
||||
self.request.session.create()
|
||||
try:
|
||||
assert request.user == session.user
|
||||
assert (request.user == session.user) or (
|
||||
request.session.session_key == session.session_key
|
||||
and session.session_key
|
||||
)
|
||||
except AssertionError:
|
||||
return Response(status=status.HTTP_403_FORBIDDEN)
|
||||
session.radio.pick()
|
||||
|
|
|
@ -13,6 +13,7 @@ import funkwhale_api
|
|||
from funkwhale_api.activity import record
|
||||
from funkwhale_api.common import fields, preferences, utils as common_utils
|
||||
from funkwhale_api.favorites.models import TrackFavorite
|
||||
from funkwhale_api.moderation import filters as moderation_filters
|
||||
from funkwhale_api.music import models as music_models
|
||||
from funkwhale_api.music import utils
|
||||
from funkwhale_api.music import views as music_views
|
||||
|
@ -91,7 +92,7 @@ def get_playlist_qs(request):
|
|||
class SubsonicViewSet(viewsets.GenericViewSet):
|
||||
content_negotiation_class = negotiation.SubsonicContentNegociation
|
||||
authentication_classes = [authentication.SubsonicAuthentication]
|
||||
permissions_classes = [rest_permissions.IsAuthenticated]
|
||||
permission_classes = [rest_permissions.IsAuthenticated]
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
if not preferences.get("subsonic__enabled"):
|
||||
|
@ -127,7 +128,7 @@ class SubsonicViewSet(viewsets.GenericViewSet):
|
|||
detail=False,
|
||||
methods=["get", "post"],
|
||||
url_name="get_license",
|
||||
permissions_classes=[],
|
||||
permission_classes=[],
|
||||
url_path="getLicense",
|
||||
)
|
||||
def get_license(self, request, *args, **kwargs):
|
||||
|
@ -152,8 +153,14 @@ class SubsonicViewSet(viewsets.GenericViewSet):
|
|||
url_path="getArtists",
|
||||
)
|
||||
def get_artists(self, request, *args, **kwargs):
|
||||
artists = music_models.Artist.objects.all().playable_by(
|
||||
utils.get_actor_from_request(request)
|
||||
artists = (
|
||||
music_models.Artist.objects.all()
|
||||
.exclude(
|
||||
moderation_filters.get_filtered_content_query(
|
||||
moderation_filters.USER_FILTER_CONFIG["ARTIST"], request.user
|
||||
)
|
||||
)
|
||||
.playable_by(utils.get_actor_from_request(request))
|
||||
)
|
||||
data = serializers.GetArtistsSerializer(artists).data
|
||||
payload = {"artists": data}
|
||||
|
@ -167,8 +174,14 @@ class SubsonicViewSet(viewsets.GenericViewSet):
|
|||
url_path="getIndexes",
|
||||
)
|
||||
def get_indexes(self, request, *args, **kwargs):
|
||||
artists = music_models.Artist.objects.all().playable_by(
|
||||
utils.get_actor_from_request(request)
|
||||
artists = (
|
||||
music_models.Artist.objects.all()
|
||||
.exclude(
|
||||
moderation_filters.get_filtered_content_query(
|
||||
moderation_filters.USER_FILTER_CONFIG["ARTIST"], request.user
|
||||
)
|
||||
)
|
||||
.playable_by(utils.get_actor_from_request(request))
|
||||
)
|
||||
data = serializers.GetArtistsSerializer(artists).data
|
||||
payload = {"indexes": data}
|
||||
|
@ -237,7 +250,24 @@ class SubsonicViewSet(viewsets.GenericViewSet):
|
|||
format = data.get("format", "raw")
|
||||
if format == "raw":
|
||||
format = None
|
||||
return music_views.handle_serve(upload=upload, user=request.user, format=format)
|
||||
|
||||
max_bitrate = data.get("maxBitRate")
|
||||
try:
|
||||
max_bitrate = min(max(int(max_bitrate), 0), 320) or None
|
||||
except (TypeError, ValueError):
|
||||
max_bitrate = None
|
||||
|
||||
if max_bitrate:
|
||||
max_bitrate = max_bitrate * 1000
|
||||
return music_views.handle_serve(
|
||||
upload=upload,
|
||||
user=request.user,
|
||||
format=format,
|
||||
max_bitrate=max_bitrate,
|
||||
# Subsonic clients don't expect 302 redirection unfortunately,
|
||||
# So we have to proxy media files
|
||||
proxy_media=True,
|
||||
)
|
||||
|
||||
@action(detail=False, methods=["get", "post"], url_name="star", url_path="star")
|
||||
@find_object(music_models.Track.objects.all())
|
||||
|
@ -273,7 +303,11 @@ class SubsonicViewSet(viewsets.GenericViewSet):
|
|||
def get_random_songs(self, request, *args, **kwargs):
|
||||
data = request.GET or request.POST
|
||||
actor = utils.get_actor_from_request(request)
|
||||
queryset = music_models.Track.objects.all()
|
||||
queryset = music_models.Track.objects.all().exclude(
|
||||
moderation_filters.get_filtered_content_query(
|
||||
moderation_filters.USER_FILTER_CONFIG["TRACK"], request.user
|
||||
)
|
||||
)
|
||||
queryset = queryset.playable_by(actor)
|
||||
try:
|
||||
size = int(data["size"])
|
||||
|
@ -308,8 +342,14 @@ class SubsonicViewSet(viewsets.GenericViewSet):
|
|||
url_path="getAlbumList2",
|
||||
)
|
||||
def get_album_list2(self, request, *args, **kwargs):
|
||||
queryset = music_models.Album.objects.with_tracks_count().order_by(
|
||||
"artist__name"
|
||||
queryset = (
|
||||
music_models.Album.objects.exclude(
|
||||
moderation_filters.get_filtered_content_query(
|
||||
moderation_filters.USER_FILTER_CONFIG["ALBUM"], request.user
|
||||
)
|
||||
)
|
||||
.with_tracks_count()
|
||||
.order_by("artist__name")
|
||||
)
|
||||
data = request.GET or request.POST
|
||||
filterset = filters.AlbumList2FilterSet(data, queryset=queryset)
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
{% load account %}{% user_display user as user_display %}{% load i18n %}{% autoescape off %}{% blocktrans with site_name=current_site.name site_domain=current_site.domain %}Hello from {{ site_name }}!
|
||||
{% load account %}{% user_display user as user_display %}{% load i18n %}{% autoescape off %}{% blocktrans with site_name=funkwhale_site_name site_domain=funkwhale_site_domain %}Hello from {{ site_name }}!
|
||||
|
||||
You're receiving this e-mail because user {{ user_display }} at {{ site_domain }} has given yours as an e-mail address to connect their account.
|
||||
|
||||
To confirm this is correct, go to {{ funkwhale_url }}/auth/email/confirm?key={{ key }}
|
||||
{% endblocktrans %}{% endautoescape %}
|
||||
{% blocktrans with site_name=current_site.name site_domain=current_site.domain %}Thank you from {{ site_name }}!
|
||||
{% blocktrans with site_name=funkwhale_site_name site_domain=funkwhale_site_domain %}Thank you from {{ site_name }}!
|
||||
{{ site_domain }}{% endblocktrans %}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{% load i18n %}{% autoescape off %}
|
||||
{% blocktrans %}You're receiving this email because you requested a password reset for your user account at {{ site_name }}.{% endblocktrans %}
|
||||
{% blocktrans with site_name=funkwhale_site_name %}You're receiving this email because you requested a password reset for your user account at {{ site_name }}.{% endblocktrans %}
|
||||
|
||||
{% trans "Please go to the following page and choose a new password:" %}
|
||||
{{ funkwhale_url }}/auth/password/reset/confirm?uid={{ uid }}&token={{ token }}
|
||||
|
@ -7,6 +7,6 @@
|
|||
|
||||
{% trans "Thanks for using our site!" %}
|
||||
|
||||
{% blocktrans %}The {{ site_name }} team{% endblocktrans %}
|
||||
{% blocktrans with site_name=funkwhale_site_name %}The {{ site_name }} team{% endblocktrans %}
|
||||
|
||||
{% endautoescape %}
|
||||
|
|
|
@ -3,11 +3,22 @@ from django.conf import settings
|
|||
from dynamic_preferences.registries import global_preferences_registry
|
||||
|
||||
|
||||
def get_email_context():
|
||||
context = {}
|
||||
context["funkwhale_url"] = settings.FUNKWHALE_URL
|
||||
manager = global_preferences_registry.manager()
|
||||
context["funkwhale_site_name"] = (
|
||||
manager["instance__name"] or settings.FUNKWHALE_HOSTNAME
|
||||
)
|
||||
context["funkwhale_site_domain"] = settings.FUNKWHALE_HOSTNAME
|
||||
return context
|
||||
|
||||
|
||||
class FunkwhaleAccountAdapter(DefaultAccountAdapter):
|
||||
def is_open_for_signup(self, request):
|
||||
manager = global_preferences_registry.manager()
|
||||
return manager["users__registration_enabled"]
|
||||
|
||||
def send_mail(self, template_prefix, email, context):
|
||||
context["funkwhale_url"] = settings.FUNKWHALE_URL
|
||||
context.update(get_email_context())
|
||||
return super().send_mail(template_prefix, email, context)
|
||||
|
|
|
@ -33,6 +33,20 @@ class MyUserCreationForm(UserCreationForm):
|
|||
raise forms.ValidationError(self.error_messages["duplicate_username"])
|
||||
|
||||
|
||||
def disable(modeladmin, request, queryset):
|
||||
queryset.exclude(pk=request.user.pk).update(is_active=False)
|
||||
|
||||
|
||||
disable.short_description = "Disable login"
|
||||
|
||||
|
||||
def enable(modeladmin, request, queryset):
|
||||
queryset.update(is_active=True)
|
||||
|
||||
|
||||
enable.short_description = "Enable login"
|
||||
|
||||
|
||||
@admin.register(models.User)
|
||||
class UserAdmin(AuthUserAdmin):
|
||||
form = MyUserChangeForm
|
||||
|
@ -40,6 +54,7 @@ class UserAdmin(AuthUserAdmin):
|
|||
list_display = [
|
||||
"username",
|
||||
"email",
|
||||
"is_active",
|
||||
"date_joined",
|
||||
"last_login",
|
||||
"is_staff",
|
||||
|
@ -53,7 +68,7 @@ class UserAdmin(AuthUserAdmin):
|
|||
"permission_library",
|
||||
"permission_moderation",
|
||||
]
|
||||
|
||||
actions = [disable, enable]
|
||||
fieldsets = (
|
||||
(None, {"fields": ("username", "password", "privacy_level")}),
|
||||
(
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue