pin setuptools to 60.10.0 to fix builds 1.4.0

This commit is contained in:
petitminion 2025-04-14 14:16:08 +00:00
parent 7f7f4a1fff
commit a9d56911ef
8 changed files with 2251 additions and 2110 deletions

View File

@ -248,6 +248,9 @@ test_api:
CACHE_URL: "redis://redis:6379/0" CACHE_URL: "redis://redis:6379/0"
before_script: before_script:
- cd api - cd api
- poetry env info
- poetry run pip install "setuptools==60.10.0" wheel
- poetry run pip install --no-use-pep517 django-allauth==0.42.0
- poetry install --all-extras - poetry install --all-extras
script: script:
- > - >
@ -351,6 +354,8 @@ build_api_schema:
API_TYPE: "v1" API_TYPE: "v1"
before_script: before_script:
- cd api - cd api
- poetry run pip install "setuptools==60.10.0" wheel
- poetry run pip install --no-use-pep517 django-allauth==0.42.0
- poetry install --all-extras - poetry install --all-extras
- poetry run funkwhale-manage migrate - poetry run funkwhale-manage migrate
script: script:

View File

@ -1,148 +1,148 @@
import logging # import logging
import time # import time
import troi # import troi
import troi.core # import troi.core
from django.core.cache import cache # from django.core.cache import cache
from django.core.exceptions import ValidationError # from django.core.exceptions import ValidationError
from django.db.models import Q # from django.db.models import Q
from requests.exceptions import ConnectTimeout # from requests.exceptions import ConnectTimeout
from funkwhale_api.music import models as music_models # from funkwhale_api.music import models as music_models
from funkwhale_api.typesense import utils # from funkwhale_api.typesense import utils
logger = logging.getLogger(__name__) # logger = logging.getLogger(__name__)
patches = troi.utils.discover_patches() # patches = troi.utils.discover_patches()
SUPPORTED_PATCHES = patches.keys() # SUPPORTED_PATCHES = patches.keys()
def run(config, **kwargs): # def run(config, **kwargs):
"""Validate the received config and run the queryset generation""" # """Validate the received config and run the queryset generation"""
candidates = kwargs.pop("candidates", music_models.Track.objects.all()) # candidates = kwargs.pop("candidates", music_models.Track.objects.all())
validate(config) # validate(config)
return TroiPatch().get_queryset(config, candidates) # return TroiPatch().get_queryset(config, candidates)
def validate(config): # def validate(config):
patch = config.get("patch") # patch = config.get("patch")
if patch not in SUPPORTED_PATCHES: # if patch not in SUPPORTED_PATCHES:
raise ValidationError( # raise ValidationError(
'Invalid patch "{}". Supported patches: {}'.format( # 'Invalid patch "{}". Supported patches: {}'.format(
config["patch"], SUPPORTED_PATCHES # config["patch"], SUPPORTED_PATCHES
) # )
) # )
return True # return True
def build_radio_queryset(patch, config, radio_qs): # def build_radio_queryset(patch, config, radio_qs):
"""Take a troi patch and its arg, match the missing mbid and then build a radio queryset""" # """Take a troi patch and its arg, match the missing mbid and then build a radio queryset"""
logger.info("Config used for troi radio generation is " + str(config)) # logger.info("Config used for troi radio generation is " + str(config))
start_time = time.time() # start_time = time.time()
try: # try:
recommendations = troi.core.generate_playlist(patch, config) # recommendations = troi.core.generate_playlist(patch, config)
except ConnectTimeout: # except ConnectTimeout:
raise ValueError( # raise ValueError(
"Timed out while connecting to ListenBrainz. No candidates could be retrieved for the radio." # "Timed out while connecting to ListenBrainz. No candidates could be retrieved for the radio."
) # )
end_time_rec = time.time() # end_time_rec = time.time()
logger.info("Troi fetch took :" + str(end_time_rec - start_time)) # logger.info("Troi fetch took :" + str(end_time_rec - start_time))
if not recommendations: # if not recommendations:
raise ValueError("No candidates found by troi") # raise ValueError("No candidates found by troi")
recommended_mbids = [ # recommended_mbids = [
recommended_recording.mbid # recommended_recording.mbid
for recommended_recording in recommendations.playlists[0].recordings # for recommended_recording in recommendations.playlists[0].recordings
] # ]
logger.info("Searching for MusicBrainz ID in Funkwhale database") # logger.info("Searching for MusicBrainz ID in Funkwhale database")
qs_recommended = ( # qs_recommended = (
music_models.Track.objects.all() # music_models.Track.objects.all()
.filter(mbid__in=recommended_mbids) # .filter(mbid__in=recommended_mbids)
.order_by("mbid", "pk") # .order_by("mbid", "pk")
.distinct("mbid") # .distinct("mbid")
) # )
qs_recommended_mbid = [str(i.mbid) for i in qs_recommended] # qs_recommended_mbid = [str(i.mbid) for i in qs_recommended]
recommended_mbids_not_qs = [ # recommended_mbids_not_qs = [
mbid for mbid in recommended_mbids if mbid not in qs_recommended_mbid # mbid for mbid in recommended_mbids if mbid not in qs_recommended_mbid
] # ]
cached_match = cache.get_many(recommended_mbids_not_qs) # cached_match = cache.get_many(recommended_mbids_not_qs)
cached_match_mbid = [str(i) for i in cached_match.keys()] # cached_match_mbid = [str(i) for i in cached_match.keys()]
if qs_recommended and cached_match_mbid: # if qs_recommended and cached_match_mbid:
logger.info("MusicBrainz IDs found in Funkwhale database and redis") # logger.info("MusicBrainz IDs found in Funkwhale database and redis")
qs_recommended_mbid.extend(cached_match_mbid) # qs_recommended_mbid.extend(cached_match_mbid)
mbids_found = qs_recommended_mbid # mbids_found = qs_recommended_mbid
elif qs_recommended and not cached_match_mbid: # elif qs_recommended and not cached_match_mbid:
logger.info("MusicBrainz IDs found in Funkwhale database") # logger.info("MusicBrainz IDs found in Funkwhale database")
mbids_found = qs_recommended_mbid # mbids_found = qs_recommended_mbid
elif not qs_recommended and cached_match_mbid: # elif not qs_recommended and cached_match_mbid:
logger.info("MusicBrainz IDs found in redis cache") # logger.info("MusicBrainz IDs found in redis cache")
mbids_found = cached_match_mbid # mbids_found = cached_match_mbid
else: # else:
logger.info( # logger.info(
"Couldn't find any matches in Funkwhale database. Trying to match all" # "Couldn't find any matches in Funkwhale database. Trying to match all"
) # )
mbids_found = [] # mbids_found = []
recommended_recordings_not_found = [ # recommended_recordings_not_found = [
i for i in recommendations.playlists[0].recordings if i.mbid not in mbids_found # i for i in recommendations.playlists[0].recordings if i.mbid not in mbids_found
] # ]
logger.info("Matching missing MusicBrainz ID to Funkwhale track") # logger.info("Matching missing MusicBrainz ID to Funkwhale track")
start_time_resolv = time.time() # start_time_resolv = time.time()
utils.resolve_recordings_to_fw_track(recommended_recordings_not_found) # utils.resolve_recordings_to_fw_track(recommended_recordings_not_found)
end_time_resolv = time.time() # end_time_resolv = time.time()
logger.info( # logger.info(
"Resolving " # "Resolving "
+ str(len(recommended_recordings_not_found)) # + str(len(recommended_recordings_not_found))
+ " tracks in " # + " tracks in "
+ str(end_time_resolv - start_time_resolv) # + str(end_time_resolv - start_time_resolv)
) # )
cached_match = cache.get_many(recommended_mbids) # cached_match = cache.get_many(recommended_mbids)
if not mbids_found and not cached_match: # if not mbids_found and not cached_match:
raise ValueError("No candidates found for troi radio") # raise ValueError("No candidates found for troi radio")
mbids_found_pks = list( # mbids_found_pks = list(
music_models.Track.objects.all() # music_models.Track.objects.all()
.filter(mbid__in=mbids_found) # .filter(mbid__in=mbids_found)
.order_by("mbid", "pk") # .order_by("mbid", "pk")
.distinct("mbid") # .distinct("mbid")
.values_list("pk", flat=True) # .values_list("pk", flat=True)
) # )
mbids_found_pks_unique = [ # mbids_found_pks_unique = [
i for i in mbids_found_pks if i not in cached_match.keys() # i for i in mbids_found_pks if i not in cached_match.keys()
] # ]
if mbids_found and cached_match: # if mbids_found and cached_match:
return radio_qs.filter( # return radio_qs.filter(
Q(pk__in=mbids_found_pks_unique) | Q(pk__in=cached_match.values()) # Q(pk__in=mbids_found_pks_unique) | Q(pk__in=cached_match.values())
) # )
if mbids_found and not cached_match: # if mbids_found and not cached_match:
return radio_qs.filter(pk__in=mbids_found_pks_unique) # return radio_qs.filter(pk__in=mbids_found_pks_unique)
if not mbids_found and cached_match: # if not mbids_found and cached_match:
return radio_qs.filter(pk__in=cached_match.values()) # return radio_qs.filter(pk__in=cached_match.values())
class TroiPatch: # class TroiPatch:
code = "troi-patch" # code = "troi-patch"
label = "Troi Patch" # label = "Troi Patch"
def get_queryset(self, config, qs): # def get_queryset(self, config, qs):
patch_string = config.pop("patch") # patch_string = config.pop("patch")
patch = patches[patch_string] # patch = patches[patch_string]
return build_radio_queryset(patch(), config, qs) # return build_radio_queryset(patch(), config, qs)

View File

@ -1,111 +1,111 @@
from troi import Artist, Element, Playlist, Recording # from troi import Artist, Element, Playlist, Recording
from troi.patch import Patch # from troi.patch import Patch
recording_list = [ # recording_list = [
Recording( # Recording(
name="I Want It That Way", # name="I Want It That Way",
mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa", # mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa",
artist=Artist(name="artist_name"), # artist=Artist(name="artist_name"),
), # ),
Recording(name="Untouchable", artist=Artist(name="Another lol")), # Recording(name="Untouchable", artist=Artist(name="Another lol")),
Recording( # Recording(
name="The Perfect Kiss", # name="The Perfect Kiss",
mbid="ec0da94e-fbfe-4eb0-968e-024d4c32d1d0", # mbid="ec0da94e-fbfe-4eb0-968e-024d4c32d1d0",
artist=Artist(name="artist_name2"), # artist=Artist(name="artist_name2"),
), # ),
Recording( # Recording(
name="Love Your Voice", # name="Love Your Voice",
mbid="93726547-f8c0-4efd-8e16-d2dee76500f6", # mbid="93726547-f8c0-4efd-8e16-d2dee76500f6",
artist=Artist(name="artist_name"), # artist=Artist(name="artist_name"),
), # ),
Recording( # Recording(
name="Hall of Fame", # name="Hall of Fame",
mbid="395bd5a1-79cc-4e04-8869-ca9eabc78d09", # mbid="395bd5a1-79cc-4e04-8869-ca9eabc78d09",
artist=Artist(name="artist_name_3"), # artist=Artist(name="artist_name_3"),
), # ),
] # ]
class DummyElement(Element): # class DummyElement(Element):
"""Dummy element that returns a fixed playlist for testing""" # """Dummy element that returns a fixed playlist for testing"""
@staticmethod # @staticmethod
def outputs(): # def outputs():
return [Playlist] # return [Playlist]
def read(self, sources): # def read(self, sources):
recordings = recording_list # recordings = recording_list
return [ # return [
Playlist( # Playlist(
name="Test Export Playlist", # name="Test Export Playlist",
description="A playlist to test exporting playlists to spotify", # description="A playlist to test exporting playlists to spotify",
recordings=recordings, # recordings=recordings,
) # )
] # ]
class DummyPatch(Patch): # class DummyPatch(Patch):
"""Dummy patch that always returns a fixed set of recordings for testing""" # """Dummy patch that always returns a fixed set of recordings for testing"""
@staticmethod # @staticmethod
def slug(): # def slug():
return "test-patch" # return "test-patch"
def create(self, inputs): # def create(self, inputs):
return DummyElement() # return DummyElement()
@staticmethod # @staticmethod
def outputs(): # def outputs():
return [Recording] # return [Recording]
recommended_recording_mbids = [ # recommended_recording_mbids = [
"87dfa566-21c3-45ed-bc42-1d345b8563fa", # "87dfa566-21c3-45ed-bc42-1d345b8563fa",
"ec0da94e-fbfe-4eb0-968e-024d4c32d1d0", # "ec0da94e-fbfe-4eb0-968e-024d4c32d1d0",
"93726547-f8c0-4efd-8e16-d2dee76500f6", # "93726547-f8c0-4efd-8e16-d2dee76500f6",
"395bd5a1-79cc-4e04-8869-ca9eabc78d09", # "395bd5a1-79cc-4e04-8869-ca9eabc78d09",
] # ]
typesense_search_result = { # typesense_search_result = {
"facet_counts": [], # "facet_counts": [],
"found": 1, # "found": 1,
"out_of": 1, # "out_of": 1,
"page": 1, # "page": 1,
"request_params": { # "request_params": {
"collection_name": "canonical_fw_data", # "collection_name": "canonical_fw_data",
"per_page": 10, # "per_page": 10,
"q": "artist_nameiwantitthatway", # "q": "artist_nameiwantitthatway",
}, # },
"search_time_ms": 1, # "search_time_ms": 1,
"hits": [ # "hits": [
{ # {
"highlights": [ # "highlights": [
{ # {
"field": "combined", # "field": "combined",
"snippet": "string", # "snippet": "string",
"matched_tokens": ["string"], # "matched_tokens": ["string"],
} # }
], # ],
"document": { # "document": {
"pk": "1", # "pk": "1",
"combined": "artist_nameiwantitthatway", # "combined": "artist_nameiwantitthatway",
}, # },
"text_match": 130916, # "text_match": 130916,
}, # },
{ # {
"highlights": [ # "highlights": [
{ # {
"field": "combined", # "field": "combined",
"snippet": "string", # "snippet": "string",
"matched_tokens": ["string"], # "matched_tokens": ["string"],
} # }
], # ],
"document": { # "document": {
"pk": "2", # "pk": "2",
"combined": "artist_nameiwantitthatway", # "combined": "artist_nameiwantitthatway",
}, # },
"text_match": 130916, # "text_match": 130916,
}, # },
], # ],
} # }

View File

@ -1,92 +1,92 @@
import logging # import logging
import re # import re
import unidecode # import unidecode
from django.conf import settings # from django.conf import settings
from django.core.cache import cache # from django.core.cache import cache
from lb_matching_tools.cleaner import MetadataCleaner # from lb_matching_tools.cleaner import MetadataCleaner
from funkwhale_api.music import models as music_models # from funkwhale_api.music import models as music_models
logger = logging.getLogger(__name__) # logger = logging.getLogger(__name__)
api_key = settings.TYPESENSE_API_KEY # api_key = settings.TYPESENSE_API_KEY
host = settings.TYPESENSE_HOST # host = settings.TYPESENSE_HOST
port = settings.TYPESENSE_PORT # port = settings.TYPESENSE_PORT
protocol = settings.TYPESENSE_PROTOCOL # protocol = settings.TYPESENSE_PROTOCOL
TYPESENSE_NUM_TYPO = settings.TYPESENSE_NUM_TYPO # TYPESENSE_NUM_TYPO = settings.TYPESENSE_NUM_TYPO
class TypesenseNotActivate(Exception): # class TypesenseNotActivate(Exception):
pass # pass
if not settings.TYPESENSE_API_KEY: # if not settings.TYPESENSE_API_KEY:
logger.info( # logger.info(
"Typesense is not activated. You can enable it by setting the TYPESENSE_API_KEY env variable." # "Typesense is not activated. You can enable it by setting the TYPESENSE_API_KEY env variable."
) # )
else: # else:
import typesense # import typesense
def delete_non_alnum_characters(text): # def delete_non_alnum_characters(text):
return unidecode.unidecode(re.sub(r"[^\w]+", "", text).lower()) # return unidecode.unidecode(re.sub(r"[^\w]+", "", text).lower())
def resolve_recordings_to_fw_track(recordings): # def resolve_recordings_to_fw_track(recordings):
""" # """
Tries to match a troi recording entity to a fw track using the typesense index. # Tries to match a troi recording entity to a fw track using the typesense index.
It will save the results in the match_mbid attribute of the Track table. # It will save the results in the match_mbid attribute of the Track table.
For test purposes : if multiple fw tracks are returned, we log the information # For test purposes : if multiple fw tracks are returned, we log the information
but only keep the best result in db to avoid duplicates. # but only keep the best result in db to avoid duplicates.
""" # """
if not settings.TYPESENSE_API_KEY: # if not settings.TYPESENSE_API_KEY:
raise TypesenseNotActivate( # raise TypesenseNotActivate(
"Typesense is not activated. You can enable it by setting the TYPESENSE_API_KEY env variable." # "Typesense is not activated. You can enable it by setting the TYPESENSE_API_KEY env variable."
) # )
client = typesense.Client( # client = typesense.Client(
{ # {
"api_key": api_key, # "api_key": api_key,
"nodes": [{"host": host, "port": port, "protocol": protocol}], # "nodes": [{"host": host, "port": port, "protocol": protocol}],
"connection_timeout_seconds": 2, # "connection_timeout_seconds": 2,
} # }
) # )
mc = MetadataCleaner() # mc = MetadataCleaner()
for recording in recordings: # for recording in recordings:
rec = mc.clean_recording(recording.name) # rec = mc.clean_recording(recording.name)
artist = mc.clean_artist(recording.artist.name) # artist = mc.clean_artist(recording.artist.name)
canonical_name_for_track = delete_non_alnum_characters(artist + rec) # canonical_name_for_track = delete_non_alnum_characters(artist + rec)
logger.debug(f"Trying to resolve : {canonical_name_for_track}") # logger.debug(f"Trying to resolve : {canonical_name_for_track}")
search_parameters = { # search_parameters = {
"q": canonical_name_for_track, # "q": canonical_name_for_track,
"query_by": "combined", # "query_by": "combined",
"num_typos": TYPESENSE_NUM_TYPO, # "num_typos": TYPESENSE_NUM_TYPO,
"drop_tokens_threshold": 0, # "drop_tokens_threshold": 0,
} # }
matches = client.collections["canonical_fw_data"].documents.search( # matches = client.collections["canonical_fw_data"].documents.search(
search_parameters # search_parameters
) # )
if matches["hits"]: # if matches["hits"]:
hit = matches["hits"][0] # hit = matches["hits"][0]
pk = hit["document"]["pk"] # pk = hit["document"]["pk"]
logger.debug(f"Saving match for track with primary key {pk}") # logger.debug(f"Saving match for track with primary key {pk}")
cache.set(recording.mbid, pk) # cache.set(recording.mbid, pk)
if settings.DEBUG and matches["hits"][1]: # if settings.DEBUG and matches["hits"][1]:
for hit in matches["hits"][1:]: # for hit in matches["hits"][1:]:
pk = hit["document"]["pk"] # pk = hit["document"]["pk"]
fw_track = music_models.Track.objects.get(pk=pk) # fw_track = music_models.Track.objects.get(pk=pk)
logger.info( # logger.info(
f"Duplicate match found for {fw_track.artist.name} {fw_track.title} \ # f"Duplicate match found for {fw_track.artist.name} {fw_track.title} \
and primary key {pk}. Skipping because of better match." # and primary key {pk}. Skipping because of better match."
) # )
else: # else:
logger.debug("No match found in fw db") # logger.debug("No match found in fw db")
return cache.get_many([rec.mbid for rec in recordings]) # return cache.get_many([rec.mbid for rec in recordings])

3521
api/poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -26,10 +26,11 @@ funkwhale-manage = 'funkwhale_api.main:main'
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.8,<3.12" python = "^3.8,<3.12"
setuptools = "==60.10.0"
# Django # Django
dj-rest-auth = { extras = ["with_social"], version = "2.2.8" } dj-rest-auth = { extras = ["with_social"], version = "2.2.8" }
django = "==3.2.25" django = "==3.2.24"
django-allauth = "==0.42.0" django-allauth = "==0.42.0"
django-cache-memoize = "0.1.10" django-cache-memoize = "0.1.10"
django-cacheops = "==6.1" django-cacheops = "==6.1"
@ -84,9 +85,7 @@ requests = "==2.28.2"
requests-http-message-signatures = "==0.3.1" requests-http-message-signatures = "==0.3.1"
sentry-sdk = "==1.19.1" sentry-sdk = "==1.19.1"
watchdog = "==2.2.1" watchdog = "==2.2.1"
troi = { git = "https://github.com/metabrainz/troi-recommendation-playground.git", tag = "v-2023-10-30.0"} unidecode = "==1.3.7"
lb-matching-tools = { git = "https://github.com/metabrainz/listenbrainz-matching-tools.git", branch = "main"}
unidecode = "==1.3.8"
pycountry = "22.3.5" pycountry = "22.3.5"
# Typesense # Typesense
@ -97,7 +96,6 @@ ipython = "==7.34.0"
pluralizer = "==1.2.0" pluralizer = "==1.2.0"
service-identity = "==21.1.0" service-identity = "==21.1.0"
unicode-slugify = "==0.1.5" unicode-slugify = "==0.1.5"
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
aioresponses = "==0.7.6" aioresponses = "==0.7.6"
asynctest = "==0.13.0" asynctest = "==0.13.0"
@ -128,7 +126,8 @@ django-extensions = "==3.2.3"
typesense = ["typesense"] typesense = ["typesense"]
[build-system] [build-system]
requires = ["poetry-core==1.8.1"] requires = ["poetry-core==1.8.1", "setuptools==60.10.0"
]
build-backend = "poetry.core.masonry.api" build-backend = "poetry.core.masonry.api"
[tool.pylint.master] [tool.pylint.master]

View File

@ -1,116 +1,116 @@
import pytest # import pytest
import troi.core # import troi.core
from django.core.cache import cache # from django.core.cache import cache
from django.db.models import Q # from django.db.models import Q
from requests.exceptions import ConnectTimeout # from requests.exceptions import ConnectTimeout
from funkwhale_api.music.models import Track # from funkwhale_api.music.models import Track
from funkwhale_api.radios import lb_recommendations # from funkwhale_api.radios import lb_recommendations
from funkwhale_api.typesense import factories as custom_factories # from funkwhale_api.typesense import factories as custom_factories
from funkwhale_api.typesense import utils # from funkwhale_api.typesense import utils
def test_can_build_radio_queryset_with_fw_db(factories, mocker): # def test_can_build_radio_queryset_with_fw_db(factories, mocker):
factories["music.Track"]( # factories["music.Track"](
title="I Want It That Way", mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa" # title="I Want It That Way", mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa"
) # )
factories["music.Track"]( # factories["music.Track"](
title="The Perfect Kiss", mbid="ec0da94e-fbfe-4eb0-968e-024d4c32d1d0" # title="The Perfect Kiss", mbid="ec0da94e-fbfe-4eb0-968e-024d4c32d1d0"
) # )
factories["music.Track"]() # factories["music.Track"]()
qs = Track.objects.all() # qs = Track.objects.all()
mocker.patch("funkwhale_api.typesense.utils.resolve_recordings_to_fw_track") # mocker.patch("funkwhale_api.typesense.utils.resolve_recordings_to_fw_track")
radio_qs = lb_recommendations.build_radio_queryset( # radio_qs = lb_recommendations.build_radio_queryset(
custom_factories.DummyPatch(), {"min_recordings": 1}, qs # custom_factories.DummyPatch(), {"min_recordings": 1}, qs
) # )
recommended_recording_mbids = [ # recommended_recording_mbids = [
"87dfa566-21c3-45ed-bc42-1d345b8563fa", # "87dfa566-21c3-45ed-bc42-1d345b8563fa",
"ec0da94e-fbfe-4eb0-968e-024d4c32d1d0", # "ec0da94e-fbfe-4eb0-968e-024d4c32d1d0",
] # ]
assert list( # assert list(
Track.objects.all().filter(Q(mbid__in=recommended_recording_mbids)) # Track.objects.all().filter(Q(mbid__in=recommended_recording_mbids))
) == list(radio_qs) # ) == list(radio_qs)
def test_build_radio_queryset_without_fw_db(mocker): # def test_build_radio_queryset_without_fw_db(mocker):
resolve_recordings_to_fw_track = mocker.patch.object( # resolve_recordings_to_fw_track = mocker.patch.object(
utils, "resolve_recordings_to_fw_track", return_value=None # utils, "resolve_recordings_to_fw_track", return_value=None
) # )
# mocker.patch.object(cache, "get_many", return_value=None) # # mocker.patch.object(cache, "get_many", return_value=None)
qs = Track.objects.all() # qs = Track.objects.all()
with pytest.raises(ValueError): # with pytest.raises(ValueError):
lb_recommendations.build_radio_queryset( # lb_recommendations.build_radio_queryset(
custom_factories.DummyPatch(), {"min_recordings": 1}, qs # custom_factories.DummyPatch(), {"min_recordings": 1}, qs
) # )
assert resolve_recordings_to_fw_track.called_once_with( # assert resolve_recordings_to_fw_track.called_once_with(
custom_factories.recommended_recording_mbids # custom_factories.recommended_recording_mbids
) # )
def test_build_radio_queryset_with_redis_and_fw_db(factories, mocker): # def test_build_radio_queryset_with_redis_and_fw_db(factories, mocker):
factories["music.Track"]( # factories["music.Track"](
pk="1", title="I Want It That Way", mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa" # pk="1", title="I Want It That Way", mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa"
) # )
mocker.patch.object(utils, "resolve_recordings_to_fw_track", return_value=None) # mocker.patch.object(utils, "resolve_recordings_to_fw_track", return_value=None)
redis_cache = {} # redis_cache = {}
redis_cache["ec0da94e-fbfe-4eb0-968e-024d4c32d1d0"] = 2 # redis_cache["ec0da94e-fbfe-4eb0-968e-024d4c32d1d0"] = 2
mocker.patch.object(cache, "get_many", return_value=redis_cache) # mocker.patch.object(cache, "get_many", return_value=redis_cache)
qs = Track.objects.all() # qs = Track.objects.all()
assert list( # assert list(
lb_recommendations.build_radio_queryset( # lb_recommendations.build_radio_queryset(
custom_factories.DummyPatch(), {"min_recordings": 1}, qs # custom_factories.DummyPatch(), {"min_recordings": 1}, qs
) # )
) == list(Track.objects.all().filter(pk__in=[1, 2])) # ) == list(Track.objects.all().filter(pk__in=[1, 2]))
def test_build_radio_queryset_with_redis_and_without_fw_db(factories, mocker): # def test_build_radio_queryset_with_redis_and_without_fw_db(factories, mocker):
factories["music.Track"]( # factories["music.Track"](
pk="1", title="Super title", mbid="87dfaaaa-2aaa-45ed-bc42-1d34aaaaaaaa" # pk="1", title="Super title", mbid="87dfaaaa-2aaa-45ed-bc42-1d34aaaaaaaa"
) # )
mocker.patch.object(utils, "resolve_recordings_to_fw_track", return_value=None) # mocker.patch.object(utils, "resolve_recordings_to_fw_track", return_value=None)
redis_cache = {} # redis_cache = {}
redis_cache["87dfa566-21c3-45ed-bc42-1d345b8563fa"] = 1 # redis_cache["87dfa566-21c3-45ed-bc42-1d345b8563fa"] = 1
mocker.patch.object(cache, "get_many", return_value=redis_cache) # mocker.patch.object(cache, "get_many", return_value=redis_cache)
qs = Track.objects.all() # qs = Track.objects.all()
assert list( # assert list(
lb_recommendations.build_radio_queryset( # lb_recommendations.build_radio_queryset(
custom_factories.DummyPatch(), {"min_recordings": 1}, qs # custom_factories.DummyPatch(), {"min_recordings": 1}, qs
) # )
) == list(Track.objects.all().filter(pk=1)) # ) == list(Track.objects.all().filter(pk=1))
def test_build_radio_queryset_catch_troi_ConnectTimeout(mocker): # def test_build_radio_queryset_catch_troi_ConnectTimeout(mocker):
mocker.patch.object( # mocker.patch.object(
troi.core, # troi.core,
"generate_playlist", # "generate_playlist",
side_effect=ConnectTimeout, # side_effect=ConnectTimeout,
) # )
qs = Track.objects.all() # qs = Track.objects.all()
with pytest.raises(ValueError): # with pytest.raises(ValueError):
lb_recommendations.build_radio_queryset( # lb_recommendations.build_radio_queryset(
custom_factories.DummyPatch(), {"min_recordings": 1}, qs # custom_factories.DummyPatch(), {"min_recordings": 1}, qs
) # )
def test_build_radio_queryset_catch_troi_no_candidates(mocker): # def test_build_radio_queryset_catch_troi_no_candidates(mocker):
mocker.patch.object( # mocker.patch.object(
troi.core, # troi.core,
"generate_playlist", # "generate_playlist",
) # )
qs = Track.objects.all() # qs = Track.objects.all()
with pytest.raises(ValueError): # with pytest.raises(ValueError):
lb_recommendations.build_radio_queryset( # lb_recommendations.build_radio_queryset(
custom_factories.DummyPatch(), {"min_recordings": 1}, qs # custom_factories.DummyPatch(), {"min_recordings": 1}, qs
) # )

View File

@ -1,43 +1,43 @@
import requests_mock # import requests_mock
import typesense # import typesense
from django.core.cache import cache # from django.core.cache import cache
from funkwhale_api.typesense import factories as custom_factories # from funkwhale_api.typesense import factories as custom_factories
from funkwhale_api.typesense import utils # from funkwhale_api.typesense import utils
def test_resolve_recordings_to_fw_track(mocker, factories): # def test_resolve_recordings_to_fw_track(mocker, factories):
artist = factories["music.Artist"](name="artist_name") # artist = factories["music.Artist"](name="artist_name")
factories["music.Track"]( # factories["music.Track"](
pk=1, # pk=1,
title="I Want It That Way", # title="I Want It That Way",
artist=artist, # artist=artist,
mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa", # mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa",
) # )
factories["music.Track"]( # factories["music.Track"](
pk=2, # pk=2,
title="I Want It That Way", # title="I Want It That Way",
artist=artist, # artist=artist,
) # )
client = typesense.Client( # client = typesense.Client(
{ # {
"api_key": "api_key", # "api_key": "api_key",
"nodes": [{"host": "host", "port": "port", "protocol": "protocol"}], # "nodes": [{"host": "host", "port": "port", "protocol": "protocol"}],
"connection_timeout_seconds": 2, # "connection_timeout_seconds": 2,
} # }
) # )
with requests_mock.Mocker() as r_mocker: # with requests_mock.Mocker() as r_mocker:
mocker.patch.object(typesense, "Client", return_value=client) # mocker.patch.object(typesense, "Client", return_value=client)
mocker.patch.object( # mocker.patch.object(
typesense.client.ApiCall, # typesense.client.ApiCall,
"post", # "post",
return_value=custom_factories.typesense_search_result, # return_value=custom_factories.typesense_search_result,
) # )
r_mocker.get( # r_mocker.get(
"protocol://host:port/collections/canonical_fw_data/documents/search", # "protocol://host:port/collections/canonical_fw_data/documents/search",
json=custom_factories.typesense_search_result, # json=custom_factories.typesense_search_result,
) # )
utils.resolve_recordings_to_fw_track(custom_factories.recording_list) # utils.resolve_recordings_to_fw_track(custom_factories.recording_list)
assert cache.get("87dfa566-21c3-45ed-bc42-1d345b8563fa") == "1" # assert cache.get("87dfa566-21c3-45ed-bc42-1d345b8563fa") == "1"