Resolve "Database growing way too much (music_upload)"
This commit is contained in:
parent
b77e16e09b
commit
6328d00015
|
@ -0,0 +1,35 @@
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
from django.db import transaction
|
||||||
|
|
||||||
|
from funkwhale_api.music import models
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = """
|
||||||
|
This command makes it easy to prune all skipped Uploads from the database.
|
||||||
|
Due to a bug they might caused the database to grow exponentially,
|
||||||
|
especially when using in-place-imports on a regular basis. This command
|
||||||
|
helps to clean up the database again.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument(
|
||||||
|
"--force",
|
||||||
|
default=False,
|
||||||
|
help="Disable dry run mode and apply pruning for real on the database",
|
||||||
|
)
|
||||||
|
|
||||||
|
@transaction.atomic
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
skipped = models.Uploads.objects.filter(import_status="skipped")
|
||||||
|
count = len(skipped)
|
||||||
|
if options["force"]:
|
||||||
|
skipped.delete()
|
||||||
|
print(f"Deleted {count} entries from the database.")
|
||||||
|
return
|
||||||
|
|
||||||
|
print(
|
||||||
|
f"Would delete {count} entries from the database.\
|
||||||
|
Run with --force to actually apply changes to the database"
|
||||||
|
)
|
|
@ -264,7 +264,9 @@ def process_upload(upload, update_denormalization=True):
|
||||||
upload.import_status = "skipped"
|
upload.import_status = "skipped"
|
||||||
upload.import_details = {
|
upload.import_details = {
|
||||||
"code": "already_imported_in_owned_libraries",
|
"code": "already_imported_in_owned_libraries",
|
||||||
"duplicates": list(owned_duplicates),
|
# In order to avoid exponential growth of the database, we only
|
||||||
|
# reference the first known upload which gets duplicated
|
||||||
|
"duplicates": owned_duplicates[0],
|
||||||
}
|
}
|
||||||
upload.import_date = timezone.now()
|
upload.import_date = timezone.now()
|
||||||
upload.save(
|
upload.save(
|
||||||
|
@ -415,6 +417,7 @@ def get_owned_duplicates(upload, track):
|
||||||
)
|
)
|
||||||
.exclude(pk=upload.pk)
|
.exclude(pk=upload.pk)
|
||||||
.values_list("uuid", flat=True)
|
.values_list("uuid", flat=True)
|
||||||
|
.order_by("creation_date")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -452,7 +452,7 @@ def test_upload_import_skip_existing_track_in_own_library(factories, temp_signal
|
||||||
assert duplicate.import_status == "skipped"
|
assert duplicate.import_status == "skipped"
|
||||||
assert duplicate.import_details == {
|
assert duplicate.import_details == {
|
||||||
"code": "already_imported_in_owned_libraries",
|
"code": "already_imported_in_owned_libraries",
|
||||||
"duplicates": [str(existing.uuid)],
|
"duplicates": str(existing.uuid),
|
||||||
}
|
}
|
||||||
|
|
||||||
handler.assert_called_once_with(
|
handler.assert_called_once_with(
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
Fix exponentially growing database when using in-place-imports on a regular base #1676
|
Loading…
Reference in New Issue