Merge branch '176-relaunch' into 'develop'
Resolve "Allow import job relaunch" Closes #176 See merge request funkwhale/funkwhale!172
This commit is contained in:
commit
e226d60c6a
|
@ -1,4 +1,5 @@
|
|||
from django.db import transaction
|
||||
from django.db.models import Q
|
||||
from rest_framework import serializers
|
||||
from taggit.models import Tag
|
||||
|
||||
|
@ -9,6 +10,7 @@ from funkwhale_api.federation.serializers import AP_CONTEXT
|
|||
from funkwhale_api.users.serializers import UserBasicSerializer
|
||||
|
||||
from . import models
|
||||
from . import tasks
|
||||
|
||||
|
||||
class TagSerializer(serializers.ModelSerializer):
|
||||
|
@ -204,3 +206,33 @@ class SubmitFederationTracksSerializer(serializers.Serializer):
|
|||
source=lt.url,
|
||||
)
|
||||
return batch
|
||||
|
||||
|
||||
class ImportJobRunSerializer(serializers.Serializer):
|
||||
jobs = serializers.PrimaryKeyRelatedField(
|
||||
many=True,
|
||||
queryset=models.ImportJob.objects.filter(
|
||||
status__in=['pending', 'errored']
|
||||
)
|
||||
)
|
||||
batches = serializers.PrimaryKeyRelatedField(
|
||||
many=True,
|
||||
queryset=models.ImportBatch.objects.all()
|
||||
)
|
||||
|
||||
def validate(self, validated_data):
|
||||
jobs = validated_data['jobs']
|
||||
batches_ids = [b.pk for b in validated_data['batches']]
|
||||
query = Q(batch__pk__in=batches_ids)
|
||||
query |= Q(pk__in=[j.id for j in jobs])
|
||||
queryset = models.ImportJob.objects.filter(query).filter(
|
||||
status__in=['pending', 'errored']).distinct()
|
||||
validated_data['_jobs'] = queryset
|
||||
return validated_data
|
||||
|
||||
def create(self, validated_data):
|
||||
ids = validated_data['_jobs'].values_list('id', flat=True)
|
||||
validated_data['_jobs'].update(status='pending')
|
||||
for id in ids:
|
||||
tasks.import_job_run.delay(import_job_id=id)
|
||||
return {'jobs': list(ids)}
|
||||
|
|
|
@ -145,6 +145,14 @@ class ImportJobViewSet(
|
|||
data['count'] = sum([v for v in data.values()])
|
||||
return Response(data)
|
||||
|
||||
@list_route(methods=['post'])
|
||||
def run(self, request, *args, **kwargs):
|
||||
serializer = serializers.ImportJobRunSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
payload = serializer.save()
|
||||
|
||||
return Response(payload)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
source = 'file://' + serializer.validated_data['audio_file'].name
|
||||
serializer.save(source=source)
|
||||
|
|
|
@ -208,3 +208,64 @@ def test_import_job_stats_filter(factories, superuser_api_client):
|
|||
}
|
||||
assert response.status_code == 200
|
||||
assert response.data == expected
|
||||
|
||||
|
||||
def test_import_job_run_via_api(factories, superuser_api_client, mocker):
|
||||
run = mocker.patch('funkwhale_api.music.tasks.import_job_run.delay')
|
||||
job1 = factories['music.ImportJob'](status='errored')
|
||||
job2 = factories['music.ImportJob'](status='pending')
|
||||
|
||||
url = reverse('api:v1:import-jobs-run')
|
||||
response = superuser_api_client.post(url, {'jobs': [job2.pk, job1.pk]})
|
||||
|
||||
job1.refresh_from_db()
|
||||
job2.refresh_from_db()
|
||||
assert response.status_code == 200
|
||||
assert response.data == {'jobs': [job1.pk, job2.pk]}
|
||||
assert job1.status == 'pending'
|
||||
assert job2.status == 'pending'
|
||||
|
||||
run.assert_any_call(import_job_id=job1.pk)
|
||||
run.assert_any_call(import_job_id=job2.pk)
|
||||
|
||||
|
||||
def test_import_batch_run_via_api(factories, superuser_api_client, mocker):
|
||||
run = mocker.patch('funkwhale_api.music.tasks.import_job_run.delay')
|
||||
|
||||
batch = factories['music.ImportBatch']()
|
||||
job1 = factories['music.ImportJob'](batch=batch, status='errored')
|
||||
job2 = factories['music.ImportJob'](batch=batch, status='pending')
|
||||
|
||||
url = reverse('api:v1:import-jobs-run')
|
||||
response = superuser_api_client.post(url, {'batches': [batch.pk]})
|
||||
|
||||
job1.refresh_from_db()
|
||||
job2.refresh_from_db()
|
||||
assert response.status_code == 200
|
||||
assert job1.status == 'pending'
|
||||
assert job2.status == 'pending'
|
||||
|
||||
run.assert_any_call(import_job_id=job1.pk)
|
||||
run.assert_any_call(import_job_id=job2.pk)
|
||||
|
||||
|
||||
def test_import_batch_and_job_run_via_api(
|
||||
factories, superuser_api_client, mocker):
|
||||
run = mocker.patch('funkwhale_api.music.tasks.import_job_run.delay')
|
||||
|
||||
batch = factories['music.ImportBatch']()
|
||||
job1 = factories['music.ImportJob'](batch=batch, status='errored')
|
||||
job2 = factories['music.ImportJob'](status='pending')
|
||||
|
||||
url = reverse('api:v1:import-jobs-run')
|
||||
response = superuser_api_client.post(
|
||||
url, {'batches': [batch.pk], 'jobs': [job2.pk]})
|
||||
|
||||
job1.refresh_from_db()
|
||||
job2.refresh_from_db()
|
||||
assert response.status_code == 200
|
||||
assert job1.status == 'pending'
|
||||
assert job2.status == 'pending'
|
||||
|
||||
run.assert_any_call(import_job_id=job1.pk)
|
||||
run.assert_any_call(import_job_id=job2.pk)
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Can now relaunch erored jobs and batches (#176)
|
|
@ -40,7 +40,16 @@
|
|||
</tr>
|
||||
<tr v-if="stats">
|
||||
<td><strong>{{ $t('Errored') }}</strong></td>
|
||||
<td>{{ stats.errored }}</td>
|
||||
<td>
|
||||
{{ stats.errored }}
|
||||
<button
|
||||
@click="rerun({batches: [batch.id], jobs: []})"
|
||||
v-if="stats.errored > 0"
|
||||
class="ui tiny basic icon button">
|
||||
<i class="redo icon" />
|
||||
{{ $t('Rerun errored jobs')}}
|
||||
</button>
|
||||
</td>
|
||||
</tr>
|
||||
<tr v-if="stats">
|
||||
<td><strong>{{ $t('Finished') }}</strong></td>
|
||||
|
@ -83,11 +92,21 @@
|
|||
<a :href="'https://www.musicbrainz.org/recording/' + job.mbid" target="_blank">{{ job.mbid }}</a>
|
||||
</td>
|
||||
<td>
|
||||
<a :href="job.source" target="_blank">{{ job.source }}</a>
|
||||
<a :title="job.source" :href="job.source" target="_blank">
|
||||
{{ job.source|truncate(50) }}
|
||||
</a>
|
||||
</td>
|
||||
<td>
|
||||
<span
|
||||
:class="['ui', {'yellow': job.status === 'pending'}, {'red': job.status === 'errored'}, {'green': job.status === 'finished'}, 'label']">{{ job.status }}</span>
|
||||
:class="['ui', {'yellow': job.status === 'pending'}, {'red': job.status === 'errored'}, {'green': job.status === 'finished'}, 'label']">
|
||||
{{ job.status }}</span>
|
||||
<button
|
||||
@click="rerun({batches: [], jobs: [job.id]})"
|
||||
v-if="job.status === 'errored'"
|
||||
:title="$t('Rerun job')"
|
||||
class="ui tiny basic icon button">
|
||||
<i class="redo icon" />
|
||||
</button>
|
||||
</td>
|
||||
<td>
|
||||
<router-link v-if="job.track_file" :to="{name: 'library.tracks.detail', params: {id: job.track_file.track }}">{{ job.track_file.track }}</router-link>
|
||||
|
@ -167,12 +186,6 @@ export default {
|
|||
return axios.get(url).then((response) => {
|
||||
self.batch = response.data
|
||||
self.isLoading = false
|
||||
if (self.batch.status === 'pending') {
|
||||
self.timeout = setTimeout(
|
||||
self.fetchData,
|
||||
5000
|
||||
)
|
||||
}
|
||||
})
|
||||
},
|
||||
fetchStats () {
|
||||
|
@ -186,7 +199,7 @@ export default {
|
|||
self.fetchJobs()
|
||||
self.fetchData()
|
||||
}
|
||||
if (self.batch.status === 'pending') {
|
||||
if (self.stats.pending > 0) {
|
||||
self.timeout = setTimeout(
|
||||
self.fetchStats,
|
||||
5000
|
||||
|
@ -194,6 +207,15 @@ export default {
|
|||
}
|
||||
})
|
||||
},
|
||||
rerun ({jobs, batches}) {
|
||||
let payload = {
|
||||
jobs, batches
|
||||
}
|
||||
let self = this
|
||||
axios.post('import-jobs/run/', payload).then((response) => {
|
||||
self.fetchStats()
|
||||
})
|
||||
},
|
||||
fetchJobs () {
|
||||
let params = {
|
||||
batch: this.id,
|
||||
|
|
Loading…
Reference in New Issue