Fix #176: Can now relaunch erored jobs and batches
This commit is contained in:
parent
6dcde77b1e
commit
f4899c5387
|
@ -1,4 +1,5 @@
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
|
from django.db.models import Q
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
from taggit.models import Tag
|
from taggit.models import Tag
|
||||||
|
|
||||||
|
@ -9,6 +10,7 @@ from funkwhale_api.federation.serializers import AP_CONTEXT
|
||||||
from funkwhale_api.users.serializers import UserBasicSerializer
|
from funkwhale_api.users.serializers import UserBasicSerializer
|
||||||
|
|
||||||
from . import models
|
from . import models
|
||||||
|
from . import tasks
|
||||||
|
|
||||||
|
|
||||||
class TagSerializer(serializers.ModelSerializer):
|
class TagSerializer(serializers.ModelSerializer):
|
||||||
|
@ -204,3 +206,33 @@ class SubmitFederationTracksSerializer(serializers.Serializer):
|
||||||
source=lt.url,
|
source=lt.url,
|
||||||
)
|
)
|
||||||
return batch
|
return batch
|
||||||
|
|
||||||
|
|
||||||
|
class ImportJobRunSerializer(serializers.Serializer):
|
||||||
|
jobs = serializers.PrimaryKeyRelatedField(
|
||||||
|
many=True,
|
||||||
|
queryset=models.ImportJob.objects.filter(
|
||||||
|
status__in=['pending', 'errored']
|
||||||
|
)
|
||||||
|
)
|
||||||
|
batches = serializers.PrimaryKeyRelatedField(
|
||||||
|
many=True,
|
||||||
|
queryset=models.ImportBatch.objects.all()
|
||||||
|
)
|
||||||
|
|
||||||
|
def validate(self, validated_data):
|
||||||
|
jobs = validated_data['jobs']
|
||||||
|
batches_ids = [b.pk for b in validated_data['batches']]
|
||||||
|
query = Q(batch__pk__in=batches_ids)
|
||||||
|
query |= Q(pk__in=[j.id for j in jobs])
|
||||||
|
queryset = models.ImportJob.objects.filter(query).filter(
|
||||||
|
status__in=['pending', 'errored']).distinct()
|
||||||
|
validated_data['_jobs'] = queryset
|
||||||
|
return validated_data
|
||||||
|
|
||||||
|
def create(self, validated_data):
|
||||||
|
ids = validated_data['_jobs'].values_list('id', flat=True)
|
||||||
|
validated_data['_jobs'].update(status='pending')
|
||||||
|
for id in ids:
|
||||||
|
tasks.import_job_run.delay(import_job_id=id)
|
||||||
|
return {'jobs': list(ids)}
|
||||||
|
|
|
@ -145,6 +145,14 @@ class ImportJobViewSet(
|
||||||
data['count'] = sum([v for v in data.values()])
|
data['count'] = sum([v for v in data.values()])
|
||||||
return Response(data)
|
return Response(data)
|
||||||
|
|
||||||
|
@list_route(methods=['post'])
|
||||||
|
def run(self, request, *args, **kwargs):
|
||||||
|
serializer = serializers.ImportJobRunSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
payload = serializer.save()
|
||||||
|
|
||||||
|
return Response(payload)
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
def perform_create(self, serializer):
|
||||||
source = 'file://' + serializer.validated_data['audio_file'].name
|
source = 'file://' + serializer.validated_data['audio_file'].name
|
||||||
serializer.save(source=source)
|
serializer.save(source=source)
|
||||||
|
|
|
@ -208,3 +208,64 @@ def test_import_job_stats_filter(factories, superuser_api_client):
|
||||||
}
|
}
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.data == expected
|
assert response.data == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_import_job_run_via_api(factories, superuser_api_client, mocker):
|
||||||
|
run = mocker.patch('funkwhale_api.music.tasks.import_job_run.delay')
|
||||||
|
job1 = factories['music.ImportJob'](status='errored')
|
||||||
|
job2 = factories['music.ImportJob'](status='pending')
|
||||||
|
|
||||||
|
url = reverse('api:v1:import-jobs-run')
|
||||||
|
response = superuser_api_client.post(url, {'jobs': [job2.pk, job1.pk]})
|
||||||
|
|
||||||
|
job1.refresh_from_db()
|
||||||
|
job2.refresh_from_db()
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.data == {'jobs': [job1.pk, job2.pk]}
|
||||||
|
assert job1.status == 'pending'
|
||||||
|
assert job2.status == 'pending'
|
||||||
|
|
||||||
|
run.assert_any_call(import_job_id=job1.pk)
|
||||||
|
run.assert_any_call(import_job_id=job2.pk)
|
||||||
|
|
||||||
|
|
||||||
|
def test_import_batch_run_via_api(factories, superuser_api_client, mocker):
|
||||||
|
run = mocker.patch('funkwhale_api.music.tasks.import_job_run.delay')
|
||||||
|
|
||||||
|
batch = factories['music.ImportBatch']()
|
||||||
|
job1 = factories['music.ImportJob'](batch=batch, status='errored')
|
||||||
|
job2 = factories['music.ImportJob'](batch=batch, status='pending')
|
||||||
|
|
||||||
|
url = reverse('api:v1:import-jobs-run')
|
||||||
|
response = superuser_api_client.post(url, {'batches': [batch.pk]})
|
||||||
|
|
||||||
|
job1.refresh_from_db()
|
||||||
|
job2.refresh_from_db()
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert job1.status == 'pending'
|
||||||
|
assert job2.status == 'pending'
|
||||||
|
|
||||||
|
run.assert_any_call(import_job_id=job1.pk)
|
||||||
|
run.assert_any_call(import_job_id=job2.pk)
|
||||||
|
|
||||||
|
|
||||||
|
def test_import_batch_and_job_run_via_api(
|
||||||
|
factories, superuser_api_client, mocker):
|
||||||
|
run = mocker.patch('funkwhale_api.music.tasks.import_job_run.delay')
|
||||||
|
|
||||||
|
batch = factories['music.ImportBatch']()
|
||||||
|
job1 = factories['music.ImportJob'](batch=batch, status='errored')
|
||||||
|
job2 = factories['music.ImportJob'](status='pending')
|
||||||
|
|
||||||
|
url = reverse('api:v1:import-jobs-run')
|
||||||
|
response = superuser_api_client.post(
|
||||||
|
url, {'batches': [batch.pk], 'jobs': [job2.pk]})
|
||||||
|
|
||||||
|
job1.refresh_from_db()
|
||||||
|
job2.refresh_from_db()
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert job1.status == 'pending'
|
||||||
|
assert job2.status == 'pending'
|
||||||
|
|
||||||
|
run.assert_any_call(import_job_id=job1.pk)
|
||||||
|
run.assert_any_call(import_job_id=job2.pk)
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
Can now relaunch erored jobs and batches (#176)
|
|
@ -40,7 +40,16 @@
|
||||||
</tr>
|
</tr>
|
||||||
<tr v-if="stats">
|
<tr v-if="stats">
|
||||||
<td><strong>{{ $t('Errored') }}</strong></td>
|
<td><strong>{{ $t('Errored') }}</strong></td>
|
||||||
<td>{{ stats.errored }}</td>
|
<td>
|
||||||
|
{{ stats.errored }}
|
||||||
|
<button
|
||||||
|
@click="rerun({batches: [batch.id], jobs: []})"
|
||||||
|
v-if="stats.errored > 0"
|
||||||
|
class="ui tiny basic icon button">
|
||||||
|
<i class="redo icon" />
|
||||||
|
{{ $t('Rerun errored jobs')}}
|
||||||
|
</button>
|
||||||
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr v-if="stats">
|
<tr v-if="stats">
|
||||||
<td><strong>{{ $t('Finished') }}</strong></td>
|
<td><strong>{{ $t('Finished') }}</strong></td>
|
||||||
|
@ -83,11 +92,21 @@
|
||||||
<a :href="'https://www.musicbrainz.org/recording/' + job.mbid" target="_blank">{{ job.mbid }}</a>
|
<a :href="'https://www.musicbrainz.org/recording/' + job.mbid" target="_blank">{{ job.mbid }}</a>
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
<a :href="job.source" target="_blank">{{ job.source }}</a>
|
<a :title="job.source" :href="job.source" target="_blank">
|
||||||
|
{{ job.source|truncate(50) }}
|
||||||
|
</a>
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
<span
|
<span
|
||||||
:class="['ui', {'yellow': job.status === 'pending'}, {'red': job.status === 'errored'}, {'green': job.status === 'finished'}, 'label']">{{ job.status }}</span>
|
:class="['ui', {'yellow': job.status === 'pending'}, {'red': job.status === 'errored'}, {'green': job.status === 'finished'}, 'label']">
|
||||||
|
{{ job.status }}</span>
|
||||||
|
<button
|
||||||
|
@click="rerun({batches: [], jobs: [job.id]})"
|
||||||
|
v-if="job.status === 'errored'"
|
||||||
|
:title="$t('Rerun job')"
|
||||||
|
class="ui tiny basic icon button">
|
||||||
|
<i class="redo icon" />
|
||||||
|
</button>
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
<router-link v-if="job.track_file" :to="{name: 'library.tracks.detail', params: {id: job.track_file.track }}">{{ job.track_file.track }}</router-link>
|
<router-link v-if="job.track_file" :to="{name: 'library.tracks.detail', params: {id: job.track_file.track }}">{{ job.track_file.track }}</router-link>
|
||||||
|
@ -167,12 +186,6 @@ export default {
|
||||||
return axios.get(url).then((response) => {
|
return axios.get(url).then((response) => {
|
||||||
self.batch = response.data
|
self.batch = response.data
|
||||||
self.isLoading = false
|
self.isLoading = false
|
||||||
if (self.batch.status === 'pending') {
|
|
||||||
self.timeout = setTimeout(
|
|
||||||
self.fetchData,
|
|
||||||
5000
|
|
||||||
)
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
fetchStats () {
|
fetchStats () {
|
||||||
|
@ -186,7 +199,7 @@ export default {
|
||||||
self.fetchJobs()
|
self.fetchJobs()
|
||||||
self.fetchData()
|
self.fetchData()
|
||||||
}
|
}
|
||||||
if (self.batch.status === 'pending') {
|
if (self.stats.pending > 0) {
|
||||||
self.timeout = setTimeout(
|
self.timeout = setTimeout(
|
||||||
self.fetchStats,
|
self.fetchStats,
|
||||||
5000
|
5000
|
||||||
|
@ -194,6 +207,15 @@ export default {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
rerun ({jobs, batches}) {
|
||||||
|
let payload = {
|
||||||
|
jobs, batches
|
||||||
|
}
|
||||||
|
let self = this
|
||||||
|
axios.post('import-jobs/run/', payload).then((response) => {
|
||||||
|
self.fetchStats()
|
||||||
|
})
|
||||||
|
},
|
||||||
fetchJobs () {
|
fetchJobs () {
|
||||||
let params = {
|
let params = {
|
||||||
batch: this.id,
|
batch: this.id,
|
||||||
|
|
Loading…
Reference in New Issue