diff --git a/src/backend/home/tests.py b/src/backend/home/tests.py index baff765d..dc1cd3f5 100644 --- a/src/backend/home/tests.py +++ b/src/backend/home/tests.py @@ -200,22 +200,28 @@ def test_fail_requests(self): self.assertIn('does not exist or is deleted', resp.json()['error']) dm.DatasetRawFile.objects.create(dataset=self.ds, rawfile=self.timsraw) resp = self.cl.post(self.url, content_type='application/json', data={'pwiz_id': self.pw.pk, - 'dsid': self.ds.pk}) + 'dsid': self.ds.pk, 'dbid': self.sflib.pk}) self.assertEqual(resp.status_code, 403) self.assertIn('contains data from multiple instrument types', resp.json()['error']) - def test_existing_mzmls(self): + def test_existing_mzmls_no_db(self): # no mzMLs exist yet resp = self.cl.post(self.url, content_type='application/json', data={'pwiz_id': self.pw.pk, 'dsid': self.ds.pk}) self.assertEqual(resp.status_code, 403) self.assertIn('Need to create normal mzMLs', resp.json()['error']) + # Not passed a db + am.MzmlFile.objects.create(sfile=self.qesf, pwiz=self.pw) + resp = self.cl.post(self.url, content_type='application/json', data={'pwiz_id': self.pw.pk, + 'dsid': self.ds.pk}) + self.assertEqual(resp.status_code, 400) + self.assertEqual('Must pass a database to refine with', resp.json()['error']) + # refined exists already refinedsf = rm.StoredFile.objects.create(rawfile=self.qeraw, filename=f'{self.qeraw.name}_refined', servershare=self.ds.storageshare, path=self.storloc, md5='refined_md5', checked=True, filetype=self.ft) am.MzmlFile.objects.create(sfile=refinedsf, pwiz=self.pw, refined=True) - am.MzmlFile.objects.create(sfile=self.qesf, pwiz=self.pw) resp = self.cl.post(self.url, content_type='application/json', data={'dsid': self.ds.pk}) self.assertEqual(resp.status_code, 403) diff --git a/src/backend/home/views.py b/src/backend/home/views.py index fcaf089d..5ce3ff16 100644 --- a/src/backend/home/views.py +++ b/src/backend/home/views.py @@ -917,9 +917,11 @@ def refine_mzmls(request): elif not nr_exist_mzml or nr_exist_mzml < nr_dsrs: return JsonResponse({'error': 'Need to create normal mzMLs before refining'}, status=403) # Check DB - if filemodels.StoredFile.objects.filter(pk=data['dbid'], - filetype__name=settings.DBFA_FT_NAME).count() != 1: - return JsonResponse({'error': 'Wrong database to refine with'}, status=403) + if dbid := data.get('dbid'): + if filemodels.StoredFile.objects.filter(pk=dbid, filetype__name=settings.DBFA_FT_NAME).count() != 1: + return JsonResponse({'error': 'Wrong database to refine with'}, status=403) + else: + return JsonResponse({'error': 'Must pass a database to refine with'}, status=400) # Check WF if anmodels.NextflowWfVersionParamset.objects.filter(pk=data['wfid'], userworkflow__name__icontains='refine', @@ -937,7 +939,7 @@ def refine_mzmls(request): # FIXME get analysis if it does exist, in case someone reruns? analysis = anmodels.Analysis.objects.create(user=request.user, name=f'refine_dataset_{dset.pk}', editable=False) job = create_job('refine_mzmls', dset_id=dset.pk, analysis_id=analysis.id, wfv_id=data['wfid'], - dstshare_id=res_share.pk, dbfn_id=data['dbid'], qtype=dset.quantdataset.quanttype.shortname) + dstshare_id=res_share.pk, dbfn_id=dbid, qtype=dset.quantdataset.quanttype.shortname) uwf = anmodels.UserWorkflow.objects.get(nfwfversionparamsets=data['wfid'], wftype=anmodels.UserWorkflow.WFTypeChoices.SPEC) anmodels.NextflowSearch.objects.update_or_create(analysis=analysis, defaults={