From a1f45de56115119b752c9f51ec7e762bb39eb554 Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Thu, 16 May 2019 18:17:00 -0600 Subject: [PATCH 01/36] WIP Merge acquisitions into results This works, but (multi-recording) archive generation needs work and unit tests are not up-to-date, so likely there are permission regressions that will be corrected when test failures are addressed. --- src/acquisitions/__init__.py | 0 src/acquisitions/apps.py | 5 - src/acquisitions/migrations/0001_initial.py | 30 --- .../migrations/0002_auto_20190515_2055.py | 26 --- src/acquisitions/migrations/__init__.py | 0 src/acquisitions/serializers.py | 87 --------- src/acquisitions/tests/__init__.py | 0 .../tests/test_admin_detail_view.py | 128 ------------- src/acquisitions/tests/test_archive_view.py | 42 ----- src/acquisitions/tests/test_detail_view.py | 70 ------- src/acquisitions/tests/test_list_view.py | 56 ------ src/acquisitions/tests/test_overview_view.py | 56 ------ .../tests/test_user_detail_view.py | 138 -------------- src/acquisitions/tests/utils.py | 113 ----------- src/acquisitions/urls.py | 34 ---- src/acquisitions/views.py | 177 ------------------ src/actions/acquire_single_freq_fft.py | 87 ++++----- .../acquire_stepped_freq_tdomain_iq.py | 69 +++---- src/authentication/migrations/0001_initial.py | 2 +- src/hardware/__init__.py | 4 + src/results/migrations/0001_initial.py | 30 ++- .../migrations/0002_auto_20190516_2353.py | 25 +++ src/results/models/__init__.py | 2 + .../models/acquisition.py} | 27 ++- .../{models.py => models/task_result.py} | 34 +++- src/{acquisitions => results}/permissions.py | 10 +- src/results/serializers/__init__.py | 2 + src/results/serializers/acquisition.py | 38 ++++ .../task_result.py} | 38 ++-- src/results/urls.py | 23 ++- src/results/views.py | 163 +++++++++++++++- src/schedule/migrations/0001_initial.py | 2 +- src/schedule/models/__init__.py | 2 - src/schedule/serializers.py | 12 +- src/scheduler/scheduler.py | 66 ++++--- src/sensor/exceptions.py | 6 +- src/sensor/settings.py | 5 - src/sensor/urls.py | 1 - src/sensor/views.py | 7 +- src/status/migrations/0001_initial.py | 2 +- 40 files changed, 449 insertions(+), 1170 deletions(-) delete mode 100644 src/acquisitions/__init__.py delete mode 100644 src/acquisitions/apps.py delete mode 100644 src/acquisitions/migrations/0001_initial.py delete mode 100644 src/acquisitions/migrations/0002_auto_20190515_2055.py delete mode 100644 src/acquisitions/migrations/__init__.py delete mode 100644 src/acquisitions/serializers.py delete mode 100644 src/acquisitions/tests/__init__.py delete mode 100644 src/acquisitions/tests/test_admin_detail_view.py delete mode 100644 src/acquisitions/tests/test_archive_view.py delete mode 100644 src/acquisitions/tests/test_detail_view.py delete mode 100644 src/acquisitions/tests/test_list_view.py delete mode 100644 src/acquisitions/tests/test_overview_view.py delete mode 100644 src/acquisitions/tests/test_user_detail_view.py delete mode 100644 src/acquisitions/tests/utils.py delete mode 100644 src/acquisitions/urls.py delete mode 100644 src/acquisitions/views.py create mode 100644 src/results/migrations/0002_auto_20190516_2353.py create mode 100644 src/results/models/__init__.py rename src/{acquisitions/models.py => results/models/acquisition.py} (52%) rename src/results/{models.py => models/task_result.py} (65%) rename src/{acquisitions => results}/permissions.py (74%) create mode 100644 src/results/serializers/__init__.py create mode 100644 src/results/serializers/acquisition.py rename src/results/{serializers.py => serializers/task_result.py} (93%) diff --git a/src/acquisitions/__init__.py b/src/acquisitions/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/src/acquisitions/apps.py b/src/acquisitions/apps.py deleted file mode 100644 index 5c7a70d8..00000000 --- a/src/acquisitions/apps.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.apps import AppConfig - - -class AcquisitionsConfig(AppConfig): - name = 'acquisitions' diff --git a/src/acquisitions/migrations/0001_initial.py b/src/acquisitions/migrations/0001_initial.py deleted file mode 100644 index f8d33e41..00000000 --- a/src/acquisitions/migrations/0001_initial.py +++ /dev/null @@ -1,30 +0,0 @@ -# Generated by Django 2.2.1 on 2019-05-15 20:55 - -from django.db import migrations, models -import jsonfield.fields - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ] - - operations = [ - migrations.CreateModel( - name='Acquisition', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('task_id', models.IntegerField(help_text='The id of the task relative to the acquisition')), - ('recording_id', models.IntegerField(default=0, help_text='The id of the recording relative to the task')), - ('sigmf_metadata', jsonfield.fields.JSONField(help_text='The sigmf meta data for the acquisition')), - ('data', models.BinaryField(null=True)), - ('created', models.DateTimeField(auto_now_add=True, help_text='The time the acquisition was created')), - ], - options={ - 'db_table': 'acquisitions', - 'ordering': ('created',), - }, - ), - ] diff --git a/src/acquisitions/migrations/0002_auto_20190515_2055.py b/src/acquisitions/migrations/0002_auto_20190515_2055.py deleted file mode 100644 index 81440f89..00000000 --- a/src/acquisitions/migrations/0002_auto_20190515_2055.py +++ /dev/null @@ -1,26 +0,0 @@ -# Generated by Django 2.2.1 on 2019-05-15 20:55 - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ('acquisitions', '0001_initial'), - ('schedule', '0001_initial'), - ] - - operations = [ - migrations.AddField( - model_name='acquisition', - name='schedule_entry', - field=models.ForeignKey(help_text='The schedule entry relative to the acquisition', on_delete=django.db.models.deletion.PROTECT, related_name='acquisitions', to='schedule.ScheduleEntry'), - ), - migrations.AlterUniqueTogether( - name='acquisition', - unique_together={('schedule_entry', 'task_id', 'recording_id')}, - ), - ] diff --git a/src/acquisitions/migrations/__init__.py b/src/acquisitions/migrations/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/src/acquisitions/serializers.py b/src/acquisitions/serializers.py deleted file mode 100644 index 9fc95bbe..00000000 --- a/src/acquisitions/serializers.py +++ /dev/null @@ -1,87 +0,0 @@ -from rest_framework import serializers -from rest_framework.reverse import reverse - -from schedule.models import ScheduleEntry -from sensor import V1 -from .models import Acquisition - - -class AcquisitionsOverviewSerializer(serializers.HyperlinkedModelSerializer): - results = serializers.SerializerMethodField( - help_text="The link to the acquisitions") - schedule_entry = serializers.SerializerMethodField( - help_text="The related schedule entry for the acquisition") - acquisitions_available = serializers.SerializerMethodField( - help_text="The number of available acquisitions") - archive = serializers.SerializerMethodField( - help_text="The url to download a SigMF archive of all acquisitions" - ) - - class Meta: - model = ScheduleEntry - fields = ('results', 'acquisitions_available', 'archive', - 'schedule_entry') - - def get_results(self, obj): - request = self.context['request'] - route = 'acquisition-list' - kws = {'schedule_entry_name': obj.name} - kws.update(V1) - url = reverse(route, kwargs=kws, request=request) - return url - - def get_acquisitions_available(self, obj): - return obj.acquisitions.count() - - def get_schedule_entry(self, obj): - request = self.context['request'] - kwargs = {'pk': obj.name} - url = reverse('schedule-detail', kwargs=kwargs, request=request) - return url - - def get_archive(self, obj): - request = self.context['request'] - kwargs = {'schedule_entry_name': obj.name} - url = reverse('acquisition-list-archive', kwargs=kwargs, - request=request) - return url - - -class AcquisitionHyperlinkedRelatedField(serializers.HyperlinkedRelatedField): - # django-rest-framework.org/api-guide/relations/#custom-hyperlinked-fields - def get_url(self, obj, view_name, request, format): - kws = { - 'schedule_entry_name': obj.schedule_entry.name, - 'task_id': obj.task_id - } - kws.update(V1) - url = reverse(view_name, kwargs=kws, request=request, format=format) - return url - - -class AcquisitionSerializer(serializers.ModelSerializer): - # `self` here refers to the self url field - this seems to work - self = AcquisitionHyperlinkedRelatedField( - view_name='acquisition-detail', - read_only=True, - help_text="The url of the acquisition", - source='*' # pass whole object - ) - archive = AcquisitionHyperlinkedRelatedField( - view_name='acquisition-archive', - read_only=True, - help_text="The url to download a SigMF archive of this acquisition", - source='*' # pass whole object - ) - sigmf_metadata = serializers.DictField( - help_text="The sigmf meta data for the acquisition") - - class Meta: - model = Acquisition - fields = ('self', 'task_id', 'created', 'archive', 'sigmf_metadata') - extra_kwargs = { - 'schedule_entry': { - 'view_name': 'schedule-detail', - 'lookup_field': 'name' - } - } diff --git a/src/acquisitions/tests/__init__.py b/src/acquisitions/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/src/acquisitions/tests/test_admin_detail_view.py b/src/acquisitions/tests/test_admin_detail_view.py deleted file mode 100644 index ff592e81..00000000 --- a/src/acquisitions/tests/test_admin_detail_view.py +++ /dev/null @@ -1,128 +0,0 @@ -from rest_framework import status - -from acquisitions.tests.utils import (reverse_acquisition_detail, - update_acquisition_detail, - simulate_acquisitions, HTTPS_KWARG) -from sensor.tests.utils import validate_response - - -def test_admin_can_create_private_acquisition(admin_client, user_client, - test_scheduler): - private_entry_name = simulate_acquisitions(admin_client, is_private=True) - private_acq_url = reverse_acquisition_detail(private_entry_name, 1) - - user_response = user_client.get(private_acq_url, **HTTPS_KWARG) - - validate_response(user_response, status.HTTP_403_FORBIDDEN) - - -def test_admin_can_view_all_acquisitions(admin_client, alt_admin_client, - user_client, test_scheduler): - # alt admin schedule entry - alt_admin_entry_name = simulate_acquisitions( - alt_admin_client, name='alt_admin_single_acq') - alt_admin_acq_url = reverse_acquisition_detail(alt_admin_entry_name, 1) - - admin_view_alt_admin_response = admin_client.get(alt_admin_acq_url, - **HTTPS_KWARG) - - # user schedule entry - user_acq_name = simulate_acquisitions(user_client, name='admin_single_acq') - user_acq_url = reverse_acquisition_detail(user_acq_name, 1) - - admin_view_user_response = admin_client.get(user_acq_url, **HTTPS_KWARG) - - validate_response(admin_view_alt_admin_response, status.HTTP_200_OK) - validate_response(admin_view_user_response, status.HTTP_200_OK) - - -def test_admin_can_view_private_acquisitions(admin_client, alt_admin_client, - test_scheduler): - private_entry_name = simulate_acquisitions( - alt_admin_client, is_private=True) - private_acq_url = reverse_acquisition_detail(private_entry_name, 1) - - response = admin_client.get(private_acq_url, **HTTPS_KWARG) - - validate_response(response, status.HTTP_200_OK) - - -def test_admin_can_delete_their_acquisition(admin_client, test_scheduler): - entry_name = simulate_acquisitions(admin_client) - acq_url = reverse_acquisition_detail(entry_name, 1) - - first_response = admin_client.delete(acq_url, **HTTPS_KWARG) - second_response = admin_client.delete(acq_url, **HTTPS_KWARG) - - validate_response(first_response, status.HTTP_204_NO_CONTENT) - validate_response(second_response, status.HTTP_404_NOT_FOUND) - - -def test_admin_can_delete_other_acquisitions(admin_client, alt_admin_client, - user_client, test_scheduler): - # alt admin private schedule entry - alt_admin_entry_name = simulate_acquisitions( - alt_admin_client, name='alt_admin_single_acq', is_private=True) - alt_admin_acq_url = reverse_acquisition_detail(alt_admin_entry_name, 1) - - admin_delete_alt_admin_response = admin_client.delete( - alt_admin_acq_url, **HTTPS_KWARG) - - # user schedule entry - user_acq_name = simulate_acquisitions(user_client, name='admin_single_acq') - user_acq_url = reverse_acquisition_detail(user_acq_name, 1) - - admin_delete_user_response = admin_client.delete(user_acq_url, - **HTTPS_KWARG) - - validate_response(admin_delete_user_response, status.HTTP_204_NO_CONTENT) - validate_response(admin_delete_alt_admin_response, - status.HTTP_204_NO_CONTENT) - - -def test_admin_cant_modify_their_acquisition(admin_client, test_scheduler): - entry_name = simulate_acquisitions(admin_client) - acq_url = reverse_acquisition_detail(entry_name, 1) - - new_acquisition_detail = admin_client.get(acq_url, **HTTPS_KWARG).data - - new_acquisition_detail['task_id'] = 2 - - response = update_acquisition_detail(admin_client, entry_name, 1, - new_acquisition_detail) - - validate_response(response, status.HTTP_405_METHOD_NOT_ALLOWED) - - -def test_user_cant_modify_other_acquisitions(admin_client, alt_admin_client, - user_client, test_scheduler): - # alt admin schedule entry - alt_admin_entry_name = simulate_acquisitions( - alt_admin_client, name='alt_admin_single_acq') - alt_admin_acq_url = reverse_acquisition_detail(alt_admin_entry_name, 1) - - new_acquisition_detail = user_client.get(alt_admin_acq_url, **HTTPS_KWARG) - - new_acquisition_detail = new_acquisition_detail.data - - new_acquisition_detail['task_id'] = 2 - - admin_modify_alt_admin_response = update_acquisition_detail( - admin_client, alt_admin_entry_name, 1, new_acquisition_detail) - - # user schedule entry - user_entry_name = simulate_acquisitions( - user_client, name='admin_single_acq') - user_acq_url = reverse_acquisition_detail(user_entry_name, 1) - - new_acquisition_detail = admin_client.get(user_acq_url, **HTTPS_KWARG).data - - new_acquisition_detail['task_id'] = 2 - - admin_modify_user_response = update_acquisition_detail( - admin_client, user_entry_name, 1, new_acquisition_detail) - - validate_response(admin_modify_alt_admin_response, - status.HTTP_405_METHOD_NOT_ALLOWED) - validate_response(admin_modify_user_response, - status.HTTP_405_METHOD_NOT_ALLOWED) diff --git a/src/acquisitions/tests/test_archive_view.py b/src/acquisitions/tests/test_archive_view.py deleted file mode 100644 index 8ab23b65..00000000 --- a/src/acquisitions/tests/test_archive_view.py +++ /dev/null @@ -1,42 +0,0 @@ -import os -import tempfile - -import numpy as np -from rest_framework import status - -import sigmf.sigmffile - -import sensor.settings -from acquisitions.tests.utils import (reverse_acquisition_archive, - simulate_acquisitions, HTTPS_KWARG) - - -def test_archive_download(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client, n=1) - task_id = 1 - url = reverse_acquisition_archive(entry_name, task_id) - disposition = 'attachment; filename="{}_test_acq_1.sigmf"' - disposition = disposition.format(sensor.settings.FQDN) - response = user_client.get(url, **HTTPS_KWARG) - - assert response.status_code == status.HTTP_200_OK - assert response['content-disposition'] == disposition - assert response['content-type'] == 'application/x-tar' - - with tempfile.NamedTemporaryFile() as tf: - for content in response.streaming_content: - tf.write(content) - - sigmf_archive_contents = sigmf.sigmffile.fromarchive(tf.name) - md = sigmf_archive_contents._metadata - datafile = sigmf_archive_contents.data_file - datafile_actual_size = os.stat(datafile).st_size - claimed_sha512 = md['global']['core:sha512'] - number_of_sample_arrays = len(md['annotations']) - samples_per_array = md['annotations'][0]['core:sample_count'] - sample_array_size = samples_per_array * np.float32(0.0).nbytes - datafile_expected_size = number_of_sample_arrays * sample_array_size - actual_sha512 = sigmf.sigmf_hash.calculate_sha512(datafile) - - assert datafile_actual_size == datafile_expected_size - assert claimed_sha512 == actual_sha512 diff --git a/src/acquisitions/tests/test_detail_view.py b/src/acquisitions/tests/test_detail_view.py deleted file mode 100644 index 726151c5..00000000 --- a/src/acquisitions/tests/test_detail_view.py +++ /dev/null @@ -1,70 +0,0 @@ -import pytest -from rest_framework import status -from rest_framework.reverse import reverse - -from acquisitions.tests.utils import (get_acquisition_detail, - reverse_acquisition_detail, - simulate_acquisitions, HTTPS_KWARG) -from sensor import V1 -from sensor.tests.utils import validate_response - - -def test_non_existent_entry(user_client): - with pytest.raises(AssertionError): - get_acquisition_detail(user_client, 'doesntexist', 1) - - -def test_non_existent_task_id(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client, n=1) - with pytest.raises(AssertionError): - non_existent_task_id = 2 - get_acquisition_detail(user_client, entry_name, non_existent_task_id) - - -def test_get_detail_from_single(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client, n=1) - task_id = 1 - acq = get_acquisition_detail(user_client, entry_name, task_id) - - assert acq['task_id'] == task_id - - -def test_get_detail_from_multiple(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client, n=3) - task_id = 3 - acq = get_acquisition_detail(user_client, entry_name, task_id) - - assert acq['task_id'] == task_id - - -def test_delete_single(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client, n=3) - task_id_to_delete = 2 - url = reverse_acquisition_detail(entry_name, task_id_to_delete) - - response = user_client.delete(url, **HTTPS_KWARG) - validate_response(response, status.HTTP_204_NO_CONTENT) - - response = user_client.delete(url, **HTTPS_KWARG) - validate_response(response, status.HTTP_404_NOT_FOUND) - - # other 2 acquisitions should be unaffected - get_acquisition_detail(user_client, entry_name, 1) - get_acquisition_detail(user_client, entry_name, 3) - - -def test_private_entries_have_private_acquisitons(admin_client, user_client, - test_scheduler): - entry_name = simulate_acquisitions(admin_client, is_private=True) - kws = {'pk': entry_name} - kws.update(V1) - entry_url = reverse('schedule-detail', kwargs=kws) - - admin_response = admin_client.get(entry_url, **HTTPS_KWARG) - admin_acquisition_url = admin_response.data['acquisitions'] - - user_respose = user_client.get(admin_acquisition_url, **HTTPS_KWARG) - admin_respose = admin_client.get(admin_acquisition_url, **HTTPS_KWARG) - - validate_response(user_respose, status.HTTP_403_FORBIDDEN) - validate_response(admin_respose, status.HTTP_200_OK) diff --git a/src/acquisitions/tests/test_list_view.py b/src/acquisitions/tests/test_list_view.py deleted file mode 100644 index 75958c79..00000000 --- a/src/acquisitions/tests/test_list_view.py +++ /dev/null @@ -1,56 +0,0 @@ -import pytest -from rest_framework import status - -from acquisitions.tests.utils import ( - get_acquisition_list, reverse_acquisition_detail, reverse_acquisition_list, - simulate_acquisitions) -from schedule.tests.utils import post_schedule, TEST_SCHEDULE_ENTRY -from sensor.tests.utils import validate_response, HTTPS_KWARG - - -def test_non_existent_entry(user_client, test_scheduler): - with pytest.raises(AssertionError): - get_acquisition_list(user_client, 'doesntexist') - - -@pytest.mark.django_db -def test_entry_with_no_acquisition_response(user_client, test_scheduler): - entry = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - - with pytest.raises(AssertionError): - assert get_acquisition_list(user_client, entry['name']) - - -@pytest.mark.django_db -def test_single_acquisition_response(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client, n=1) - acquisition, = get_acquisition_list(user_client, entry_name) - task_id = 1 - expected_url = reverse_acquisition_detail(entry_name, task_id) - - assert acquisition['self'] == expected_url - assert acquisition['task_id'] == task_id - - -@pytest.mark.django_db -def test_multiple_acquisition_response(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client, n=3) - acquisitions = get_acquisition_list(user_client, entry_name) - assert len(acquisitions) == 3 - - for i, acq in enumerate(acquisitions, start=1): - expected_url = reverse_acquisition_detail(entry_name, i) - assert acq['self'] == expected_url - assert acq['task_id'] == i - - -@pytest.mark.django_db -def test_delete_list(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client, n=3) - url = reverse_acquisition_list(entry_name) - - response = user_client.delete(url, **HTTPS_KWARG) - validate_response(response, status.HTTP_204_NO_CONTENT) - - response = user_client.delete(url, **HTTPS_KWARG) - validate_response(response, status.HTTP_404_NOT_FOUND) diff --git a/src/acquisitions/tests/test_overview_view.py b/src/acquisitions/tests/test_overview_view.py deleted file mode 100644 index c9720f47..00000000 --- a/src/acquisitions/tests/test_overview_view.py +++ /dev/null @@ -1,56 +0,0 @@ -from rest_framework import status - -from acquisitions.tests.utils import ( - SINGLE_ACQUISITION, EMPTY_ACQUISITIONS_RESPONSE, - reverse_acquisitions_overview, reverse_acquisition_list, - simulate_acquisitions, get_acquisitions_overview) -from schedule.tests.utils import post_schedule -from sensor.tests.utils import validate_response, HTTPS_KWARG - - -def test_empty_overview_response(user_client): - response = get_acquisitions_overview(user_client) - assert response == EMPTY_ACQUISITIONS_RESPONSE - - -def test_overview_exists_when_entry_created(user_client, test_scheduler): - post_schedule(user_client, SINGLE_ACQUISITION) - overview, = get_acquisitions_overview(user_client) - assert overview['acquisitions_available'] == 0 - - -def test_get_overview(user_client, test_scheduler): - entry1_name = simulate_acquisitions(user_client) - overview, = get_acquisitions_overview(user_client) - - assert overview['results'] == reverse_acquisition_list(entry1_name) - assert overview['acquisitions_available'] == 1 - - entry2_name = simulate_acquisitions(user_client, n=3) - overview_list = get_acquisitions_overview(user_client) - - assert len(overview_list) == 2 - - (overview1, overview2) = overview_list - - assert overview1 == overview - assert overview2['results'] == reverse_acquisition_list(entry2_name) - assert overview2['acquisitions_available'] == 3 - - -def test_overview_for_private_entry_is_private(admin_client, user_client, - test_scheduler): - simulate_acquisitions(admin_client, is_private=True) - overview = get_acquisitions_overview(user_client) - assert overview == [] - - overview, = get_acquisitions_overview(admin_client) - assert overview['acquisitions_available'] == 1 - assert overview['results'] # is non-empty string - assert overview['schedule_entry'] # is non-empty string - - -def test_delete_overview_not_allowed(user_client, test_scheduler): - url = reverse_acquisitions_overview() - response = user_client.delete(url, **HTTPS_KWARG) - assert validate_response(response, status.HTTP_405_METHOD_NOT_ALLOWED) diff --git a/src/acquisitions/tests/test_user_detail_view.py b/src/acquisitions/tests/test_user_detail_view.py deleted file mode 100644 index 46c15a0a..00000000 --- a/src/acquisitions/tests/test_user_detail_view.py +++ /dev/null @@ -1,138 +0,0 @@ -from rest_framework import status - -from acquisitions.tests.utils import (reverse_acquisition_detail, - update_acquisition_detail, - simulate_acquisitions, HTTPS_KWARG) -from sensor.tests.utils import validate_response - - -def test_user_can_create_nonprivate_acquisition(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client) - acq_url = reverse_acquisition_detail(entry_name, 1) - response = user_client.get(acq_url, **HTTPS_KWARG) - - validate_response(response, status.HTTP_200_OK) - - -def test_user_cant_create_private_acquisition(user_client, alt_user_client, - test_scheduler): - # The alt user attempts to create a private acquisition. - entry_name = simulate_acquisitions(alt_user_client, is_private=True) - acq_url = reverse_acquisition_detail(entry_name, 1) - - # The user attempts to GET the acquisition that the alt user created. - response = user_client.get(acq_url, **HTTPS_KWARG) - - # The user successfully GETs the acquistion that the alt user - # created; meaning that the acquisition was not, in fact, private. - validate_response(response, status.HTTP_200_OK) - - -def test_user_can_view_other_nonprivate_acquisitions( - admin_client, user_client, alt_user_client, test_scheduler): - # alt user schedule entry - alt_user_entry_name = simulate_acquisitions( - alt_user_client, name='alt_user_single_acq') - alt_user_acq_url = reverse_acquisition_detail(alt_user_entry_name, 1) - - user_view_alt_user_response = user_client.get(alt_user_acq_url, - **HTTPS_KWARG) - - # admin user schedule entry - admin_acq_name = simulate_acquisitions( - admin_client, name='admin_single_acq') - admin_acq_url = reverse_acquisition_detail(admin_acq_name, 1) - - user_view_admin_response = user_client.get(admin_acq_url, **HTTPS_KWARG) - - validate_response(user_view_alt_user_response, status.HTTP_200_OK) - validate_response(user_view_admin_response, status.HTTP_200_OK) - - -def test_user_cant_view_private_acquisitions(admin_client, user_client, - test_scheduler): - private_entry_name = simulate_acquisitions(admin_client, is_private=True) - private_acq_url = reverse_acquisition_detail(private_entry_name, 1) - - response = user_client.get(private_acq_url, **HTTPS_KWARG) - - validate_response(response, status.HTTP_403_FORBIDDEN) - - -def test_user_can_delete_their_acquisition(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client) - acq_url = reverse_acquisition_detail(entry_name, 1) - - first_response = user_client.delete(acq_url, **HTTPS_KWARG) - second_response = user_client.delete(acq_url, **HTTPS_KWARG) - - validate_response(first_response, status.HTTP_204_NO_CONTENT) - validate_response(second_response, status.HTTP_404_NOT_FOUND) - - -def test_user_cant_delete_other_acquisitions(admin_client, user_client, - alt_user_client, test_scheduler): - # alt user schedule entry - alt_user_entry_name = simulate_acquisitions( - alt_user_client, name='alt_user_single_acq') - alt_user_acq_url = reverse_acquisition_detail(alt_user_entry_name, 1) - - user_delete_alt_user_response = user_client.delete(alt_user_acq_url, - **HTTPS_KWARG) - - # admin user schedule entry - admin_acq_name = simulate_acquisitions( - admin_client, name='admin_single_acq') - admin_acq_url = reverse_acquisition_detail(admin_acq_name, 1) - - user_delete_admin_response = user_client.delete(admin_acq_url, - **HTTPS_KWARG) - - validate_response(user_delete_admin_response, status.HTTP_403_FORBIDDEN) - validate_response(user_delete_alt_user_response, status.HTTP_403_FORBIDDEN) - - -def test_user_cant_modify_their_acquisition(user_client, test_scheduler): - entry_name = simulate_acquisitions(user_client) - acq_url = reverse_acquisition_detail(entry_name, 1) - - new_acquisition_detail = user_client.get(acq_url, **HTTPS_KWARG).data - - new_acquisition_detail['task_id'] = 2 - - response = update_acquisition_detail(user_client, entry_name, 1, - new_acquisition_detail) - - validate_response(response, status.HTTP_405_METHOD_NOT_ALLOWED) - - -def test_user_cant_modify_other_acquisitions(admin_client, user_client, - alt_user_client, test_scheduler): - # alt user schedule entry - alt_user_entry_name = simulate_acquisitions( - alt_user_client, name='alt_user_single_acq') - alt_user_acq_url = reverse_acquisition_detail(alt_user_entry_name, 1) - - new_acquisition_detail = user_client.get(alt_user_acq_url, **HTTPS_KWARG) - - new_acquisition_detail = new_acquisition_detail.data - - new_acquisition_detail['task_id'] = 2 - - user_modify_alt_user_response = update_acquisition_detail( - user_client, alt_user_entry_name, 1, new_acquisition_detail) - - # admin user schedule entry - admin_entry_name = simulate_acquisitions( - admin_client, name='admin_single_acq') - admin_acq_url = reverse_acquisition_detail(admin_entry_name, 1) - - new_acquisition_detail = user_client.get(admin_acq_url, **HTTPS_KWARG).data - - new_acquisition_detail['task_id'] = 2 - - user_modify_admin_response = update_acquisition_detail( - user_client, admin_entry_name, 1, new_acquisition_detail) - - validate_response(user_modify_alt_user_response, status.HTTP_403_FORBIDDEN) - validate_response(user_modify_admin_response, status.HTTP_403_FORBIDDEN) diff --git a/src/acquisitions/tests/utils.py b/src/acquisitions/tests/utils.py deleted file mode 100644 index 2abad9db..00000000 --- a/src/acquisitions/tests/utils.py +++ /dev/null @@ -1,113 +0,0 @@ -import json -from django.test import RequestFactory -from rest_framework.reverse import reverse -from rest_framework import status - -from schedule.tests.utils import post_schedule -from scheduler.tests.utils import simulate_scheduler_run -from sensor import V1 -from sensor.tests.utils import validate_response, HTTPS_KWARG - -EMPTY_ACQUISITIONS_RESPONSE = [] - -SINGLE_ACQUISITION = { - 'name': 'test_acq', - 'start': None, - 'stop': None, - 'interval': None, - 'action': 'mock_acquire' -} - -MULTIPLE_ACQUISITIONS = { - 'name': 'test_multiple_acq', - 'start': None, - 'relative_stop': 5, - 'interval': 1, - 'action': 'mock_acquire' -} - - -def simulate_acquisitions(client, n=1, is_private=False, name=None): - assert 0 < n <= 10 - - if n == 1: - schedule_entry = SINGLE_ACQUISITION.copy() - else: - schedule_entry = MULTIPLE_ACQUISITIONS.copy() - schedule_entry['relative_stop'] = n + 1 - - schedule_entry['is_private'] = is_private - - if name is not None: - schedule_entry['name'] = name - - entry = post_schedule(client, schedule_entry) - simulate_scheduler_run(n) - - return entry['name'] - - -def reverse_acquisitions_overview(): - rf = RequestFactory() - request = rf.get('/acquisitions/', **HTTPS_KWARG) - return reverse('acquisitions-overview', kwargs=V1, request=request) - - -def reverse_acquisition_list(schedule_entry_name): - rf = RequestFactory() - request = rf.get('/acquisitions/' + schedule_entry_name, **HTTPS_KWARG) - kws = {'schedule_entry_name': schedule_entry_name} - kws.update(V1) - return reverse('acquisition-list', kwargs=kws, request=request) - - -def reverse_acquisition_detail(schedule_entry_name, task_id): - rf = RequestFactory() - url = '/acquisitions/' + schedule_entry_name + '/' + str(task_id) - request = rf.get(url, **HTTPS_KWARG) - kws = {'schedule_entry_name': schedule_entry_name, 'task_id': task_id} - kws.update(V1) - return reverse('acquisition-detail', kwargs=kws, request=request) - - -def reverse_acquisition_archive(schedule_entry_name, task_id): - rf = RequestFactory() - entry_name = schedule_entry_name - url = '/'.join(['/acquisitions', entry_name, str(task_id), 'archive']) - request = rf.get(url, **HTTPS_KWARG) - kws = {'schedule_entry_name': entry_name, 'task_id': task_id} - kws.update(V1) - return reverse('acquisition-archive', kwargs=kws, request=request) - - -def get_acquisitions_overview(client): - url = reverse_acquisitions_overview() - response = client.get(url, **HTTPS_KWARG) - rjson = validate_response(response, status.HTTP_200_OK) - return rjson['results'] - - -def get_acquisition_list(client, schedule_entry_name): - url = reverse_acquisition_list(schedule_entry_name) - response = client.get(url, **HTTPS_KWARG) - rjson = validate_response(response, status.HTTP_200_OK) - return rjson['results'] - - -def get_acquisition_detail(client, schedule_entry_name, task_id): - url = reverse_acquisition_detail(schedule_entry_name, task_id) - response = client.get(url, **HTTPS_KWARG) - return validate_response(response, status.HTTP_200_OK) - - -def update_acquisition_detail(client, schedule_entry_name, task_id, - new_acquisition): - url = reverse_acquisition_detail(schedule_entry_name, task_id) - - kwargs = { - 'data': json.dumps(new_acquisition), - 'content_type': 'application/json', - 'wsgi.url_scheme': 'https' - } - - return client.put(url, **kwargs) diff --git a/src/acquisitions/urls.py b/src/acquisitions/urls.py deleted file mode 100644 index 5665efed..00000000 --- a/src/acquisitions/urls.py +++ /dev/null @@ -1,34 +0,0 @@ -from django.urls import path - -from .views import (AcquisitionsOverviewViewSet, AcquisitionListViewSet, - AcquisitionInstanceViewSet) - -urlpatterns = ( - path('', - view=AcquisitionsOverviewViewSet.as_view({ - 'get': 'list' - }), - name='acquisitions-overview'), - path('/', - view=AcquisitionListViewSet.as_view({ - 'get': 'list', - 'delete': 'destroy_all' - }), - name='acquisition-list'), - path('/archive/', - view=AcquisitionListViewSet.as_view({ - 'get': 'archive', - }), - name='acquisition-list-archive'), - path('//', - view=AcquisitionInstanceViewSet.as_view({ - 'get': 'retrieve', - 'delete': 'destroy' - }), - name='acquisition-detail'), - path('//archive', - view=AcquisitionInstanceViewSet.as_view({ - 'get': 'archive', - }), - name='acquisition-archive') -) diff --git a/src/acquisitions/views.py b/src/acquisitions/views.py deleted file mode 100644 index 40bf9b51..00000000 --- a/src/acquisitions/views.py +++ /dev/null @@ -1,177 +0,0 @@ -import logging -import tempfile - -from django.http import Http404, FileResponse -from rest_framework import status, filters -from rest_framework.decorators import action -from rest_framework.generics import get_object_or_404 -from rest_framework.mixins import (ListModelMixin, RetrieveModelMixin, - DestroyModelMixin) -from rest_framework.response import Response -from rest_framework.settings import api_settings -from rest_framework.viewsets import GenericViewSet - -import sigmf.archive -import sigmf.sigmffile - -import sensor.settings -from schedule.models import ScheduleEntry -from .models import Acquisition -from .permissions import IsAdminOrOwnerOrReadOnly -from .serializers import (AcquisitionsOverviewSerializer, - AcquisitionSerializer) - - -logger = logging.getLogger(__name__) - - -class AcquisitionsOverviewViewSet(ListModelMixin, GenericViewSet): - """ - list: - Returns an overview of how many acquisitions are available per schedule - entry. - """ - lookup_field = 'schedule_entry_name' - queryset = ScheduleEntry.objects.all() - serializer_class = AcquisitionsOverviewSerializer - - def get_queryset(self): - # .list() does not call .get_object(), which triggers permissions - # checks, so we need to filter our queryset based on `is_private` and - # request user. - base_queryset = self.filter_queryset(self.queryset) - if self.request.user.is_staff: - return base_queryset - else: - return base_queryset.filter(is_private=False) - - -class MultipleFieldLookupMixin(object): - """Get multiple field filtering based on a `lookup_fields` attribute.""" - - def get_queryset(self): - base_queryset = super(MultipleFieldLookupMixin, self).get_queryset() - base_queryset = self.filter_queryset(base_queryset) - - filter = {'schedule_entry__name': self.kwargs['schedule_entry_name']} - - queryset = base_queryset.filter(**filter) - - if not queryset.exists(): - raise Http404 - - return queryset - - def get_object(self): - queryset = self.get_queryset() - filter = {'task_id': self.kwargs['task_id']} - - return get_object_or_404(queryset, **filter) - - -class AcquisitionListViewSet(MultipleFieldLookupMixin, ListModelMixin, - GenericViewSet): - """ - list: - Returns a list of all acquisitions created by the given schedule entry. - - destroy_all: - Deletes all acquisitions created by the given schedule entry. - """ - queryset = Acquisition.objects.all() - serializer_class = AcquisitionSerializer - permission_classes = ( - api_settings.DEFAULT_PERMISSION_CLASSES + [IsAdminOrOwnerOrReadOnly]) - filter_backends = (filters.SearchFilter, filters.OrderingFilter) - lookup_fields = ('schedule_entry__name', 'task_id') - ordering_fields = ('task_id', 'created') - search_fields = ('sigmf_metadata', ) - - @action(detail=False, methods=('delete', )) - def destroy_all(self, request, version, schedule_entry_name): - queryset = self.get_queryset() - queryset = queryset.filter(schedule_entry__name=schedule_entry_name) - - if not queryset.exists(): - raise Http404 - - queryset.delete() - - return Response(status=status.HTTP_204_NO_CONTENT) - - @action(detail=False) - def archive(self, request, version, schedule_entry_name): - queryset = self.get_queryset() - queryset = queryset.filter(schedule_entry__name=schedule_entry_name) - fqdn = sensor.settings.FQDN - fname = fqdn + '_' + schedule_entry_name + '.sigmf' - - if not queryset.exists(): - raise Http404 - - # FileResponse handles closing the file - tmparchive = tempfile.TemporaryFile() - build_sigmf_archive(tmparchive, schedule_entry_name, queryset) - content_type = 'application/x-tar' - response = FileResponse(tmparchive, as_attachment=True, filename=fname, - content_type=content_type) - return response - - -class AcquisitionInstanceViewSet(MultipleFieldLookupMixin, RetrieveModelMixin, - DestroyModelMixin, GenericViewSet): - """ - destroy: - Deletes the specified acquisition. - - retrieve: - Returns all available metadata about an acquisition. - - archive: - Downloads the acquisition's SigMF archive. - """ - queryset = Acquisition.objects.all() - serializer_class = AcquisitionSerializer - permission_classes = ( - api_settings.DEFAULT_PERMISSION_CLASSES + [IsAdminOrOwnerOrReadOnly]) - lookup_fields = ('schedule_entry__name', 'task_id') - - @action(detail=True) - def archive(self, request, version, schedule_entry_name, task_id): - entry_name = schedule_entry_name - fqdn = sensor.settings.FQDN - fname = fqdn + '_' + entry_name + '_' + str(task_id) + '.sigmf' - acq = self.get_object() - - # FileResponse handles closing the file - tmparchive = tempfile.TemporaryFile() - build_sigmf_archive(tmparchive, schedule_entry_name, [acq]) - content_type = 'application/x-tar' - response = FileResponse(tmparchive, as_attachment=True, filename=fname, - content_type=content_type) - return response - - -def build_sigmf_archive(fileobj, schedule_entry_name, acquisitions): - """Build a SigMF archive containing `acquisitions` and save to fileobj. - - @param fileobj: a fileobj open for writing - @param schedule_entry_name: the name of the parent schedule entry - @param acquisitions: an iterable of Acquisition objects from the database - @return: None - - """ - logger.debug("building sigmf archive") - - for acq in acquisitions: - with tempfile.NamedTemporaryFile() as tmpdata: - tmpdata.write(acq.data) - tmpdata.seek(0) # move fd ptr to start of data for reading - name = schedule_entry_name + '_' + str(acq.task_id) - sigmf_file = sigmf.sigmffile.SigMFFile(metadata=acq.sigmf_metadata, - name=name) - sigmf_file.set_data_file(tmpdata.name) - - sigmf.archive.SigMFArchive(sigmf_file, path=name, fileobj=fileobj) - - logger.debug("sigmf archive built") diff --git a/src/actions/acquire_single_freq_fft.py b/src/actions/acquire_single_freq_fft.py index 470d556f..e5ee402f 100644 --- a/src/actions/acquire_single_freq_fft.py +++ b/src/actions/acquire_single_freq_fft.py @@ -57,7 +57,7 @@ ## Frequency-domain processing After windowing, the data matrix is converted into the frequency domain using -an FFT'd, doing the equivalent of the DFT defined as +an FFT, doing the equivalent of the DFT defined as $$A_k = \sum_{{m=0}}^{{n-1}} a_m \exp\left\\{{-2\pi i{{mk \over n}}\right\\}} \qquad k = 0,\ldots,n-1$$ @@ -80,16 +80,15 @@ from __future__ import absolute_import import logging +from enum import Enum import numpy as np -from enum import Enum -from rest_framework.reverse import reverse from sigmf.sigmffile import SigMFFile from capabilities import capabilities -from hardware import usrp_iface -from sensor import V1, settings, utils +from hardware import sdr +from sensor import settings, utils from .base import Action @@ -110,7 +109,7 @@ class M4sDetector(Enum): # The sigmf-ns-scos version targeted by this action -SCOS_TRANSFER_SPEC_VER = '0.1' +SCOS_TRANSFER_SPEC_VER = '0.2' def m4s_detector(array): @@ -119,8 +118,7 @@ def m4s_detector(array): Detector is applied along each column. :param array: an (m x n) array of real frequency-domain linear power values - :returns: a (5 x n) in the order min, max, mean, median, sample in the case - that `detector` is `m4s`, otherwise a (1 x n) array + :returns: a (5 x n) in the order min, max, mean, median, sample """ amin = np.min(array, axis=0) @@ -146,7 +144,7 @@ class SingleFrequencyFftAcquisition(Action): """ def __init__(self, name, frequency, gain, sample_rate, fft_size, nffts): - super(SingleFrequencyFftAcquisition, self).__init__() + super().__init__() self.name = name self.frequency = frequency @@ -154,67 +152,61 @@ def __init__(self, name, frequency, gain, sample_rate, fft_size, nffts): self.sample_rate = sample_rate self.fft_size = fft_size self.nffts = nffts - self.usrp = usrp_iface # make instance variable to allow mocking + self.sdr = sdr # make instance variable to allow mocking self.enbw = None def __call__(self, schedule_entry_name, task_id): """This is the entrypoint function called by the scheduler.""" - from schedule.models import ScheduleEntry + from results.models import TaskResult - # raises ScheduleEntry.DoesNotExist if no matching schedule entry - parent_entry = ScheduleEntry.objects.get(name=schedule_entry_name) + # Raises TaskResult.DoesNotExist if no matching task result + task_result = TaskResult.objects.get( + schedule_entry__name=schedule_entry_name, task_id=task_id) self.test_required_components() - self.configure_usrp() - data = self.acquire_data(parent_entry, task_id) + self.configure_sdr() + data = self.acquire_data() m4s_data = self.apply_detector(data) sigmf_md = self.build_sigmf_md() - self.archive(m4s_data, sigmf_md, parent_entry, task_id) - - kws = {'schedule_entry_name': schedule_entry_name, 'task_id': task_id} - kws.update(V1) - detail = reverse( - 'acquisition-detail', kwargs=kws, request=parent_entry.request) - - return detail + self.archive(task_result, m4s_data, sigmf_md) def test_required_components(self): """Fail acquisition if a required component is not available.""" - self.usrp.connect() - if not self.usrp.is_available: - msg = "acquisition failed: USRP required but not available" + self.sdr.connect() + if not self.sdr.is_available: + msg = "acquisition failed: SDR required but not available" raise RuntimeError(msg) - def configure_usrp(self): - self.set_usrp_clock_rate() - self.set_usrp_sample_rate() - self.set_usrp_frequency() - self.set_usrp_gain() + def configure_sdr(self): + self.set_sdr_clock_rate() + self.set_sdr_sample_rate() + self.set_sdr_frequency() + self.set_sdr_gain() - def set_usrp_gain(self): - self.usrp.radio.gain = self.gain + def set_sdr_gain(self): + self.sdr.radio.gain = self.gain - def set_usrp_sample_rate(self): - self.usrp.radio.sample_rate = self.sample_rate - self.sample_rate = self.usrp.radio.sample_rate + def set_sdr_sample_rate(self): + self.sdr.radio.sample_rate = self.sample_rate + self.sample_rate = self.sdr.radio.sample_rate - def set_usrp_clock_rate(self): + def set_sdr_clock_rate(self): clock_rate = self.sample_rate while clock_rate < 10e6: clock_rate *= 4 - self.usrp.radio.clock_rate = clock_rate + self.sdr.radio.clock_rate = clock_rate - def set_usrp_frequency(self): + def set_sdr_frequency(self): requested_frequency = self.frequency - self.usrp.radio.frequency = requested_frequency - self.frequency = self.usrp.radio.frequency + self.sdr.radio.frequency = requested_frequency + self.frequency = self.sdr.radio.frequency - def acquire_data(self, parent_entry, task_id): + def acquire_data(self): msg = "Acquiring {} FFTs at {} MHz" logger.debug(msg.format(self.nffts, self.frequency / 1e6)) - data = self.usrp.radio.acquire_samples(self.nffts * self.fft_size) + data = self.sdr.radio.acquire_samples(self.nffts * self.fft_size) data.resize((self.nffts, self.fft_size)) return data @@ -290,15 +282,14 @@ def apply_detector(self, data): return fdata_dbm_m4s - def archive(self, m4s_data, sigmf_md, parent_entry, task_id): - from acquisitions.models import Acquisition + def archive(self, task_result, m4s_data, sigmf_md): + from results.models import Acquisition logger.debug("Storing acquisition in database") Acquisition( - schedule_entry=parent_entry, - task_id=task_id, - sigmf_metadata=sigmf_md._metadata, + task_result=task_result, + metadata=sigmf_md._metadata, data=m4s_data).save() @property diff --git a/src/actions/acquire_stepped_freq_tdomain_iq.py b/src/actions/acquire_stepped_freq_tdomain_iq.py index b18bff81..e8ff41d5 100644 --- a/src/actions/acquire_stepped_freq_tdomain_iq.py +++ b/src/actions/acquire_stepped_freq_tdomain_iq.py @@ -47,12 +47,11 @@ import numpy as np -from rest_framework.reverse import reverse from sigmf.sigmffile import SigMFFile from capabilities import capabilities -from hardware import usrp_iface -from sensor import V1, settings, utils +from hardware import sdr +from sensor import settings, utils from .base import Action @@ -99,41 +98,35 @@ def __init__(self, name, fcs, gains, sample_rates, durations_ms): self.nfcs = nfcs self.fcs = fcs self.tuning_parameters = tuning_parameters - self.usrp = usrp_iface # make instance variable to allow mocking + self.sdr = sdr # make instance variable to allow mocking def __call__(self, schedule_entry_name, task_id): """This is the entrypoint function called by the scheduler.""" - from schedule.models import ScheduleEntry + from results.models import TaskResult - # raises ScheduleEntry.DoesNotExist if no matching schedule entry - parent_entry = ScheduleEntry.objects.get(name=schedule_entry_name) + # Raises TaskResult.DoesNotExist if no matching task result + task_result = TaskResult.objects.get( + schedule_entry__name=schedule_entry_name, task_id=task_id) self.test_required_components() for recording_id, fc in enumerate(self.fcs, start=1): - data, sigmf_md = self.acquire_data(fc, parent_entry, task_id) - self.archive(data, sigmf_md, parent_entry, task_id, recording_id) - - kws = {'schedule_entry_name': schedule_entry_name, 'task_id': task_id} - kws.update(V1) - detail = reverse( - 'acquisition-detail', kwargs=kws, request=parent_entry.request) - - return detail + data, sigmf_md = self.acquire_data(fc) + self.archive(task_result, data, sigmf_md, recording_id) def test_required_components(self): """Fail acquisition if a required component is not available.""" - self.usrp.connect() - if not self.usrp.is_available: - msg = "acquisition failed: USRP required but not available" + self.sdr.connect() + if not self.sdr.is_available: + msg = "acquisition failed: SDR required but not available" raise RuntimeError(msg) - def acquire_data(self, fc, parent_entry, task_id): + def acquire_data(self, fc): tuning_parameters = self.tuning_parameters[fc] - self.configure_usrp(fc, **tuning_parameters) + self.configure_sdr(fc, **tuning_parameters) # Use the radio's actual reported sample rate instead of requested rate - sample_rate = self.usrp.radio.sample_rate + sample_rate = self.sdr.radio.sample_rate # Build global metadata sigmf_md = SigMFFile() @@ -152,42 +145,40 @@ def acquire_data(self, fc, parent_entry, task_id): nsamps = int(sample_rate * tuning_parameters['duration_ms'] * 1e-3) dt = utils.get_datetime_str_now() - acq = self.usrp.radio.acquire_samples(nsamps).astype(np.complex64) + acq = self.sdr.radio.acquire_samples(nsamps).astype(np.complex64) data = np.append(data, acq) capture_md = {"core:frequency": fc, "core:datetime": dt} sigmf_md.add_capture(start_index=0, metadata=capture_md) - annotation_md = {"applied_scale_factor": self.usrp.radio.scale_factor} + annotation_md = {"applied_scale_factor": self.sdr.radio.scale_factor} sigmf_md.add_annotation(start_index=0, length=nsamps, metadata=annotation_md) return data, sigmf_md - def configure_usrp(self, fc, gain, sample_rate, duration_ms): - self.set_usrp_clock_rate(sample_rate) - self.set_usrp_sample_rate(sample_rate) - self.usrp.radio.tune_frequency(fc) - self.usrp.radio.gain = gain + def configure_sdr(self, fc, gain, sample_rate, duration_ms): + self.set_sdr_clock_rate(sample_rate) + self.set_sdr_sample_rate(sample_rate) + self.sdr.radio.tune_frequency(fc) + self.sdr.radio.gain = gain - def set_usrp_clock_rate(self, sample_rate): + def set_sdr_clock_rate(self, sample_rate): clock_rate = sample_rate while clock_rate < 10e6: clock_rate *= 4 - self.usrp.radio.clock_rate = clock_rate + self.sdr.radio.clock_rate = clock_rate - def set_usrp_sample_rate(self, sample_rate): - self.usrp.radio.sample_rate = sample_rate + def set_sdr_sample_rate(self, sample_rate): + self.sdr.radio.sample_rate = sample_rate - def archive(self, m4s_data, sigmf_md, parent_entry, task_id, recording_id): - from acquisitions.models import Acquisition + def archive(self, task_result, m4s_data, sigmf_md): + from results.models import Acquisition logger.debug("Storing acquisition in database") Acquisition( - schedule_entry=parent_entry, - task_id=task_id, - recording_id=recording_id, - sigmf_metadata=sigmf_md._metadata, + task_result=task_result, + metadata=sigmf_md._metadata, data=m4s_data).save() @property diff --git a/src/authentication/migrations/0001_initial.py b/src/authentication/migrations/0001_initial.py index 68ff35dc..8fce9acd 100644 --- a/src/authentication/migrations/0001_initial.py +++ b/src/authentication/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 2.2.1 on 2019-05-15 20:55 +# Generated by Django 2.2.1 on 2019-05-16 23:49 import django.contrib.auth.models import django.contrib.auth.validators diff --git a/src/hardware/__init__.py b/src/hardware/__init__.py index e69de29b..a28db747 100644 --- a/src/hardware/__init__.py +++ b/src/hardware/__init__.py @@ -0,0 +1,4 @@ +from . import usrp_iface + + +sdr = usrp_iface diff --git a/src/results/migrations/0001_initial.py b/src/results/migrations/0001_initial.py index f47f7de0..3c40ee3e 100644 --- a/src/results/migrations/0001_initial.py +++ b/src/results/migrations/0001_initial.py @@ -1,7 +1,10 @@ -# Generated by Django 2.2.1 on 2019-05-15 20:55 +# Generated by Django 2.2.1 on 2019-05-16 23:49 +import datetime from django.db import migrations, models import django.db.models.deletion +from django.utils.timezone import utc +import jsonfield.fields class Migration(migrations.Migration): @@ -18,16 +21,31 @@ class Migration(migrations.Migration): fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('task_id', models.IntegerField(help_text='The id of the task relative to the result')), - ('started', models.DateTimeField(help_text='The time the task started')), - ('finished', models.DateTimeField(help_text='The time the task finished')), - ('duration', models.DurationField(help_text='Task duration in seconds')), - ('result', models.CharField(choices=[(1, 'success'), (2, 'failure')], help_text='"success" or "failure"', max_length=7)), + ('started', models.DateTimeField(default=datetime.datetime(2019, 5, 16, 23, 42, 27, 761070, tzinfo=utc), help_text='The time the task started')), + ('finished', models.DateTimeField(default=datetime.datetime(2019, 5, 16, 23, 42, 27, 761070, tzinfo=utc), help_text='The time the task finished')), + ('duration', models.DurationField(default=datetime.timedelta(0), help_text='Task duration in seconds')), + ('status', models.CharField(choices=[(1, 'success'), (2, 'failure'), (3, 'in-progress')], default='in-progress', help_text='"success" or "failure"', max_length=11)), ('detail', models.CharField(blank=True, help_text='Arbitrary detail string', max_length=512)), - ('schedule_entry', models.ForeignKey(help_text='The schedule entry relative to the result', on_delete=django.db.models.deletion.CASCADE, related_name='results', to='schedule.ScheduleEntry')), + ('schedule_entry', models.ForeignKey(help_text='The schedule entry relative to the result', on_delete=django.db.models.deletion.PROTECT, related_name='results', to='schedule.ScheduleEntry')), ], options={ 'ordering': ('task_id',), 'unique_together': {('schedule_entry', 'task_id')}, }, ), + migrations.CreateModel( + name='Acquisition', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('recording_id', models.IntegerField(default=0, help_text='The id of the recording relative to the task')), + ('metadata', jsonfield.fields.JSONField(help_text='The sigmf meta data for the acquisition')), + ('data', models.BinaryField(null=True)), + ('task_result', models.ForeignKey(help_text='The task_result relative to the acquisition', on_delete=django.db.models.deletion.CASCADE, related_name='data', to='results.TaskResult')), + ], + options={ + 'db_table': 'acquisitions', + 'ordering': ('task_result', 'recording_id'), + 'unique_together': {('task_result', 'recording_id')}, + }, + ), ] diff --git a/src/results/migrations/0002_auto_20190516_2353.py b/src/results/migrations/0002_auto_20190516_2353.py new file mode 100644 index 00000000..1dcf7cd1 --- /dev/null +++ b/src/results/migrations/0002_auto_20190516_2353.py @@ -0,0 +1,25 @@ +# Generated by Django 2.2.1 on 2019-05-16 23:53 + +import datetime +from django.db import migrations, models +from django.utils.timezone import utc + + +class Migration(migrations.Migration): + + dependencies = [ + ('results', '0001_initial'), + ] + + operations = [ + migrations.AlterField( + model_name='taskresult', + name='finished', + field=models.DateTimeField(default=datetime.datetime(2019, 5, 16, 23, 0, tzinfo=utc), help_text='The time the task finished'), + ), + migrations.AlterField( + model_name='taskresult', + name='started', + field=models.DateTimeField(default=datetime.datetime(2019, 5, 16, 23, 0, tzinfo=utc), help_text='The time the task started'), + ), + ] diff --git a/src/results/models/__init__.py b/src/results/models/__init__.py new file mode 100644 index 00000000..5cebb4e8 --- /dev/null +++ b/src/results/models/__init__.py @@ -0,0 +1,2 @@ +from .acquisition import Acquisition # noqa +from .task_result import TaskResult # noqa diff --git a/src/acquisitions/models.py b/src/results/models/acquisition.py similarity index 52% rename from src/acquisitions/models.py rename to src/results/models/acquisition.py index 64fa1083..ede79695 100644 --- a/src/acquisitions/models.py +++ b/src/results/models/acquisition.py @@ -1,7 +1,7 @@ from django.db import models from jsonfield import JSONField -from schedule.models import ScheduleEntry +from .task_result import TaskResult class Acquisition(models.Model): @@ -15,30 +15,25 @@ class Acquisition(models.Model): entry, task id, and recording id. """ - schedule_entry = models.ForeignKey( - ScheduleEntry, - on_delete=models.PROTECT, - related_name='acquisitions', - help_text="The schedule entry relative to the acquisition") - task_id = models.IntegerField( - help_text="The id of the task relative to the acquisition") + task_result = models.ForeignKey( + TaskResult, + on_delete=models.CASCADE, + related_name='data', + help_text="The task_result relative to the acquisition") recording_id = models.IntegerField( default=0, help_text="The id of the recording relative to the task") - sigmf_metadata = JSONField( - help_text="The sigmf meta data for the acquisition") + metadata = JSONField(help_text="The sigmf meta data for the acquisition") data = models.BinaryField(help_text="", null=True) - created = models.DateTimeField( - help_text="The time the acquisition was created", auto_now_add=True) class Meta: db_table = 'acquisitions' - ordering = ('created', ) - unique_together = (('schedule_entry', 'task_id', 'recording_id'), ) + ordering = ('task_result', 'recording_id') + unique_together = (('task_result', 'recording_id'), ) def __str__(self): return '{}/{}:{}'.format( - self.schedule_entry.name, - self.task_id, + self.task_result.schedule_entry.name, + self.task_result.task_id, self.recording_id ) diff --git a/src/results/models.py b/src/results/models/task_result.py similarity index 65% rename from src/results/models.py rename to src/results/models/task_result.py index b9739598..f927feba 100644 --- a/src/results/models.py +++ b/src/results/models/task_result.py @@ -1,28 +1,46 @@ +import datetime + from django.db import models +from django.utils import timezone from schedule.models import ScheduleEntry from sensor.settings import MAX_TASK_RESULTS -from .consts import MAX_DETAIL_LEN +from results.consts import MAX_DETAIL_LEN + + +UTC = timezone.timezone.utc class TaskResult(models.Model): """Map between schedule entries and their task results.""" SUCCESS = 1 FAILURE = 2 - RESULT_CHOICES = ((SUCCESS, 'success'), (FAILURE, 'failure')) + IN_PROGRESS = 3 + RESULT_CHOICES = ( + (SUCCESS, 'success'), + (FAILURE, 'failure'), + (IN_PROGRESS, 'in-progress') + ) schedule_entry = models.ForeignKey( ScheduleEntry, - on_delete=models.CASCADE, + on_delete=models.PROTECT, related_name='results', help_text="The schedule entry relative to the result") task_id = models.IntegerField( help_text="The id of the task relative to the result") - started = models.DateTimeField(help_text="The time the task started") - finished = models.DateTimeField(help_text="The time the task finished") - duration = models.DurationField(help_text="Task duration in seconds") - result = models.CharField( - max_length=7, + started = models.DateTimeField( + default=datetime.datetime(2019, 5, 16, 23, tzinfo=UTC), + help_text="The time the task started") + finished = models.DateTimeField( + default=datetime.datetime(2019, 5, 16, 23, tzinfo=UTC), + help_text="The time the task finished") + duration = models.DurationField( + default=timezone.ZERO, + help_text="Task duration in seconds") + status = models.CharField( + default='in-progress', + max_length=11, help_text='"success" or "failure"', choices=RESULT_CHOICES) detail = models.CharField( diff --git a/src/acquisitions/permissions.py b/src/results/permissions.py similarity index 74% rename from src/acquisitions/permissions.py rename to src/results/permissions.py index 73a5da0b..0854db05 100644 --- a/src/acquisitions/permissions.py +++ b/src/results/permissions.py @@ -2,16 +2,16 @@ class IsAdminOrOwnerOrReadOnly(permissions.BasePermission): - """Only allow an admin or a acquisition's owner to edit it.""" + """Only allow an admin or a result's owner to edit it.""" def has_permission(self, request, view): user = request.user - acquisition = view.queryset.first() + result = view.queryset.first() - if acquisition is None: + if result is None: return True - if acquisition.schedule_entry.is_private and not user.is_staff: + if result.schedule_entry.is_private and not user.is_staff: return False if request.method in permissions.SAFE_METHODS: @@ -19,7 +19,7 @@ def has_permission(self, request, view): # Write permissions are only allowed to the owner or an admin # or if the aquisition doesn't exists (leading to 404). - if acquisition.schedule_entry.owner == user: + if result.schedule_entry.owner == user: return True return user.is_staff diff --git a/src/results/serializers/__init__.py b/src/results/serializers/__init__.py new file mode 100644 index 00000000..ec16b900 --- /dev/null +++ b/src/results/serializers/__init__.py @@ -0,0 +1,2 @@ +from .acquisition import AcquisitionSerializer +from .task_result import TaskResultSerializer, TaskResultsOverviewSerializer diff --git a/src/results/serializers/acquisition.py b/src/results/serializers/acquisition.py new file mode 100644 index 00000000..f46ec621 --- /dev/null +++ b/src/results/serializers/acquisition.py @@ -0,0 +1,38 @@ +from rest_framework import serializers +from rest_framework.reverse import reverse + +from results.models import Acquisition +from sensor import V1 + + +class AcquisitionHyperlinkedRelatedField(serializers.HyperlinkedRelatedField): + # django-rest-framework.org/api-guide/relations/#custom-hyperlinked-fields + def get_url(self, obj, view_name, request, format): + kws = { + 'schedule_entry_name': obj.task_result.schedule_entry.name, + 'task_id': obj.task_result.task_id + } + kws.update(V1) + url = reverse(view_name, kwargs=kws, request=request, format=format) + return url + + +class AcquisitionSerializer(serializers.ModelSerializer): + archive = AcquisitionHyperlinkedRelatedField( + view_name='result-archive', + read_only=True, + help_text="The url to download a SigMF archive of this acquisition", + source='*' # pass whole object + ) + metadata = serializers.DictField( + help_text="The SigMF metadata for the acquisition") + + class Meta: + model = Acquisition + fields = ('recording_id', 'archive', 'metadata') + extra_kwargs = { + 'schedule_entry': { + 'view_name': 'schedule-detail', + 'lookup_field': 'name' + } + } diff --git a/src/results/serializers.py b/src/results/serializers/task_result.py similarity index 93% rename from src/results/serializers.py rename to src/results/serializers/task_result.py index 5436b248..1fad8372 100644 --- a/src/results/serializers.py +++ b/src/results/serializers/task_result.py @@ -3,7 +3,21 @@ from schedule.models import ScheduleEntry from sensor import V1 -from .models import TaskResult +from results.models import TaskResult + +from .acquisition import AcquisitionSerializer + + +class TaskResultHyperlinkedRelatedField(serializers.HyperlinkedRelatedField): + # django-rest-framework.org/api-guide/relations/#custom-hyperlinked-fields + def get_url(self, obj, view_name, request, format): + kws = { + 'schedule_entry_name': obj.schedule_entry.name, + 'task_id': obj.task_id + } + kws.update(V1) + url = reverse(view_name, kwargs=kws, request=request, format=format) + return url class TaskResultsOverviewSerializer(serializers.HyperlinkedModelSerializer): @@ -38,19 +52,6 @@ def get_schedule_entry(self, obj): return url -# FIXME: this is identical to AcquisitionHyperlinkedRelatedField -class TaskResultHyperlinkedRelatedField(serializers.HyperlinkedRelatedField): - # django-rest-framework.org/api-guide/relations/#custom-hyperlinked-fields - def get_url(self, obj, view_name, request, format): - kws = { - 'schedule_entry_name': obj.schedule_entry.name, - 'task_id': obj.task_id - } - kws.update(V1) - url = reverse(view_name, kwargs=kws, request=request, format=format) - return url - - class TaskResultSerializer(serializers.HyperlinkedModelSerializer): self = TaskResultHyperlinkedRelatedField( view_name='result-detail', @@ -60,18 +61,20 @@ class TaskResultSerializer(serializers.HyperlinkedModelSerializer): ) schedule_entry = serializers.SerializerMethodField( help_text="The url of the parent schedule entry") + data = AcquisitionSerializer(many=True) class Meta: model = TaskResult fields = ( 'self', + 'schedule_entry', 'task_id', + 'status', + 'detail', 'started', 'finished', 'duration', - 'result', - 'detail', - 'schedule_entry', + 'data' ) def get_schedule_entry(self, obj): @@ -80,4 +83,5 @@ def get_schedule_entry(self, obj): kws = {'pk': obj.schedule_entry.name} kws.update(V1) url = reverse(route, kwargs=kws, request=request) + return url diff --git a/src/results/urls.py b/src/results/urls.py index db12f641..df210c07 100644 --- a/src/results/urls.py +++ b/src/results/urls.py @@ -1,20 +1,35 @@ from django.urls import path -from .views import (ResultsOverviewViewSet, ResultListViewSet, - ResultInstanceViewSet) +from .views import ( + ResultsOverviewViewSet, ResultListViewSet, ResultInstanceViewSet) + urlpatterns = ( path('', - view=ResultsOverviewViewSet.as_view({'get': 'list'}), + view=ResultsOverviewViewSet.as_view({ + 'get': 'list' + }), name='results-overview'), path('/', view=ResultListViewSet.as_view({ 'get': 'list', + 'delete': 'destroy_all' }), name='result-list'), + path('/archive/', + view=ResultListViewSet.as_view({ + 'get': 'archive', + }), + name='result-list-archive'), path('//', view=ResultInstanceViewSet.as_view({ 'get': 'retrieve', + 'delete': 'destroy' + }), + name='result-detail'), + path('//archive', + view=ResultInstanceViewSet.as_view({ + 'get': 'archive', }), - name='result-detail') + name='result-archive') ) diff --git a/src/results/views.py b/src/results/views.py index 4082de74..17c32514 100644 --- a/src/results/views.py +++ b/src/results/views.py @@ -1,12 +1,28 @@ -from django.http import Http404 -from rest_framework import filters +import logging +import tempfile + +from django.http import Http404, FileResponse +from rest_framework import filters, status +from rest_framework.decorators import action from rest_framework.generics import get_object_or_404 -from rest_framework.mixins import ListModelMixin, RetrieveModelMixin +from rest_framework.mixins import ( + ListModelMixin, RetrieveModelMixin, DestroyModelMixin) +from rest_framework.response import Response +from rest_framework.settings import api_settings from rest_framework.viewsets import GenericViewSet +import sigmf.archive +import sigmf.sigmffile + +import sensor.settings from schedule.models import ScheduleEntry -from .models import TaskResult -from .serializers import TaskResultsOverviewSerializer, TaskResultSerializer +from .models.task_result import TaskResult +from .permissions import IsAdminOrOwnerOrReadOnly +from .serializers.task_result import ( + TaskResultsOverviewSerializer, TaskResultSerializer) + + +logger = logging.getLogger(__name__) class ResultsOverviewViewSet(ListModelMixin, GenericViewSet): @@ -55,15 +71,69 @@ def get_object(self): return get_object_or_404(queryset, **filter) +class TaskResultListViewSet(MultipleFieldLookupMixin, ListModelMixin, + GenericViewSet): + """ + list: + Returns a list of all acquisitions created by the given schedule entry. + + destroy_all: + Deletes all acquisitions created by the given schedule entry. + """ + queryset = TaskResult.objects.all() + serializer_class = TaskResultSerializer + permission_classes = ( + api_settings.DEFAULT_PERMISSION_CLASSES + [IsAdminOrOwnerOrReadOnly]) + filter_backends = (filters.SearchFilter, filters.OrderingFilter) + lookup_fields = ('schedule_entry__name', 'task_id') + ordering_fields = ('task_id', 'created') + search_fields = ('sigmf_metadata', ) + + @action(detail=False, methods=('delete', )) + def destroy_all(self, request, version, schedule_entry_name): + queryset = self.get_queryset() + queryset = queryset.filter(schedule_entry__name=schedule_entry_name) + + if not queryset.exists(): + raise Http404 + + queryset.delete() + + return Response(status=status.HTTP_204_NO_CONTENT) + + @action(detail=False) + def archive(self, request, version, schedule_entry_name): + queryset = self.get_queryset() + queryset = queryset.filter(schedule_entry__name=schedule_entry_name) + fqdn = sensor.settings.FQDN + fname = fqdn + '_' + schedule_entry_name + '.sigmf' + + if not queryset.exists(): + raise Http404 + + # FileResponse handles closing the file + tmparchive = tempfile.TemporaryFile() + build_sigmf_archive(tmparchive, schedule_entry_name, queryset) + content_type = 'application/x-tar' + response = FileResponse(tmparchive, as_attachment=True, filename=fname, + content_type=content_type) + return response + + class ResultListViewSet(ListModelMixin, GenericViewSet): """ list: Returns a list of all results created by the given schedule entry. + + destroy_all: + Deletes all results created by the given schedule entry. + """ queryset = TaskResult.objects.all() serializer_class = TaskResultSerializer + permission_classes = ( + api_settings.DEFAULT_PERMISSION_CLASSES + [IsAdminOrOwnerOrReadOnly]) filter_backends = (filters.SearchFilter, filters.OrderingFilter) - lookup_fields = ('schedule_entry__name', ) lookup_fields = ('schedule_entry__name', 'task_id') ordering_fields = ('task_id', 'started', 'finished', 'duration', 'result') search_fields = ('task_id', 'result', 'detail') @@ -85,13 +155,92 @@ def get_queryset(self): return queryset.all() + @action(detail=False, methods=('delete', )) + def destroy_all(self, request, version, schedule_entry_name): + queryset = self.get_queryset() + + if not queryset.exists(): + raise Http404 + + queryset.delete() + + return Response(status=status.HTTP_204_NO_CONTENT) + + @action(detail=False) + def archive(self, request, version, schedule_entry_name): + queryset = self.get_queryset() + + if not queryset.exists(): + raise Http404 + + fqdn = sensor.settings.FQDN + fname = fqdn + '_' + schedule_entry_name + '.sigmf' + + # FileResponse handles closing the file + tmparchive = tempfile.TemporaryFile() + build_sigmf_archive(tmparchive, schedule_entry_name, queryset) + content_type = 'application/x-tar' + response = FileResponse(tmparchive, as_attachment=True, filename=fname, + content_type=content_type) + + return response + class ResultInstanceViewSet(MultipleFieldLookupMixin, RetrieveModelMixin, - GenericViewSet): + DestroyModelMixin, GenericViewSet): """ retrieve: Returns a specific result. + + destroy: + Deletes the specified acquisition. + + archive: + Downloads the acquisition's SigMF archive. + """ queryset = TaskResult.objects.all() serializer_class = TaskResultSerializer + permission_classes = ( + api_settings.DEFAULT_PERMISSION_CLASSES + [IsAdminOrOwnerOrReadOnly]) lookup_fields = ('schedule_entry__name', 'task_id') + + @action(detail=True) + def archive(self, request, version, schedule_entry_name, task_id): + entry_name = schedule_entry_name + fqdn = sensor.settings.FQDN + fname = fqdn + '_' + entry_name + '_' + str(task_id) + '.sigmf' + acq = self.get_object() + + # FileResponse handles closing the file + tmparchive = tempfile.TemporaryFile() + build_sigmf_archive(tmparchive, schedule_entry_name, [acq]) + content_type = 'application/x-tar' + response = FileResponse(tmparchive, as_attachment=True, filename=fname, + content_type=content_type) + return response + + +def build_sigmf_archive(fileobj, schedule_entry_name, acquisitions): + """Build a SigMF archive containing `acquisitions` and save to fileobj. + + @param fileobj: a fileobj open for writing + @param schedule_entry_name: the name of the parent schedule entry + @param acquisitions: an iterable of Acquisition objects from the database + @return: None + + """ + logger.debug("building sigmf archive") + + for acq in acquisitions: + with tempfile.NamedTemporaryFile() as tmpdata: + tmpdata.write(acq.data) + tmpdata.seek(0) # move fd ptr to start of data for reading + name = schedule_entry_name + '_' + str(acq.task_id) + sigmf_file = sigmf.sigmffile.SigMFFile(metadata=acq.sigmf_metadata, + name=name) + sigmf_file.set_data_file(tmpdata.name) + + sigmf.archive.SigMFArchive(sigmf_file, path=name, fileobj=fileobj) + + logger.debug("sigmf archive built") diff --git a/src/schedule/migrations/0001_initial.py b/src/schedule/migrations/0001_initial.py index 90353ffa..0c040f66 100644 --- a/src/schedule/migrations/0001_initial.py +++ b/src/schedule/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 2.2.1 on 2019-05-15 20:55 +# Generated by Django 2.2.1 on 2019-05-16 23:49 from django.conf import settings import django.core.validators diff --git a/src/schedule/models/__init__.py b/src/schedule/models/__init__.py index 7eb52c55..4b8d1db9 100644 --- a/src/schedule/models/__init__.py +++ b/src/schedule/models/__init__.py @@ -1,4 +1,2 @@ -from __future__ import absolute_import - from .schedule_entry import ScheduleEntry, DEFAULT_PRIORITY # noqa from .request import Request # noqa diff --git a/src/schedule/serializers.py b/src/schedule/serializers.py index 0ca2b072..4606cfc0 100644 --- a/src/schedule/serializers.py +++ b/src/schedule/serializers.py @@ -48,8 +48,6 @@ def to_internal_value(self, dt_str): class ScheduleEntrySerializer(serializers.HyperlinkedModelSerializer): """Covert ScheduleEntry to and from JSON.""" - acquisitions = serializers.SerializerMethodField( - help_text="The list of acquisitions related to the entry") results = serializers.SerializerMethodField( help_text="The list of results related to the entry") start = DateTimeFromTimestampField( @@ -100,8 +98,7 @@ class Meta: fields = ('self', 'name', 'action', 'priority', 'start', 'stop', 'relative_stop', 'interval', 'is_active', 'is_private', 'callback_url', 'next_task_time', 'next_task_id', 'created', - 'modified', 'owner', 'acquisitions', 'results', - 'validate_only') + 'modified', 'owner', 'results', 'validate_only') extra_kwargs = { 'self': { 'view_name': 'schedule-detail', @@ -161,13 +158,6 @@ def validate(self, data): return data - def get_acquisitions(self, obj): - request = self.context['request'] - kws = {'schedule_entry_name': obj.name} - kws.update(V1) - url = reverse('acquisition-list', kwargs=kws, request=request) - return url - def get_results(self, obj): request = self.context['request'] kws = {'schedule_entry_name': obj.name} diff --git a/src/scheduler/scheduler.py b/src/scheduler/scheduler.py index d49995dd..8aeb0858 100644 --- a/src/scheduler/scheduler.py +++ b/src/scheduler/scheduler.py @@ -38,6 +38,11 @@ def __init__(self): self.running = False self.interrupt_flag = threading.Event() + # Cache the currently running task state + self.entry = None # ScheduleEntry that created the current task + self.task = None # Task object describing current task + self.task_result = None # TaskResult object for current task + @property def schedule(self): """An updated view of the current schedule""" @@ -113,50 +118,53 @@ def _queue_tasks(self, schedule_snapshot): def _consume_task_queue(self, pending_task_queue): for task in pending_task_queue.to_list(): - result, started, finished, detail = self._call_task_action(task) - self._save_task_result(task, started, finished, result, detail) - - def _call_task_action(self, task): - entry_name = task.schedule_entry_name - task_id = task.task_id - started = timezone.now() + entry_name = task.schedule_entry_name + self.task = task + self.entry = ScheduleEntry.objects.get(name=entry_name) + self._initialize_task_result() + started = timezone.now() + status, detail = self._call_task_action() + finished = timezone.now() + self._finalize_task_result(started, finished, status, detail) + + def _initialize_task_result(self): + """Initalize an 'in-progress' result so it exists when action runs.""" + tid = self.task.task_id + self.task_result = TaskResult(schedule_entry=self.entry, task_id=tid) + self.task_result.save() + + def _call_task_action(self): + entry_name = self.task.schedule_entry_name + task_id = self.task.task_id try: logger.debug("running task {}/{}".format(entry_name, task_id)) - detail = task.action_fn(entry_name, task_id) + detail = self.task.action_fn(entry_name, task_id) self.delayfn(0) # let other threads run - result = 'success' + status = 'success' if not isinstance(detail, str): detail = "" except Exception as err: detail = str(err) logger.exception("action failed: {}".format(detail)) - result = 'failure' - - finished = timezone.now() - - return result, started, finished, detail[:MAX_DETAIL_LEN] + status = 'failure' - def _save_task_result(self, task, started, finished, result, detail): - entry_name = task.schedule_entry_name - entry = ScheduleEntry.objects.get(name=entry_name) - task_id = task.task_id + return status, detail[:MAX_DETAIL_LEN] - tr = TaskResult( - schedule_entry=entry, - task_id=task_id, - started=started, - finished=finished, - duration=(finished - started), - result=result, - detail=detail) + def _finalize_task_result(self, started, finished, status, detail): + tr = self.task_result + tr.started = started + tr.finished = finished + tr.duration = finished - started + tr.status = status + tr.detail = detail tr.save() - if entry.callback_url: - context = {'request': entry.request} + if self.entry.callback_url: + context = {'request': self.entry.request} result_json = TaskResultSerializer(tr, context=context).data requests_futures_session.post( - entry.callback_url, + self.entry.callback_url, json=result_json, background_callback=self._callback_response_handler, ) diff --git a/src/sensor/exceptions.py b/src/sensor/exceptions.py index c05699d1..51d0dc0b 100644 --- a/src/sensor/exceptions.py +++ b/src/sensor/exceptions.py @@ -47,14 +47,14 @@ def handle_protected_error(exc, context): task_id = protected_object.task_id url_kwargs = {'schedule_entry_name': entry_name, 'task_id': task_id} url_kwargs.update(V1) - view_name = 'acquisition-detail' + view_name = 'result-detail' url = reverse(view_name, kwargs=url_kwargs, request=request) protected_object_urls.append(url) response = Response({ 'detail': - ("Cannot delete schedule entry {!r} because acquisitions on disk " - "reference it. Delete the protected acquisitions first." + ("Cannot delete schedule entry {!r} because results on disk " + "reference it. Delete the protected results first." ).format(entry_name), 'protected_objects': protected_object_urls diff --git a/src/sensor/settings.py b/src/sensor/settings.py index 4a6f63a3..98d48587 100644 --- a/src/sensor/settings.py +++ b/src/sensor/settings.py @@ -155,7 +155,6 @@ 'raven.contrib.django.raven_compat', 'debug_toolbar', # project-local apps - 'acquisitions.apps.AcquisitionsConfig', 'authentication.apps.AuthenticationConfig', 'capabilities.apps.CapabilitiesConfig', 'hardware.apps.HardwareConfig', @@ -344,10 +343,6 @@ 'handlers': ['console'], 'level': LOGLEVEL }, - 'acquisitions': { - 'handlers': ['console'], - 'level': LOGLEVEL - }, 'capabilities': { 'handlers': ['console'], 'level': LOGLEVEL diff --git a/src/sensor/urls.py b/src/sensor/urls.py index f88eac61..d9a46baf 100644 --- a/src/sensor/urls.py +++ b/src/sensor/urls.py @@ -35,7 +35,6 @@ api_urlpatterns = format_suffix_patterns( ( path('', api_v1_root, name='api-root'), - path('acquisitions/', include('acquisitions.urls')), path('capabilities/', include('capabilities.urls')), path('schedule/', include('schedule.urls')), path('status', include('status.urls')), diff --git a/src/sensor/views.py b/src/sensor/views.py index 310610be..6dd52b7c 100644 --- a/src/sensor/views.py +++ b/src/sensor/views.py @@ -16,12 +16,11 @@ def api_v1_root(request, version, format=None): """SCOS sensor API root.""" reverse_ = partial(reverse, request=request, format=format) list_endpoints = { + 'capabilities': reverse_('capabilities'), 'schedule': reverse_('schedule-list'), - 'acquisitions': reverse_('acquisitions-overview'), 'status': reverse_('status'), - 'users': reverse_('user-list'), - 'capabilities': reverse_('capabilities'), - 'results': reverse_('results-overview') + 'results': reverse_('results-overview'), + 'users': reverse_('user-list') } # See note in settings:INTERNAL_IPS about why we do this here diff --git a/src/status/migrations/0001_initial.py b/src/status/migrations/0001_initial.py index 04e1957d..b15ee78d 100644 --- a/src/status/migrations/0001_initial.py +++ b/src/status/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 2.2.1 on 2019-05-15 20:55 +# Generated by Django 2.2.1 on 2019-05-16 23:49 from django.db import migrations, models From bf910e437dec984b0cf8a7670a4a937fa875238e Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Thu, 16 May 2019 18:21:08 -0600 Subject: [PATCH 02/36] Fix (aka ignore) lint errors --- src/results/serializers/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/results/serializers/__init__.py b/src/results/serializers/__init__.py index ec16b900..85e69343 100644 --- a/src/results/serializers/__init__.py +++ b/src/results/serializers/__init__.py @@ -1,2 +1,2 @@ -from .acquisition import AcquisitionSerializer -from .task_result import TaskResultSerializer, TaskResultsOverviewSerializer +from .acquisition import AcquisitionSerializer # noqa +from .task_result import TaskResultSerializer, TaskResultsOverviewSerializer # noqa From 93103df4e25beb30f6f1866c5cffda1c478cc215 Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Fri, 17 May 2019 15:42:33 -0600 Subject: [PATCH 03/36] Refactor "results" endpoint to "tasks" and merge task-related things from /status and /schedule --- src/actions/acquire_single_freq_fft.py | 4 +- .../acquire_stepped_freq_tdomain_iq.py | 4 +- src/authentication/migrations/0001_initial.py | 2 +- src/results/apps.py | 5 - .../migrations/0002_auto_20190516_2353.py | 25 ----- src/results/urls.py | 35 ------- src/schedule/migrations/0001_initial.py | 2 +- src/schedule/serializers.py | 8 +- src/scheduler/scheduler.py | 9 +- src/scheduler/tasks/__init__.py | 4 - src/sensor/settings.py | 6 +- src/sensor/urls.py | 2 +- src/sensor/views.py | 2 +- src/status/migrations/0001_initial.py | 2 +- src/status/views.py | 10 +- src/{results => tasks}/__init__.py | 0 src/tasks/apps.py | 5 + src/{results => tasks}/consts.py | 0 .../migrations/0001_initial.py | 10 +- src/{results => tasks}/migrations/__init__.py | 0 src/{results => tasks}/models/__init__.py | 1 + src/{results => tasks}/models/acquisition.py | 0 src/{scheduler/tasks => tasks/models}/task.py | 0 src/{results => tasks}/models/task_result.py | 4 +- src/{results => tasks}/permissions.py | 0 .../serializers/__init__.py | 0 .../serializers/acquisition.py | 4 +- .../serializers/task.py} | 0 .../serializers/task_result.py | 20 ++-- src/{scheduler => }/tasks/task_queue.py | 2 +- src/{results => tasks}/tests/__init__.py | 0 .../tests/test_detail_view.py | 0 .../tests/test_list_view.py | 0 src/{results => tasks}/tests/test_models.py | 0 .../tests/test_overview_view.py | 0 .../tests/test_serializers.py | 0 src/{results => tasks}/tests/utils.py | 0 src/tasks/urls.py | 38 +++++++ src/{results => tasks}/views.py | 98 ++++++++----------- 39 files changed, 127 insertions(+), 175 deletions(-) delete mode 100644 src/results/apps.py delete mode 100644 src/results/migrations/0002_auto_20190516_2353.py delete mode 100644 src/results/urls.py delete mode 100644 src/scheduler/tasks/__init__.py rename src/{results => tasks}/__init__.py (100%) create mode 100644 src/tasks/apps.py rename src/{results => tasks}/consts.py (100%) rename src/{results => tasks}/migrations/0001_initial.py (85%) rename src/{results => tasks}/migrations/__init__.py (100%) rename src/{results => tasks}/models/__init__.py (74%) rename src/{results => tasks}/models/acquisition.py (100%) rename src/{scheduler/tasks => tasks/models}/task.py (100%) rename src/{results => tasks}/models/task_result.py (96%) rename src/{results => tasks}/permissions.py (100%) rename src/{results => tasks}/serializers/__init__.py (100%) rename src/{results => tasks}/serializers/acquisition.py (93%) rename src/{scheduler/serializers.py => tasks/serializers/task.py} (100%) rename src/{results => tasks}/serializers/task_result.py (84%) rename src/{scheduler => }/tasks/task_queue.py (97%) rename src/{results => tasks}/tests/__init__.py (100%) rename src/{results => tasks}/tests/test_detail_view.py (100%) rename src/{results => tasks}/tests/test_list_view.py (100%) rename src/{results => tasks}/tests/test_models.py (100%) rename src/{results => tasks}/tests/test_overview_view.py (100%) rename src/{results => tasks}/tests/test_serializers.py (100%) rename src/{results => tasks}/tests/utils.py (100%) create mode 100644 src/tasks/urls.py rename src/{results => tasks}/views.py (73%) diff --git a/src/actions/acquire_single_freq_fft.py b/src/actions/acquire_single_freq_fft.py index e5ee402f..dacc2082 100644 --- a/src/actions/acquire_single_freq_fft.py +++ b/src/actions/acquire_single_freq_fft.py @@ -157,7 +157,7 @@ def __init__(self, name, frequency, gain, sample_rate, fft_size, nffts): def __call__(self, schedule_entry_name, task_id): """This is the entrypoint function called by the scheduler.""" - from results.models import TaskResult + from tasks.models import TaskResult # Raises TaskResult.DoesNotExist if no matching task result task_result = TaskResult.objects.get( @@ -283,7 +283,7 @@ def apply_detector(self, data): return fdata_dbm_m4s def archive(self, task_result, m4s_data, sigmf_md): - from results.models import Acquisition + from tasks.models import Acquisition logger.debug("Storing acquisition in database") diff --git a/src/actions/acquire_stepped_freq_tdomain_iq.py b/src/actions/acquire_stepped_freq_tdomain_iq.py index e8ff41d5..b77f93fb 100644 --- a/src/actions/acquire_stepped_freq_tdomain_iq.py +++ b/src/actions/acquire_stepped_freq_tdomain_iq.py @@ -102,7 +102,7 @@ def __init__(self, name, fcs, gains, sample_rates, durations_ms): def __call__(self, schedule_entry_name, task_id): """This is the entrypoint function called by the scheduler.""" - from results.models import TaskResult + from tasks.models import TaskResult # Raises TaskResult.DoesNotExist if no matching task result task_result = TaskResult.objects.get( @@ -172,7 +172,7 @@ def set_sdr_sample_rate(self, sample_rate): self.sdr.radio.sample_rate = sample_rate def archive(self, task_result, m4s_data, sigmf_md): - from results.models import Acquisition + from tasks.models import Acquisition logger.debug("Storing acquisition in database") diff --git a/src/authentication/migrations/0001_initial.py b/src/authentication/migrations/0001_initial.py index 8fce9acd..a32c1ae8 100644 --- a/src/authentication/migrations/0001_initial.py +++ b/src/authentication/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 2.2.1 on 2019-05-16 23:49 +# Generated by Django 2.2.1 on 2019-05-17 20:43 import django.contrib.auth.models import django.contrib.auth.validators diff --git a/src/results/apps.py b/src/results/apps.py deleted file mode 100644 index 60bd5c1a..00000000 --- a/src/results/apps.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.apps import AppConfig - - -class ResultsConfig(AppConfig): - name = 'results' diff --git a/src/results/migrations/0002_auto_20190516_2353.py b/src/results/migrations/0002_auto_20190516_2353.py deleted file mode 100644 index 1dcf7cd1..00000000 --- a/src/results/migrations/0002_auto_20190516_2353.py +++ /dev/null @@ -1,25 +0,0 @@ -# Generated by Django 2.2.1 on 2019-05-16 23:53 - -import datetime -from django.db import migrations, models -from django.utils.timezone import utc - - -class Migration(migrations.Migration): - - dependencies = [ - ('results', '0001_initial'), - ] - - operations = [ - migrations.AlterField( - model_name='taskresult', - name='finished', - field=models.DateTimeField(default=datetime.datetime(2019, 5, 16, 23, 0, tzinfo=utc), help_text='The time the task finished'), - ), - migrations.AlterField( - model_name='taskresult', - name='started', - field=models.DateTimeField(default=datetime.datetime(2019, 5, 16, 23, 0, tzinfo=utc), help_text='The time the task started'), - ), - ] diff --git a/src/results/urls.py b/src/results/urls.py deleted file mode 100644 index df210c07..00000000 --- a/src/results/urls.py +++ /dev/null @@ -1,35 +0,0 @@ -from django.urls import path - -from .views import ( - ResultsOverviewViewSet, ResultListViewSet, ResultInstanceViewSet) - - -urlpatterns = ( - path('', - view=ResultsOverviewViewSet.as_view({ - 'get': 'list' - }), - name='results-overview'), - path('/', - view=ResultListViewSet.as_view({ - 'get': 'list', - 'delete': 'destroy_all' - }), - name='result-list'), - path('/archive/', - view=ResultListViewSet.as_view({ - 'get': 'archive', - }), - name='result-list-archive'), - path('//', - view=ResultInstanceViewSet.as_view({ - 'get': 'retrieve', - 'delete': 'destroy' - }), - name='result-detail'), - path('//archive', - view=ResultInstanceViewSet.as_view({ - 'get': 'archive', - }), - name='result-archive') -) diff --git a/src/schedule/migrations/0001_initial.py b/src/schedule/migrations/0001_initial.py index 0c040f66..52219865 100644 --- a/src/schedule/migrations/0001_initial.py +++ b/src/schedule/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 2.2.1 on 2019-05-16 23:49 +# Generated by Django 2.2.1 on 2019-05-17 20:43 from django.conf import settings import django.core.validators diff --git a/src/schedule/serializers.py b/src/schedule/serializers.py index 4606cfc0..34478398 100644 --- a/src/schedule/serializers.py +++ b/src/schedule/serializers.py @@ -48,7 +48,7 @@ def to_internal_value(self, dt_str): class ScheduleEntrySerializer(serializers.HyperlinkedModelSerializer): """Covert ScheduleEntry to and from JSON.""" - results = serializers.SerializerMethodField( + task_results = serializers.SerializerMethodField( help_text="The list of results related to the entry") start = DateTimeFromTimestampField( required=False, @@ -98,7 +98,7 @@ class Meta: fields = ('self', 'name', 'action', 'priority', 'start', 'stop', 'relative_stop', 'interval', 'is_active', 'is_private', 'callback_url', 'next_task_time', 'next_task_id', 'created', - 'modified', 'owner', 'results', 'validate_only') + 'modified', 'owner', 'task_results', 'validate_only') extra_kwargs = { 'self': { 'view_name': 'schedule-detail', @@ -158,11 +158,11 @@ def validate(self, data): return data - def get_results(self, obj): + def get_task_results(self, obj): request = self.context['request'] kws = {'schedule_entry_name': obj.name} kws.update(V1) - url = reverse('result-list', kwargs=kws, request=request) + url = reverse('task-result-list', kwargs=kws, request=request) return url def to_internal_value(self, data): diff --git a/src/scheduler/scheduler.py b/src/scheduler/scheduler.py index 8aeb0858..9af7e2ea 100644 --- a/src/scheduler/scheduler.py +++ b/src/scheduler/scheduler.py @@ -8,13 +8,14 @@ from django.utils import timezone from requests_futures.sessions import FuturesSession -from results.consts import MAX_DETAIL_LEN -from results.models import TaskResult -from results.serializers import TaskResultSerializer +from tasks.consts import MAX_DETAIL_LEN +from tasks.models import TaskResult +from tasks.serializers import TaskResultSerializer +from tasks.task_queue import TaskQueue from schedule.models import ScheduleEntry from sensor import settings + from . import utils -from .tasks import TaskQueue logger = logging.getLogger(__name__) requests_futures_session = FuturesSession() diff --git a/src/scheduler/tasks/__init__.py b/src/scheduler/tasks/__init__.py deleted file mode 100644 index d3b5e56d..00000000 --- a/src/scheduler/tasks/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# flake8: noqa F401 - imported but unused - -from .task import Task -from .task_queue import TaskQueue diff --git a/src/sensor/settings.py b/src/sensor/settings.py index 98d48587..542dd42a 100644 --- a/src/sensor/settings.py +++ b/src/sensor/settings.py @@ -158,7 +158,7 @@ 'authentication.apps.AuthenticationConfig', 'capabilities.apps.CapabilitiesConfig', 'hardware.apps.HardwareConfig', - 'results.apps.ResultsConfig', + 'tasks.apps.TasksConfig', 'schedule.apps.ScheduleConfig', 'scheduler.apps.SchedulerConfig', 'status.apps.StatusConfig', @@ -281,8 +281,8 @@ # Ensure only the last MAX_TASK_RESULTS results are kept per schedule entry MAX_TASK_RESULTS = 100 -# Display at most MAX_TASK_QUEUE upcoming tasks in the status endpoint -MAX_TASK_QUEUE = 100 +# Display at most MAX_TASK_QUEUE upcoming tasks in /tasks/upcoming +MAX_TASK_QUEUE = 50 # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators diff --git a/src/sensor/urls.py b/src/sensor/urls.py index d9a46baf..f5945be7 100644 --- a/src/sensor/urls.py +++ b/src/sensor/urls.py @@ -39,7 +39,7 @@ path('schedule/', include('schedule.urls')), path('status', include('status.urls')), path('users/', include('authentication.urls')), - path('results/', include('results.urls')), + path('tasks/', include('tasks.urls')), path('schema/', schema_view.with_ui('redoc', cache_timeout=0), name='api_schema') ) diff --git a/src/sensor/views.py b/src/sensor/views.py index 6dd52b7c..e9d84dbe 100644 --- a/src/sensor/views.py +++ b/src/sensor/views.py @@ -19,7 +19,7 @@ def api_v1_root(request, version, format=None): 'capabilities': reverse_('capabilities'), 'schedule': reverse_('schedule-list'), 'status': reverse_('status'), - 'results': reverse_('results-overview'), + 'tasks': reverse_('task-root'), 'users': reverse_('user-list') } diff --git a/src/status/migrations/0001_initial.py b/src/status/migrations/0001_initial.py index b15ee78d..13bd1424 100644 --- a/src/status/migrations/0001_initial.py +++ b/src/status/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 2.2.1 on 2019-05-16 23:49 +# Generated by Django 2.2.1 on 2019-05-17 20:43 from django.db import migrations, models diff --git a/src/status/views.py b/src/status/views.py index 17a6aeb6..4ca7a1e7 100644 --- a/src/status/views.py +++ b/src/status/views.py @@ -4,8 +4,7 @@ from rest_framework.response import Response from scheduler import scheduler -from scheduler.serializers import TaskSerializer -from sensor import settings, utils +from sensor import utils from .models import Location from .serializers import LocationSerializer @@ -26,13 +25,8 @@ def get_location(): @api_view() def status(request, version, format=None): """The status overview of the sensor.""" - context = {'request': request} - taskq = scheduler.thread.task_queue.to_list()[:settings.MAX_TASK_QUEUE] - task_serializer = TaskSerializer(taskq, many=True, context=context) - return Response({ 'scheduler': scheduler.thread.status, 'location': get_location(), - 'system_time': utils.get_datetime_str_now(), - 'task_queue': task_serializer.data + 'system_time': utils.get_datetime_str_now() }) diff --git a/src/results/__init__.py b/src/tasks/__init__.py similarity index 100% rename from src/results/__init__.py rename to src/tasks/__init__.py diff --git a/src/tasks/apps.py b/src/tasks/apps.py new file mode 100644 index 00000000..20547224 --- /dev/null +++ b/src/tasks/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class TasksConfig(AppConfig): + name = 'tasks' diff --git a/src/results/consts.py b/src/tasks/consts.py similarity index 100% rename from src/results/consts.py rename to src/tasks/consts.py diff --git a/src/results/migrations/0001_initial.py b/src/tasks/migrations/0001_initial.py similarity index 85% rename from src/results/migrations/0001_initial.py rename to src/tasks/migrations/0001_initial.py index 3c40ee3e..e9e4f0bc 100644 --- a/src/results/migrations/0001_initial.py +++ b/src/tasks/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 2.2.1 on 2019-05-16 23:49 +# Generated by Django 2.2.1 on 2019-05-17 20:43 import datetime from django.db import migrations, models @@ -21,12 +21,12 @@ class Migration(migrations.Migration): fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('task_id', models.IntegerField(help_text='The id of the task relative to the result')), - ('started', models.DateTimeField(default=datetime.datetime(2019, 5, 16, 23, 42, 27, 761070, tzinfo=utc), help_text='The time the task started')), - ('finished', models.DateTimeField(default=datetime.datetime(2019, 5, 16, 23, 42, 27, 761070, tzinfo=utc), help_text='The time the task finished')), + ('started', models.DateTimeField(default=datetime.datetime(2019, 5, 16, 23, 0, tzinfo=utc), help_text='The time the task started')), + ('finished', models.DateTimeField(default=datetime.datetime(2019, 5, 16, 23, 0, tzinfo=utc), help_text='The time the task finished')), ('duration', models.DurationField(default=datetime.timedelta(0), help_text='Task duration in seconds')), ('status', models.CharField(choices=[(1, 'success'), (2, 'failure'), (3, 'in-progress')], default='in-progress', help_text='"success" or "failure"', max_length=11)), ('detail', models.CharField(blank=True, help_text='Arbitrary detail string', max_length=512)), - ('schedule_entry', models.ForeignKey(help_text='The schedule entry relative to the result', on_delete=django.db.models.deletion.PROTECT, related_name='results', to='schedule.ScheduleEntry')), + ('schedule_entry', models.ForeignKey(help_text='The schedule entry relative to the result', on_delete=django.db.models.deletion.PROTECT, related_name='task_results', to='schedule.ScheduleEntry')), ], options={ 'ordering': ('task_id',), @@ -40,7 +40,7 @@ class Migration(migrations.Migration): ('recording_id', models.IntegerField(default=0, help_text='The id of the recording relative to the task')), ('metadata', jsonfield.fields.JSONField(help_text='The sigmf meta data for the acquisition')), ('data', models.BinaryField(null=True)), - ('task_result', models.ForeignKey(help_text='The task_result relative to the acquisition', on_delete=django.db.models.deletion.CASCADE, related_name='data', to='results.TaskResult')), + ('task_result', models.ForeignKey(help_text='The task_result relative to the acquisition', on_delete=django.db.models.deletion.CASCADE, related_name='data', to='tasks.TaskResult')), ], options={ 'db_table': 'acquisitions', diff --git a/src/results/migrations/__init__.py b/src/tasks/migrations/__init__.py similarity index 100% rename from src/results/migrations/__init__.py rename to src/tasks/migrations/__init__.py diff --git a/src/results/models/__init__.py b/src/tasks/models/__init__.py similarity index 74% rename from src/results/models/__init__.py rename to src/tasks/models/__init__.py index 5cebb4e8..9d96f9f6 100644 --- a/src/results/models/__init__.py +++ b/src/tasks/models/__init__.py @@ -1,2 +1,3 @@ from .acquisition import Acquisition # noqa +from .task import Task # noqa from .task_result import TaskResult # noqa diff --git a/src/results/models/acquisition.py b/src/tasks/models/acquisition.py similarity index 100% rename from src/results/models/acquisition.py rename to src/tasks/models/acquisition.py diff --git a/src/scheduler/tasks/task.py b/src/tasks/models/task.py similarity index 100% rename from src/scheduler/tasks/task.py rename to src/tasks/models/task.py diff --git a/src/results/models/task_result.py b/src/tasks/models/task_result.py similarity index 96% rename from src/results/models/task_result.py rename to src/tasks/models/task_result.py index f927feba..3b18dd1f 100644 --- a/src/results/models/task_result.py +++ b/src/tasks/models/task_result.py @@ -5,7 +5,7 @@ from schedule.models import ScheduleEntry from sensor.settings import MAX_TASK_RESULTS -from results.consts import MAX_DETAIL_LEN +from tasks.consts import MAX_DETAIL_LEN UTC = timezone.timezone.utc @@ -25,7 +25,7 @@ class TaskResult(models.Model): schedule_entry = models.ForeignKey( ScheduleEntry, on_delete=models.PROTECT, - related_name='results', + related_name='task_results', help_text="The schedule entry relative to the result") task_id = models.IntegerField( help_text="The id of the task relative to the result") diff --git a/src/results/permissions.py b/src/tasks/permissions.py similarity index 100% rename from src/results/permissions.py rename to src/tasks/permissions.py diff --git a/src/results/serializers/__init__.py b/src/tasks/serializers/__init__.py similarity index 100% rename from src/results/serializers/__init__.py rename to src/tasks/serializers/__init__.py diff --git a/src/results/serializers/acquisition.py b/src/tasks/serializers/acquisition.py similarity index 93% rename from src/results/serializers/acquisition.py rename to src/tasks/serializers/acquisition.py index f46ec621..6ad7d627 100644 --- a/src/results/serializers/acquisition.py +++ b/src/tasks/serializers/acquisition.py @@ -1,7 +1,7 @@ from rest_framework import serializers from rest_framework.reverse import reverse -from results.models import Acquisition +from tasks.models import Acquisition from sensor import V1 @@ -19,7 +19,7 @@ def get_url(self, obj, view_name, request, format): class AcquisitionSerializer(serializers.ModelSerializer): archive = AcquisitionHyperlinkedRelatedField( - view_name='result-archive', + view_name='task-result-archive', read_only=True, help_text="The url to download a SigMF archive of this acquisition", source='*' # pass whole object diff --git a/src/scheduler/serializers.py b/src/tasks/serializers/task.py similarity index 100% rename from src/scheduler/serializers.py rename to src/tasks/serializers/task.py diff --git a/src/results/serializers/task_result.py b/src/tasks/serializers/task_result.py similarity index 84% rename from src/results/serializers/task_result.py rename to src/tasks/serializers/task_result.py index 1fad8372..c004e06f 100644 --- a/src/results/serializers/task_result.py +++ b/src/tasks/serializers/task_result.py @@ -3,7 +3,7 @@ from schedule.models import ScheduleEntry from sensor import V1 -from results.models import TaskResult +from tasks.models import TaskResult from .acquisition import AcquisitionSerializer @@ -21,27 +21,27 @@ def get_url(self, obj, view_name, request, format): class TaskResultsOverviewSerializer(serializers.HyperlinkedModelSerializer): - results = serializers.SerializerMethodField( + task_results = serializers.SerializerMethodField( help_text="The link to the task results") + task_results_available = serializers.SerializerMethodField( + help_text="The number of available results") schedule_entry = serializers.SerializerMethodField( help_text="The related schedule entry for the result") - results_available = serializers.SerializerMethodField( - help_text="The number of available results") class Meta: model = ScheduleEntry - fields = ('results', 'results_available', 'schedule_entry') + fields = ('task_results', 'task_results_available', 'schedule_entry') - def get_results(self, obj): + def get_task_results(self, obj): request = self.context['request'] - route = 'result-list' + route = 'task-result-list' kws = {'schedule_entry_name': obj.name} kws.update(V1) url = reverse(route, kwargs=kws, request=request) return url - def get_results_available(self, obj): - return obj.results.count() + def get_task_results_available(self, obj): + return obj.task_results.count() def get_schedule_entry(self, obj): request = self.context['request'] @@ -54,7 +54,7 @@ def get_schedule_entry(self, obj): class TaskResultSerializer(serializers.HyperlinkedModelSerializer): self = TaskResultHyperlinkedRelatedField( - view_name='result-detail', + view_name='task-result-detail', read_only=True, help_text="The url of the result", source='*' # pass whole object diff --git a/src/scheduler/tasks/task_queue.py b/src/tasks/task_queue.py similarity index 97% rename from src/scheduler/tasks/task_queue.py rename to src/tasks/task_queue.py index d540a747..4dbf2360 100644 --- a/src/scheduler/tasks/task_queue.py +++ b/src/tasks/task_queue.py @@ -6,7 +6,7 @@ import heapq -from . import Task +from .models import Task class TaskQueue(list): diff --git a/src/results/tests/__init__.py b/src/tasks/tests/__init__.py similarity index 100% rename from src/results/tests/__init__.py rename to src/tasks/tests/__init__.py diff --git a/src/results/tests/test_detail_view.py b/src/tasks/tests/test_detail_view.py similarity index 100% rename from src/results/tests/test_detail_view.py rename to src/tasks/tests/test_detail_view.py diff --git a/src/results/tests/test_list_view.py b/src/tasks/tests/test_list_view.py similarity index 100% rename from src/results/tests/test_list_view.py rename to src/tasks/tests/test_list_view.py diff --git a/src/results/tests/test_models.py b/src/tasks/tests/test_models.py similarity index 100% rename from src/results/tests/test_models.py rename to src/tasks/tests/test_models.py diff --git a/src/results/tests/test_overview_view.py b/src/tasks/tests/test_overview_view.py similarity index 100% rename from src/results/tests/test_overview_view.py rename to src/tasks/tests/test_overview_view.py diff --git a/src/results/tests/test_serializers.py b/src/tasks/tests/test_serializers.py similarity index 100% rename from src/results/tests/test_serializers.py rename to src/tasks/tests/test_serializers.py diff --git a/src/results/tests/utils.py b/src/tasks/tests/utils.py similarity index 100% rename from src/results/tests/utils.py rename to src/tasks/tests/utils.py diff --git a/src/tasks/urls.py b/src/tasks/urls.py new file mode 100644 index 00000000..4c2aebbd --- /dev/null +++ b/src/tasks/urls.py @@ -0,0 +1,38 @@ +from django.urls import path + +from .views import ( + TaskResultsOverviewViewSet, TaskResultListViewSet, + TaskResultInstanceViewSet, task_root, upcoming_tasks) + + +urlpatterns = ( + path('', view=task_root, name='task-root'), + path('upcoming/', view=upcoming_tasks, name='upcoming-tasks'), + path('completed/', + view=TaskResultsOverviewViewSet.as_view({ + 'get': 'list' + }), + name='task-results-overview'), + path('completed//', + view=TaskResultListViewSet.as_view({ + 'get': 'list', + 'delete': 'destroy_all' + }), + name='task-result-list'), + path('completed//archive/', + view=TaskResultListViewSet.as_view({ + 'get': 'archive', + }), + name='task-result-list-archive'), + path('completed///', + view=TaskResultInstanceViewSet.as_view({ + 'get': 'retrieve', + 'delete': 'destroy' + }), + name='task-result-detail'), + path('completed///archive', + view=TaskResultInstanceViewSet.as_view({ + 'get': 'archive', + }), + name='task-result-archive') +) diff --git a/src/results/views.py b/src/tasks/views.py similarity index 73% rename from src/results/views.py rename to src/tasks/views.py index 17c32514..60125b10 100644 --- a/src/results/views.py +++ b/src/tasks/views.py @@ -1,5 +1,6 @@ import logging import tempfile +from functools import partial from django.http import Http404, FileResponse from rest_framework import filters, status @@ -7,17 +8,22 @@ from rest_framework.generics import get_object_or_404 from rest_framework.mixins import ( ListModelMixin, RetrieveModelMixin, DestroyModelMixin) +from rest_framework.decorators import api_view from rest_framework.response import Response +from rest_framework.reverse import reverse from rest_framework.settings import api_settings from rest_framework.viewsets import GenericViewSet import sigmf.archive import sigmf.sigmffile -import sensor.settings from schedule.models import ScheduleEntry +from scheduler import scheduler +from sensor import settings + from .models.task_result import TaskResult from .permissions import IsAdminOrOwnerOrReadOnly +from .serializers.task import TaskSerializer from .serializers.task_result import ( TaskResultsOverviewSerializer, TaskResultSerializer) @@ -25,7 +31,29 @@ logger = logging.getLogger(__name__) -class ResultsOverviewViewSet(ListModelMixin, GenericViewSet): +@api_view() +def task_root(request, version, format=None): + """Provides links to upcoming and completed tasks""" + reverse_ = partial(reverse, request=request, format=format) + task_endpoints = { + 'upcoming': reverse_('upcoming-tasks'), + 'completed': reverse_('task-results-overview') + } + + return Response(task_endpoints) + + +@api_view() +def upcoming_tasks(request, version, format=None): + """Returns a snapshot of upcoming tasks.""" + context = {'request': request} + taskq = scheduler.thread.task_queue.to_list()[:settings.MAX_TASK_QUEUE] + taskq_serializer = TaskSerializer(taskq, many=True, context=context) + + return Response(taskq_serializer.data) + + +class TaskResultsOverviewViewSet(ListModelMixin, GenericViewSet): """ list: Returns an overview of how many results are available per schedule @@ -71,56 +99,7 @@ def get_object(self): return get_object_or_404(queryset, **filter) -class TaskResultListViewSet(MultipleFieldLookupMixin, ListModelMixin, - GenericViewSet): - """ - list: - Returns a list of all acquisitions created by the given schedule entry. - - destroy_all: - Deletes all acquisitions created by the given schedule entry. - """ - queryset = TaskResult.objects.all() - serializer_class = TaskResultSerializer - permission_classes = ( - api_settings.DEFAULT_PERMISSION_CLASSES + [IsAdminOrOwnerOrReadOnly]) - filter_backends = (filters.SearchFilter, filters.OrderingFilter) - lookup_fields = ('schedule_entry__name', 'task_id') - ordering_fields = ('task_id', 'created') - search_fields = ('sigmf_metadata', ) - - @action(detail=False, methods=('delete', )) - def destroy_all(self, request, version, schedule_entry_name): - queryset = self.get_queryset() - queryset = queryset.filter(schedule_entry__name=schedule_entry_name) - - if not queryset.exists(): - raise Http404 - - queryset.delete() - - return Response(status=status.HTTP_204_NO_CONTENT) - - @action(detail=False) - def archive(self, request, version, schedule_entry_name): - queryset = self.get_queryset() - queryset = queryset.filter(schedule_entry__name=schedule_entry_name) - fqdn = sensor.settings.FQDN - fname = fqdn + '_' + schedule_entry_name + '.sigmf' - - if not queryset.exists(): - raise Http404 - - # FileResponse handles closing the file - tmparchive = tempfile.TemporaryFile() - build_sigmf_archive(tmparchive, schedule_entry_name, queryset) - content_type = 'application/x-tar' - response = FileResponse(tmparchive, as_attachment=True, filename=fname, - content_type=content_type) - return response - - -class ResultListViewSet(ListModelMixin, GenericViewSet): +class TaskResultListViewSet(ListModelMixin, GenericViewSet): """ list: Returns a list of all results created by the given schedule entry. @@ -128,6 +107,9 @@ class ResultListViewSet(ListModelMixin, GenericViewSet): destroy_all: Deletes all results created by the given schedule entry. + archive: + Downloads the acquisition's SigMF archive. + """ queryset = TaskResult.objects.all() serializer_class = TaskResultSerializer @@ -135,8 +117,8 @@ class ResultListViewSet(ListModelMixin, GenericViewSet): api_settings.DEFAULT_PERMISSION_CLASSES + [IsAdminOrOwnerOrReadOnly]) filter_backends = (filters.SearchFilter, filters.OrderingFilter) lookup_fields = ('schedule_entry__name', 'task_id') - ordering_fields = ('task_id', 'started', 'finished', 'duration', 'result') - search_fields = ('task_id', 'result', 'detail') + ordering_fields = ('task_id', 'started', 'finished', 'duration', 'status') + search_fields = ('task_id', 'status', 'detail') def get_queryset(self): # .list() does not call .get_object(), which triggers permissions @@ -173,7 +155,7 @@ def archive(self, request, version, schedule_entry_name): if not queryset.exists(): raise Http404 - fqdn = sensor.settings.FQDN + fqdn = settings.FQDN fname = fqdn + '_' + schedule_entry_name + '.sigmf' # FileResponse handles closing the file @@ -186,8 +168,8 @@ def archive(self, request, version, schedule_entry_name): return response -class ResultInstanceViewSet(MultipleFieldLookupMixin, RetrieveModelMixin, - DestroyModelMixin, GenericViewSet): +class TaskResultInstanceViewSet(MultipleFieldLookupMixin, RetrieveModelMixin, + DestroyModelMixin, GenericViewSet): """ retrieve: Returns a specific result. @@ -208,7 +190,7 @@ class ResultInstanceViewSet(MultipleFieldLookupMixin, RetrieveModelMixin, @action(detail=True) def archive(self, request, version, schedule_entry_name, task_id): entry_name = schedule_entry_name - fqdn = sensor.settings.FQDN + fqdn = settings.FQDN fname = fqdn + '_' + entry_name + '_' + str(task_id) + '.sigmf' acq = self.get_object() From 79d02b7c65b7cad28fd66c144a68f318d393b6b3 Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Thu, 23 May 2019 00:17:04 -0600 Subject: [PATCH 04/36] Fix imports for new app structure --- .../tests/test_acquire_single_freq_fft.py | 14 ++++--- src/schedule/tests/test_views.py | 2 +- src/tasks/tests/test_detail_view.py | 4 +- src/tasks/tests/test_list_view.py | 6 +-- src/tasks/tests/test_models.py | 4 +- src/tasks/tests/test_overview_view.py | 7 ++-- src/tasks/tests/test_serializers.py | 8 ++-- src/tasks/tests/utils.py | 41 ++++++++++++++++++- 8 files changed, 63 insertions(+), 23 deletions(-) diff --git a/src/actions/tests/test_acquire_single_freq_fft.py b/src/actions/tests/test_acquire_single_freq_fft.py index e61289b3..f39af14f 100644 --- a/src/actions/tests/test_acquire_single_freq_fft.py +++ b/src/actions/tests/test_acquire_single_freq_fft.py @@ -1,12 +1,14 @@ -from actions import by_name -from acquisitions.models import Acquisition +import json +from os import path + from django.conf import settings # from jsonschema import validate as schema_validate -from schedule.tests.utils import post_schedule, TEST_SCHEDULE_ENTRY from sigmf.validate import validate as sigmf_validate -import json -from os import path +import actions +from schedule.tests.utils import post_schedule, TEST_SCHEDULE_ENTRY +from tasks.models import Acquisition + SCHEMA_DIR = path.join(settings.REPO_ROOT, "schemas") SCHEMA_FNAME = "scos_transfer_spec_schema.json" @@ -23,7 +25,7 @@ def test_detector(user_client, rf): task_id = rjson['next_task_id'] # use mock_acquire set up in conftest.py - by_name['mock_acquire'](entry_name, task_id) + actions.by_name['mock_acquire'](entry_name, task_id) acquistion = Acquisition.objects.get(task_id=task_id) sigmf_metadata = acquistion.sigmf_metadata assert sigmf_validate(sigmf_metadata) diff --git a/src/schedule/tests/test_views.py b/src/schedule/tests/test_views.py index 8b6a8615..4267ec5c 100644 --- a/src/schedule/tests/test_views.py +++ b/src/schedule/tests/test_views.py @@ -1,12 +1,12 @@ from rest_framework import status from rest_framework.reverse import reverse -from acquisitions.tests.utils import simulate_acquisitions from schedule.tests.utils import (EMPTY_SCHEDULE_RESPONSE, TEST_SCHEDULE_ENTRY, TEST_PRIVATE_SCHEDULE_ENTRY, post_schedule, reverse_detail_url) from sensor import V1 from sensor.tests.utils import validate_response, HTTPS_KWARG +from tasks.tests.utils import simulate_acquisitions def test_entry_posted_to_schedule_is_immediately_available(user_client): diff --git a/src/tasks/tests/test_detail_view.py b/src/tasks/tests/test_detail_view.py index 143da522..50699c95 100644 --- a/src/tasks/tests/test_detail_view.py +++ b/src/tasks/tests/test_detail_view.py @@ -1,8 +1,8 @@ from rest_framework import status -from acquisitions.tests.utils import simulate_acquisitions -from results.tests.utils import create_task_results, reverse_result_detail from sensor.tests.utils import validate_response, HTTPS_KWARG +from tasks.tests.utils import ( + create_task_results, reverse_result_detail, simulate_acquisitions) def test_can_view_own_result_details(user_client): diff --git a/src/tasks/tests/test_list_view.py b/src/tasks/tests/test_list_view.py index 0156df8e..876adafd 100644 --- a/src/tasks/tests/test_list_view.py +++ b/src/tasks/tests/test_list_view.py @@ -1,14 +1,14 @@ import pytest from rest_framework import status -from acquisitions.tests.utils import simulate_acquisitions -from results.tests.utils import ( +from sensor.tests.utils import validate_response, HTTPS_KWARG +from tasks.tests.utils import ( create_task_results, get_result_list, reverse_result_detail, reverse_result_list, + simulate_acquisitions ) -from sensor.tests.utils import validate_response, HTTPS_KWARG def test_non_existent_entry(user_client): diff --git a/src/tasks/tests/test_models.py b/src/tasks/tests/test_models.py index d645168d..cd18c97a 100644 --- a/src/tasks/tests/test_models.py +++ b/src/tasks/tests/test_models.py @@ -1,7 +1,7 @@ import pytest -from results.models import TaskResult -from results.tests.utils import TEST_MAX_TASK_RESULTS, create_task_results +from tasks.models import TaskResult +from tasks.tests.utils import TEST_MAX_TASK_RESULTS, create_task_results @pytest.mark.django_db diff --git a/src/tasks/tests/test_overview_view.py b/src/tasks/tests/test_overview_view.py index 2ba86fae..8b62dc34 100644 --- a/src/tasks/tests/test_overview_view.py +++ b/src/tasks/tests/test_overview_view.py @@ -1,10 +1,9 @@ from rest_framework import status -from acquisitions.tests.utils import simulate_acquisitions -from results.tests.utils import (EMPTY_RESULTS_RESPONSE, create_task_results, - reverse_results_overview, - get_results_overview) from sensor.tests.utils import validate_response, HTTPS_KWARG +from tasks.tests.utils import ( + EMPTY_RESULTS_RESPONSE, create_task_results, reverse_results_overview, + get_results_overview, simulate_acquisitions) def test_user_empty_overview_response(user_client): diff --git a/src/tasks/tests/test_serializers.py b/src/tasks/tests/test_serializers.py index fe334187..29af1b9c 100644 --- a/src/tasks/tests/test_serializers.py +++ b/src/tasks/tests/test_serializers.py @@ -1,9 +1,9 @@ import pytest -from results.models import TaskResult -from results.serializers import (TaskResultSerializer, - TaskResultsOverviewSerializer) -from results.tests.utils import create_task_results +from tasks.models import TaskResult +from tasks.serializers import (TaskResultSerializer, + TaskResultsOverviewSerializer) +from tasks.tests.utils import create_task_results @pytest.mark.django_db diff --git a/src/tasks/tests/utils.py b/src/tasks/tests/utils.py index 6e8b4d8f..67bc327d 100644 --- a/src/tasks/tests/utils.py +++ b/src/tasks/tests/utils.py @@ -5,17 +5,56 @@ from rest_framework.reverse import reverse from rest_framework import status -from results.models import TaskResult from schedule.models import ScheduleEntry from schedule.tests.utils import post_schedule, TEST_SCHEDULE_ENTRY +from scheduler.tests.utils import simulate_scheduler_run from sensor import V1 from sensor.tests.utils import validate_response, HTTPS_KWARG +from tasks.models import TaskResult TEST_MAX_TASK_RESULTS = 100 # Reduce from default of settings.MAX_TASK_RESULTS ONE_MICROSECOND = datetime.timedelta(0, 0, 1) EMPTY_RESULTS_RESPONSE = [] +EMPTY_ACQUISITIONS_RESPONSE = [] + +SINGLE_ACQUISITION = { + 'name': 'test_acq', + 'start': None, + 'stop': None, + 'interval': None, + 'action': 'mock_acquire' +} + +MULTIPLE_ACQUISITIONS = { + 'name': 'test_multiple_acq', + 'start': None, + 'relative_stop': 5, + 'interval': 1, + 'action': 'mock_acquire' +} + + +def simulate_acquisitions(client, n=1, is_private=False, name=None): + assert 0 < n <= 10 + + if n == 1: + schedule_entry = SINGLE_ACQUISITION.copy() + else: + schedule_entry = MULTIPLE_ACQUISITIONS.copy() + schedule_entry['relative_stop'] = n + 1 + + schedule_entry['is_private'] = is_private + + if name is not None: + schedule_entry['name'] = name + + entry = post_schedule(client, schedule_entry) + simulate_scheduler_run(n) + + return entry['name'] + def create_task_results(n, user_client, entry_name=None): # We need an entry in the schedule to create TRs for From ff12702808268e17830238eb65e31f461480b624 Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Tue, 28 May 2019 21:36:10 -0600 Subject: [PATCH 05/36] Prefer black over yapf --- src/requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/requirements-dev.txt b/src/requirements-dev.txt index 1dd69e7c..ea7544fc 100644 --- a/src/requirements-dev.txt +++ b/src/requirements-dev.txt @@ -1,5 +1,6 @@ -rrequirements.txt +black==18.9b0 flake8==3.7.7 jedi==0.13.3 jsonschema==3.0.1 @@ -8,4 +9,3 @@ pytest-cov==2.7.1 pytest-django==3.4.8 pytest-flake8==1.0.4 tox==3.10.0 -yapf==0.27.0 From 7069cdf0f14ac1a5cb6ce302d65ea40e168bb9c5 Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Tue, 28 May 2019 21:37:43 -0600 Subject: [PATCH 06/36] blacken codebase Run `pip install black && black src` from root dir --- src/actions/__init__.py | 17 +- src/actions/acquire_single_freq_fft.py | 39 +- .../acquire_stepped_freq_tdomain_iq.py | 51 ++- src/actions/sync_gps.py | 3 +- .../tests/test_acquire_single_freq_fft.py | 7 +- src/actions/tests/test_init.py | 2 +- src/authentication/apps.py | 2 +- src/authentication/migrations/0001_initial.py | 127 +++++- src/authentication/models.py | 1 + src/authentication/serializers.py | 44 +- src/authentication/tests/test_list_view.py | 8 +- src/authentication/urls.py | 6 +- src/authentication/views.py | 21 +- src/capabilities/__init__.py | 2 +- src/capabilities/apps.py | 2 +- src/capabilities/urls.py | 4 +- src/capabilities/views.py | 17 +- src/conftest.py | 39 +- src/hardware/apps.py | 2 +- src/hardware/gps_iface.py | 51 ++- src/hardware/mocks/usrp_block.py | 4 +- src/hardware/scale_factors.py | 63 ++- src/hardware/tests/test_scale_factors.py | 14 +- src/hardware/tests/test_usrp.py | 2 +- src/hardware/usrp_iface.py | 21 +- src/manage.py | 7 +- src/schedule/apps.py | 2 +- src/schedule/migrations/0001_initial.py | 179 ++++++-- src/schedule/models/request.py | 1 + src/schedule/models/schedule_entry.py | 82 ++-- src/schedule/serializers.py | 122 +++-- src/schedule/tests/test_admin_views.py | 92 ++-- src/schedule/tests/test_models.py | 47 +- src/schedule/tests/test_serializers.py | 425 ++++++------------ src/schedule/tests/test_user_views.py | 84 ++-- src/schedule/tests/test_views.py | 38 +- src/schedule/tests/utils.py | 38 +- src/schedule/urls.py | 2 +- src/schedule/views.py | 18 +- src/scheduler/apps.py | 2 +- src/scheduler/scheduler.py | 26 +- src/scheduler/tests/test_scheduler.py | 93 ++-- src/scheduler/tests/utils.py | 20 +- src/sensor/__init__.py | 2 +- src/sensor/apps.py | 2 +- src/sensor/exceptions.py | 36 +- src/sensor/settings.py | 340 ++++++-------- src/sensor/tests/test_api_docs.py | 4 +- src/sensor/tests/test_api_root_view.py | 9 +- src/sensor/tests/utils.py | 4 +- src/sensor/urls.py | 39 +- src/sensor/utils.py | 2 +- src/sensor/views.py | 18 +- src/status/admin.py | 2 +- src/status/apps.py | 2 +- src/status/migrations/0001_initial.py | 65 ++- src/status/models.py | 16 +- src/status/serializers.py | 2 +- src/status/urls.py | 4 +- src/status/views.py | 12 +- src/tasks/apps.py | 2 +- src/tasks/migrations/0001_initial.py | 123 ++++- src/tasks/models/acquisition.py | 20 +- src/tasks/models/task.py | 4 +- src/tasks/models/task_result.py | 41 +- src/tasks/serializers/acquisition.py | 18 +- src/tasks/serializers/task.py | 10 +- src/tasks/serializers/task_result.py | 59 +-- src/tasks/tests/test_detail_view.py | 8 +- src/tasks/tests/test_list_view.py | 19 +- src/tasks/tests/test_overview_view.py | 31 +- src/tasks/tests/test_serializers.py | 19 +- src/tasks/tests/utils.py | 57 +-- src/tasks/urls.py | 66 +-- src/tasks/views.py | 79 ++-- 75 files changed, 1595 insertions(+), 1347 deletions(-) diff --git a/src/actions/__init__.py b/src/actions/__init__.py index d30de9a9..49b3d498 100644 --- a/src/actions/__init__.py +++ b/src/actions/__init__.py @@ -18,10 +18,11 @@ # Actions initialized here are made available through the API registered_actions = { "logger": logger_action.Logger(), - "admin_logger": - logger_action.Logger(loglvl=logger_action.LOGLVL_ERROR, admin_only=True), + "admin_logger": logger_action.Logger( + loglvl=logger_action.LOGLVL_ERROR, admin_only=True + ), "monitor_usrp": monitor_usrp.UsrpMonitor(admin_only=True), - "sync_gps": sync_gps.SyncGps(admin_only=True) + "sync_gps": sync_gps.SyncGps(admin_only=True), } by_name = registered_actions @@ -33,10 +34,8 @@ "logger": logger_action.Logger, "usrp_monitor": monitor_usrp.UsrpMonitor, "sync_gps": sync_gps.SyncGps, - "single_frequency_fft": - acquire_single_freq_fft.SingleFrequencyFftAcquisition, - "stepped_frequency_time_domain_iq": - acquire_stepped_freq_tdomain_iq.SteppedFrequencyTimeDomainIqAcquisition + "single_frequency_fft": acquire_single_freq_fft.SingleFrequencyFftAcquisition, + "stepped_frequency_time_domain_iq": acquire_stepped_freq_tdomain_iq.SteppedFrequencyTimeDomainIqAcquisition, } @@ -63,9 +62,9 @@ def get_summary(action_fn): def load_from_yaml(yaml_dir=settings.ACTION_DEFINITIONS_DIR): """Load any YAML files in yaml_dir.""" - yaml = YAML(typ='safe') + yaml = YAML(typ="safe") yaml_path = Path(yaml_dir) - for yaml_file in yaml_path.glob('*.yml'): + for yaml_file in yaml_path.glob("*.yml"): defn = yaml.load(yaml_file) for class_name, parameters in defn.items(): try: diff --git a/src/actions/acquire_single_freq_fft.py b/src/actions/acquire_single_freq_fft.py index dacc2082..821ac061 100644 --- a/src/actions/acquire_single_freq_fft.py +++ b/src/actions/acquire_single_freq_fft.py @@ -96,7 +96,7 @@ GLOBAL_INFO = { "core:datatype": "f32_le", # 32-bit float, Little Endian - "core:version": "0.0.1" + "core:version": "0.0.1", } @@ -109,7 +109,7 @@ class M4sDetector(Enum): # The sigmf-ns-scos version targeted by this action -SCOS_TRANSFER_SPEC_VER = '0.2' +SCOS_TRANSFER_SPEC_VER = "0.2" def m4s_detector(array): @@ -161,7 +161,8 @@ def __call__(self, schedule_entry_name, task_id): # Raises TaskResult.DoesNotExist if no matching task result task_result = TaskResult.objects.get( - schedule_entry__name=schedule_entry_name, task_id=task_id) + schedule_entry__name=schedule_entry_name, task_id=task_id + ) self.test_required_components() self.configure_sdr() @@ -219,14 +220,14 @@ def build_sigmf_md(self): sigmf_md.set_global_field("core:sample_rate", self.sample_rate) sigmf_md.set_global_field("core:description", self.description) - sensor_def = capabilities['sensor_definition'] + sensor_def = capabilities["sensor_definition"] sigmf_md.set_global_field("ntia:sensor_definition", sensor_def) sigmf_md.set_global_field("ntia:sensor_id", settings.FQDN) sigmf_md.set_global_field("scos:version", SCOS_TRANSFER_SPEC_VER) capture_md = { "core:frequency": self.frequency, - "core:time": utils.get_datetime_str_now() + "core:time": utils.get_datetime_str_now(), } sigmf_md.add_capture(start_index=0, metadata=capture_md) @@ -239,19 +240,20 @@ def build_sigmf_md(self): "detector": detector.name + "_power", "number_of_ffts": self.nffts, "units": "dBm", - "reference": "not referenced" + "reference": "not referenced", } annotation_md = { "scos:measurement_type": { - "single_frequency_fft_detection": single_frequency_fft_md, + "single_frequency_fft_detection": single_frequency_fft_md } } sigmf_md.add_annotation( start_index=(i * self.fft_size), length=self.fft_size, - metadata=annotation_md) + metadata=annotation_md, + ) return sigmf_md @@ -260,10 +262,10 @@ def apply_detector(self, data): logger.debug("Applying detector") window = np.blackman(self.fft_size) - window_power = sum(window**2) + window_power = sum(window ** 2) impedance = 50.0 # ohms - self.enbw = self.fft_size * window_power / sum(window)**2 + self.enbw = self.fft_size * window_power / sum(window) ** 2 Vsq2W_dB = -10.0 * np.log10(self.fft_size * window_power * impedance) @@ -288,19 +290,18 @@ def archive(self, task_result, m4s_data, sigmf_md): logger.debug("Storing acquisition in database") Acquisition( - task_result=task_result, - metadata=sigmf_md._metadata, - data=m4s_data).save() + task_result=task_result, metadata=sigmf_md._metadata, data=m4s_data + ).save() @property def description(self): defs = { - 'name': self.name, - 'frequency': self.frequency / 1e6, - 'sample_rate': self.sample_rate / 1e6, - 'fft_size': self.fft_size, - 'nffts': self.nffts, - 'gain': self.gain + "name": self.name, + "frequency": self.frequency / 1e6, + "sample_rate": self.sample_rate / 1e6, + "fft_size": self.fft_size, + "nffts": self.nffts, + "gain": self.gain, } # __doc__ refers to the module docstring at the top of the file diff --git a/src/actions/acquire_stepped_freq_tdomain_iq.py b/src/actions/acquire_stepped_freq_tdomain_iq.py index b77f93fb..8035192f 100644 --- a/src/actions/acquire_stepped_freq_tdomain_iq.py +++ b/src/actions/acquire_stepped_freq_tdomain_iq.py @@ -59,12 +59,12 @@ GLOBAL_INFO = { "core:datatype": "cf32_le", # 2x 32-bit float, Little Endian - "core:version": "0.0.2" + "core:version": "0.0.2", } # The sigmf-ns-scos version targeted by this action -SCOS_TRANSFER_SPEC_VER = '0.2' +SCOS_TRANSFER_SPEC_VER = "0.2" class SteppedFrequencyTimeDomainIqAcquisition(Action): @@ -83,7 +83,7 @@ def __init__(self, name, fcs, gains, sample_rates, durations_ms): nfcs = len(fcs) - parameter_names = ('gain', 'sample_rate', 'duration_ms') + parameter_names = ("gain", "sample_rate", "duration_ms") tuning_parameters = {} for fc, *params in zip_longest(fcs, gains, sample_rates, durations_ms): @@ -106,7 +106,8 @@ def __call__(self, schedule_entry_name, task_id): # Raises TaskResult.DoesNotExist if no matching task result task_result = TaskResult.objects.get( - schedule_entry__name=schedule_entry_name, task_id=task_id) + schedule_entry__name=schedule_entry_name, task_id=task_id + ) self.test_required_components() @@ -134,7 +135,7 @@ def acquire_data(self, fc): sigmf_md.set_global_field("core:sample_rate", sample_rate) sigmf_md.set_global_field("core:description", self.description) - sensor_def = capabilities['sensor_definition'] + sensor_def = capabilities["sensor_definition"] sigmf_md.set_global_field("ntia:sensor_definition", sensor_def) sigmf_md.set_global_field("ntia:sensor_id", settings.FQDN) sigmf_md.set_global_field("scos:version", SCOS_TRANSFER_SPEC_VER) @@ -142,7 +143,7 @@ def acquire_data(self, fc): # Acquire data and build per-capture metadata data = np.array([], dtype=np.complex64) - nsamps = int(sample_rate * tuning_parameters['duration_ms'] * 1e-3) + nsamps = int(sample_rate * tuning_parameters["duration_ms"] * 1e-3) dt = utils.get_datetime_str_now() acq = self.sdr.radio.acquire_samples(nsamps).astype(np.complex64) @@ -150,8 +151,7 @@ def acquire_data(self, fc): capture_md = {"core:frequency": fc, "core:datetime": dt} sigmf_md.add_capture(start_index=0, metadata=capture_md) annotation_md = {"applied_scale_factor": self.sdr.radio.scale_factor} - sigmf_md.add_annotation(start_index=0, length=nsamps, - metadata=annotation_md) + sigmf_md.add_annotation(start_index=0, length=nsamps, metadata=annotation_md) return data, sigmf_md @@ -177,9 +177,8 @@ def archive(self, task_result, m4s_data, sigmf_md): logger.debug("Storing acquisition in database") Acquisition( - task_result=task_result, - metadata=sigmf_md._metadata, - data=m4s_data).save() + task_result=task_result, metadata=sigmf_md._metadata, data=m4s_data + ).save() @property def description(self): @@ -194,34 +193,34 @@ def description(self): total_samples = 0 for fc in self.fcs: tuning_params = self.tuning_parameters[fc].copy() - tuning_params['fc_MHz'] = fc / 1e6 - srate = tuning_params['sample_rate'] - tuning_params['sample_rate_Msps'] = srate / 1e6 + tuning_params["fc_MHz"] = fc / 1e6 + srate = tuning_params["sample_rate"] + tuning_params["sample_rate_Msps"] = srate / 1e6 acquisition_plan += acq_plan_template.format(**tuning_params) - total_samples += int(tuning_params['duration_ms'] / 1e6 * srate) + total_samples += int(tuning_params["duration_ms"] / 1e6 * srate) f_low = self.fcs[0] - f_low_srate = self.tuning_parameters[f_low]['sample_rate'] + f_low_srate = self.tuning_parameters[f_low]["sample_rate"] f_low_edge = (f_low - f_low_srate / 2.0) / 1e6 f_high = self.fcs[-1] - f_high_srate = self.tuning_parameters[f_high]['sample_rate'] + f_high_srate = self.tuning_parameters[f_high]["sample_rate"] f_high_edge = (f_high - f_high_srate / 2.0) / 1e6 - durations = [v['duration_ms'] for v in self.tuning_parameters.values()] + durations = [v["duration_ms"] for v in self.tuning_parameters.values()] min_duration_ms = np.sum(durations) filesize_mb = total_samples * 8 / 1e6 # 8 bytes per complex64 sample defs = { - 'name': self.name, - 'nfcs': self.nfcs, - 'f_low_edge': f_low_edge, - 'f_high_edge': f_high_edge, - 'acquisition_plan': acquisition_plan, - 'min_duration_ms': min_duration_ms, - 'total_samples': total_samples, - 'filesize_mb': filesize_mb + "name": self.name, + "nfcs": self.nfcs, + "f_low_edge": f_low_edge, + "f_high_edge": f_high_edge, + "acquisition_plan": acquisition_plan, + "min_duration_ms": min_duration_ms, + "total_samples": total_samples, + "filesize_mb": filesize_mb, } # __doc__ refers to the module docstring at the top of the file diff --git a/src/actions/sync_gps.py b/src/actions/sync_gps.py index 14bad32e..b1472076 100644 --- a/src/actions/sync_gps.py +++ b/src/actions/sync_gps.py @@ -39,4 +39,5 @@ def __call__(self, name, tid): gps=True, description=GPS_LOCATION_DESCRIPTION, latitude=latitude, - longitude=longitude) + longitude=longitude, + ) diff --git a/src/actions/tests/test_acquire_single_freq_fft.py b/src/actions/tests/test_acquire_single_freq_fft.py index f39af14f..f2db7107 100644 --- a/src/actions/tests/test_acquire_single_freq_fft.py +++ b/src/actions/tests/test_acquire_single_freq_fft.py @@ -2,6 +2,7 @@ from os import path from django.conf import settings + # from jsonschema import validate as schema_validate from sigmf.validate import validate as sigmf_validate @@ -21,11 +22,11 @@ def test_detector(user_client, rf): # Put an entry in the schedule that we can refer to rjson = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - entry_name = rjson['name'] - task_id = rjson['next_task_id'] + entry_name = rjson["name"] + task_id = rjson["next_task_id"] # use mock_acquire set up in conftest.py - actions.by_name['mock_acquire'](entry_name, task_id) + actions.by_name["mock_acquire"](entry_name, task_id) acquistion = Acquisition.objects.get(task_id=task_id) sigmf_metadata = acquistion.sigmf_metadata assert sigmf_validate(sigmf_metadata) diff --git a/src/actions/tests/test_init.py b/src/actions/tests/test_init.py index acf03fbd..4c3218f2 100644 --- a/src/actions/tests/test_init.py +++ b/src/actions/tests/test_init.py @@ -43,7 +43,7 @@ def _test_load_from_yaml_check_error(yaml_to_write, expected_error): # load_from_yaml loads all `.yml` files in the passed directory, so do a # bit of setup to create an invalid yaml tempfile in a temporary directory with tempfile.TemporaryDirectory() as tmpdir: - with tempfile.NamedTemporaryFile(suffix='.yml', dir=tmpdir) as tmpfile: + with tempfile.NamedTemporaryFile(suffix=".yml", dir=tmpdir) as tmpfile: tmpfile.write(yaml_to_write) tmpfile.seek(0) # Now try to load the invalid yaml file, expecting an error diff --git a/src/authentication/apps.py b/src/authentication/apps.py index 9635c9df..372ba813 100644 --- a/src/authentication/apps.py +++ b/src/authentication/apps.py @@ -2,4 +2,4 @@ class AuthenticationConfig(AppConfig): - name = 'authentication' + name = "authentication" diff --git a/src/authentication/migrations/0001_initial.py b/src/authentication/migrations/0001_initial.py index a32c1ae8..89234407 100644 --- a/src/authentication/migrations/0001_initial.py +++ b/src/authentication/migrations/0001_initial.py @@ -10,35 +10,114 @@ class Migration(migrations.Migration): initial = True - dependencies = [ - ('auth', '0011_update_proxy_permissions'), - ] + dependencies = [("auth", "0011_update_proxy_permissions")] operations = [ migrations.CreateModel( - name='User', + name="User", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('password', models.CharField(max_length=128, verbose_name='password')), - ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')), - ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), - ('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')), - ('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')), - ('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')), - ('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')), - ('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')), - ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), - ('email', models.EmailField(max_length=254, null=True)), - ('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')), - ('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("password", models.CharField(max_length=128, verbose_name="password")), + ( + "last_login", + models.DateTimeField( + blank=True, null=True, verbose_name="last login" + ), + ), + ( + "is_superuser", + models.BooleanField( + default=False, + help_text="Designates that this user has all permissions without explicitly assigning them.", + verbose_name="superuser status", + ), + ), + ( + "username", + models.CharField( + error_messages={ + "unique": "A user with that username already exists." + }, + help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.", + max_length=150, + unique=True, + validators=[ + django.contrib.auth.validators.UnicodeUsernameValidator() + ], + verbose_name="username", + ), + ), + ( + "first_name", + models.CharField( + blank=True, max_length=30, verbose_name="first name" + ), + ), + ( + "last_name", + models.CharField( + blank=True, max_length=150, verbose_name="last name" + ), + ), + ( + "is_staff", + models.BooleanField( + default=False, + help_text="Designates whether the user can log into this admin site.", + verbose_name="staff status", + ), + ), + ( + "is_active", + models.BooleanField( + default=True, + help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.", + verbose_name="active", + ), + ), + ( + "date_joined", + models.DateTimeField( + default=django.utils.timezone.now, verbose_name="date joined" + ), + ), + ("email", models.EmailField(max_length=254, null=True)), + ( + "groups", + models.ManyToManyField( + blank=True, + help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.", + related_name="user_set", + related_query_name="user", + to="auth.Group", + verbose_name="groups", + ), + ), + ( + "user_permissions", + models.ManyToManyField( + blank=True, + help_text="Specific permissions for this user.", + related_name="user_set", + related_query_name="user", + to="auth.Permission", + verbose_name="user permissions", + ), + ), ], options={ - 'verbose_name': 'user', - 'verbose_name_plural': 'users', - 'abstract': False, + "verbose_name": "user", + "verbose_name_plural": "users", + "abstract": False, }, - managers=[ - ('objects', django.contrib.auth.models.UserManager()), - ], - ), + managers=[("objects", django.contrib.auth.models.UserManager())], + ) ] diff --git a/src/authentication/models.py b/src/authentication/models.py index 85b6e9f9..e0f7d559 100644 --- a/src/authentication/models.py +++ b/src/authentication/models.py @@ -7,6 +7,7 @@ class User(AbstractUser): """A user of the sensor.""" + email = models.EmailField(null=True) diff --git a/src/authentication/serializers.py b/src/authentication/serializers.py index 517a23c8..bec3df83 100644 --- a/src/authentication/serializers.py +++ b/src/authentication/serializers.py @@ -7,37 +7,39 @@ class UserProfileSerializer(serializers.HyperlinkedModelSerializer): """Public user account view.""" + schedule_entries = serializers.SerializerMethodField( - help_text="The list of schedule entries owned by the user") + help_text="The list of schedule entries owned by the user" + ) class Meta: model = User - fields = ('self', 'username', 'is_active', 'date_joined', 'last_login', - 'schedule_entries') + fields = ( + "self", + "username", + "is_active", + "date_joined", + "last_login", + "schedule_entries", + ) extra_kwargs = { - 'self': { - 'view_name': 'user-detail' - }, - 'is_active': { - 'initial': True - }, - 'schedule_entries': { - 'view_name': 'schedule-detail' - }, + "self": {"view_name": "user-detail"}, + "is_active": {"initial": True}, + "schedule_entries": {"view_name": "schedule-detail"}, } - read_only_fields = ('schedule_entries', 'date_joined', 'last_login') + read_only_fields = ("schedule_entries", "date_joined", "last_login") def get_schedule_entries(self, obj): """Filter private schedule entries if requester is not an admin.""" - request = self.context['request'] + request = self.context["request"] entries = obj.schedule_entries.get_queryset() if not request.user.is_staff: entries = entries.filter(is_private=False) urls = [] for entry in entries: - route = 'schedule-detail' - kws = {'pk': entry.name} + route = "schedule-detail" + kws = {"pk": entry.name} kws.update(V1) urls.append(reverse(route, kwargs=kws, request=request)) @@ -46,6 +48,7 @@ def get_schedule_entries(self, obj): class UserDetailsSerializer(UserProfileSerializer): """Private user account view.""" + auth_token = serializers.SerializerMethodField() has_usable_password = serializers.SerializerMethodField() is_admin = serializers.SerializerMethodField() @@ -55,9 +58,12 @@ def get_is_admin(self, obj): class Meta(UserProfileSerializer.Meta): fields = UserProfileSerializer.Meta.fields + ( - 'email', 'auth_token', 'has_usable_password', 'is_admin') - read_only_fields = UserProfileSerializer.Meta.read_only_fields + ( - 'auth_token', ) + "email", + "auth_token", + "has_usable_password", + "is_admin", + ) + read_only_fields = UserProfileSerializer.Meta.read_only_fields + ("auth_token",) def get_auth_token(self, obj): return obj.auth_token.key diff --git a/src/authentication/tests/test_list_view.py b/src/authentication/tests/test_list_view.py index 08723823..381786c4 100644 --- a/src/authentication/tests/test_list_view.py +++ b/src/authentication/tests/test_list_view.py @@ -9,9 +9,9 @@ def test_user_cannot_view_private_entry_in_list(admin_client, user_client): """An unprivileged user should not be able to see private entries.""" post_schedule(admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) - url = reverse('user-list', kwargs=V1) + url = reverse("user-list", kwargs=V1) response = user_client.get(url, **HTTPS_KWARG) rjson = validate_response(response, status.HTTP_200_OK) - results = rjson['results'] - admin_rjson = [user for user in results if user['username'] == 'admin'][0] - assert admin_rjson['schedule_entries'] == [] + results = rjson["results"] + admin_rjson = [user for user in results if user["username"] == "admin"][0] + assert admin_rjson["schedule_entries"] == [] diff --git a/src/authentication/urls.py b/src/authentication/urls.py index 89c95202..0c4957f9 100644 --- a/src/authentication/urls.py +++ b/src/authentication/urls.py @@ -3,7 +3,7 @@ from .views import UserListView, UserInstanceView urlpatterns = ( - path('', UserListView.as_view(), name='user-list'), - path('me/', UserInstanceView.as_view(), name='user-detail'), - path('/', UserInstanceView.as_view(), name='user-detail'), + path("", UserListView.as_view(), name="user-list"), + path("me/", UserInstanceView.as_view(), name="user-detail"), + path("/", UserInstanceView.as_view(), name="user-detail"), ) diff --git a/src/authentication/views.py b/src/authentication/views.py index ae7e685b..c7ef3202 100644 --- a/src/authentication/views.py +++ b/src/authentication/views.py @@ -2,8 +2,11 @@ from rest_framework.generics import get_object_or_404 from rest_framework.generics import ( - ListAPIView, ListCreateAPIView, RetrieveAPIView, - RetrieveUpdateDestroyAPIView) + ListAPIView, + ListCreateAPIView, + RetrieveAPIView, + RetrieveUpdateDestroyAPIView, +) from rest_framework.permissions import IsAdminUser from rest_framework.settings import api_settings from rest_framework.views import APIView @@ -31,24 +34,24 @@ def dispatch(self, request, *args, **kwargs): class UserDetailsListView(ListCreateAPIView): """View user details and create users.""" - queryset = User.objects.all().order_by('-date_joined') + + queryset = User.objects.all().order_by("-date_joined") serializer_class = UserDetailsSerializer - permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [ - IsAdminUser, - ] + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [IsAdminUser] class UserProfilesListView(ListAPIView): """View public profiles of all registered users.""" - queryset = User.objects.all().order_by('-date_joined') + + queryset = User.objects.all().order_by("-date_joined") serializer_class = UserProfileSerializer class UserInstanceView(APIView): def dispatch(self, request, *args, **kwargs): - kwargs.pop('version', None) + kwargs.pop("version", None) if not kwargs: # /users/me - kwargs = {'pk': request.user.pk} + kwargs = {"pk": request.user.pk} requested_user = get_object_or_404(User.objects.all(), **kwargs) if request.user.is_staff or request.user == requested_user: diff --git a/src/capabilities/__init__.py b/src/capabilities/__init__.py index 7af489f6..4743876b 100644 --- a/src/capabilities/__init__.py +++ b/src/capabilities/__init__.py @@ -16,4 +16,4 @@ def load_from_json(fname): logger.exception("Unable to load JSON file {}".format(fname)) -capabilities['sensor_definition'] = load_from_json(SENSOR_DEFINITION_FILE) +capabilities["sensor_definition"] = load_from_json(SENSOR_DEFINITION_FILE) diff --git a/src/capabilities/apps.py b/src/capabilities/apps.py index 75492209..acd07f83 100644 --- a/src/capabilities/apps.py +++ b/src/capabilities/apps.py @@ -5,4 +5,4 @@ class CapabilitiesConfig(AppConfig): - name = 'capabilities' + name = "capabilities" diff --git a/src/capabilities/urls.py b/src/capabilities/urls.py index 2073a01a..8bfd737f 100644 --- a/src/capabilities/urls.py +++ b/src/capabilities/urls.py @@ -2,6 +2,4 @@ from .views import capabilities_view -urlpatterns = ( - path('', capabilities_view, name='capabilities'), -) +urlpatterns = (path("", capabilities_view, name="capabilities"),) diff --git a/src/capabilities/views.py b/src/capabilities/views.py index ea0aa104..c24deb5f 100644 --- a/src/capabilities/views.py +++ b/src/capabilities/views.py @@ -18,14 +18,13 @@ def get_actions(include_admin_actions=False): if actions.by_name[action].admin_only and not include_admin_actions: continue - serialized_actions.append({ - 'name': - action, - 'summary': - actions.get_summary(actions.by_name[action]), - 'description': - actions.by_name[action].description - }) + serialized_actions.append( + { + "name": action, + "summary": actions.get_summary(actions.by_name[action]), + "description": actions.by_name[action].description, + } + ) return serialized_actions @@ -34,5 +33,5 @@ def get_actions(include_admin_actions=False): def capabilities_view(request, version, format=None): """The capabilites of the sensor.""" filtered_actions = get_actions(include_admin_actions=request.user.is_staff) - capabilities['actions'] = filtered_actions + capabilities["actions"] = filtered_actions return Response(capabilities) diff --git a/src/conftest.py b/src/conftest.py index 4eea93b0..55739a2f 100644 --- a/src/conftest.py +++ b/src/conftest.py @@ -8,20 +8,21 @@ def pytest_addoption(parser): parser.addoption( - '--update-api-docs', - action='store_true', + "--update-api-docs", + action="store_true", default=False, - help="Ensure API docs match code") + help="Ensure API docs match code", + ) def pytest_collection_modifyitems(config, items): """Skips `test_api_docs_up_to_date` if CLI option not passed.""" - if config.getoption('--update-api-docs'): + if config.getoption("--update-api-docs"): # --update-api-docs given on cli: do not skip api doc generation return skip_api_gen = pytest.mark.skip(reason="didn't pass --update-api-docs") for item in items: - if 'update_api_docs' in item.keywords: + if "update_api_docs" in item.keywords: item.add_marker(skip_api_gen) @@ -43,15 +44,15 @@ def testclock(): def test_scheduler(rf, testclock): """Instantiate test scheduler with fake request context and testclock.""" s = scheduler.scheduler.Scheduler() - s.request = rf.post('mock://cburl/schedule') + s.request = rf.post("mock://cburl/schedule") return s @pytest.fixture def user(db): """A normal user.""" - username = 'test' - password = 'password' + username = "test" + password = "password" user, created = User.objects.get_or_create(username=username) @@ -76,8 +77,8 @@ def user_client(db, user): @pytest.fixture def alt_user(db): """A normal user.""" - username = 'alt_test' - password = 'password' + username = "alt_test" + password = "password" user, created = User.objects.get_or_create(username=username) @@ -111,15 +112,16 @@ def alt_admin_user(db, django_user_model, django_username_field): username_field = django_username_field try: - user = UserModel._default_manager.get(**{username_field: 'alt_admin'}) + user = UserModel._default_manager.get(**{username_field: "alt_admin"}) except UserModel.DoesNotExist: extra_fields = {} - if username_field != 'username': - extra_fields[username_field] = 'alt_admin' + if username_field != "username": + extra_fields[username_field] = "alt_admin" user = UserModel._default_manager.create_superuser( - 'alt_admin', 'alt_admin@example.com', 'password', **extra_fields) + "alt_admin", "alt_admin@example.com", "password", **extra_fields + ) return user @@ -130,18 +132,19 @@ def alt_admin_client(db, alt_admin_user): from django.test.client import Client client = Client() - client.login(username=alt_admin_user.username, password='password') + client.login(username=alt_admin_user.username, password="password") return client # Add mock acquisitions for tests mock_acquire = actions.acquire_single_freq_fft.SingleFrequencyFftAcquisition( - name='mock_acquire', + name="mock_acquire", frequency=1e9, # 1 GHz gain=40, sample_rate=1e6, # 1 MSa/s fft_size=16, - nffts=11) -actions.by_name['mock_acquire'] = mock_acquire + nffts=11, +) +actions.by_name["mock_acquire"] = mock_acquire actions.init() diff --git a/src/hardware/apps.py b/src/hardware/apps.py index 3413a19a..54c0c35d 100644 --- a/src/hardware/apps.py +++ b/src/hardware/apps.py @@ -5,4 +5,4 @@ class HardwareConfig(AppConfig): - name = 'hardware' + name = "hardware" diff --git a/src/hardware/gps_iface.py b/src/hardware/gps_iface.py index a1218f1d..63ec5b5a 100644 --- a/src/hardware/gps_iface.py +++ b/src/hardware/gps_iface.py @@ -22,8 +22,8 @@ def get_lat_long(timeout_s=1): logger.debug("Waiting for GPS lock... ") start = time() gps_locked = False - while (time() - start < timeout_s and not gps_locked): - gps_locked = usrp.get_mboard_sensor('gps_locked').to_bool() + while time() - start < timeout_s and not gps_locked: + gps_locked = usrp.get_mboard_sensor("gps_locked").to_bool() sleep(0.1) if not gps_locked: @@ -32,20 +32,20 @@ def get_lat_long(timeout_s=1): logger.debug("GPS locked.") - if 'gpsdo' not in usrp.get_time_sources(0): + if "gpsdo" not in usrp.get_time_sources(0): logger.warning("No GPSDO time source detected") return None - usrp.set_time_source('gpsdo') + usrp.set_time_source("gpsdo") - if usrp.get_time_source(0) != 'gpsdo': + if usrp.get_time_source(0) != "gpsdo": logger.error("Failed to set GPSDO time source") return None # Poll get_time_last_pss() until change is seen last_t = int(usrp.get_time_last_pps().get_real_secs()) now_t = int(usrp.get_time_last_pps().get_real_secs()) - while (last_t != now_t): + while last_t != now_t: sleep(0.05) now_t = int(usrp.get_time_last_pps().get_real_secs()) @@ -53,27 +53,27 @@ def get_lat_long(timeout_s=1): sleep(0.1) # To use gr-uhd instead of UHD python driver, this line needs to change # gps_t = uhd.time_spec_t(usrp.get_mboard_sensor('gps_time').to_int() + 1) - gps_t = uhd.types.TimeSpec(usrp.get_mboard_sensor('gps_time').to_int() + 1) + gps_t = uhd.types.TimeSpec(usrp.get_mboard_sensor("gps_time").to_int() + 1) usrp.set_time_next_pps(gps_t) dt = datetime.fromtimestamp(gps_t.get_real_secs()) - date_cmd = ['date', '-s', '{:}'.format(dt.strftime('%Y/%m/%d %H:%M:%S'))] + date_cmd = ["date", "-s", "{:}".format(dt.strftime("%Y/%m/%d %H:%M:%S"))] subprocess.check_output(date_cmd, shell=True) logger.info("Set USRP and system time to GPS time {}".format(dt.ctime())) - if 'gpsdo' not in usrp.get_clock_sources(0): + if "gpsdo" not in usrp.get_clock_sources(0): logger.warning("No GPSDO clock source detected") return None - usrp.set_clock_source('gpsdo') + usrp.set_clock_source("gpsdo") - if usrp.get_clock_source(0) != 'gpsdo': + if usrp.get_clock_source(0) != "gpsdo": logger.error("Failed to set GPSDO clock source") return None start = time() ref_locked = False - while (time() - start < timeout_s and not ref_locked): - ref_locked = usrp.get_mboard_sensor('ref_locked').to_bool() + while time() - start < timeout_s and not ref_locked: + ref_locked = usrp.get_mboard_sensor("ref_locked").to_bool() if not ref_locked: msg = "Timed out waiting for clock to lock to GPSDO reference" @@ -83,12 +83,27 @@ def get_lat_long(timeout_s=1): logger.debug("Clock locked to GPSDO reference") try: - gpgga = usrp.get_mboard_sensor('gps_gpgga').value - (fmt, utc, lat, ns, lng, ew, qual, nsats, hdil, alt, altu, gdalsep, - gdalsepu, age, refid) = gpgga.split(',') + gpgga = usrp.get_mboard_sensor("gps_gpgga").value + ( + fmt, + utc, + lat, + ns, + lng, + ew, + qual, + nsats, + hdil, + alt, + altu, + gdalsep, + gdalsepu, + age, + refid, + ) = gpgga.split(",") latitude = float(lat) - if ns == 'S': + if ns == "S": latitude = -latitude latitude_degs = int(latitude / 100) @@ -96,7 +111,7 @@ def get_lat_long(timeout_s=1): latitude_dd = latitude_degs + (latitude_mins / 60) longitude = float(lng) - if ew == 'W': + if ew == "W": longitude = -longitude longitude_degs = int(longitude / 100) diff --git a/src/hardware/mocks/usrp_block.py b/src/hardware/mocks/usrp_block.py index 85826f14..b8a386a0 100644 --- a/src/hardware/mocks/usrp_block.py +++ b/src/hardware/mocks/usrp_block.py @@ -4,8 +4,8 @@ import numpy as np -tune_result_params = ['actual_dsp_freq', 'actual_rf_freq'] -MockTuneResult = namedtuple('MockTuneResult', tune_result_params) +tune_result_params = ["actual_dsp_freq", "actual_rf_freq"] +MockTuneResult = namedtuple("MockTuneResult", tune_result_params) class MockUsrp(object): diff --git a/src/hardware/scale_factors.py b/src/hardware/scale_factors.py index fefccff1..31ba1383 100644 --- a/src/hardware/scale_factors.py +++ b/src/hardware/scale_factors.py @@ -48,25 +48,25 @@ def get_power_scale_factor(self, lo_frequency, gain): f_div_min = self.frequencies[0] f_div_max = self.frequencies[-1] for div in self.divisions: - if f >= div['upper_bound']: - f_div_min = div['upper_bound'] + if f >= div["upper_bound"]: + f_div_min = div["upper_bound"] else: # Check if we are in the division - if f > div['lower_bound']: + if f > div["lower_bound"]: logger.warning("SDR tuned to within a division:") logger.warning(" LO frequency: {}".format(f)) msg = " Division: [{},{}]" - lb = div['lower_bound'] - ub = div['upper_bound'] + lb = div["lower_bound"] + ub = div["upper_bound"] msg = msg.format(lb, ub) logger.warning(msg) msg = "Assumed scale factor of lower boundary." logger.warning(msg) - f_div_min = div['lower_bound'] - f_div_max = div['lower_bound'] + f_div_min = div["lower_bound"] + f_div_max = div["lower_bound"] bypass_freq_interpolation = True else: - f_div_max = div['lower_bound'] + f_div_max = div["lower_bound"] break # Determine the index associated with the frequency/ies @@ -85,18 +85,33 @@ def get_power_scale_factor(self, lo_frequency, gain): scale_factor = self.factors[f_i][g_i] elif bypass_freq_interpolation: scale_factor = self.interpolate_1d( - g, self.gains[g_i], self.gains[g_i + 1], - self.factors[f_i][g_i], self.factors[f_i][g_i + 1]) + g, + self.gains[g_i], + self.gains[g_i + 1], + self.factors[f_i][g_i], + self.factors[f_i][g_i + 1], + ) elif bypass_gain_interpolation: scale_factor = self.interpolate_1d( - f, self.frequencies[f_i], self.frequencies[f_i + 1], - self.factors[f_i][g_i], self.factors[f_i + 1][g_i]) + f, + self.frequencies[f_i], + self.frequencies[f_i + 1], + self.factors[f_i][g_i], + self.factors[f_i + 1][g_i], + ) else: scale_factor = self.interpolate_2d( - f, g, self.frequencies[f_i], self.frequencies[f_i + 1], - self.gains[g_i], self.gains[g_i + 1], self.factors[f_i][g_i], - self.factors[f_i + 1][g_i], self.factors[f_i][g_i + 1], - self.factors[f_i + 1][g_i + 1]) + f, + g, + self.frequencies[f_i], + self.frequencies[f_i + 1], + self.gains[g_i], + self.gains[g_i + 1], + self.factors[f_i][g_i], + self.factors[f_i + 1][g_i], + self.factors[f_i][g_i + 1], + self.factors[f_i + 1][g_i + 1], + ) logger.debug("Using power scale factor: {}".format(scale_factor)) return scale_factor @@ -104,7 +119,7 @@ def get_power_scale_factor(self, lo_frequency, gain): def get_scale_factor(self, lo_frequency, gain): """Get the linear scale factor for the current setup.""" psf = self.get_power_scale_factor(lo_frequency, gain) - sf = 10**(psf / 20.0) + sf = 10 ** (psf / 20.0) logger.debug("Using linear scale factor: {}".format(sf)) return sf @@ -124,21 +139,21 @@ def load_from_json(fname): sf = json.load(f) # Dimensions of the factors array is not validated by the schema - factor_rows = len(sf['factors']) - nfrequencies = len(sf['frequencies']) - ngains = len(sf['gains']) + factor_rows = len(sf["factors"]) + nfrequencies = len(sf["frequencies"]) + ngains = len(sf["gains"]) msg = "Number of rows in factors 2D array ({}) ".format(factor_rows) msg += "not equal to number of frequencies ({})".format(nfrequencies) - assert len(sf['factors']) == len(sf['frequencies']), msg + assert len(sf["factors"]) == len(sf["frequencies"]), msg msg = "factors row {!r} isn't the same length as the `gains` array ({})" - for row in sf['factors']: + for row in sf["factors"]: assert len(row) == ngains, format(row, ngains) # Ensure frequencies and gains arrays are already sorted - assert sf['frequencies'] == sorted(sf['frequencies']), "freqs not sorted" - assert sf['gains'] == sorted(sf['gains']), "gains not sorted" + assert sf["frequencies"] == sorted(sf["frequencies"]), "freqs not sorted" + assert sf["gains"] == sorted(sf["gains"]), "gains not sorted" return ScaleFactors(**sf) diff --git a/src/hardware/tests/test_scale_factors.py b/src/hardware/tests/test_scale_factors.py index e28b1599..986f0437 100644 --- a/src/hardware/tests/test_scale_factors.py +++ b/src/hardware/tests/test_scale_factors.py @@ -7,29 +7,26 @@ from hardware import scale_factors from sensor.settings import REPO_ROOT -RESOURCES_DIR = path.join(REPO_ROOT, './src/hardware/tests/resources') -TEST_SCALE_FACTORS_FILE = path.join(RESOURCES_DIR, 'test_scale_factors.json') +RESOURCES_DIR = path.join(REPO_ROOT, "./src/hardware/tests/resources") +TEST_SCALE_FACTORS_FILE = path.join(RESOURCES_DIR, "test_scale_factors.json") sfs = scale_factors.load_from_json(TEST_SCALE_FACTORS_FILE) @pytest.mark.parametrize( - 'sf,f,g', + "sf,f,g", [ # (scale_factor, lo_frequency, gain) - # Outer boundary (-7.47813046479, 70e6, 0), (7.50256094609, 6e9, 0), (-76.2557869767, 70e6, 76), (-65.3006507223, 6e9, 76), - # Beyond limits (-7.47813046479, 50e6, 0), (7.50256094609, 7e9, 0), (-7.47813046479, 70e6, -10), (-76.2557869767, 70e6, 100), - # At division (-5.40071178476, 1299974999, 0), (-5.41274003389, 1300974999, 0), @@ -37,12 +34,10 @@ (-76.3832149678, 2200468999, 100), (5.81812380813, 3999124997, -10), (-69.7131434755, 4000123997, 100), - # In division (-22.8093940482, 1300000000, 20), (-38.0043597179, 2200000000, 40), (-47.2748864466, 4000000000, 60), - # Interpolated (-11.5030015054, 100e6, 5), (-30.0076949404, 600e6, 25), @@ -51,7 +46,8 @@ (-32.2959584348, 3000e6, 37), (-51.2041078009, 4100e6, 58), (-11.4556252931, 5000e6, 19), - ]) + ], +) def test_scale_factor_calculation(sf, f, g): """Test that the default scale factor is used if not file was loaded.""" diff --git a/src/hardware/tests/test_usrp.py b/src/hardware/tests/test_usrp.py index 547b8b04..3d446564 100644 --- a/src/hardware/tests/test_usrp.py +++ b/src/hardware/tests/test_usrp.py @@ -77,4 +77,4 @@ def test_scaled_data_acquisition(): msg = "Scale factor not correctly applied to acquisition.\n" msg += "Algorithm: {}\n".format(datum / 1e6) msg += "Expected: {}\n".format(true_val / 1e6) - assert (datum == true_val), msg + assert datum == true_val, msg diff --git a/src/hardware/usrp_iface.py b/src/hardware/usrp_iface.py index a5b1a678..3dc27bcc 100644 --- a/src/hardware/usrp_iface.py +++ b/src/hardware/usrp_iface.py @@ -42,8 +42,8 @@ def connect(sf_file=settings.SCALE_FACTORS_FILE): # -> bool: random = settings.MOCK_RADIO_RANDOM usrp = MockUsrp(randomize_values=random) is_available = True - RESOURCES_DIR = path.join(REPO_ROOT, './src/hardware/tests/resources') - sf_file = path.join(RESOURCES_DIR, 'test_scale_factors.json') + RESOURCES_DIR = path.join(REPO_ROOT, "./src/hardware/tests/resources") + sf_file = path.join(RESOURCES_DIR, "test_scale_factors.json") else: if is_available and radio is not None: return True @@ -54,7 +54,7 @@ def connect(sf_file=settings.SCALE_FACTORS_FILE): # -> bool: logger.warning("uhd not available - disabling radio") return False - usrp_args = 'type=b200' # find any b-series device + usrp_args = "type=b200" # find any b-series device try: usrp = uhd.usrp.MultiUSRP(usrp_args) @@ -158,24 +158,25 @@ def recompute_scale_factor(self): return self.scale_factor = self.scale_factors.get_scale_factor( - lo_frequency=self.frequency, gain=self.gain) + lo_frequency=self.frequency, gain=self.gain + ) def acquire_samples(self, n, nskip=200000, retries=5): # -> np.ndarray: """Aquire nskip+n samples and return the last n""" o_retries = retries while True: samples = self.usrp.recv_num_samps( - n + nskip, # number of samples - self.frequency, # center frequency in Hz - self.sample_rate, # sample rate in samples per second - [0], # channel list - self.gain # gain in dB + n + nskip, # number of samples + self.frequency, # center frequency in Hz + self.sample_rate, # sample rate in samples per second + [0], # channel list + self.gain, # gain in dB ) # usrp.recv_num_samps returns a numpy array of shape # (n_channels, n_samples) and dtype complex64 assert samples.dtype == np.complex64 assert len(samples.shape) == 2 and samples.shape[0] == 1 - data = samples[0] # isolate data for channel 0 + data = samples[0] # isolate data for channel 0 data_len = len(data) data = data[nskip:] data = data * self.scale_factor diff --git a/src/manage.py b/src/manage.py index 17b4bd0e..e25962a9 100755 --- a/src/manage.py +++ b/src/manage.py @@ -17,10 +17,11 @@ raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " - "forget to activate a virtual environment?") + "forget to activate a virtual environment?" + ) raise execute_from_command_line(sys.argv) - if os.getenv('INSIDE_EMACS'): + if os.getenv("INSIDE_EMACS"): # runserver turns on term echo, which echoes twice in emacs shell - os.system('stty -echo') + os.system("stty -echo") diff --git a/src/schedule/apps.py b/src/schedule/apps.py index 4bb70791..a887cfa2 100644 --- a/src/schedule/apps.py +++ b/src/schedule/apps.py @@ -2,4 +2,4 @@ class ScheduleConfig(AppConfig): - name = 'schedule' + name = "schedule" diff --git a/src/schedule/migrations/0001_initial.py b/src/schedule/migrations/0001_initial.py index 52219865..c88a9504 100644 --- a/src/schedule/migrations/0001_initial.py +++ b/src/schedule/migrations/0001_initial.py @@ -11,42 +11,165 @@ class Migration(migrations.Migration): initial = True - dependencies = [ - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ] + dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)] operations = [ migrations.CreateModel( - name='Request', + name="Request", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('scheme', models.CharField(blank=True, max_length=16, null=True)), - ('version', models.CharField(blank=True, max_length=16, null=True)), - ('host', models.CharField(blank=True, max_length=255, null=True)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("scheme", models.CharField(blank=True, max_length=16, null=True)), + ("version", models.CharField(blank=True, max_length=16, null=True)), + ("host", models.CharField(blank=True, max_length=255, null=True)), ], ), migrations.CreateModel( - name='ScheduleEntry', + name="ScheduleEntry", fields=[ - ('name', models.SlugField(help_text='[Required] The unique identifier used in URLs and filenames', primary_key=True, serialize=False)), - ('action', models.CharField(choices=[('acquire_700c_dl', 'acquire_700c_dl - Apply m4s detector over 300 1024-pt FFTs at 751.00 MHz.'), ('logger', 'logger - Log the message "running test {name}/{tid}".'), ('survey_700_band_iq', 'survey_700_band_iq - Capture time-domain IQ samples at 10 frequencies between')], help_text='[Required] The name of the action to be scheduled', max_length=50)), - ('priority', models.SmallIntegerField(default=10, help_text='Lower number is higher priority (default=10)', validators=[django.core.validators.MinValueValidator(-20), django.core.validators.MaxValueValidator(19)])), - ('start', models.BigIntegerField(blank=True, default=schedule.models.schedule_entry.next_schedulable_timefn, help_text="Absolute time (epoch) to start, or leave blank for 'now'")), - ('stop', models.BigIntegerField(blank=True, help_text="Absolute time (epoch) to stop, or leave blank for 'never'", null=True)), - ('interval', models.PositiveIntegerField(blank=True, help_text='Seconds between tasks, or leave blank to run once', null=True, validators=[django.core.validators.MinValueValidator(1)])), - ('is_active', models.BooleanField(default=True, help_text='Indicates whether the entry should be removed from the scheduler without removing it from the system')), - ('is_private', models.BooleanField(default=False, help_text='Indicates whether the entry, and resulting data, are only visible to admins')), - ('callback_url', models.URLField(blank=True, help_text='If given, the scheduler will POST a `TaskResult` JSON object to this URL after each task completes', null=True)), - ('next_task_time', models.BigIntegerField(editable=False, help_text='The time the next task is scheduled to be executed', null=True)), - ('next_task_id', models.IntegerField(default=1, editable=False, help_text='The id of the next task to be executed')), - ('created', models.DateTimeField(auto_now_add=True, help_text='The date the entry was created')), - ('modified', models.DateTimeField(auto_now=True, help_text='The date the entry was modified')), - ('owner', models.ForeignKey(editable=False, help_text='The name of the user who owns the entry', on_delete=django.db.models.deletion.CASCADE, related_name='schedule_entries', to=settings.AUTH_USER_MODEL)), - ('request', models.ForeignKey(editable=False, help_text='The request that created the entry', null=True, on_delete=django.db.models.deletion.CASCADE, to='schedule.Request')), + ( + "name", + models.SlugField( + help_text="[Required] The unique identifier used in URLs and filenames", + primary_key=True, + serialize=False, + ), + ), + ( + "action", + models.CharField( + choices=[ + ( + "acquire_700c_dl", + "acquire_700c_dl - Apply m4s detector over 300 1024-pt FFTs at 751.00 MHz.", + ), + ( + "logger", + 'logger - Log the message "running test {name}/{tid}".', + ), + ( + "survey_700_band_iq", + "survey_700_band_iq - Capture time-domain IQ samples at 10 frequencies between", + ), + ], + help_text="[Required] The name of the action to be scheduled", + max_length=50, + ), + ), + ( + "priority", + models.SmallIntegerField( + default=10, + help_text="Lower number is higher priority (default=10)", + validators=[ + django.core.validators.MinValueValidator(-20), + django.core.validators.MaxValueValidator(19), + ], + ), + ), + ( + "start", + models.BigIntegerField( + blank=True, + default=schedule.models.schedule_entry.next_schedulable_timefn, + help_text="Absolute time (epoch) to start, or leave blank for 'now'", + ), + ), + ( + "stop", + models.BigIntegerField( + blank=True, + help_text="Absolute time (epoch) to stop, or leave blank for 'never'", + null=True, + ), + ), + ( + "interval", + models.PositiveIntegerField( + blank=True, + help_text="Seconds between tasks, or leave blank to run once", + null=True, + validators=[django.core.validators.MinValueValidator(1)], + ), + ), + ( + "is_active", + models.BooleanField( + default=True, + help_text="Indicates whether the entry should be removed from the scheduler without removing it from the system", + ), + ), + ( + "is_private", + models.BooleanField( + default=False, + help_text="Indicates whether the entry, and resulting data, are only visible to admins", + ), + ), + ( + "callback_url", + models.URLField( + blank=True, + help_text="If given, the scheduler will POST a `TaskResult` JSON object to this URL after each task completes", + null=True, + ), + ), + ( + "next_task_time", + models.BigIntegerField( + editable=False, + help_text="The time the next task is scheduled to be executed", + null=True, + ), + ), + ( + "next_task_id", + models.IntegerField( + default=1, + editable=False, + help_text="The id of the next task to be executed", + ), + ), + ( + "created", + models.DateTimeField( + auto_now_add=True, help_text="The date the entry was created" + ), + ), + ( + "modified", + models.DateTimeField( + auto_now=True, help_text="The date the entry was modified" + ), + ), + ( + "owner", + models.ForeignKey( + editable=False, + help_text="The name of the user who owns the entry", + on_delete=django.db.models.deletion.CASCADE, + related_name="schedule_entries", + to=settings.AUTH_USER_MODEL, + ), + ), + ( + "request", + models.ForeignKey( + editable=False, + help_text="The request that created the entry", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="schedule.Request", + ), + ), ], - options={ - 'db_table': 'schedule', - 'ordering': ('created',), - }, + options={"db_table": "schedule", "ordering": ("created",)}, ), ] diff --git a/src/schedule/models/request.py b/src/schedule/models/request.py index 28fbd917..aac0f6e8 100644 --- a/src/schedule/models/request.py +++ b/src/schedule/models/request.py @@ -10,6 +10,7 @@ class Request(models.Model): """Save enough of a request to be passed to reverse().""" + scheme = models.CharField(max_length=16, blank=True, null=True) version = models.CharField(max_length=16, blank=True, null=True) host = models.CharField(max_length=255, blank=True, null=True) diff --git a/src/schedule/models/schedule_entry.py b/src/schedule/models/schedule_entry.py index 4439cfe9..6d688060 100644 --- a/src/schedule/models/schedule_entry.py +++ b/src/schedule/models/schedule_entry.py @@ -66,78 +66,97 @@ class ScheduleEntry(models.Model): name = models.SlugField( primary_key=True, - help_text="[Required] The unique identifier used in URLs and filenames" + help_text="[Required] The unique identifier used in URLs and filenames", ) action = models.CharField( choices=actions.CHOICES, max_length=actions.MAX_LENGTH, - help_text="[Required] The name of the action to be scheduled") + help_text="[Required] The name of the action to be scheduled", + ) priority = models.SmallIntegerField( default=DEFAULT_PRIORITY, validators=(MinValueValidator(-20), MaxValueValidator(19)), - help_text=("Lower number is higher priority (default={})" - ).format(DEFAULT_PRIORITY)) + help_text=("Lower number is higher priority (default={})").format( + DEFAULT_PRIORITY + ), + ) start = models.BigIntegerField( blank=True, default=next_schedulable_timefn, - help_text="Absolute time (epoch) to start, or leave blank for 'now'") + help_text="Absolute time (epoch) to start, or leave blank for 'now'", + ) stop = models.BigIntegerField( null=True, blank=True, - help_text="Absolute time (epoch) to stop, or leave blank for 'never'") + help_text="Absolute time (epoch) to stop, or leave blank for 'never'", + ) interval = models.PositiveIntegerField( null=True, blank=True, - validators=(MinValueValidator(1), ), - help_text="Seconds between tasks, or leave blank to run once") + validators=(MinValueValidator(1),), + help_text="Seconds between tasks, or leave blank to run once", + ) is_active = models.BooleanField( default=True, editable=True, - help_text=("Indicates whether the entry should be removed from the " - "scheduler without removing it from the system")) + help_text=( + "Indicates whether the entry should be removed from the " + "scheduler without removing it from the system" + ), + ) is_private = models.BooleanField( default=False, editable=True, - help_text=("Indicates whether the entry, and resulting data, are only " - "visible to admins")) + help_text=( + "Indicates whether the entry, and resulting data, are only " + "visible to admins" + ), + ) callback_url = models.URLField( null=True, blank=True, - help_text=("If given, the scheduler will POST a `TaskResult` JSON " - "object to this URL after each task completes")) + help_text=( + "If given, the scheduler will POST a `TaskResult` JSON " + "object to this URL after each task completes" + ), + ) # read-only fields next_task_time = models.BigIntegerField( null=True, editable=False, - help_text="The time the next task is scheduled to be executed") + help_text="The time the next task is scheduled to be executed", + ) next_task_id = models.IntegerField( - default=1, - editable=False, - help_text="The id of the next task to be executed") + default=1, editable=False, help_text="The id of the next task to be executed" + ) created = models.DateTimeField( - auto_now_add=True, help_text="The date the entry was created") + auto_now_add=True, help_text="The date the entry was created" + ) modified = models.DateTimeField( - auto_now=True, help_text="The date the entry was modified") + auto_now=True, help_text="The date the entry was modified" + ) owner = models.ForeignKey( - 'authentication.User', + "authentication.User", editable=False, - related_name='schedule_entries', + related_name="schedule_entries", on_delete=models.CASCADE, - help_text="The name of the user who owns the entry") + help_text="The name of the user who owns the entry", + ) request = models.ForeignKey( - 'schedule.Request', + "schedule.Request", null=True, # null allowable for unit testing only editable=False, on_delete=models.CASCADE, - help_text="The request that created the entry") + help_text="The request that created the entry", + ) class Meta: - db_table = 'schedule' - ordering = ('created', ) + db_table = "schedule" + ordering = ("created",) def __init__(self, *args, **kwargs): - relative_stop = kwargs.pop('relative_stop', None) + relative_stop = kwargs.pop("relative_stop", None) super(ScheduleEntry, self).__init__(*args, **kwargs) @@ -215,6 +234,7 @@ def get_next_task_id(self): return next_task_id def __str__(self): - fmtstr = 'name={}, pri={}, start={}, stop={}, ival={}, action={}' - return fmtstr.format(self.name, self.priority, self.start, self.stop, - self.interval, self.action) + fmtstr = "name={}, pri={}, start={}, stop={}, ival={}, action={}" + return fmtstr.format( + self.name, self.priority, self.start, self.stop, self.interval, self.action + ) diff --git a/src/schedule/serializers.py b/src/schedule/serializers.py index 34478398..6bb8fd20 100644 --- a/src/schedule/serializers.py +++ b/src/schedule/serializers.py @@ -5,14 +5,12 @@ import actions from sensor import V1 -from sensor.utils import (get_datetime_from_timestamp, - get_timestamp_from_datetime) +from sensor.utils import get_datetime_from_timestamp, get_timestamp_from_datetime from .models import DEFAULT_PRIORITY, ScheduleEntry action_help = "[Required] The name of the action to be scheduled" -priority_help = "Lower number is higher priority (default={})".format( - DEFAULT_PRIORITY) +priority_help = "Lower number is higher priority (default={})".format(DEFAULT_PRIORITY) def datetimes_to_timestamps(validated_data): @@ -48,13 +46,16 @@ def to_internal_value(self, dt_str): class ScheduleEntrySerializer(serializers.HyperlinkedModelSerializer): """Covert ScheduleEntry to and from JSON.""" + task_results = serializers.SerializerMethodField( - help_text="The list of results related to the entry") + help_text="The list of results related to the entry" + ) start = DateTimeFromTimestampField( required=False, allow_null=True, default=None, - help_text="UTC time (ISO 8601) to start, or leave blank for 'now'") + help_text="UTC time (ISO 8601) to start, or leave blank for 'now'", + ) stop = DateTimeFromTimestampField( required=False, allow_null=True, @@ -62,7 +63,9 @@ class ScheduleEntrySerializer(serializers.HyperlinkedModelSerializer): label="Absolute stop", help_text=( "UTC time (ISO 8601) to stop, " - "or leave blank for 'never' (not valid with relative stop)")) + "or leave blank for 'never' (not valid with relative stop)" + ), + ) relative_stop = serializers.IntegerField( required=False, write_only=True, @@ -71,50 +74,71 @@ class ScheduleEntrySerializer(serializers.HyperlinkedModelSerializer): min_value=1, help_text=( "Integer seconds after start to stop, " - "or leave blank for 'never' (not valid with absolute stop)")) + "or leave blank for 'never' (not valid with absolute stop)" + ), + ) next_task_time = DateTimeFromTimestampField( - read_only=True, - help_text="UTC time (ISO 8601) the next task is scheduled for") + read_only=True, help_text="UTC time (ISO 8601) the next task is scheduled for" + ) # action choices is modified in schedule/views.py based on user action = serializers.ChoiceField( choices=actions.CHOICES, - help_text="[Required] The name of the action to be scheduled") + help_text="[Required] The name of the action to be scheduled", + ) # priority min_value is modified in schedule/views.py based on user priority = serializers.IntegerField( required=False, allow_null=True, min_value=0, max_value=19, - help_text=priority_help) + help_text=priority_help, + ) # validate_only is a serializer-only field validate_only = serializers.BooleanField( required=False, default=False, - help_text="Only validate the input, do not modify the schedule") + help_text="Only validate the input, do not modify the schedule", + ) class Meta: model = ScheduleEntry - fields = ('self', 'name', 'action', 'priority', 'start', 'stop', - 'relative_stop', 'interval', 'is_active', 'is_private', - 'callback_url', 'next_task_time', 'next_task_id', 'created', - 'modified', 'owner', 'task_results', 'validate_only') + fields = ( + "self", + "name", + "action", + "priority", + "start", + "stop", + "relative_stop", + "interval", + "is_active", + "is_private", + "callback_url", + "next_task_time", + "next_task_id", + "created", + "modified", + "owner", + "task_results", + "validate_only", + ) extra_kwargs = { - 'self': { - 'view_name': 'schedule-detail', - 'help_text': "The url of the entry" + "self": { + "view_name": "schedule-detail", + "help_text": "The url of the entry", + }, + "owner": { + "view_name": "user-detail", + "help_text": "The name of the user who owns the entry", }, - 'owner': { - 'view_name': 'user-detail', - 'help_text': "The name of the user who owns the entry" - } } - read_only_fields = ('next_task_time', 'is_private') - write_only_fields = ('relative_stop', 'validate_only') + read_only_fields = ("next_task_time", "is_private") + write_only_fields = ("relative_stop", "validate_only") def save(self, *args, **kwargs): """Don't save if validate_only is True.""" - if self.validated_data.get('validate_only'): + if self.validated_data.get("validate_only"): return super(ScheduleEntrySerializer, self).save(*args, **kwargs) @@ -126,16 +150,16 @@ def validate(self, data): got_absolute_stop = False got_relative_stop = False - if 'start' in data: - if data['start'] is None: - data.pop('start') + if "start" in data: + if data["start"] is None: + data.pop("start") else: got_start = True - if 'stop' in data and data['stop'] is not None: + if "stop" in data and data["stop"] is not None: got_absolute_stop = True - if 'relative_stop' in data and data['relative_stop'] is not None: + if "relative_stop" in data and data["relative_stop"] is not None: got_relative_stop = True if got_absolute_stop and got_relative_stop: @@ -144,47 +168,49 @@ def validate(self, data): if got_start and got_absolute_stop: # We should have timestamps at this point - assert type(data['start']) is int - assert type(data['stop']) is int - if data['stop'] <= data['start']: + assert type(data["start"]) is int + assert type(data["stop"]) is int + if data["stop"] <= data["start"]: err = "stop time is not after start" raise serializers.ValidationError(err) - if 'priority' in data and data['priority'] is None: - data.pop('priority') + if "priority" in data and data["priority"] is None: + data.pop("priority") - if 'validate_only' in data and data['validate_only'] is not True: - data.pop('validate_only') + if "validate_only" in data and data["validate_only"] is not True: + data.pop("validate_only") return data def get_task_results(self, obj): - request = self.context['request'] - kws = {'schedule_entry_name': obj.name} + request = self.context["request"] + kws = {"schedule_entry_name": obj.name} kws.update(V1) - url = reverse('task-result-list', kwargs=kws, request=request) + url = reverse("task-result-list", kwargs=kws, request=request) return url def to_internal_value(self, data): """Clean up input before starting validation.""" # Allow 'absolute_stop' to be a synonym for 'stop' - if 'absolute_stop' in data: - data['stop'] = data.pop('absolute_stop') + if "absolute_stop" in data: + data["stop"] = data.pop("absolute_stop") return super().to_internal_value(data) class AdminScheduleEntrySerializer(ScheduleEntrySerializer): """ScheduleEntrySerializer class for superusers.""" + action = serializers.ChoiceField( - choices=actions.CHOICES + actions.ADMIN_CHOICES, - help_text=action_help) + choices=actions.CHOICES + actions.ADMIN_CHOICES, help_text=action_help + ) priority = serializers.IntegerField( required=False, allow_null=True, min_value=-20, max_value=19, - help_text=priority_help) + help_text=priority_help, + ) class Meta(ScheduleEntrySerializer.Meta): - read_only_fields = ('next_task_time', ) + read_only_fields = ("next_task_time",) diff --git a/src/schedule/tests/test_admin_views.py b/src/schedule/tests/test_admin_views.py index e8894d7e..331b1c8e 100644 --- a/src/schedule/tests/test_admin_views.py +++ b/src/schedule/tests/test_admin_views.py @@ -1,53 +1,55 @@ from rest_framework import status from rest_framework.reverse import reverse -from schedule.tests.utils import (EMPTY_SCHEDULE_RESPONSE, TEST_SCHEDULE_ENTRY, - TEST_PRIVATE_SCHEDULE_ENTRY, post_schedule, - update_schedule) +from schedule.tests.utils import ( + EMPTY_SCHEDULE_RESPONSE, + TEST_SCHEDULE_ENTRY, + TEST_PRIVATE_SCHEDULE_ENTRY, + post_schedule, + update_schedule, +) from sensor import V1 from sensor.tests.utils import validate_response, HTTPS_KWARG def test_post_admin_private_schedule(admin_client): rjson = post_schedule(admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) - entry_name = rjson['name'] - kws = {'pk': entry_name} + entry_name = rjson["name"] + kws = {"pk": entry_name} kws.update(V1) - entry_url = reverse('schedule-detail', kwargs=kws) + entry_url = reverse("schedule-detail", kwargs=kws) admin_user_respose = admin_client.get(entry_url, **HTTPS_KWARG) for k, v in TEST_PRIVATE_SCHEDULE_ENTRY.items(): assert rjson[k] == v - assert rjson['is_private'] + assert rjson["is_private"] validate_response(admin_user_respose, status.HTTP_200_OK) - assert admin_user_respose.data['is_private'] + assert admin_user_respose.data["is_private"] def test_admin_can_view_private_entry_in_list(admin_client): post_schedule(admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) - url = reverse('schedule-list', kwargs=V1) + url = reverse("schedule-list", kwargs=V1) response = admin_client.get(url, **HTTPS_KWARG) rjson = validate_response(response, status.HTTP_200_OK) assert rjson != EMPTY_SCHEDULE_RESPONSE -def test_admin_can_view_all_entries(admin_client, user_client, - alt_admin_client): +def test_admin_can_view_all_entries(admin_client, user_client, alt_admin_client): # user schedule entry user_rjson = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - user_entry_name = user_rjson['name'] - kws = {'pk': user_entry_name} + user_entry_name = user_rjson["name"] + kws = {"pk": user_entry_name} kws.update(V1) - user_url = reverse('schedule-detail', kwargs=kws) + user_url = reverse("schedule-detail", kwargs=kws) # alt admin user schedule entry - alt_admin_rjson = post_schedule(alt_admin_client, - TEST_PRIVATE_SCHEDULE_ENTRY) - alt_admin_entry_name = alt_admin_rjson['name'] - kws = {'pk': alt_admin_entry_name} + alt_admin_rjson = post_schedule(alt_admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) + alt_admin_entry_name = alt_admin_rjson["name"] + kws = {"pk": alt_admin_entry_name} kws.update(V1) - alt_admin_url = reverse('schedule-detail', kwargs=kws) + alt_admin_url = reverse("schedule-detail", kwargs=kws) response = admin_client.get(user_url, **HTTPS_KWARG) validate_response(response, status.HTTP_200_OK) @@ -56,22 +58,20 @@ def test_admin_can_view_all_entries(admin_client, user_client, validate_response(response, status.HTTP_200_OK) -def test_admin_can_delete_all_entries(admin_client, user_client, - alt_admin_client): +def test_admin_can_delete_all_entries(admin_client, user_client, alt_admin_client): # user schedule entry user_rjson = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - user_entry_name = user_rjson['name'] - kws = {'pk': user_entry_name} + user_entry_name = user_rjson["name"] + kws = {"pk": user_entry_name} kws.update(V1) - user_url = reverse('schedule-detail', kwargs=kws) + user_url = reverse("schedule-detail", kwargs=kws) # admin user schedule entry - alt_admin_rjson = post_schedule(alt_admin_client, - TEST_PRIVATE_SCHEDULE_ENTRY) - alt_admin_entry_name = alt_admin_rjson['name'] - kws = {'pk': alt_admin_entry_name} + alt_admin_rjson = post_schedule(alt_admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) + alt_admin_entry_name = alt_admin_rjson["name"] + kws = {"pk": alt_admin_entry_name} kws.update(V1) - alt_admin_url = reverse('schedule-detail', kwargs=kws) + alt_admin_url = reverse("schedule-detail", kwargs=kws) response = admin_client.delete(user_url, **HTTPS_KWARG) validate_response(response, status.HTTP_204_NO_CONTENT) @@ -84,39 +84,39 @@ def test_admin_can_delete_all_entries(admin_client, user_client, validate_response(response, status.HTTP_404_NOT_FOUND) -def test_admin_can_modify_all_entries(admin_client, user_client, - alt_admin_client): +def test_admin_can_modify_all_entries(admin_client, user_client, alt_admin_client): # user schedule entry user_rjson = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - user_entry_name = user_rjson['name'] + user_entry_name = user_rjson["name"] - admin_adjust_user_response = update_schedule(admin_client, user_entry_name, - TEST_PRIVATE_SCHEDULE_ENTRY) + admin_adjust_user_response = update_schedule( + admin_client, user_entry_name, TEST_PRIVATE_SCHEDULE_ENTRY + ) # admin user schedule entry - alt_admin_rjson = post_schedule(alt_admin_client, - TEST_PRIVATE_SCHEDULE_ENTRY) - alt_admin_entry_name = alt_admin_rjson['name'] + alt_admin_rjson = post_schedule(alt_admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) + alt_admin_entry_name = alt_admin_rjson["name"] admin_adjust_alt_admin_response = update_schedule( - admin_client, alt_admin_entry_name, TEST_SCHEDULE_ENTRY) + admin_client, alt_admin_entry_name, TEST_SCHEDULE_ENTRY + ) validate_response(admin_adjust_user_response, status.HTTP_200_OK) - assert admin_adjust_user_response.data['is_private'] + assert admin_adjust_user_response.data["is_private"] validate_response(admin_adjust_alt_admin_response, status.HTTP_200_OK) - assert not admin_adjust_alt_admin_response.data['is_private'] + assert not admin_adjust_alt_admin_response.data["is_private"] def test_admin_can_use_negative_priority(admin_client): hipri = TEST_PRIVATE_SCHEDULE_ENTRY.copy() - hipri['priority'] = -20 + hipri["priority"] = -20 rjson = post_schedule(admin_client, hipri) - entry_name = rjson['name'] - kws = {'pk': entry_name} + entry_name = rjson["name"] + kws = {"pk": entry_name} kws.update(V1) - entry_url = reverse('schedule-detail', kwargs=kws) + entry_url = reverse("schedule-detail", kwargs=kws) admin_user_respose = admin_client.get(entry_url, **HTTPS_KWARG) - assert rjson['priority'] == -20 + assert rjson["priority"] == -20 validate_response(admin_user_respose, status.HTTP_200_OK) - assert admin_user_respose.data['is_private'] + assert admin_user_respose.data["is_private"] diff --git a/src/schedule/tests/test_models.py b/src/schedule/tests/test_models.py index dc066f1b..c7652b80 100644 --- a/src/schedule/tests/test_models.py +++ b/src/schedule/tests/test_models.py @@ -9,13 +9,15 @@ from schedule.models import ScheduleEntry, DEFAULT_PRIORITY -@pytest.mark.parametrize('test_input,future_t,expected', - [((0, 5, 1), 2, [[0, 1], [2, 3], [4]]), - ((1, 5, 2), 8, [[1, 3]])]) +@pytest.mark.parametrize( + "test_input,future_t,expected", + [((0, 5, 1), 2, [[0, 1], [2, 3], [4]]), ((1, 5, 2), 8, [[1, 3]])], +) def test_take_until(test_input, future_t, expected): start, stop, interval = test_input entry = ScheduleEntry( - name='t', start=start, stop=stop, interval=interval, action='logger') + name="t", start=start, stop=stop, interval=interval, action="logger" + ) initial_times = list(entry.get_remaining_times()) r = [] for t in count(future_t, future_t): @@ -29,33 +31,32 @@ def test_take_until(test_input, future_t, expected): def test_undefined_start_is_now(): - entry = ScheduleEntry(name='t', action='logger') + entry = ScheduleEntry(name="t", action="logger") now = utils.timefn() assert entry.start in (now - 1, now, now + 1) def test_undefined_stop_is_never(): - entry = ScheduleEntry(name='t', action='logger', interval=1) + entry = ScheduleEntry(name="t", action="logger", interval=1) assert entry.stop is None assert type(entry.get_remaining_times()) is itertools.count def test_relative_stop_becomes_absolute(): - e = ScheduleEntry( - name='t', start=20, relative_stop=10, interval=1, action='logger') + e = ScheduleEntry(name="t", start=20, relative_stop=10, interval=1, action="logger") assert e.start == 20 assert e.stop == 30 assert list(e.get_remaining_times()) == list(range(20, 30, 1)) def test_stop_before_start(): - e = ScheduleEntry(name='t', start=20, stop=10, interval=1, action='logger') + e = ScheduleEntry(name="t", start=20, stop=10, interval=1, action="logger") assert list(e.get_remaining_times()) == list(range(0)) def test_no_interval_is_one_shot(): """Leaving `interval` blank should indicate "one-shot" entry.""" - e = ScheduleEntry(name='t', action='logger') + e = ScheduleEntry(name="t", action="logger") remaining_times = list(e.get_remaining_times()) assert len(remaining_times) == 1 @@ -70,7 +71,7 @@ def test_no_interval_is_one_shot(): def test_no_interval_with_start_is_one_shot(): """Specifying start should not affect number of times.""" - e = ScheduleEntry(name='t', action='logger', start=1) + e = ScheduleEntry(name="t", action="logger", start=1) remaining_times = list(e.get_remaining_times()) assert len(remaining_times) == 1 @@ -86,42 +87,42 @@ def test_no_interval_with_start_is_one_shot(): def test_no_interval_future_start(testclock): """One-shot entry should wait for start.""" # recall current t=0 so start=1 is 1 second in the future - e = ScheduleEntry(name='t', action='logger', start=1) + e = ScheduleEntry(name="t", action="logger", start=1) assert not e.take_pending() def test_bad_interval_raises(): with pytest.raises(ValidationError): - ScheduleEntry(name='t', interval=-1, action='logger').clean_fields() + ScheduleEntry(name="t", interval=-1, action="logger").clean_fields() with pytest.raises(ValidationError): - ScheduleEntry(name='t', interval=0, action='logger').clean_fields() + ScheduleEntry(name="t", interval=0, action="logger").clean_fields() with pytest.raises(ValidationError): - ScheduleEntry(name='t', interval=0.1, action='logger').clean_fields() + ScheduleEntry(name="t", interval=0.1, action="logger").clean_fields() def test_bad_action_raises(): with pytest.raises(ValidationError): - ScheduleEntry(name='t', action='this_doesnt_exist').clean_fields() + ScheduleEntry(name="t", action="this_doesnt_exist").clean_fields() def test_bad_name_raises(): with pytest.raises(ValidationError): # whitespace - ScheduleEntry(name='test 1', action='logger').clean_fields() + ScheduleEntry(name="test 1", action="logger").clean_fields() with pytest.raises(ValidationError): # punctuation other than "_-" - ScheduleEntry(name='test1!', action='logger').clean_fields() + ScheduleEntry(name="test1!", action="logger").clean_fields() # ok - ScheduleEntry(name='_test-Stuff123', action='logger').clean_fields() + ScheduleEntry(name="_test-Stuff123", action="logger").clean_fields() def test_non_unique_name_raises(user): - ScheduleEntry(name='t', action='logger', owner=user).save() + ScheduleEntry(name="t", action="logger", owner=user).save() with pytest.raises(ValidationError): - ScheduleEntry(name='t', action='logger', owner=user).full_clean() + ScheduleEntry(name="t", action="logger", owner=user).full_clean() def test_defaults(): - entry = ScheduleEntry(name='t', action='logger') + entry = ScheduleEntry(name="t", action="logger") assert entry.priority == DEFAULT_PRIORITY assert entry.start is not None assert entry.stop is None @@ -130,4 +131,4 @@ def test_defaults(): def test_str(): - str(ScheduleEntry(name='t', action='logger')) + str(ScheduleEntry(name="t", action="logger")) diff --git a/src/schedule/tests/test_serializers.py b/src/schedule/tests/test_serializers.py index 52c066cb..9ac5ab2b 100644 --- a/src/schedule/tests/test_serializers.py +++ b/src/schedule/tests/test_serializers.py @@ -1,7 +1,6 @@ import pytest -from schedule.serializers import ( - AdminScheduleEntrySerializer, ScheduleEntrySerializer) +from schedule.serializers import AdminScheduleEntrySerializer, ScheduleEntrySerializer from sensor.utils import parse_datetime_str from .utils import post_schedule @@ -14,90 +13,56 @@ # Test that valid user input is valid @pytest.mark.django_db @pytest.mark.parametrize( - 'entry_json', + "entry_json", [ # A name and action should be the minimum acceptable entry # i.e., (one-shot, ASAP) - { - 'name': 'test', - 'action': 'logger' - }, + {"name": "test", "action": "logger"}, # Stop 10 seconds after starting, start ASAP - { - 'name': 'test', - 'action': 'logger', - 'relative_stop': 10 - }, + {"name": "test", "action": "logger", "relative_stop": 10}, # Min integer interval ok - { - 'name': 'test', - 'action': 'logger', - 'interval': 10 - }, + {"name": "test", "action": "logger", "interval": 10}, # Max priority ok - { - 'name': 'test', - 'action': 'logger', - 'priority': 19 - }, + {"name": "test", "action": "logger", "priority": 19}, # Min user priority ok - { - 'name': 'test', - 'action': 'logger', - 'priority': 0 - }, + {"name": "test", "action": "logger", "priority": 0}, # Stop 10 seconds after starting; start at absolute time { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:25Z', - 'relative_stop': 10, + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:25Z", + "relative_stop": 10, }, # Start and stop at absolute time; equivalent to above { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:25Z', - 'absolute_stop': '2018-03-16T17:12:35Z', + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:25Z", + "absolute_stop": "2018-03-16T17:12:35Z", }, # 'stop' and 'absolute_stop' are synonyms - { - 'name': 'test', - 'action': 'logger', - 'stop': '2018-03-16T17:12:35.0Z' - }, + {"name": "test", "action": "logger", "stop": "2018-03-16T17:12:35.0Z"}, # Subseconds are optional - { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:35Z' - }, + {"name": "test", "action": "logger", "start": "2018-03-16T17:12:35Z"}, # Sensor is timezone-aware - { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-22T13:53:25-06:00' - }, + {"name": "test", "action": "logger", "start": "2018-03-22T13:53:25-06:00"}, # All non-boolean, non-required fields accepts null to mean not defined { - 'name': 'test', - 'action': 'logger', - 'start': None, - 'absolute_stop': None, - 'relative_stop': None, - 'priority': None, - 'start': None, - 'start': None, - 'interval': None, - 'callback_url': None, + "name": "test", + "action": "logger", + "start": None, + "absolute_stop": None, + "relative_stop": None, + "priority": None, + "start": None, + "start": None, + "interval": None, + "callback_url": None, }, # Explicit validate_only is valid - { - 'name': 'test', - 'action': 'logger', - 'validate_only': False - }, - ]) + {"name": "test", "action": "logger", "validate_only": False}, + ], +) def test_valid_user_entries(entry_json, user): serializer = ScheduleEntrySerializer(data=entry_json) assert serializer.is_valid() @@ -107,96 +72,58 @@ def test_valid_user_entries(entry_json, user): # Test that valid admin input is valid @pytest.mark.django_db @pytest.mark.parametrize( - 'entry_json', + "entry_json", [ # A name and action should be the minimum acceptable entry # i.e., (one-shot, ASAP) - { - 'name': 'test', - 'action': 'logger' - }, + {"name": "test", "action": "logger"}, # Stop 10 seconds after starting, start ASAP - { - 'name': 'test', - 'action': 'logger', - 'relative_stop': 10 - }, + {"name": "test", "action": "logger", "relative_stop": 10}, # Min integer interval ok - { - 'name': 'test', - 'action': 'logger', - 'interval': 10 - }, + {"name": "test", "action": "logger", "interval": 10}, # Max priority ok - { - 'name': 'test', - 'action': 'logger', - 'priority': 19 - }, + {"name": "test", "action": "logger", "priority": 19}, # Min admin priority ok - { - 'name': 'test', - 'action': 'logger', - 'priority': -20 - }, + {"name": "test", "action": "logger", "priority": -20}, # Stop 10 seconds after starting; start at absolute time { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:25Z', - 'relative_stop': 10, + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:25Z", + "relative_stop": 10, }, # Start and stop at absolute time; equivalent to above { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:25Z', - 'absolute_stop': '2018-03-16T17:12:35Z', + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:25Z", + "absolute_stop": "2018-03-16T17:12:35Z", }, # 'stop' and 'absolute_stop' are synonyms - { - 'name': 'test', - 'action': 'logger', - 'stop': '2018-03-16T17:12:35.0Z' - }, + {"name": "test", "action": "logger", "stop": "2018-03-16T17:12:35.0Z"}, # Subseconds are optional - { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:35Z' - }, + {"name": "test", "action": "logger", "start": "2018-03-16T17:12:35Z"}, # Sensor is timezone-aware - { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-22T13:53:25-06:00' - }, + {"name": "test", "action": "logger", "start": "2018-03-22T13:53:25-06:00"}, # All non-boolean, non-required fields accepts null to mean not defined { - 'name': 'test', - 'action': 'logger', - 'start': None, - 'absolute_stop': None, - 'relative_stop': None, - 'priority': None, - 'start': None, - 'start': None, - 'interval': None, - 'callback_url': None, + "name": "test", + "action": "logger", + "start": None, + "absolute_stop": None, + "relative_stop": None, + "priority": None, + "start": None, + "start": None, + "interval": None, + "callback_url": None, }, # Explicit validate_only is valid - { - 'name': 'test', - 'action': 'logger', - 'validate_only': False - }, + {"name": "test", "action": "logger", "validate_only": False}, # Admin can create private entries - { - 'name': 'test', - 'action': 'logger', - 'is_private': True - } - ]) + {"name": "test", "action": "logger", "is_private": True}, + ], +) def test_valid_admin_entries(entry_json, user): serializer = AdminScheduleEntrySerializer(data=entry_json) assert serializer.is_valid() @@ -206,93 +133,54 @@ def test_valid_admin_entries(entry_json, user): # Test that invalid user input is invalid @pytest.mark.django_db @pytest.mark.parametrize( - 'entry_json', + "entry_json", [ # name is a required field - { - 'action': 'logger' - }, + {"action": "logger"}, # action is a required field - { - 'name': 'test' - }, + {"name": "test"}, # non-integer priority - { - 'name': 'test', - 'action': 'logger', - 'priority': 3.14 - }, + {"name": "test", "action": "logger", "priority": 3.14}, # priority less than min (for normal user) - { - 'name': 'test', - 'action': 'logger', - 'priority': -1 - }, + {"name": "test", "action": "logger", "priority": -1}, # priority greater than max (19) - { - 'name': 'test', - 'action': 'logger', - 'priority': 20 - }, + {"name": "test", "action": "logger", "priority": 20}, # non-integer interval - { - 'name': 'test', - 'action': 'logger', - 'interval': 3.14 - }, + {"name": "test", "action": "logger", "interval": 3.14}, # zero interval - { - 'name': 'test', - 'action': 'logger', - 'interval': 0 - }, + {"name": "test", "action": "logger", "interval": 0}, # negative interval - { - 'name': 'test', - 'action': 'logger', - 'interval': -1 - }, + {"name": "test", "action": "logger", "interval": -1}, # can't interpret both absolute and relative stop { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:25.0Z', - 'absolute_stop': '2018-03-16T17:12:35.0Z', - 'relative_stop': 10, + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:25.0Z", + "absolute_stop": "2018-03-16T17:12:35.0Z", + "relative_stop": 10, }, # 0 relative_stop - { - 'name': 'test', - 'action': 'logger', - 'relative_stop': 0 - }, + {"name": "test", "action": "logger", "relative_stop": 0}, # negative relative_stop - { - 'name': 'test', - 'action': 'logger', - 'relative_stop': -10 - }, + {"name": "test", "action": "logger", "relative_stop": -10}, # non-integer relative_stop - { - 'name': 'test', - 'action': 'logger', - 'relative_stop': 3.14 - }, + {"name": "test", "action": "logger", "relative_stop": 3.14}, # stop is before start { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:35Z', - 'stop': '2018-03-16T17:12:30Z' + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:35Z", + "stop": "2018-03-16T17:12:30Z", }, # stop is same as start { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:35Z', - 'stop': '2018-03-16T17:12:35Z', - } - ]) + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:35Z", + "stop": "2018-03-16T17:12:35Z", + }, + ], +) def test_invalid_user_entries(entry_json): serializer = ScheduleEntrySerializer(data=entry_json) assert not serializer.is_valid() @@ -301,93 +189,54 @@ def test_invalid_user_entries(entry_json): # Test that invalid admin input is invalid @pytest.mark.django_db @pytest.mark.parametrize( - 'entry_json', + "entry_json", [ # name is a required field - { - 'action': 'logger' - }, + {"action": "logger"}, # action is a required field - { - 'name': 'test' - }, + {"name": "test"}, # non-integer priority - { - 'name': 'test', - 'action': 'logger', - 'priority': 3.14 - }, + {"name": "test", "action": "logger", "priority": 3.14}, # priority less than min (for admin) - { - 'name': 'test', - 'action': 'logger', - 'priority': -21 - }, + {"name": "test", "action": "logger", "priority": -21}, # priority greater than max (19) - { - 'name': 'test', - 'action': 'logger', - 'priority': 20 - }, + {"name": "test", "action": "logger", "priority": 20}, # non-integer interval - { - 'name': 'test', - 'action': 'logger', - 'interval': 3.14 - }, + {"name": "test", "action": "logger", "interval": 3.14}, # zero interval - { - 'name': 'test', - 'action': 'logger', - 'interval': 0 - }, + {"name": "test", "action": "logger", "interval": 0}, # negative interval - { - 'name': 'test', - 'action': 'logger', - 'interval': -1 - }, + {"name": "test", "action": "logger", "interval": -1}, # can't interpret both absolute and relative stop { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:25.0Z', - 'absolute_stop': '2018-03-16T17:12:35.0Z', - 'relative_stop': 10, + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:25.0Z", + "absolute_stop": "2018-03-16T17:12:35.0Z", + "relative_stop": 10, }, # 0 relative_stop - { - 'name': 'test', - 'action': 'logger', - 'relative_stop': 0 - }, + {"name": "test", "action": "logger", "relative_stop": 0}, # negative relative_stop - { - 'name': 'test', - 'action': 'logger', - 'relative_stop': -10 - }, + {"name": "test", "action": "logger", "relative_stop": -10}, # non-integer relative_stop - { - 'name': 'test', - 'action': 'logger', - 'relative_stop': 3.14 - }, + {"name": "test", "action": "logger", "relative_stop": 3.14}, # stop is before start { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:35Z', - 'stop': '2018-03-16T17:12:30Z' + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:35Z", + "stop": "2018-03-16T17:12:30Z", }, # stop is same as start { - 'name': 'test', - 'action': 'logger', - 'start': '2018-03-16T17:12:35Z', - 'stop': '2018-03-16T17:12:35Z', + "name": "test", + "action": "logger", + "start": "2018-03-16T17:12:35Z", + "stop": "2018-03-16T17:12:35Z", }, - ]) + ], +) def test_invalid_admin_entries(entry_json): serializer = AdminScheduleEntrySerializer(data=entry_json) assert not serializer.is_valid() @@ -400,35 +249,35 @@ def test_invalid_admin_entries(entry_json): def test_serialized_fields(user_client): """Certain fields on the schedule entry model should be serialized.""" - rjson = post_schedule(user_client, {'name': 'test', 'action': 'logger'}) + rjson = post_schedule(user_client, {"name": "test", "action": "logger"}) # nullable fields - assert 'interval' in rjson - assert 'callback_url' in rjson + assert "interval" in rjson + assert "callback_url" in rjson # non-nullable fields - assert rjson['name'] - assert rjson['action'] - assert rjson['priority'] is not None # potentially 0 - assert rjson['next_task_id'] + assert rjson["name"] + assert rjson["action"] + assert rjson["priority"] is not None # potentially 0 + assert rjson["next_task_id"] # nullable datetimes - assert rjson['start'] is None or parse_datetime_str(rjson['start']) - assert rjson['stop'] is None or parse_datetime_str(rjson['stop']) + assert rjson["start"] is None or parse_datetime_str(rjson["start"]) + assert rjson["stop"] is None or parse_datetime_str(rjson["stop"]) # datetimes - assert parse_datetime_str(rjson['created']) - assert parse_datetime_str(rjson['modified']) - assert parse_datetime_str(rjson['next_task_time']) + assert parse_datetime_str(rjson["created"]) + assert parse_datetime_str(rjson["modified"]) + assert parse_datetime_str(rjson["next_task_time"]) # booleans - assert rjson['is_active'] in {True, False} - assert rjson['is_private'] in {True, False} + assert rjson["is_active"] in {True, False} + assert rjson["is_private"] in {True, False} # links - assert rjson['self'] - assert rjson['owner'] - assert rjson['results'] - assert rjson['acquisitions'] + assert rjson["self"] + assert rjson["owner"] + assert rjson["results"] + assert rjson["acquisitions"] def test_non_serialized_fields(user_client): """Certain fields on the schedule entry model should not be serialized.""" - rjson = post_schedule(user_client, {'name': 'test', 'action': 'logger'}) + rjson = post_schedule(user_client, {"name": "test", "action": "logger"}) - assert 'relative_stop' not in rjson + assert "relative_stop" not in rjson diff --git a/src/schedule/tests/test_user_views.py b/src/schedule/tests/test_user_views.py index f9a3cf90..68fc0c7c 100644 --- a/src/schedule/tests/test_user_views.py +++ b/src/schedule/tests/test_user_views.py @@ -3,10 +3,15 @@ from rest_framework import status from rest_framework.reverse import reverse -from schedule.tests.utils import (EMPTY_SCHEDULE_RESPONSE, TEST_SCHEDULE_ENTRY, - TEST_PRIVATE_SCHEDULE_ENTRY, - TEST_ALTERNATE_SCHEDULE_ENTRY, post_schedule, - update_schedule, reverse_detail_url) +from schedule.tests.utils import ( + EMPTY_SCHEDULE_RESPONSE, + TEST_SCHEDULE_ENTRY, + TEST_PRIVATE_SCHEDULE_ENTRY, + TEST_ALTERNATE_SCHEDULE_ENTRY, + post_schedule, + update_schedule, + reverse_detail_url, +) from sensor import V1 from sensor.tests.utils import validate_response, HTTPS_KWARG @@ -14,18 +19,18 @@ def test_user_cannot_post_private_schedule(user_client): """Unpriveleged users should not be able to create private entries.""" rjson = post_schedule(user_client, TEST_PRIVATE_SCHEDULE_ENTRY) - entry_name = rjson['name'] + entry_name = rjson["name"] url = reverse_detail_url(entry_name) response = user_client.get(url, **HTTPS_KWARG) - assert not rjson['is_private'] + assert not rjson["is_private"] validate_response(response, status.HTTP_200_OK) - assert not response.data['is_private'] + assert not response.data["is_private"] def test_user_can_view_non_private_user_entries(user_client, alt_user_client): # alt user schedule entry alt_user_rjson = post_schedule(alt_user_client, TEST_SCHEDULE_ENTRY) - alt_user_entry_name = alt_user_rjson['name'] + alt_user_entry_name = alt_user_rjson["name"] alt_user_entry_url = reverse_detail_url(alt_user_entry_name) response = user_client.get(alt_user_entry_url, **HTTPS_KWARG) validate_response(response, status.HTTP_200_OK) @@ -34,7 +39,7 @@ def test_user_can_view_non_private_user_entries(user_client, alt_user_client): def test_user_can_view_non_private_admin_entries(admin_client, user_client): # admin user schedule entry admin_rjson = post_schedule(admin_client, TEST_ALTERNATE_SCHEDULE_ENTRY) - admin_entry_name = admin_rjson['name'] + admin_entry_name = admin_rjson["name"] admin_entry_url = reverse_detail_url(admin_entry_name) response = user_client.get(admin_entry_url, **HTTPS_KWARG) validate_response(response, status.HTTP_200_OK) @@ -42,10 +47,10 @@ def test_user_can_view_non_private_admin_entries(admin_client, user_client): def test_user_cannot_view_private_entry_in_list(admin_client, user_client): post_schedule(admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) - url = reverse('schedule-list', kwargs=V1) + url = reverse("schedule-list", kwargs=V1) response = user_client.get(url, **HTTPS_KWARG) rjson = validate_response(response, status.HTTP_200_OK) - assert rjson['results'] == EMPTY_SCHEDULE_RESPONSE + assert rjson["results"] == EMPTY_SCHEDULE_RESPONSE def test_user_cannot_view_private_entry_details(admin_client, user_client): @@ -53,7 +58,7 @@ def test_user_cannot_view_private_entry_details(admin_client, user_client): # Private indicates admin wants users to be unaware that the entry exists # on the system, hence 404 vs 403 (FORBIDDEN). rjson = post_schedule(admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) - entry_name = rjson['name'] + entry_name = rjson["name"] url = reverse_detail_url(entry_name) response = user_client.get(url, **HTTPS_KWARG) validate_response(response, status.HTTP_404_NOT_FOUND) @@ -61,7 +66,7 @@ def test_user_cannot_view_private_entry_details(admin_client, user_client): def test_user_can_delete_their_entry(user_client): rjson = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - entry_name = rjson['name'] + entry_name = rjson["name"] entry_url = reverse_detail_url(entry_name) # First attempt to delete should return 204 @@ -73,23 +78,22 @@ def test_user_can_delete_their_entry(user_client): validate_response(response, status.HTTP_404_NOT_FOUND) -def test_user_cannot_delete_any_other_entry(admin_client, user_client, - alt_user_client): +def test_user_cannot_delete_any_other_entry(admin_client, user_client, alt_user_client): # alt user schedule entry alt_user_rjson = post_schedule(alt_user_client, TEST_SCHEDULE_ENTRY) - alt_user_entry_name = alt_user_rjson['name'] + alt_user_entry_name = alt_user_rjson["name"] alt_user_entry_url = reverse_detail_url(alt_user_entry_name) - user_delete_alt_user_response = user_client.delete(alt_user_entry_url, - **HTTPS_KWARG) + user_delete_alt_user_response = user_client.delete( + alt_user_entry_url, **HTTPS_KWARG + ) # admin user schedule entry admin_rjson = post_schedule(admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) - admin_entry_name = admin_rjson['name'] + admin_entry_name = admin_rjson["name"] admin_entry_url = reverse_detail_url(admin_entry_name) - user_delete_admin_response = user_client.delete(admin_entry_url, - **HTTPS_KWARG) + user_delete_admin_response = user_client.delete(admin_entry_url, **HTTPS_KWARG) validate_response(user_delete_alt_user_response, status.HTTP_403_FORBIDDEN) # Admin's entry is private, hence 404 instead of 403 @@ -98,31 +102,33 @@ def test_user_cannot_delete_any_other_entry(admin_client, user_client, def test_user_can_modify_their_entry(user_client): rjson = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - entry_name = rjson['name'] + entry_name = rjson["name"] - user_adjust_response = update_schedule(user_client, entry_name, - TEST_ALTERNATE_SCHEDULE_ENTRY) + user_adjust_response = update_schedule( + user_client, entry_name, TEST_ALTERNATE_SCHEDULE_ENTRY + ) validate_response(user_adjust_response, status.HTTP_200_OK) - assert rjson['priority'] == 10 - assert user_adjust_response.data['priority'] == 5 + assert rjson["priority"] == 10 + assert user_adjust_response.data["priority"] == 5 -def test_user_cannot_modify_any_other_entry(admin_client, user_client, - alt_user_client): +def test_user_cannot_modify_any_other_entry(admin_client, user_client, alt_user_client): # alt user schedule entry alt_user_rjson = post_schedule(alt_user_client, TEST_SCHEDULE_ENTRY) - alt_user_entry_name = alt_user_rjson['name'] + alt_user_entry_name = alt_user_rjson["name"] user_adjust_alt_user_response = update_schedule( - user_client, alt_user_entry_name, TEST_PRIVATE_SCHEDULE_ENTRY) + user_client, alt_user_entry_name, TEST_PRIVATE_SCHEDULE_ENTRY + ) # admin user schedule entry admin_rjson = post_schedule(admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) - admin_entry_name = admin_rjson['name'] + admin_entry_name = admin_rjson["name"] - user_adjust_admin_response = update_schedule(user_client, admin_entry_name, - TEST_SCHEDULE_ENTRY) + user_adjust_admin_response = update_schedule( + user_client, admin_entry_name, TEST_SCHEDULE_ENTRY + ) validate_response(user_adjust_alt_user_response, status.HTTP_403_FORBIDDEN) # Admin's entry is private, hence 404 instead of 403 @@ -132,7 +138,7 @@ def test_user_cannot_modify_any_other_entry(admin_client, user_client, def test_user_cannot_use_negative_priority(user_client): """Unpriveleged users should not be able to use "high" priority.""" hipri = TEST_SCHEDULE_ENTRY.copy() - hipri['priority'] = -20 + hipri["priority"] = -20 with pytest.raises(AssertionError): post_schedule(user_client, hipri) @@ -141,12 +147,12 @@ def test_validate_only_does_not_modify_schedule_with_good_entry(user_client): """A good entry with validate_only should return 200 only.""" # Ensure that a 200 "OK" is returned from the validator entry = TEST_SCHEDULE_ENTRY.copy() - entry['validate_only'] = True + entry["validate_only"] = True expected_status = status.HTTP_204_NO_CONTENT post_schedule(user_client, entry, expected_status=expected_status) # Ensure that the entry didn't make it into the schedule - entry_name = entry['name'] + entry_name = entry["name"] url = reverse_detail_url(entry_name) response = user_client.get(url, **HTTPS_KWARG) validate_response(response, status.HTTP_404_NOT_FOUND) @@ -156,12 +162,12 @@ def test_validate_only_does_not_modify_schedule_with_bad_entry(user_client): """A bad entry with validate_only should return 400 only.""" # Ensure that a 400 "BAD REQUEST" is returned from the validator entry = TEST_SCHEDULE_ENTRY.copy() - entry['interval'] = 1.5 # non-integer interval is invalid - entry['validate_only'] = True + entry["interval"] = 1.5 # non-integer interval is invalid + entry["validate_only"] = True expected_status = status.HTTP_400_BAD_REQUEST post_schedule(user_client, entry, expected_status=expected_status) # Ensure that the entry didn't make it into the schedule - url = reverse_detail_url(entry['name']) + url = reverse_detail_url(entry["name"]) response = user_client.get(url, **HTTPS_KWARG) validate_response(response, status.HTTP_404_NOT_FOUND) diff --git a/src/schedule/tests/test_views.py b/src/schedule/tests/test_views.py index 4267ec5c..6d1add3a 100644 --- a/src/schedule/tests/test_views.py +++ b/src/schedule/tests/test_views.py @@ -1,9 +1,13 @@ from rest_framework import status from rest_framework.reverse import reverse -from schedule.tests.utils import (EMPTY_SCHEDULE_RESPONSE, TEST_SCHEDULE_ENTRY, - TEST_PRIVATE_SCHEDULE_ENTRY, post_schedule, - reverse_detail_url) +from schedule.tests.utils import ( + EMPTY_SCHEDULE_RESPONSE, + TEST_SCHEDULE_ENTRY, + TEST_PRIVATE_SCHEDULE_ENTRY, + post_schedule, + reverse_detail_url, +) from sensor import V1 from sensor.tests.utils import validate_response, HTTPS_KWARG from tasks.tests.utils import simulate_acquisitions @@ -11,7 +15,7 @@ def test_entry_posted_to_schedule_is_immediately_available(user_client): rjson = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - entry_name = rjson['name'] + entry_name = rjson["name"] entry_url = reverse_detail_url(entry_name) user_response = user_client.get(entry_url, **HTTPS_KWARG) @@ -24,9 +28,9 @@ def test_entry_posted_to_schedule_is_immediately_available(user_client): def test_post_unknown_field_to_schedule(user_client): """Unknown fields in a schedule entry should be ignored.""" entry_json = TEST_SCHEDULE_ENTRY.copy() - entry_json['nonsense'] = True + entry_json["nonsense"] = True rjson = post_schedule(user_client, entry_json) - entry_name = rjson['name'] + entry_name = rjson["name"] entry_url = reverse_detail_url(entry_name) response = user_client.get(entry_url, **HTTPS_KWARG) validate_response(response, status.HTTP_200_OK) @@ -34,13 +38,13 @@ def test_post_unknown_field_to_schedule(user_client): for k, v in TEST_SCHEDULE_ENTRY.items(): assert rjson[k] == v - assert 'nonsense' not in rjson - assert 'nonsense' not in response.data + assert "nonsense" not in rjson + assert "nonsense" not in response.data def test_private_schedule_entry_is_private(admin_client, user_client): rjson = post_schedule(admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) - entry_name = rjson['name'] + entry_name = rjson["name"] entry_url = reverse_detail_url(entry_name) user_response = user_client.get(entry_url, **HTTPS_KWARG) admin_user_response = admin_client.get(entry_url, **HTTPS_KWARG) @@ -50,22 +54,22 @@ def test_private_schedule_entry_is_private(admin_client, user_client): def test_get_schedule(user_client): - url = reverse('schedule-list', kwargs=V1) + url = reverse("schedule-list", kwargs=V1) rjson = validate_response(user_client.get(url, **HTTPS_KWARG)) - assert rjson['results'] == EMPTY_SCHEDULE_RESPONSE + assert rjson["results"] == EMPTY_SCHEDULE_RESPONSE post_schedule(user_client, TEST_SCHEDULE_ENTRY) rjson = validate_response(user_client.get(url, **HTTPS_KWARG)) - assert rjson['count'] == 1 + assert rjson["count"] == 1 - expected_name = TEST_SCHEDULE_ENTRY['name'] - actual_name = rjson['results'][0]['name'] + expected_name = TEST_SCHEDULE_ENTRY["name"] + actual_name = rjson["results"][0]["name"] assert expected_name == actual_name def test_get_nonexistent_entry_details_returns_404(user_client): """Requesting details of non-existent entry should return 404.""" - url = reverse_detail_url('doesntexist') + url = reverse_detail_url("doesntexist") response = user_client.get(url, **HTTPS_KWARG) validate_response(response, status.HTTP_404_NOT_FOUND) @@ -73,7 +77,7 @@ def test_get_nonexistent_entry_details_returns_404(user_client): def test_get_existing_entry_details_returns_200(user_client): """Requesting details of existing entry should return 200.""" rjson = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - entry_name = rjson['name'] + entry_name = rjson["name"] url = reverse_detail_url(entry_name) response = user_client.get(url, **HTTPS_KWARG) validate_response(response, status.HTTP_200_OK) @@ -87,7 +91,7 @@ def test_delete_entry_with_acquisitions_fails(user_client, test_scheduler): rjson = validate_response(response, status.HTTP_400_BAD_REQUEST) expected_status = status.HTTP_204_NO_CONTENT - for acq_url in rjson['protected_objects']: + for acq_url in rjson["protected_objects"]: response = user_client.delete(acq_url, **HTTPS_KWARG) validate_response(response, expected_status) diff --git a/src/schedule/tests/utils.py b/src/schedule/tests/utils.py index d033ba9e..fc6834e2 100644 --- a/src/schedule/tests/utils.py +++ b/src/schedule/tests/utils.py @@ -8,31 +8,31 @@ EMPTY_SCHEDULE_RESPONSE = [] -TEST_SCHEDULE_ENTRY = {'name': 'test', 'action': 'logger', 'is_private': False} +TEST_SCHEDULE_ENTRY = {"name": "test", "action": "logger", "is_private": False} TEST_ALTERNATE_SCHEDULE_ENTRY = { - 'name': 'test_alternate', - 'action': 'logger', - 'is_private': False, - 'priority': 5 + "name": "test_alternate", + "action": "logger", + "is_private": False, + "priority": 5, } TEST_PRIVATE_SCHEDULE_ENTRY = { - 'name': 'test_private', - 'action': 'logger', - 'is_private': True + "name": "test_private", + "action": "logger", + "is_private": True, } def post_schedule(client, entry, expected_status=status.HTTP_201_CREATED): kwargs = { - 'data': json.dumps(entry), - 'content_type': 'application/json', - 'secure': True, - 'wsgi.url_scheme': 'https' + "data": json.dumps(entry), + "content_type": "application/json", + "secure": True, + "wsgi.url_scheme": "https", } - url = reverse('schedule-list', kwargs=V1) + url = reverse("schedule-list", kwargs=V1) r = client.post(url, **kwargs) err = "Got status {}, expected {}".format(r.status_code, expected_status) @@ -48,19 +48,19 @@ def update_schedule(client, entry_name, new_entry): url = reverse_detail_url(entry_name) kwargs = { - 'data': json.dumps(new_entry), - 'content_type': 'application/json', - 'secure': True, - 'wsgi.url_scheme': 'https' + "data": json.dumps(new_entry), + "content_type": "application/json", + "secure": True, + "wsgi.url_scheme": "https", } return client.put(url, **kwargs) def reverse_detail_url(entry_name): - kws = {'pk': entry_name} + kws = {"pk": entry_name} kws.update(V1) - url = reverse('schedule-detail', kwargs=kws) + url = reverse("schedule-detail", kwargs=kws) return url diff --git a/src/schedule/urls.py b/src/schedule/urls.py index 5467cad1..d4153d14 100644 --- a/src/schedule/urls.py +++ b/src/schedule/urls.py @@ -3,6 +3,6 @@ from .views import ScheduleEntryViewSet router = SimpleRouter() -router.register('', ScheduleEntryViewSet, basename='schedule') +router.register("", ScheduleEntryViewSet, basename="schedule") urlpatterns = router.urls diff --git a/src/schedule/views.py b/src/schedule/views.py index 45de8fa4..79e7beb7 100644 --- a/src/schedule/views.py +++ b/src/schedule/views.py @@ -30,15 +30,15 @@ class ScheduleEntryViewSet(ModelViewSet): Deletes the specified schedule entry. """ + queryset = ScheduleEntry.objects.all() permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [ - IsAdminOrOwnerOrReadOnly, + IsAdminOrOwnerOrReadOnly ] filter_backends = (filters.SearchFilter, filters.OrderingFilter) - lookup_fields = ('schedule_entry__name', 'task_id') - ordering_fields = ('priority', 'start', 'next_task_time', 'created', - 'modified') - search_fields = ('name', 'action') + lookup_fields = ("schedule_entry__name", "task_id") + ordering_fields = ("priority", "start", "next_task_time", "created", "modified") + search_fields = ("name", "action") def create(self, request, *args, **kwargs): """Return NO CONTENT when input is valid but validate_only is True.""" @@ -47,7 +47,7 @@ def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) - if serializer.validated_data.get('validate_only'): + if serializer.validated_data.get("validate_only"): return Response(status=status.HTTP_204_NO_CONTENT) self.perform_create(serializer) @@ -73,7 +73,7 @@ def get_queryset(self): def get_serializer_class(self): """Modify the base serializer based on user and request.""" - updating = self.action in {'update', 'partial_update'} + updating = self.action in {"update", "partial_update"} if self.request.user.is_staff: SerializerBaseClass = AdminScheduleEntrySerializer @@ -83,9 +83,9 @@ def get_serializer_class(self): ro_fields = SerializerBaseClass.Meta.read_only_fields if updating: - ro_fields += ('name', 'action') + ro_fields += ("name", "action") else: - ro_fields += ('is_active', ) + ro_fields += ("is_active",) class SerializerClass(SerializerBaseClass): class Meta(SerializerBaseClass.Meta): diff --git a/src/scheduler/apps.py b/src/scheduler/apps.py index 60fb286a..d30957b6 100644 --- a/src/scheduler/apps.py +++ b/src/scheduler/apps.py @@ -2,4 +2,4 @@ class SchedulerConfig(AppConfig): - name = 'scheduler' + name = "scheduler" diff --git a/src/scheduler/scheduler.py b/src/scheduler/scheduler.py index 9af7e2ea..668eb240 100644 --- a/src/scheduler/scheduler.py +++ b/src/scheduler/scheduler.py @@ -35,13 +35,13 @@ def __init__(self): # scheduler looks ahead `interval_multiplier` times the shortest # interval in the schedule in order to keep memory-usage low self.interval_multiplier = 10 - self.name = 'Scheduler' + self.name = "Scheduler" self.running = False self.interrupt_flag = threading.Event() # Cache the currently running task state - self.entry = None # ScheduleEntry that created the current task - self.task = None # Task object describing current task + self.entry = None # ScheduleEntry that created the current task + self.task = None # Task object describing current task self.task_result = None # TaskResult object for current task @property @@ -58,7 +58,7 @@ def schedule_has_entries(self): def cancel(entry): """Remove an entry from the scheduler without deleting it.""" entry.is_active = False - entry.save(update_fields=('is_active', )) + entry.save(update_fields=("is_active",)) def stop(self): """Complete the current task, then return control.""" @@ -142,13 +142,13 @@ def _call_task_action(self): logger.debug("running task {}/{}".format(entry_name, task_id)) detail = self.task.action_fn(entry_name, task_id) self.delayfn(0) # let other threads run - status = 'success' + status = "success" if not isinstance(detail, str): detail = "" except Exception as err: detail = str(err) logger.exception("action failed: {}".format(detail)) - status = 'failure' + status = "failure" return status, detail[:MAX_DETAIL_LEN] @@ -162,7 +162,7 @@ def _finalize_task_result(self, started, finished, status, detail): tr.save() if self.entry.callback_url: - context = {'request': self.entry.request} + context = {"request": self.entry.request} result_json = TaskResultSerializer(tr, context=context).data requests_futures_session.post( self.entry.callback_url, @@ -187,7 +187,7 @@ def _queue_pending_tasks(self, schedule_snapshot): continue task_id = entry.get_next_task_id() - entry.save(update_fields=('next_task_id', )) + entry.save(update_fields=("next_task_id",)) pri = entry.priority action = entry.action pending_queue.enter(task_time, pri, action, entry.name, task_id) @@ -196,7 +196,7 @@ def _queue_pending_tasks(self, schedule_snapshot): def _take_pending_task_time(self, entry): task_times = entry.take_pending() - entry.save(update_fields=('next_task_time', 'is_active')) + entry.save(update_fields=("next_task_time", "is_active")) if not task_times: return None @@ -249,12 +249,12 @@ def _cancel_if_completed(self, entry): @property def status(self): if self.is_alive(): - return 'running' if self.running else 'idle' - return 'dead' + return "running" if self.running else "idle" + return "dead" def __repr__(self): - s = 'running' if self.running else 'stopped' - return '<{} status={}>'.format(self.__class__.__name__, s) + s = "running" if self.running else "stopped" + return "<{} status={}>".format(self.__class__.__name__, s) @contextmanager diff --git a/src/scheduler/tests/test_scheduler.py b/src/scheduler/tests/test_scheduler.py index cc7ed792..1200fe46 100644 --- a/src/scheduler/tests/test_scheduler.py +++ b/src/scheduler/tests/test_scheduler.py @@ -5,14 +5,19 @@ import requests_mock from scheduler.scheduler import Scheduler, minimum_duration -from .utils import (BAD_ACTION_STR, advance_testclock, create_action, - create_bad_action, create_entry) +from .utils import ( + BAD_ACTION_STR, + advance_testclock, + create_action, + create_bad_action, + create_entry, +) @pytest.mark.django_db def test_populate_queue(test_scheduler): """An entry in the schedule should be added to a read-only task queue.""" - create_entry('test', 1, 0, 5, 1, 'logger') + create_entry("test", 1, 0, 5, 1, "logger") s = test_scheduler s.run(blocking=False) # now=0, so task with time 0 is run assert [e.time for e in s.task_queue] == [1, 2, 3, 4] @@ -23,8 +28,8 @@ def test_priority(test_scheduler): """A task with lower priority number should sort higher in task queue.""" lopri = 20 hipri = 10 - create_entry('lopri', lopri, 0, 5, 1, 'logger') - create_entry('hipri', hipri, 0, 5, 1, 'logger') + create_entry("lopri", lopri, 0, 5, 1, "logger") + create_entry("hipri", hipri, 0, 5, 1, "logger") s = test_scheduler s.run(blocking=False) q = s.task_queue.to_list() @@ -36,7 +41,7 @@ def test_priority(test_scheduler): @pytest.mark.django_db def test_future_start(test_scheduler): """An entry with start time in future should remain in schedule.""" - create_entry('t', 1, 50, 100, 1, 'logger') + create_entry("t", 1, 50, 100, 1, "logger") test_scheduler.run(blocking=False) s = test_scheduler assert len(s.task_queue) == 0 @@ -51,7 +56,7 @@ def test_calls_actions(test_scheduler): test_actions = dict(create_action() for _ in range(3)) for i, cb in enumerate(test_actions): - create_entry('test' + str(i), 1, 0, 3, 1, cb.__name__) + create_entry("test" + str(i), 1, 0, 3, 1, cb.__name__) s = test_scheduler s.run(blocking=False) @@ -66,11 +71,11 @@ def test_calls_actions(test_scheduler): @pytest.mark.django_db def test_add_entry(test_scheduler): """Creating a new entry instance adds it to the current schedule.""" - create_entry('t1', 10, 1, 100, 5, 'logger') + create_entry("t1", 10, 1, 100, 5, "logger") s = test_scheduler s.run(blocking=False) advance_testclock(s.timefn, 49) - create_entry('t2', 20, 50, 300, 5, 'logger') + create_entry("t2", 20, 50, 300, 5, "logger") s.run(blocking=False) assert len(s.task_queue) == 20 assert s.task_queue[0].priority == 20 @@ -79,8 +84,8 @@ def test_add_entry(test_scheduler): @pytest.mark.django_db def test_remove_entry_by_delete(test_scheduler): """An entry is removed from schedule if it's deleted.""" - e1 = create_entry('t1', 10, 1, 300, 5, 'logger') - e2 = create_entry('t2', 20, 50, 300, 5, 'logger') + e1 = create_entry("t1", 10, 1, 300, 5, "logger") + e2 = create_entry("t2", 20, 50, 300, 5, "logger") s = test_scheduler s.run(blocking=False) advance_testclock(s.timefn, 10) @@ -93,8 +98,8 @@ def test_remove_entry_by_delete(test_scheduler): @pytest.mark.django_db def test_remove_entry_by_cancel(test_scheduler): """scheduler.cancel removes an entry from schedule without deleting it.""" - e1 = create_entry('t1', 10, 1, 300, 5, 'logger') - e2 = create_entry('t2', 20, 50, 300, 5, 'logger') + e1 = create_entry("t1", 10, 1, 300, 5, "logger") + e2 = create_entry("t2", 20, 50, 300, 5, "logger") s = test_scheduler s.run(blocking=False) advance_testclock(s.timefn, 10) @@ -107,7 +112,7 @@ def test_remove_entry_by_cancel(test_scheduler): @pytest.mark.django_db def test_start_stop(test_scheduler): """Calling stop on started scheduler thread should cause thread exit.""" - create_entry('t', 1, 1, 100, 5, 'logger') + create_entry("t", 1, 1, 100, 5, "logger") s = test_scheduler s.start() time.sleep(0.02) # hit minimum_duration @@ -121,7 +126,7 @@ def test_start_stop(test_scheduler): @pytest.mark.django_db def test_run_completes(test_scheduler): """The scheduler should return to idle state after schedule completes.""" - create_entry('t', 1, None, None, None, 'logger') + create_entry("t", 1, None, None, None, "logger") s = test_scheduler s.start() time.sleep(0.1) # hit minimum_duration @@ -136,10 +141,10 @@ def test_run_completes(test_scheduler): def test_survives_failed_action(test_scheduler): """An action throwing an exception should be survivable.""" cb1 = create_bad_action() - create_entry('t1', 10, None, None, None, cb1.__name__) + create_entry("t1", 10, None, None, None, cb1.__name__) cb2, flag = create_action() # less priority to force run after bad_entry fails - create_entry('t2', 20, None, None, None, cb2.__name__) + create_entry("t2", 20, None, None, None, cb2.__name__) s = test_scheduler advance_testclock(s.timefn, 1) assert not flag.is_set() @@ -150,7 +155,7 @@ def test_survives_failed_action(test_scheduler): @pytest.mark.django_db def test_compress_past_times(test_scheduler): """Multiple task times in the past should be compressed to one.""" - create_entry('t', 1, -10, 5, 1, 'logger') + create_entry("t", 1, -10, 5, 1, "logger") s = test_scheduler s.run(blocking=False) # past times -10 through 0 are compressed and a single task is run, @@ -161,7 +166,7 @@ def test_compress_past_times(test_scheduler): @pytest.mark.django_db def test_compress_past_times_offset(test_scheduler): """Multiple task times in the past should be compressed to one.""" - create_entry('t', 1, -2, 14, 4, 'logger') + create_entry("t", 1, -2, 14, 4, "logger") s = test_scheduler s.run(blocking=False) # past time -2 is run, then 2, 6, and 10 are queued @@ -173,7 +178,7 @@ def test_compress_past_times_offset(test_scheduler): @pytest.mark.django_db def test_next_task_time_value_when_start_changes(test_scheduler): """When an entry's start value changes, update `next_task_time`.""" - entry = create_entry('t', 1, 1, 10, 1, 'logger') + entry = create_entry("t", 1, 1, 10, 1, "logger") s = test_scheduler s.run(blocking=False) assert entry.next_task_time == 1 @@ -209,7 +214,7 @@ def test_next_task_time_value_when_start_changes(test_scheduler): @pytest.mark.django_db def test_next_task_time_value_when_interval_changes(test_scheduler): """When an entry's interval value changes, update `next_task_time`.""" - entry = create_entry('t', 1, 1, 100, 1, 'logger') + entry = create_entry("t", 1, 1, 100, 1, "logger") s = test_scheduler s.run(blocking=False) assert entry.next_task_time == 1 @@ -237,7 +242,7 @@ def test_next_task_time_value_when_interval_changes(test_scheduler): @pytest.mark.django_db def test_one_shot(test_scheduler): """If no start or interval given, entry should be run once and removed.""" - create_entry('t', 1, None, None, None, 'logger') + create_entry("t", 1, None, None, None, "logger") s = test_scheduler advance_testclock(s.timefn, 1) s.run(blocking=False) @@ -248,7 +253,7 @@ def test_one_shot(test_scheduler): @pytest.mark.django_db def test_task_queue(test_scheduler): """The scheduler should maintain a queue of upcoming tasks.""" - e = create_entry('t', 1, 1, 100, 5, 'logger') + e = create_entry("t", 1, 1, 100, 5, "logger") s = test_scheduler # upcoming tasks are queued @@ -273,7 +278,7 @@ def test_task_queue(test_scheduler): @pytest.mark.django_db def test_clearing_schedule_clears_task_queue(test_scheduler): """The scheduler should empty task_queue when schedule is deleted.""" - create_entry('t', 1, 1, 100, 5, 'logger') + create_entry("t", 1, 1, 100, 5, "logger") s = test_scheduler s.run(blocking=False) # queue first 10 tasks assert len(s.task_queue) == 10 @@ -314,7 +319,7 @@ def cb_request_handler(sess, resp): cb_flag.set() cb = create_bad_action() - create_entry('t', 10, None, None, None, cb.__name__, 'mock://cburl') + create_entry("t", 10, None, None, None, cb.__name__, "mock://cburl") s = test_scheduler advance_testclock(s.timefn, 1) s._callback_response_handler = cb_request_handler @@ -323,19 +328,19 @@ def cb_request_handler(sess, resp): request_json = None with requests_mock.Mocker() as m: - m.post('mock://cburl') # register url for posting + m.post("mock://cburl") # register url for posting s.run(blocking=False) time.sleep(0.1) # let requests thread run request_json = m.request_history[0].json() assert cb_flag.is_set() - assert request_json['result'] == 'failure' - assert request_json['task_id'] == 1 - assert request_json['self'] - assert request_json['detail'] == BAD_ACTION_STR - assert request_json['started'] - assert request_json['finished'] - assert request_json['duration'] + assert request_json["result"] == "failure" + assert request_json["task_id"] == 1 + assert request_json["self"] + assert request_json["detail"] == BAD_ACTION_STR + assert request_json["started"] + assert request_json["finished"] + assert request_json["duration"] @pytest.mark.django_db @@ -348,7 +353,7 @@ def cb_request_handler(sess, resp): cb, action_flag = create_action() # less priority to force run after bad_entry fails - create_entry('t', 20, None, None, None, cb.__name__, 'mock://cburl') + create_entry("t", 20, None, None, None, cb.__name__, "mock://cburl") s = test_scheduler advance_testclock(s.timefn, 1) s._callback_response_handler = cb_request_handler @@ -357,19 +362,19 @@ def cb_request_handler(sess, resp): request_json = None with requests_mock.Mocker() as m: - m.post('mock://cburl') # register mock url for posting + m.post("mock://cburl") # register mock url for posting s.run(blocking=False) time.sleep(0.1) # let requests thread run request_json = m.request_history[0].json() assert cb_flag.is_set() assert action_flag.is_set() - assert request_json['result'] == 'success' - assert request_json['task_id'] == 1 - assert request_json['self'] - assert request_json['started'] - assert request_json['finished'] - assert request_json['duration'] + assert request_json["result"] == "success" + assert request_json["task_id"] == 1 + assert request_json["self"] + assert request_json["started"] + assert request_json["finished"] + assert request_json["duration"] @pytest.mark.django_db @@ -377,10 +382,10 @@ def test_starvation(test_scheduler): """A recurring high-pri task should not 'starve' a low-pri task.""" # higher-pri recurring task that takes 3 ticks to complete enters at t=0 cb0, flag0 = create_action() - create_entry('t0', 10, None, None, 3, cb0.__name__) + create_entry("t0", 10, None, None, 3, cb0.__name__) # lower-pri task enters at t=2 cb1, flag1 = create_action() - create_entry('t1', 20, 2, None, None, cb1.__name__) + create_entry("t1", 20, 2, None, None, cb1.__name__) s = test_scheduler s.run(blocking=False) assert not flag1.is_set() @@ -399,7 +404,7 @@ def test_task_pushed_past_stop_still_runs(test_scheduler): """A task pushed past `stop` by a long running task should still run.""" # create an entry that runs at time 1 and 2 cb0, flag0 = create_action() - create_entry('t0', 10, 1, 3, 1, cb0.__name__) + create_entry("t0", 10, 1, 3, 1, cb0.__name__) s = test_scheduler s.run(blocking=False) diff --git a/src/scheduler/tests/utils.py b/src/scheduler/tests/utils.py index 94ed5dcf..c4fb9437 100644 --- a/src/scheduler/tests/utils.py +++ b/src/scheduler/tests/utils.py @@ -62,19 +62,19 @@ def simulate_scheduler_run(n=1): def create_entry(name, priority, start, stop, interval, action, cb_url=None): kwargs = { - 'name': name, - 'priority': priority, - 'stop': stop, - 'interval': interval, - 'action': action, - 'owner': User.objects.get_or_create(username='test')[0], + "name": name, + "priority": priority, + "stop": stop, + "interval": interval, + "action": action, + "owner": User.objects.get_or_create(username="test")[0], } if start is not None: - kwargs['start'] = start + kwargs["start"] = start if cb_url is not None: - kwargs['callback_url'] = cb_url + kwargs["callback_url"] = cb_url return ScheduleEntry.objects.create(**kwargs) @@ -94,7 +94,7 @@ def cb(entry, task_id): flag.set() return "set flag" - cb.__name__ = 'testcb' + str(create_action.counter) + cb.__name__ = "testcb" + str(create_action.counter) actions.by_name[cb.__name__] = cb create_action.counter += 1 return cb, flag @@ -107,7 +107,7 @@ def create_bad_action(): def bad_action(entry, task_id): raise Exception(BAD_ACTION_STR) - actions.by_name['bad_action'] = bad_action + actions.by_name["bad_action"] = bad_action return bad_action diff --git a/src/sensor/__init__.py b/src/sensor/__init__.py index de3f9da8..2e233ca1 100644 --- a/src/sensor/__init__.py +++ b/src/sensor/__init__.py @@ -1,3 +1,3 @@ # API versions -V1 = {'version': 'v1'} +V1 = {"version": "v1"} diff --git a/src/sensor/apps.py b/src/sensor/apps.py index c9eb2ec7..37eff209 100644 --- a/src/sensor/apps.py +++ b/src/sensor/apps.py @@ -2,4 +2,4 @@ class SensorConfig(AppConfig): - name = 'sensor' + name = "sensor" diff --git a/src/sensor/exceptions.py b/src/sensor/exceptions.py index 51d0dc0b..53cc55f5 100644 --- a/src/sensor/exceptions.py +++ b/src/sensor/exceptions.py @@ -24,10 +24,7 @@ def exception_handler(exc, context): if isinstance(exc, ProtectedError): response = handle_protected_error(exc, context) elif isinstance(exc, db.IntegrityError): - response = Response({ - 'detail': str(exc) - }, - status=status.HTTP_409_CONFLICT) + response = Response({"detail": str(exc)}, status=status.HTTP_409_CONFLICT) else: logger.exception("Caught unhandled exception", exc_info=exc) @@ -35,30 +32,31 @@ def exception_handler(exc, context): def handle_protected_error(exc, context): - if 'name' in context['kwargs']: - entry_name = context['kwargs']['name'] + if "name" in context["kwargs"]: + entry_name = context["kwargs"]["name"] else: - entry_name = context['kwargs']['pk'] + entry_name = context["kwargs"]["pk"] - request = context['request'] + request = context["request"] protected_object_urls = [] for protected_object in exc.protected_objects: task_id = protected_object.task_id - url_kwargs = {'schedule_entry_name': entry_name, 'task_id': task_id} + url_kwargs = {"schedule_entry_name": entry_name, "task_id": task_id} url_kwargs.update(V1) - view_name = 'result-detail' + view_name = "result-detail" url = reverse(view_name, kwargs=url_kwargs, request=request) protected_object_urls.append(url) - response = Response({ - 'detail': - ("Cannot delete schedule entry {!r} because results on disk " - "reference it. Delete the protected results first." - ).format(entry_name), - 'protected_objects': - protected_object_urls - }, - status=status.HTTP_400_BAD_REQUEST) + response = Response( + { + "detail": ( + "Cannot delete schedule entry {!r} because results on disk " + "reference it. Delete the protected results first." + ).format(entry_name), + "protected_objects": protected_object_urls, + }, + status=status.HTTP_400_BAD_REQUEST, + ) return response diff --git a/src/sensor/settings.py b/src/sensor/settings.py index 542dd42a..5c732a73 100644 --- a/src/sensor/settings.py +++ b/src/sensor/settings.py @@ -20,47 +20,47 @@ BASE_DIR = path.dirname(path.dirname(path.abspath(__file__))) REPO_ROOT = path.dirname(BASE_DIR) -FQDN = environ.get('FQDN', 'fqdn.unset') +FQDN = environ.get("FQDN", "fqdn.unset") -DOCKER_TAG = environ.get('DOCKER_TAG') -GIT_BRANCH = environ.get('GIT_BRANCH') -if not DOCKER_TAG or DOCKER_TAG == 'latest': +DOCKER_TAG = environ.get("DOCKER_TAG") +GIT_BRANCH = environ.get("GIT_BRANCH") +if not DOCKER_TAG or DOCKER_TAG == "latest": VERSION_STRING = GIT_BRANCH else: VERSION_STRING = DOCKER_TAG - if VERSION_STRING.startswith('v'): + if VERSION_STRING.startswith("v"): VERSION_STRING = VERSION_STRING[1:] -STATIC_ROOT = path.join(BASE_DIR, 'static') -STATIC_URL = '/static/' +STATIC_ROOT = path.join(BASE_DIR, "static") +STATIC_URL = "/static/" STATICFILES_DIRS = ( - ('js', path.join(STATIC_ROOT, 'js')), - ('css', path.join(STATIC_ROOT, 'css')), - ('images', path.join(STATIC_ROOT, 'images')), - ('fonts', path.join(STATIC_ROOT, 'fonts')), + ("js", path.join(STATIC_ROOT, "js")), + ("css", path.join(STATIC_ROOT, "css")), + ("images", path.join(STATIC_ROOT, "images")), + ("fonts", path.join(STATIC_ROOT, "fonts")), ) __cmd = path.split(sys.argv[0])[-1] -IN_DOCKER = bool(environ.get('IN_DOCKER')) -RUNNING_TESTS = 'test' in __cmd -RUNNING_DEMO = bool(environ.get('DEMO')) -MOCK_RADIO = bool(environ.get('MOCK_RADIO')) -MOCK_RADIO_RANDOM = bool(environ.get('MOCK_RADIO_RANDOM')) +IN_DOCKER = bool(environ.get("IN_DOCKER")) +RUNNING_TESTS = "test" in __cmd +RUNNING_DEMO = bool(environ.get("DEMO")) +MOCK_RADIO = bool(environ.get("MOCK_RADIO")) +MOCK_RADIO_RANDOM = bool(environ.get("MOCK_RADIO_RANDOM")) # Healthchecks - the existance of any of these indicates an unhealthy state -SDR_HEALTHCHECK_FILE = path.join(REPO_ROOT, 'sdr_unhealthy') -SCHEDULER_HEALTHCHECK_FILE = path.join(REPO_ROOT, 'scheduler_dead') +SDR_HEALTHCHECK_FILE = path.join(REPO_ROOT, "sdr_unhealthy") +SCHEDULER_HEALTHCHECK_FILE = path.join(REPO_ROOT, "scheduler_dead") LICENSE_URL = "https://github.com/NTIA/scos-sensor/blob/master/LICENSE.md" -OPENAPI_FILE = path.join(REPO_ROOT, 'docs', 'openapi.json') +OPENAPI_FILE = path.join(REPO_ROOT, "docs", "openapi.json") -CONFIG_DIR = path.join(REPO_ROOT, 'configs') +CONFIG_DIR = path.join(REPO_ROOT, "configs") # JSON configs -SCALE_FACTORS_FILE = path.join(CONFIG_DIR, 'scale_factors.json') -SENSOR_DEFINITION_FILE = path.join(CONFIG_DIR, 'sensor_definition.json') -ACTION_DEFINITIONS_DIR = path.join(CONFIG_DIR, 'actions') +SCALE_FACTORS_FILE = path.join(CONFIG_DIR, "scale_factors.json") +SENSOR_DEFINITION_FILE = path.join(CONFIG_DIR, "sensor_definition.json") +ACTION_DEFINITIONS_DIR = path.join(CONFIG_DIR, "actions") # Cleanup any existing healtcheck files try: @@ -69,7 +69,7 @@ pass # As defined in SigMF -DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ' +DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ" # https://docs.djangoproject.com/en/2.2/ref/settings/#internal-ips If # IN_DOCKER, the IP address that needs to go here to enable the debugging @@ -77,19 +77,19 @@ # possible to extract the correct address from an incoming request, so if # IN_DOCKER and DEBUG=true, then the `api_v1_root` view will insert the correct # IP when the first request comes in. -INTERNAL_IPS = ['127.0.0.1'] +INTERNAL_IPS = ["127.0.0.1"] # See /env.template if not IN_DOCKER or RUNNING_TESTS: - SECRET_KEY = '!j1&*$wnrkrtc-74cc7_^#n6r3om$6s#!fy=zkd_xp(gkikl+8' + SECRET_KEY = "!j1&*$wnrkrtc-74cc7_^#n6r3om$6s#!fy=zkd_xp(gkikl+8" DEBUG = True ALLOWED_HOSTS = [] else: - SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') - SECRET_KEY = environ['SECRET_KEY'] - DEBUG = environ['DEBUG'] == "true" - ALLOWED_HOSTS = environ['DOMAINS'].split() + environ['IPS'].split() - POSTGRES_PASSWORD = environ['POSTGRES_PASSWORD'] + SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") + SECRET_KEY = environ["SECRET_KEY"] + DEBUG = environ["DEBUG"] == "true" + ALLOWED_HOSTS = environ["DOMAINS"].split() + environ["IPS"].split() + POSTGRES_PASSWORD = environ["POSTGRES_PASSWORD"] SESSION_COOKIE_SECURE = IN_DOCKER CSRF_COOKIE_SECURE = IN_DOCKER @@ -141,117 +141,113 @@ """ INSTALLED_APPS = [ - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'django_extensions', - 'django_filters', - 'rest_framework', - 'rest_framework.authtoken', - 'drf_yasg', # OpenAPI generator - 'raven.contrib.django.raven_compat', - 'debug_toolbar', + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + "django_extensions", + "django_filters", + "rest_framework", + "rest_framework.authtoken", + "drf_yasg", # OpenAPI generator + "raven.contrib.django.raven_compat", + "debug_toolbar", # project-local apps - 'authentication.apps.AuthenticationConfig', - 'capabilities.apps.CapabilitiesConfig', - 'hardware.apps.HardwareConfig', - 'tasks.apps.TasksConfig', - 'schedule.apps.ScheduleConfig', - 'scheduler.apps.SchedulerConfig', - 'status.apps.StatusConfig', - 'sensor.apps.SensorConfig', # global settings/utils, etc + "authentication.apps.AuthenticationConfig", + "capabilities.apps.CapabilitiesConfig", + "hardware.apps.HardwareConfig", + "tasks.apps.TasksConfig", + "schedule.apps.ScheduleConfig", + "scheduler.apps.SchedulerConfig", + "status.apps.StatusConfig", + "sensor.apps.SensorConfig", # global settings/utils, etc ] MIDDLEWARE = [ - 'debug_toolbar.middleware.DebugToolbarMiddleware', - 'django.middleware.security.SecurityMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', + "debug_toolbar.middleware.DebugToolbarMiddleware", + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", ] -ROOT_URLCONF = 'sensor.urls' +ROOT_URLCONF = "sensor.urls" TEMPLATES = [ { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [path.join(BASE_DIR, 'templates')], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [path.join(BASE_DIR, "templates")], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", ], - 'builtins': [ - 'sensor.templatetags.sensor_tags', - ] + "builtins": ["sensor.templatetags.sensor_tags"], }, - }, + } ] -WSGI_APPLICATION = 'sensor.wsgi.application' +WSGI_APPLICATION = "sensor.wsgi.application" # Django Rest Framework # http://www.django-rest-framework.org/ REST_FRAMEWORK = { - 'EXCEPTION_HANDLER': - 'sensor.exceptions.exception_handler', - 'DEFAULT_AUTHENTICATION_CLASSES': ( - 'rest_framework.authentication.TokenAuthentication', - 'rest_framework.authentication.SessionAuthentication', + "EXCEPTION_HANDLER": "sensor.exceptions.exception_handler", + "DEFAULT_AUTHENTICATION_CLASSES": ( + "rest_framework.authentication.TokenAuthentication", + "rest_framework.authentication.SessionAuthentication", ), - 'DEFAULT_PERMISSION_CLASSES': - ('rest_framework.permissions.IsAuthenticated', ), - 'DEFAULT_RENDERER_CLASSES': ( - 'rest_framework.renderers.JSONRenderer', - 'rest_framework.renderers.BrowsableAPIRenderer', + "DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",), + "DEFAULT_RENDERER_CLASSES": ( + "rest_framework.renderers.JSONRenderer", + "rest_framework.renderers.BrowsableAPIRenderer", ), - 'DEFAULT_VERSIONING_CLASS': - 'rest_framework.versioning.URLPathVersioning', - 'DEFAULT_VERSION': 'v1', # this should always point to latest stable api - 'ALLOWED_VERSIONS': ('v1', ), - 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination', - 'PAGE_SIZE': 10, - 'DATETIME_FORMAT': DATETIME_FORMAT, - 'DATETIME_INPUT_FORMATS': ('iso-8601', ), - 'COERCE_DECIMAL_TO_STRING': False, # DecimalField should return floats - 'URL_FIELD_NAME': 'self' # RFC 42867 + "DEFAULT_VERSIONING_CLASS": "rest_framework.versioning.URLPathVersioning", + "DEFAULT_VERSION": "v1", # this should always point to latest stable api + "ALLOWED_VERSIONS": ("v1",), + "DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.LimitOffsetPagination", + "PAGE_SIZE": 10, + "DATETIME_FORMAT": DATETIME_FORMAT, + "DATETIME_INPUT_FORMATS": ("iso-8601",), + "COERCE_DECIMAL_TO_STRING": False, # DecimalField should return floats + "URL_FIELD_NAME": "self", # RFC 42867 } # https://drf-yasg.readthedocs.io/en/stable/settings.html SWAGGER_SETTINGS = { - 'SECURITY_DEFINITIONS': { - 'token': { - 'type': 'apiKey', - 'description': - ("Tokens are automatically generated for all users. You can " - "view yours by going to your User Details view in the " - "browsable API at `/api/v1/users/me` and looking for the " - "`auth_token` key. Non-admin user accounts do not initially " - "have a password and so can not log in to the browsable API. " - "To set a password for a user (for testing purposes), an " - "admin can do that in the Sensor Configuration Portal, but " - "only the account's token should be stored and used for " - "general purpose API access. " - "Example cURL call: `curl -kLsS -H \"Authorization: Token" - " 529c30e6e04b3b546f2e073e879b75fdfa147c15\" " - "https://greyhound5.sms.internal/api/v1`"), - 'name': 'Token', - 'in': 'header' + "SECURITY_DEFINITIONS": { + "token": { + "type": "apiKey", + "description": ( + "Tokens are automatically generated for all users. You can " + "view yours by going to your User Details view in the " + "browsable API at `/api/v1/users/me` and looking for the " + "`auth_token` key. Non-admin user accounts do not initially " + "have a password and so can not log in to the browsable API. " + "To set a password for a user (for testing purposes), an " + "admin can do that in the Sensor Configuration Portal, but " + "only the account's token should be stored and used for " + "general purpose API access. " + 'Example cURL call: `curl -kLsS -H "Authorization: Token' + ' 529c30e6e04b3b546f2e073e879b75fdfa147c15" ' + "https://greyhound5.sms.internal/api/v1`" + ), + "name": "Token", + "in": "header", } }, - 'APIS_SORTER': 'alpha', - 'OPERATIONS_SORTER': 'method', - 'VALIDATOR_URL': None + "APIS_SORTER": "alpha", + "OPERATIONS_SORTER": "method", + "VALIDATOR_URL": None, } # Database @@ -259,25 +255,22 @@ if RUNNING_TESTS or RUNNING_DEMO: DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': ':memory:' - } + "default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"} } else: DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql', - 'NAME': 'postgres', - 'USER': 'postgres', - 'PASSWORD': environ['POSTGRES_PASSWORD'], - 'HOST': 'db', - 'PORT': '5432', + "default": { + "ENGINE": "django.db.backends.postgresql", + "NAME": "postgres", + "USER": "postgres", + "PASSWORD": environ["POSTGRES_PASSWORD"], + "HOST": "db", + "PORT": "5432", } } if not IN_DOCKER: - DATABASES['default']['HOST'] = 'localhost' + DATABASES["default"]["HOST"] = "localhost" # Ensure only the last MAX_TASK_RESULTS results are kept per schedule entry MAX_TASK_RESULTS = 100 @@ -289,92 +282,45 @@ AUTH_PASSWORD_VALIDATORS = [ { - 'NAME': - 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', - }, - { - 'NAME': - 'django.contrib.auth.password_validation.MinimumLengthValidator', - }, - { - 'NAME': - 'django.contrib.auth.password_validation.CommonPasswordValidator', - }, - { - 'NAME': - 'django.contrib.auth.password_validation.NumericPasswordValidator', + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator" }, + {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"}, + {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, + {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}, ] -AUTH_USER_MODEL = 'authentication.User' +AUTH_USER_MODEL = "authentication.User" # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ -LANGUAGE_CODE = 'en-us' -TIME_ZONE = 'UTC' +LANGUAGE_CODE = "en-us" +TIME_ZONE = "UTC" USE_I18N = True USE_L10N = True USE_TZ = True -LOGLEVEL = 'DEBUG' if DEBUG else 'INFO' +LOGLEVEL = "DEBUG" if DEBUG else "INFO" LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'simple': { - 'format': '[%(asctime)s] [%(levelname)s] %(message)s' - }, - }, - 'filters': { - 'require_debug_true': { - '()': 'django.utils.log.RequireDebugTrue', - }, + "version": 1, + "disable_existing_loggers": False, + "formatters": {"simple": {"format": "[%(asctime)s] [%(levelname)s] %(message)s"}}, + "filters": {"require_debug_true": {"()": "django.utils.log.RequireDebugTrue"}}, + "handlers": {"console": {"class": "logging.StreamHandler", "formatter": "simple"}}, + "loggers": { + "actions": {"handlers": ["console"], "level": LOGLEVEL}, + "capabilities": {"handlers": ["console"], "level": LOGLEVEL}, + "hardware": {"handlers": ["console"], "level": LOGLEVEL}, + "schedule": {"handlers": ["console"], "level": LOGLEVEL}, + "scheduler": {"handlers": ["console"], "level": LOGLEVEL}, + "sensor": {"handlers": ["console"], "level": LOGLEVEL}, + "status": {"handlers": ["console"], "level": LOGLEVEL}, }, - 'handlers': { - 'console': { - 'class': 'logging.StreamHandler', - 'formatter': 'simple' - }, - }, - 'loggers': { - 'actions': { - 'handlers': ['console'], - 'level': LOGLEVEL - }, - 'capabilities': { - 'handlers': ['console'], - 'level': LOGLEVEL - }, - 'hardware': { - 'handlers': ['console'], - 'level': LOGLEVEL - }, - 'schedule': { - 'handlers': ['console'], - 'level': LOGLEVEL - }, - 'scheduler': { - 'handlers': ['console'], - 'level': LOGLEVEL - }, - 'sensor': { - 'handlers': ['console'], - 'level': LOGLEVEL - }, - 'status': { - 'handlers': ['console'], - 'level': LOGLEVEL - } - } } -SENTRY_DSN = environ.get('SENTRY_DSN') +SENTRY_DSN = environ.get("SENTRY_DSN") if SENTRY_DSN: import raven - RAVEN_CONFIG = { - 'dsn': SENTRY_DSN, - 'release': raven.fetch_git_sha(REPO_ROOT), - } + RAVEN_CONFIG = {"dsn": SENTRY_DSN, "release": raven.fetch_git_sha(REPO_ROOT)} diff --git a/src/sensor/tests/test_api_docs.py b/src/sensor/tests/test_api_docs.py index 48ac12b4..399410a0 100644 --- a/src/sensor/tests/test_api_docs.py +++ b/src/sensor/tests/test_api_docs.py @@ -17,9 +17,9 @@ def test_api_docs_up_to_date(admin_client): print("{} doesn't exist, not in src tree.".format(docs_dir)) return True - schema_url = reverse('api_schema', kwargs=V1) + '?format=openapi' + schema_url = reverse("api_schema", kwargs=V1) + "?format=openapi" response = admin_client.get(schema_url) - with open(settings.OPENAPI_FILE, 'w+') as openapi_file: + with open(settings.OPENAPI_FILE, "w+") as openapi_file: openapi_json = json.loads(response.content) json.dump(openapi_json, openapi_file, indent=4) diff --git a/src/sensor/tests/test_api_root_view.py b/src/sensor/tests/test_api_root_view.py index 668f1834..8acadd80 100644 --- a/src/sensor/tests/test_api_root_view.py +++ b/src/sensor/tests/test_api_root_view.py @@ -4,12 +4,17 @@ from .utils import validate_response, HTTPS_KWARG API_ROOT_ENDPOINTS = { - 'acquisitions', 'users', 'schedule', 'status', 'capabilities', 'results' + "acquisitions", + "users", + "schedule", + "status", + "capabilities", + "results", } def test_index(user_client): - response = user_client.get(reverse('api-root', kwargs=V1), **HTTPS_KWARG) + response = user_client.get(reverse("api-root", kwargs=V1), **HTTPS_KWARG) rjson = validate_response(response) assert rjson.keys() == API_ROOT_ENDPOINTS diff --git a/src/sensor/tests/utils.py b/src/sensor/tests/utils.py index 764a9323..cf327955 100644 --- a/src/sensor/tests/utils.py +++ b/src/sensor/tests/utils.py @@ -1,6 +1,6 @@ from rest_framework import status -HTTPS_KWARG = {'wsgi.url_scheme': 'https'} +HTTPS_KWARG = {"wsgi.url_scheme": "https"} def validate_response(response, expected_code=None): @@ -11,6 +11,6 @@ def validate_response(response, expected_code=None): else: assert actual_code == expected_code, response.data - if actual_code not in (status.HTTP_204_NO_CONTENT, ): + if actual_code not in (status.HTTP_204_NO_CONTENT,): rjson = response.json() return rjson diff --git a/src/sensor/urls.py b/src/sensor/urls.py index f5945be7..f7439df9 100644 --- a/src/sensor/urls.py +++ b/src/sensor/urls.py @@ -29,41 +29,40 @@ # Matches api/v1, api/v2, etc... -API_PREFIX = r'^api/(?Pv[0-9]+)/' -DEFAULT_API_VERSION = settings.REST_FRAMEWORK['DEFAULT_VERSION'] +API_PREFIX = r"^api/(?Pv[0-9]+)/" +DEFAULT_API_VERSION = settings.REST_FRAMEWORK["DEFAULT_VERSION"] api_urlpatterns = format_suffix_patterns( ( - path('', api_v1_root, name='api-root'), - path('capabilities/', include('capabilities.urls')), - path('schedule/', include('schedule.urls')), - path('status', include('status.urls')), - path('users/', include('authentication.urls')), - path('tasks/', include('tasks.urls')), - path('schema/', schema_view.with_ui('redoc', cache_timeout=0), - name='api_schema') + path("", api_v1_root, name="api-root"), + path("capabilities/", include("capabilities.urls")), + path("schedule/", include("schedule.urls")), + path("status", include("status.urls")), + path("users/", include("authentication.urls")), + path("tasks/", include("tasks.urls")), + path( + "schema/", schema_view.with_ui("redoc", cache_timeout=0), name="api_schema" + ), ) ) # Modify admin portal before including url # Text to put in each page's

(and above login form). -admin.site.site_header = 'SCOS Sensor Configuration Portal' +admin.site.site_header = "SCOS Sensor Configuration Portal" # Text to put at the top of the admin index page. -admin.site.index_title = 'SCOS Sensor Configuration Portal' +admin.site.index_title = "SCOS Sensor Configuration Portal" urlpatterns = ( - path('', RedirectView.as_view(url='/api/')), - path('admin/', admin.site.urls), - path('api/', - RedirectView.as_view(url='/api/{}/'.format(DEFAULT_API_VERSION))), + path("", RedirectView.as_view(url="/api/")), + path("admin/", admin.site.urls), + path("api/", RedirectView.as_view(url="/api/{}/".format(DEFAULT_API_VERSION))), re_path(API_PREFIX, include(api_urlpatterns)), - path('api/auth/', include('rest_framework.urls')) + path("api/auth/", include("rest_framework.urls")), ) if settings.DEBUG: import debug_toolbar - urlpatterns = [ - path('__debug__/', include(debug_toolbar.urls)), - ] + list(urlpatterns) + + urlpatterns = [path("__debug__/", include(debug_toolbar.urls))] + list(urlpatterns) diff --git a/src/sensor/utils.py b/src/sensor/utils.py index c63ff809..4f663bdf 100644 --- a/src/sensor/utils.py +++ b/src/sensor/utils.py @@ -61,7 +61,7 @@ def get_timestamp_from_datetime(dt): def get_datetime_str_now(): - return datetime.isoformat(datetime.utcnow()) + 'Z' + return datetime.isoformat(datetime.utcnow()) + "Z" def parse_datetime_str(d): diff --git a/src/sensor/views.py b/src/sensor/views.py index e9d84dbe..5f1ce097 100644 --- a/src/sensor/views.py +++ b/src/sensor/views.py @@ -11,22 +11,22 @@ from . import settings -@api_view(('GET', )) +@api_view(("GET",)) def api_v1_root(request, version, format=None): """SCOS sensor API root.""" reverse_ = partial(reverse, request=request, format=format) list_endpoints = { - 'capabilities': reverse_('capabilities'), - 'schedule': reverse_('schedule-list'), - 'status': reverse_('status'), - 'tasks': reverse_('task-root'), - 'users': reverse_('user-list') + "capabilities": reverse_("capabilities"), + "schedule": reverse_("schedule-list"), + "status": reverse_("status"), + "tasks": reverse_("task-root"), + "users": reverse_("user-list"), } # See note in settings:INTERNAL_IPS about why we do this here - nginx_container_ip = request.META['REMOTE_ADDR'] + nginx_container_ip = request.META["REMOTE_ADDR"] nginx_ip_set = nginx_container_ip in settings.INTERNAL_IPS - if (settings.IN_DOCKER and settings.DEBUG and not nginx_ip_set): + if settings.IN_DOCKER and settings.DEBUG and not nginx_ip_set: settings.INTERNAL_IPS.append(nginx_container_ip) return Response(list_endpoints) @@ -41,5 +41,5 @@ def api_v1_root(request, version, format=None): license=openapi.License(name="NTIA/ITS", url=settings.LICENSE_URL), ), public=False, - permission_classes=(permissions.IsAuthenticated, ), + permission_classes=(permissions.IsAuthenticated,), ) diff --git a/src/status/admin.py b/src/status/admin.py index 7dc7b46c..bc258465 100644 --- a/src/status/admin.py +++ b/src/status/admin.py @@ -7,4 +7,4 @@ @admin.register(Location) class LocationAdmin(admin.ModelAdmin): - list_display = ('description', 'longitude', 'latitude', 'active') + list_display = ("description", "longitude", "latitude", "active") diff --git a/src/status/apps.py b/src/status/apps.py index 9b47e3f9..d207502e 100644 --- a/src/status/apps.py +++ b/src/status/apps.py @@ -2,4 +2,4 @@ class StatusConfig(AppConfig): - name = 'status' + name = "status" diff --git a/src/status/migrations/0001_initial.py b/src/status/migrations/0001_initial.py index 13bd1424..bfb7b819 100644 --- a/src/status/migrations/0001_initial.py +++ b/src/status/migrations/0001_initial.py @@ -7,20 +7,63 @@ class Migration(migrations.Migration): initial = True - dependencies = [ - ] + dependencies = [] operations = [ migrations.CreateModel( - name='Location', + name="Location", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('gps', models.BooleanField(help_text='Identifies this as autogenerated location information')), - ('modified', models.DateTimeField(auto_now=True, help_text='Time of last location update.')), - ('active', models.BooleanField(default=True, help_text='Display this location on /status.')), - ('description', models.CharField(blank=True, help_text='Freeform text description of this location.', max_length=1024)), - ('latitude', models.DecimalField(decimal_places=6, help_text='Longitude of the sensor in decimal degrees.', max_digits=9)), - ('longitude', models.DecimalField(decimal_places=6, help_text='Longitude of the sensor in decimal degrees.', max_digits=9)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "gps", + models.BooleanField( + help_text="Identifies this as autogenerated location information" + ), + ), + ( + "modified", + models.DateTimeField( + auto_now=True, help_text="Time of last location update." + ), + ), + ( + "active", + models.BooleanField( + default=True, help_text="Display this location on /status." + ), + ), + ( + "description", + models.CharField( + blank=True, + help_text="Freeform text description of this location.", + max_length=1024, + ), + ), + ( + "latitude", + models.DecimalField( + decimal_places=6, + help_text="Longitude of the sensor in decimal degrees.", + max_digits=9, + ), + ), + ( + "longitude", + models.DecimalField( + decimal_places=6, + help_text="Longitude of the sensor in decimal degrees.", + max_digits=9, + ), + ), ], - ), + ) ] diff --git a/src/status/models.py b/src/status/models.py index c4effb6f..cb6a7761 100644 --- a/src/status/models.py +++ b/src/status/models.py @@ -10,25 +10,31 @@ class Location(models.Model): Primarily used for mapping and geo-filtering. """ + gps = models.BooleanField( help_text="Identifies this as autogenerated location information" ) modified = models.DateTimeField( - auto_now=True, help_text="Time of last location update.") + auto_now=True, help_text="Time of last location update." + ) active = models.BooleanField( - default=True, help_text="Display this location on /status.") + default=True, help_text="Display this location on /status." + ) description = models.CharField( max_length=MAX_DESCRIPTION_LEN, blank=True, - help_text="Freeform text description of this location.") + help_text="Freeform text description of this location.", + ) latitude = models.DecimalField( max_digits=9, decimal_places=6, - help_text="Longitude of the sensor in decimal degrees.") + help_text="Longitude of the sensor in decimal degrees.", + ) longitude = models.DecimalField( max_digits=9, decimal_places=6, - help_text="Longitude of the sensor in decimal degrees.") + help_text="Longitude of the sensor in decimal degrees.", + ) def save(self, *args, **kwargs): """Deselect active on all other Locations if self is active.""" diff --git a/src/status/serializers.py b/src/status/serializers.py index 5b9ee0b6..b814e3be 100644 --- a/src/status/serializers.py +++ b/src/status/serializers.py @@ -6,4 +6,4 @@ class LocationSerializer(serializers.ModelSerializer): class Meta: model = Location - exclude = ('id', 'active') + exclude = ("id", "active") diff --git a/src/status/urls.py b/src/status/urls.py index e47b23e8..3c51f935 100644 --- a/src/status/urls.py +++ b/src/status/urls.py @@ -2,6 +2,4 @@ from .views import status -urlpatterns = ( - path('', status, name='status'), -) +urlpatterns = (path("", status, name="status"),) diff --git a/src/status/views.py b/src/status/views.py index 4ca7a1e7..354525b0 100644 --- a/src/status/views.py +++ b/src/status/views.py @@ -25,8 +25,10 @@ def get_location(): @api_view() def status(request, version, format=None): """The status overview of the sensor.""" - return Response({ - 'scheduler': scheduler.thread.status, - 'location': get_location(), - 'system_time': utils.get_datetime_str_now() - }) + return Response( + { + "scheduler": scheduler.thread.status, + "location": get_location(), + "system_time": utils.get_datetime_str_now(), + } + ) diff --git a/src/tasks/apps.py b/src/tasks/apps.py index 20547224..5aadae04 100644 --- a/src/tasks/apps.py +++ b/src/tasks/apps.py @@ -2,4 +2,4 @@ class TasksConfig(AppConfig): - name = 'tasks' + name = "tasks" diff --git a/src/tasks/migrations/0001_initial.py b/src/tasks/migrations/0001_initial.py index e9e4f0bc..215abb3d 100644 --- a/src/tasks/migrations/0001_initial.py +++ b/src/tasks/migrations/0001_initial.py @@ -11,41 +11,118 @@ class Migration(migrations.Migration): initial = True - dependencies = [ - ('schedule', '0001_initial'), - ] + dependencies = [("schedule", "0001_initial")] operations = [ migrations.CreateModel( - name='TaskResult', + name="TaskResult", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('task_id', models.IntegerField(help_text='The id of the task relative to the result')), - ('started', models.DateTimeField(default=datetime.datetime(2019, 5, 16, 23, 0, tzinfo=utc), help_text='The time the task started')), - ('finished', models.DateTimeField(default=datetime.datetime(2019, 5, 16, 23, 0, tzinfo=utc), help_text='The time the task finished')), - ('duration', models.DurationField(default=datetime.timedelta(0), help_text='Task duration in seconds')), - ('status', models.CharField(choices=[(1, 'success'), (2, 'failure'), (3, 'in-progress')], default='in-progress', help_text='"success" or "failure"', max_length=11)), - ('detail', models.CharField(blank=True, help_text='Arbitrary detail string', max_length=512)), - ('schedule_entry', models.ForeignKey(help_text='The schedule entry relative to the result', on_delete=django.db.models.deletion.PROTECT, related_name='task_results', to='schedule.ScheduleEntry')), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "task_id", + models.IntegerField( + help_text="The id of the task relative to the result" + ), + ), + ( + "started", + models.DateTimeField( + default=datetime.datetime(2019, 5, 16, 23, 0, tzinfo=utc), + help_text="The time the task started", + ), + ), + ( + "finished", + models.DateTimeField( + default=datetime.datetime(2019, 5, 16, 23, 0, tzinfo=utc), + help_text="The time the task finished", + ), + ), + ( + "duration", + models.DurationField( + default=datetime.timedelta(0), + help_text="Task duration in seconds", + ), + ), + ( + "status", + models.CharField( + choices=[(1, "success"), (2, "failure"), (3, "in-progress")], + default="in-progress", + help_text='"success" or "failure"', + max_length=11, + ), + ), + ( + "detail", + models.CharField( + blank=True, help_text="Arbitrary detail string", max_length=512 + ), + ), + ( + "schedule_entry", + models.ForeignKey( + help_text="The schedule entry relative to the result", + on_delete=django.db.models.deletion.PROTECT, + related_name="task_results", + to="schedule.ScheduleEntry", + ), + ), ], options={ - 'ordering': ('task_id',), - 'unique_together': {('schedule_entry', 'task_id')}, + "ordering": ("task_id",), + "unique_together": {("schedule_entry", "task_id")}, }, ), migrations.CreateModel( - name='Acquisition', + name="Acquisition", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('recording_id', models.IntegerField(default=0, help_text='The id of the recording relative to the task')), - ('metadata', jsonfield.fields.JSONField(help_text='The sigmf meta data for the acquisition')), - ('data', models.BinaryField(null=True)), - ('task_result', models.ForeignKey(help_text='The task_result relative to the acquisition', on_delete=django.db.models.deletion.CASCADE, related_name='data', to='tasks.TaskResult')), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "recording_id", + models.IntegerField( + default=0, + help_text="The id of the recording relative to the task", + ), + ), + ( + "metadata", + jsonfield.fields.JSONField( + help_text="The sigmf meta data for the acquisition" + ), + ), + ("data", models.BinaryField(null=True)), + ( + "task_result", + models.ForeignKey( + help_text="The task_result relative to the acquisition", + on_delete=django.db.models.deletion.CASCADE, + related_name="data", + to="tasks.TaskResult", + ), + ), ], options={ - 'db_table': 'acquisitions', - 'ordering': ('task_result', 'recording_id'), - 'unique_together': {('task_result', 'recording_id')}, + "db_table": "acquisitions", + "ordering": ("task_result", "recording_id"), + "unique_together": {("task_result", "recording_id")}, }, ), ] diff --git a/src/tasks/models/acquisition.py b/src/tasks/models/acquisition.py index ede79695..6382ccd0 100644 --- a/src/tasks/models/acquisition.py +++ b/src/tasks/models/acquisition.py @@ -15,25 +15,27 @@ class Acquisition(models.Model): entry, task id, and recording id. """ + task_result = models.ForeignKey( TaskResult, on_delete=models.CASCADE, - related_name='data', - help_text="The task_result relative to the acquisition") + related_name="data", + help_text="The task_result relative to the acquisition", + ) recording_id = models.IntegerField( - default=0, - help_text="The id of the recording relative to the task") + default=0, help_text="The id of the recording relative to the task" + ) metadata = JSONField(help_text="The sigmf meta data for the acquisition") data = models.BinaryField(help_text="", null=True) class Meta: - db_table = 'acquisitions' - ordering = ('task_result', 'recording_id') - unique_together = (('task_result', 'recording_id'), ) + db_table = "acquisitions" + ordering = ("task_result", "recording_id") + unique_together = (("task_result", "recording_id"),) def __str__(self): - return '{}/{}:{}'.format( + return "{}/{}:{}".format( self.task_result.schedule_entry.name, self.task_result.task_id, - self.recording_id + self.recording_id, ) diff --git a/src/tasks/models/task.py b/src/tasks/models/task.py index 4231bd5f..b7bb5d36 100644 --- a/src/tasks/models/task.py +++ b/src/tasks/models/task.py @@ -8,8 +8,8 @@ import actions -attributes = ('time', 'priority', 'action', 'schedule_entry_name', 'task_id') -TaskTuple = namedtuple('Task', attributes) +attributes = ("time", "priority", "action", "schedule_entry_name", "task_id") +TaskTuple = namedtuple("Task", attributes) class Task(TaskTuple): diff --git a/src/tasks/models/task_result.py b/src/tasks/models/task_result.py index 3b18dd1f..50ca093b 100644 --- a/src/tasks/models/task_result.py +++ b/src/tasks/models/task_result.py @@ -13,44 +13,47 @@ class TaskResult(models.Model): """Map between schedule entries and their task results.""" + SUCCESS = 1 FAILURE = 2 IN_PROGRESS = 3 RESULT_CHOICES = ( - (SUCCESS, 'success'), - (FAILURE, 'failure'), - (IN_PROGRESS, 'in-progress') + (SUCCESS, "success"), + (FAILURE, "failure"), + (IN_PROGRESS, "in-progress"), ) schedule_entry = models.ForeignKey( ScheduleEntry, on_delete=models.PROTECT, - related_name='task_results', - help_text="The schedule entry relative to the result") - task_id = models.IntegerField( - help_text="The id of the task relative to the result") + related_name="task_results", + help_text="The schedule entry relative to the result", + ) + task_id = models.IntegerField(help_text="The id of the task relative to the result") started = models.DateTimeField( default=datetime.datetime(2019, 5, 16, 23, tzinfo=UTC), - help_text="The time the task started") + help_text="The time the task started", + ) finished = models.DateTimeField( default=datetime.datetime(2019, 5, 16, 23, tzinfo=UTC), - help_text="The time the task finished") + help_text="The time the task finished", + ) duration = models.DurationField( - default=timezone.ZERO, - help_text="Task duration in seconds") + default=timezone.ZERO, help_text="Task duration in seconds" + ) status = models.CharField( - default='in-progress', + default="in-progress", max_length=11, help_text='"success" or "failure"', - choices=RESULT_CHOICES) + choices=RESULT_CHOICES, + ) detail = models.CharField( - max_length=MAX_DETAIL_LEN, - blank=True, - help_text="Arbitrary detail string") + max_length=MAX_DETAIL_LEN, blank=True, help_text="Arbitrary detail string" + ) class Meta: - ordering = ('task_id', ) - unique_together = (('schedule_entry', 'task_id'), ) + ordering = ("task_id",) + unique_together = (("schedule_entry", "task_id"),) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -61,7 +64,7 @@ def __init__(self, *args, **kwargs): def save(self): """Limit number of results to MAX_TASK_RESULTS by removing oldest.""" all_results = TaskResult.objects.all() - filter = {'schedule_entry__name': self.schedule_entry.name} + filter = {"schedule_entry__name": self.schedule_entry.name} same_entry_results = all_results.filter(**filter) if same_entry_results.count() >= self.max_results: same_entry_results[0].delete() diff --git a/src/tasks/serializers/acquisition.py b/src/tasks/serializers/acquisition.py index 6ad7d627..c4016e49 100644 --- a/src/tasks/serializers/acquisition.py +++ b/src/tasks/serializers/acquisition.py @@ -9,8 +9,8 @@ class AcquisitionHyperlinkedRelatedField(serializers.HyperlinkedRelatedField): # django-rest-framework.org/api-guide/relations/#custom-hyperlinked-fields def get_url(self, obj, view_name, request, format): kws = { - 'schedule_entry_name': obj.task_result.schedule_entry.name, - 'task_id': obj.task_result.task_id + "schedule_entry_name": obj.task_result.schedule_entry.name, + "task_id": obj.task_result.task_id, } kws.update(V1) url = reverse(view_name, kwargs=kws, request=request, format=format) @@ -19,20 +19,16 @@ def get_url(self, obj, view_name, request, format): class AcquisitionSerializer(serializers.ModelSerializer): archive = AcquisitionHyperlinkedRelatedField( - view_name='task-result-archive', + view_name="task-result-archive", read_only=True, help_text="The url to download a SigMF archive of this acquisition", - source='*' # pass whole object + source="*", # pass whole object ) - metadata = serializers.DictField( - help_text="The SigMF metadata for the acquisition") + metadata = serializers.DictField(help_text="The SigMF metadata for the acquisition") class Meta: model = Acquisition - fields = ('recording_id', 'archive', 'metadata') + fields = ("recording_id", "archive", "metadata") extra_kwargs = { - 'schedule_entry': { - 'view_name': 'schedule-detail', - 'lookup_field': 'name' - } + "schedule_entry": {"view_name": "schedule-detail", "lookup_field": "name"} } diff --git a/src/tasks/serializers/task.py b/src/tasks/serializers/task.py index 53f8b671..10ff51d2 100644 --- a/src/tasks/serializers/task.py +++ b/src/tasks/serializers/task.py @@ -11,11 +11,11 @@ class TaskSerializer(serializers.Serializer): action = serializers.CharField(max_length=actions.MAX_LENGTH) priority = serializers.IntegerField() time = DateTimeFromTimestampField( - read_only=True, - help_text="UTC time (ISO 8601) the this task is scheduled for") + read_only=True, help_text="UTC time (ISO 8601) the this task is scheduled for" + ) def get_schedule_entry(self, obj): - request = self.context['request'] - kws = {'pk': obj.schedule_entry_name} + request = self.context["request"] + kws = {"pk": obj.schedule_entry_name} kws.update(V1) - return reverse('schedule-detail', kwargs=kws, request=request) + return reverse("schedule-detail", kwargs=kws, request=request) diff --git a/src/tasks/serializers/task_result.py b/src/tasks/serializers/task_result.py index c004e06f..c1e7781a 100644 --- a/src/tasks/serializers/task_result.py +++ b/src/tasks/serializers/task_result.py @@ -11,10 +11,7 @@ class TaskResultHyperlinkedRelatedField(serializers.HyperlinkedRelatedField): # django-rest-framework.org/api-guide/relations/#custom-hyperlinked-fields def get_url(self, obj, view_name, request, format): - kws = { - 'schedule_entry_name': obj.schedule_entry.name, - 'task_id': obj.task_id - } + kws = {"schedule_entry_name": obj.schedule_entry.name, "task_id": obj.task_id} kws.update(V1) url = reverse(view_name, kwargs=kws, request=request, format=format) return url @@ -22,20 +19,23 @@ def get_url(self, obj, view_name, request, format): class TaskResultsOverviewSerializer(serializers.HyperlinkedModelSerializer): task_results = serializers.SerializerMethodField( - help_text="The link to the task results") + help_text="The link to the task results" + ) task_results_available = serializers.SerializerMethodField( - help_text="The number of available results") + help_text="The number of available results" + ) schedule_entry = serializers.SerializerMethodField( - help_text="The related schedule entry for the result") + help_text="The related schedule entry for the result" + ) class Meta: model = ScheduleEntry - fields = ('task_results', 'task_results_available', 'schedule_entry') + fields = ("task_results", "task_results_available", "schedule_entry") def get_task_results(self, obj): - request = self.context['request'] - route = 'task-result-list' - kws = {'schedule_entry_name': obj.name} + request = self.context["request"] + route = "task-result-list" + kws = {"schedule_entry_name": obj.name} kws.update(V1) url = reverse(route, kwargs=kws, request=request) return url @@ -44,9 +44,9 @@ def get_task_results_available(self, obj): return obj.task_results.count() def get_schedule_entry(self, obj): - request = self.context['request'] - route = 'schedule-detail' - kws = {'pk': obj.name} + request = self.context["request"] + route = "schedule-detail" + kws = {"pk": obj.name} kws.update(V1) url = reverse(route, kwargs=kws, request=request) return url @@ -54,33 +54,34 @@ def get_schedule_entry(self, obj): class TaskResultSerializer(serializers.HyperlinkedModelSerializer): self = TaskResultHyperlinkedRelatedField( - view_name='task-result-detail', + view_name="task-result-detail", read_only=True, help_text="The url of the result", - source='*' # pass whole object + source="*", # pass whole object ) schedule_entry = serializers.SerializerMethodField( - help_text="The url of the parent schedule entry") + help_text="The url of the parent schedule entry" + ) data = AcquisitionSerializer(many=True) class Meta: model = TaskResult fields = ( - 'self', - 'schedule_entry', - 'task_id', - 'status', - 'detail', - 'started', - 'finished', - 'duration', - 'data' + "self", + "schedule_entry", + "task_id", + "status", + "detail", + "started", + "finished", + "duration", + "data", ) def get_schedule_entry(self, obj): - request = self.context['request'] - route = 'schedule-detail' - kws = {'pk': obj.schedule_entry.name} + request = self.context["request"] + route = "schedule-detail" + kws = {"pk": obj.schedule_entry.name} kws.update(V1) url = reverse(route, kwargs=kws, request=request) diff --git a/src/tasks/tests/test_detail_view.py b/src/tasks/tests/test_detail_view.py index 50699c95..00686650 100644 --- a/src/tasks/tests/test_detail_view.py +++ b/src/tasks/tests/test_detail_view.py @@ -2,7 +2,10 @@ from sensor.tests.utils import validate_response, HTTPS_KWARG from tasks.tests.utils import ( - create_task_results, reverse_result_detail, simulate_acquisitions) + create_task_results, + reverse_result_detail, + simulate_acquisitions, +) def test_can_view_own_result_details(user_client): @@ -21,8 +24,7 @@ def test_can_view_others_result_details(user_client, alt_user_client): validate_response(response, status.HTTP_200_OK) -def test_cannot_view_private_result_details(user_client, admin_client, - test_scheduler): +def test_cannot_view_private_result_details(user_client, admin_client, test_scheduler): """A user should not be able to view the result of a private task.""" entry_name = simulate_acquisitions(admin_client, is_private=True) url = reverse_result_detail(entry_name, 1) diff --git a/src/tasks/tests/test_list_view.py b/src/tasks/tests/test_list_view.py index 876adafd..79747802 100644 --- a/src/tasks/tests/test_list_view.py +++ b/src/tasks/tests/test_list_view.py @@ -7,13 +7,13 @@ get_result_list, reverse_result_detail, reverse_result_list, - simulate_acquisitions + simulate_acquisitions, ) def test_non_existent_entry(user_client): with pytest.raises(AssertionError): - get_result_list(user_client, 'doesntexist') + get_result_list(user_client, "doesntexist") @pytest.mark.django_db @@ -22,8 +22,8 @@ def test_single_result_response(user_client): result, = get_result_list(user_client, entry_name) task_id = 1 expected_url = reverse_result_detail(entry_name, task_id) - assert result['self'] == expected_url - assert result['task_id'] == task_id + assert result["self"] == expected_url + assert result["task_id"] == task_id @pytest.mark.django_db @@ -34,12 +34,13 @@ def test_multiple_result_response(user_client, test_scheduler): for i, acq in enumerate(results, start=1): expected_url = reverse_result_detail(entry_name, i) - assert acq['self'] == expected_url - assert acq['task_id'] == i + assert acq["self"] == expected_url + assert acq["task_id"] == i -def test_private_entry_results_list_is_private(admin_client, user_client, - test_scheduler): +def test_private_entry_results_list_is_private( + admin_client, user_client, test_scheduler +): entry_name = simulate_acquisitions(admin_client, is_private=True) url = reverse_result_list(entry_name) response = user_client.get(url, **HTTPS_KWARG) @@ -49,7 +50,7 @@ def test_private_entry_results_list_is_private(admin_client, user_client, @pytest.mark.django_db def test_delete_list(user_client): # If result doesn't exist, expect 405 - url = reverse_result_list('doesntexist') + url = reverse_result_list("doesntexist") response = user_client.delete(url, **HTTPS_KWARG) validate_response(response, status.HTTP_405_METHOD_NOT_ALLOWED) diff --git a/src/tasks/tests/test_overview_view.py b/src/tasks/tests/test_overview_view.py index 8b62dc34..d8ef6721 100644 --- a/src/tasks/tests/test_overview_view.py +++ b/src/tasks/tests/test_overview_view.py @@ -2,8 +2,12 @@ from sensor.tests.utils import validate_response, HTTPS_KWARG from tasks.tests.utils import ( - EMPTY_RESULTS_RESPONSE, create_task_results, reverse_results_overview, - get_results_overview, simulate_acquisitions) + EMPTY_RESULTS_RESPONSE, + create_task_results, + reverse_results_overview, + get_results_overview, + simulate_acquisitions, +) def test_user_empty_overview_response(user_client): @@ -19,29 +23,30 @@ def test_admin_empty_overview_response(admin_client): def test_user_get_overview(user_client): create_task_results(2, user_client) overview, = get_results_overview(user_client) - assert overview['results_available'] == 2 - assert overview['results'] # is non-empty string - assert overview['schedule_entry'] # is non-empty string + assert overview["results_available"] == 2 + assert overview["results"] # is non-empty string + assert overview["schedule_entry"] # is non-empty string def test_admin_get_overview(admin_client): create_task_results(2, admin_client) overview, = get_results_overview(admin_client) - assert overview['results_available'] == 2 - assert overview['results'] # is non-empty string - assert overview['schedule_entry'] # is non-empty string + assert overview["results_available"] == 2 + assert overview["results"] # is non-empty string + assert overview["schedule_entry"] # is non-empty string -def test_overview_for_private_entry_is_private(admin_client, user_client, - test_scheduler): +def test_overview_for_private_entry_is_private( + admin_client, user_client, test_scheduler +): simulate_acquisitions(admin_client, is_private=True) overview = get_results_overview(user_client) assert overview == [] overview, = get_results_overview(admin_client) - assert overview['results_available'] == 1 - assert overview['results'] # is non-empty string - assert overview['schedule_entry'] # is non-empty string + assert overview["results_available"] == 1 + assert overview["results"] # is non-empty string + assert overview["schedule_entry"] # is non-empty string def test_user_delete_overview_not_allowed(user_client): diff --git a/src/tasks/tests/test_serializers.py b/src/tasks/tests/test_serializers.py index 29af1b9c..50e73120 100644 --- a/src/tasks/tests/test_serializers.py +++ b/src/tasks/tests/test_serializers.py @@ -1,8 +1,7 @@ import pytest from tasks.models import TaskResult -from tasks.serializers import (TaskResultSerializer, - TaskResultsOverviewSerializer) +from tasks.serializers import TaskResultSerializer, TaskResultsOverviewSerializer from tasks.tests.utils import create_task_results @@ -10,14 +9,14 @@ def test_task_result_serializer(user_client): create_task_results(1, user_client) tr = TaskResult.objects.get() - context = {'request': None} + context = {"request": None} r = TaskResultSerializer(tr, context=context) - assert r.data['task_id'] == 1 - assert r.data['self'] == '/api/v1/results/test/1/' - assert r.data['schedule_entry'] == '/api/v1/schedule/test/' - assert r.data['detail'] == '' - assert r.data['result'] == 'success' - assert r.data['duration'] == '00:00:00.000001' + assert r.data["task_id"] == 1 + assert r.data["self"] == "/api/v1/results/test/1/" + assert r.data["schedule_entry"] == "/api/v1/schedule/test/" + assert r.data["detail"] == "" + assert r.data["result"] == "success" + assert r.data["duration"] == "00:00:00.000001" # FIXME: having problems reversing return-detail url, probably sth to do with @@ -29,7 +28,7 @@ def test_task_result_overview_serializer(user_client, rf): create_task_results(1, user_client) entries = ScheduleEntry.objects.all() - context = {'request': None} + context = {"request": None} r = TaskResultsOverviewSerializer(entries, many=True, context=context) assert r.data # TODO: complete assertions diff --git a/src/tasks/tests/utils.py b/src/tasks/tests/utils.py index 67bc327d..195f10da 100644 --- a/src/tasks/tests/utils.py +++ b/src/tasks/tests/utils.py @@ -20,19 +20,19 @@ EMPTY_ACQUISITIONS_RESPONSE = [] SINGLE_ACQUISITION = { - 'name': 'test_acq', - 'start': None, - 'stop': None, - 'interval': None, - 'action': 'mock_acquire' + "name": "test_acq", + "start": None, + "stop": None, + "interval": None, + "action": "mock_acquire", } MULTIPLE_ACQUISITIONS = { - 'name': 'test_multiple_acq', - 'start': None, - 'relative_stop': 5, - 'interval': 1, - 'action': 'mock_acquire' + "name": "test_multiple_acq", + "start": None, + "relative_stop": 5, + "interval": 1, + "action": "mock_acquire", } @@ -43,17 +43,17 @@ def simulate_acquisitions(client, n=1, is_private=False, name=None): schedule_entry = SINGLE_ACQUISITION.copy() else: schedule_entry = MULTIPLE_ACQUISITIONS.copy() - schedule_entry['relative_stop'] = n + 1 + schedule_entry["relative_stop"] = n + 1 - schedule_entry['is_private'] = is_private + schedule_entry["is_private"] = is_private if name is not None: - schedule_entry['name'] = name + schedule_entry["name"] = name entry = post_schedule(client, schedule_entry) simulate_scheduler_run(n) - return entry['name'] + return entry["name"] def create_task_results(n, user_client, entry_name=None): @@ -63,10 +63,10 @@ def create_task_results(n, user_client, entry_name=None): except Exception: test_entry = TEST_SCHEDULE_ENTRY if entry_name is not None: - test_entry['name'] = entry_name + test_entry["name"] = entry_name rjson = post_schedule(user_client, test_entry) - entry_name = rjson['name'] + entry_name = rjson["name"] entry = ScheduleEntry.objects.get(name=entry_name) for i in range(n): @@ -77,8 +77,9 @@ def create_task_results(n, user_client, entry_name=None): started=started, finished=started + ONE_MICROSECOND, duration=ONE_MICROSECOND, - result='success', - detail='') + result="success", + detail="", + ) tr.max_results = TEST_MAX_TASK_RESULTS tr.save() @@ -87,39 +88,39 @@ def create_task_results(n, user_client, entry_name=None): def reverse_results_overview(): rf = RequestFactory() - request = rf.get('/results/', **HTTPS_KWARG) - return reverse('results-overview', kwargs=V1, request=request) + request = rf.get("/results/", **HTTPS_KWARG) + return reverse("results-overview", kwargs=V1, request=request) def reverse_result_list(schedule_entry_name): rf = RequestFactory() - request = rf.get('/results/' + schedule_entry_name, **HTTPS_KWARG) - kws = {'schedule_entry_name': schedule_entry_name} + request = rf.get("/results/" + schedule_entry_name, **HTTPS_KWARG) + kws = {"schedule_entry_name": schedule_entry_name} kws.update(V1) - return reverse('result-list', kwargs=kws, request=request) + return reverse("result-list", kwargs=kws, request=request) def reverse_result_detail(schedule_entry_name, task_id): rf = RequestFactory() - url = '/results/' + schedule_entry_name + '/' + str(task_id) + url = "/results/" + schedule_entry_name + "/" + str(task_id) request = rf.get(url, **HTTPS_KWARG) - kws = {'schedule_entry_name': schedule_entry_name, 'task_id': task_id} + kws = {"schedule_entry_name": schedule_entry_name, "task_id": task_id} kws.update(V1) - return reverse('result-detail', kwargs=kws, request=request) + return reverse("result-detail", kwargs=kws, request=request) def get_results_overview(client): url = reverse_results_overview() response = client.get(url, **HTTPS_KWARG) rjson = validate_response(response, status.HTTP_200_OK) - return rjson['results'] + return rjson["results"] def get_result_list(client, schedule_entry_name): url = reverse_result_list(schedule_entry_name) response = client.get(url, **HTTPS_KWARG) rjson = validate_response(response, status.HTTP_200_OK) - return rjson['results'] + return rjson["results"] def get_result_detail(client, schedule_entry_name, task_id): diff --git a/src/tasks/urls.py b/src/tasks/urls.py index 4c2aebbd..ca4668dc 100644 --- a/src/tasks/urls.py +++ b/src/tasks/urls.py @@ -1,38 +1,42 @@ from django.urls import path from .views import ( - TaskResultsOverviewViewSet, TaskResultListViewSet, - TaskResultInstanceViewSet, task_root, upcoming_tasks) + TaskResultsOverviewViewSet, + TaskResultListViewSet, + TaskResultInstanceViewSet, + task_root, + upcoming_tasks, +) urlpatterns = ( - path('', view=task_root, name='task-root'), - path('upcoming/', view=upcoming_tasks, name='upcoming-tasks'), - path('completed/', - view=TaskResultsOverviewViewSet.as_view({ - 'get': 'list' - }), - name='task-results-overview'), - path('completed//', - view=TaskResultListViewSet.as_view({ - 'get': 'list', - 'delete': 'destroy_all' - }), - name='task-result-list'), - path('completed//archive/', - view=TaskResultListViewSet.as_view({ - 'get': 'archive', - }), - name='task-result-list-archive'), - path('completed///', - view=TaskResultInstanceViewSet.as_view({ - 'get': 'retrieve', - 'delete': 'destroy' - }), - name='task-result-detail'), - path('completed///archive', - view=TaskResultInstanceViewSet.as_view({ - 'get': 'archive', - }), - name='task-result-archive') + path("", view=task_root, name="task-root"), + path("upcoming/", view=upcoming_tasks, name="upcoming-tasks"), + path( + "completed/", + view=TaskResultsOverviewViewSet.as_view({"get": "list"}), + name="task-results-overview", + ), + path( + "completed//", + view=TaskResultListViewSet.as_view({"get": "list", "delete": "destroy_all"}), + name="task-result-list", + ), + path( + "completed//archive/", + view=TaskResultListViewSet.as_view({"get": "archive"}), + name="task-result-list-archive", + ), + path( + "completed///", + view=TaskResultInstanceViewSet.as_view( + {"get": "retrieve", "delete": "destroy"} + ), + name="task-result-detail", + ), + path( + "completed///archive", + view=TaskResultInstanceViewSet.as_view({"get": "archive"}), + name="task-result-archive", + ), ) diff --git a/src/tasks/views.py b/src/tasks/views.py index 60125b10..b49fa500 100644 --- a/src/tasks/views.py +++ b/src/tasks/views.py @@ -6,8 +6,7 @@ from rest_framework import filters, status from rest_framework.decorators import action from rest_framework.generics import get_object_or_404 -from rest_framework.mixins import ( - ListModelMixin, RetrieveModelMixin, DestroyModelMixin) +from rest_framework.mixins import ListModelMixin, RetrieveModelMixin, DestroyModelMixin from rest_framework.decorators import api_view from rest_framework.response import Response from rest_framework.reverse import reverse @@ -24,8 +23,7 @@ from .models.task_result import TaskResult from .permissions import IsAdminOrOwnerOrReadOnly from .serializers.task import TaskSerializer -from .serializers.task_result import ( - TaskResultsOverviewSerializer, TaskResultSerializer) +from .serializers.task_result import TaskResultsOverviewSerializer, TaskResultSerializer logger = logging.getLogger(__name__) @@ -36,8 +34,8 @@ def task_root(request, version, format=None): """Provides links to upcoming and completed tasks""" reverse_ = partial(reverse, request=request, format=format) task_endpoints = { - 'upcoming': reverse_('upcoming-tasks'), - 'completed': reverse_('task-results-overview') + "upcoming": reverse_("upcoming-tasks"), + "completed": reverse_("task-results-overview"), } return Response(task_endpoints) @@ -46,8 +44,8 @@ def task_root(request, version, format=None): @api_view() def upcoming_tasks(request, version, format=None): """Returns a snapshot of upcoming tasks.""" - context = {'request': request} - taskq = scheduler.thread.task_queue.to_list()[:settings.MAX_TASK_QUEUE] + context = {"request": request} + taskq = scheduler.thread.task_queue.to_list()[: settings.MAX_TASK_QUEUE] taskq_serializer = TaskSerializer(taskq, many=True, context=context) return Response(taskq_serializer.data) @@ -59,7 +57,8 @@ class TaskResultsOverviewViewSet(ListModelMixin, GenericViewSet): Returns an overview of how many results are available per schedule entry. """ - lookup_field = 'schedule_entry_name' + + lookup_field = "schedule_entry_name" queryset = ScheduleEntry.objects.all() serializer_class = TaskResultsOverviewSerializer @@ -81,9 +80,9 @@ def get_queryset(self): base_queryset = super(MultipleFieldLookupMixin, self).get_queryset() base_queryset = self.filter_queryset(base_queryset) - filter = {'schedule_entry__name': self.kwargs['schedule_entry_name']} + filter = {"schedule_entry__name": self.kwargs["schedule_entry_name"]} if not self.request.user.is_staff: - filter.update({'schedule_entry__is_private': False}) + filter.update({"schedule_entry__is_private": False}) queryset = base_queryset.filter(**filter) @@ -94,7 +93,7 @@ def get_queryset(self): def get_object(self): queryset = self.get_queryset() - filter = {'task_id': self.kwargs['task_id']} + filter = {"task_id": self.kwargs["task_id"]} return get_object_or_404(queryset, **filter) @@ -111,14 +110,16 @@ class TaskResultListViewSet(ListModelMixin, GenericViewSet): Downloads the acquisition's SigMF archive. """ + queryset = TaskResult.objects.all() serializer_class = TaskResultSerializer - permission_classes = ( - api_settings.DEFAULT_PERMISSION_CLASSES + [IsAdminOrOwnerOrReadOnly]) + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [ + IsAdminOrOwnerOrReadOnly + ] filter_backends = (filters.SearchFilter, filters.OrderingFilter) - lookup_fields = ('schedule_entry__name', 'task_id') - ordering_fields = ('task_id', 'started', 'finished', 'duration', 'status') - search_fields = ('task_id', 'status', 'detail') + lookup_fields = ("schedule_entry__name", "task_id") + ordering_fields = ("task_id", "started", "finished", "duration", "status") + search_fields = ("task_id", "status", "detail") def get_queryset(self): # .list() does not call .get_object(), which triggers permissions @@ -126,9 +127,9 @@ def get_queryset(self): # request user. base_queryset = self.filter_queryset(self.queryset) - filter = {'schedule_entry__name': self.kwargs['schedule_entry_name']} + filter = {"schedule_entry__name": self.kwargs["schedule_entry_name"]} if not self.request.user.is_staff: - filter.update({'schedule_entry__is_private': False}) + filter.update({"schedule_entry__is_private": False}) queryset = base_queryset.filter(**filter) @@ -137,7 +138,7 @@ def get_queryset(self): return queryset.all() - @action(detail=False, methods=('delete', )) + @action(detail=False, methods=("delete",)) def destroy_all(self, request, version, schedule_entry_name): queryset = self.get_queryset() @@ -156,20 +157,22 @@ def archive(self, request, version, schedule_entry_name): raise Http404 fqdn = settings.FQDN - fname = fqdn + '_' + schedule_entry_name + '.sigmf' + fname = fqdn + "_" + schedule_entry_name + ".sigmf" # FileResponse handles closing the file tmparchive = tempfile.TemporaryFile() build_sigmf_archive(tmparchive, schedule_entry_name, queryset) - content_type = 'application/x-tar' - response = FileResponse(tmparchive, as_attachment=True, filename=fname, - content_type=content_type) + content_type = "application/x-tar" + response = FileResponse( + tmparchive, as_attachment=True, filename=fname, content_type=content_type + ) return response -class TaskResultInstanceViewSet(MultipleFieldLookupMixin, RetrieveModelMixin, - DestroyModelMixin, GenericViewSet): +class TaskResultInstanceViewSet( + MultipleFieldLookupMixin, RetrieveModelMixin, DestroyModelMixin, GenericViewSet +): """ retrieve: Returns a specific result. @@ -181,25 +184,28 @@ class TaskResultInstanceViewSet(MultipleFieldLookupMixin, RetrieveModelMixin, Downloads the acquisition's SigMF archive. """ + queryset = TaskResult.objects.all() serializer_class = TaskResultSerializer - permission_classes = ( - api_settings.DEFAULT_PERMISSION_CLASSES + [IsAdminOrOwnerOrReadOnly]) - lookup_fields = ('schedule_entry__name', 'task_id') + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [ + IsAdminOrOwnerOrReadOnly + ] + lookup_fields = ("schedule_entry__name", "task_id") @action(detail=True) def archive(self, request, version, schedule_entry_name, task_id): entry_name = schedule_entry_name fqdn = settings.FQDN - fname = fqdn + '_' + entry_name + '_' + str(task_id) + '.sigmf' + fname = fqdn + "_" + entry_name + "_" + str(task_id) + ".sigmf" acq = self.get_object() # FileResponse handles closing the file tmparchive = tempfile.TemporaryFile() build_sigmf_archive(tmparchive, schedule_entry_name, [acq]) - content_type = 'application/x-tar' - response = FileResponse(tmparchive, as_attachment=True, filename=fname, - content_type=content_type) + content_type = "application/x-tar" + response = FileResponse( + tmparchive, as_attachment=True, filename=fname, content_type=content_type + ) return response @@ -218,9 +224,10 @@ def build_sigmf_archive(fileobj, schedule_entry_name, acquisitions): with tempfile.NamedTemporaryFile() as tmpdata: tmpdata.write(acq.data) tmpdata.seek(0) # move fd ptr to start of data for reading - name = schedule_entry_name + '_' + str(acq.task_id) - sigmf_file = sigmf.sigmffile.SigMFFile(metadata=acq.sigmf_metadata, - name=name) + name = schedule_entry_name + "_" + str(acq.task_id) + sigmf_file = sigmf.sigmffile.SigMFFile( + metadata=acq.sigmf_metadata, name=name + ) sigmf_file.set_data_file(tmpdata.name) sigmf.archive.SigMFArchive(sigmf_file, path=name, fileobj=fileobj) From e4b6ecbb6eb873453f1654efaffe99e4dbd0b4a5 Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Tue, 28 May 2019 21:57:27 -0600 Subject: [PATCH 07/36] Add auto-formatting pre-commit hook, dev instructions, and travis-ci test --- .pre-commit-config.yaml | 6 ++++++ .travis.yml | 2 +- DEVELOPING.md | 19 ++++++++++++++++--- src/requirements-dev.txt | 1 + 4 files changed, 24 insertions(+), 4 deletions(-) create mode 100644 .pre-commit-config.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..b9a75aff --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,6 @@ +repos: +- repo: https://github.com/ambv/black + rev: stable + hooks: + - id: black + language_version: python3.6 diff --git a/.travis.yml b/.travis.yml index 76147067..92fe3fc7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -26,6 +26,6 @@ before_script: - pip install -r ./src/requirements-dev.txt script: + - black --check ./src - tox -c ./src/tox.ini -e coverage - - tox -c ./src/tox.ini -e lint - docker ps | grep api | grep -q healthy diff --git a/DEVELOPING.md b/DEVELOPING.md index c9b5c07e..e008f237 100644 --- a/DEVELOPING.md +++ b/DEVELOPING.md @@ -38,14 +38,27 @@ recommend you initialize a virtual development environment using a tool such a ```bash $ cd src -$ python2 -m pip install -r requirements-dev.txt +$ python3 -m pip install -r requirements-dev.txt $ pytest # faster, but less thorough $ tox # tests code in clean virtualenv $ tox --recreate # if you change `requirements.txt` -$ tox -e lint # check that code meets widely accepted coding standards $ tox -e coverage # check where test coverage lacks ``` +Code Formatting +--------------- + +This project uses a Python auto-formatter called Black. You probably won't like +every decision it makes, but our continuous integration test-runner will reject +your commit if it's not properly formatted. If you've already pip-installed the +dev requirements from the section above, you already have a utility called +`pre-commit` installed that will automate setting up this project's git +pre-commit hooks. Simply type the following _once_, and each time you make a +commit, it will be "blackened" automatically. + +```bash +$ pre-commit install +``` Running Production Server with Local Changes -------------------------------------------- @@ -100,7 +113,7 @@ $ ./manage.py runserver outside of it, you may need to allow access to system sitepackages. For example, if you're using a virtualenv called `scos-sensor`, you can remove the following text file: `rm -f - ~/.virtualenvs/scos-sensor/lib/python2.7/no-global-site-packages.txt`, and + ~/.virtualenvs/scos-sensor/lib/python3.6/no-global-site-packages.txt`, and thereafter use the `ignore-installed` flag to pip: `pip install -I -r requirements.txt`. This should let the devserver fall back to system sitepackages for the SDR driver only. diff --git a/src/requirements-dev.txt b/src/requirements-dev.txt index ea7544fc..5e4e1c25 100644 --- a/src/requirements-dev.txt +++ b/src/requirements-dev.txt @@ -5,6 +5,7 @@ flake8==3.7.7 jedi==0.13.3 jsonschema==3.0.1 mkdocs==1.0.4 +pre-commit==1.16.1 pytest-cov==2.7.1 pytest-django==3.4.8 pytest-flake8==1.0.4 From b914b87afe8464c2045dbedda1ba6dd387e8dba1 Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Tue, 28 May 2019 22:01:00 -0600 Subject: [PATCH 08/36] Remove flake8-based linter --- src/requirements-dev.txt | 1 - src/tox.ini | 13 ------------- 2 files changed, 14 deletions(-) diff --git a/src/requirements-dev.txt b/src/requirements-dev.txt index 5e4e1c25..6e999c43 100644 --- a/src/requirements-dev.txt +++ b/src/requirements-dev.txt @@ -1,7 +1,6 @@ -rrequirements.txt black==18.9b0 -flake8==3.7.7 jedi==0.13.3 jsonschema==3.0.1 mkdocs==1.0.4 diff --git a/src/tox.ini b/src/tox.ini index 571d8bfd..6867913a 100644 --- a/src/tox.ini +++ b/src/tox.ini @@ -10,13 +10,6 @@ commands = py.test {posargs} install_command = pip install {opts} {packages} list_dependencies_command = pip freeze -[testenv:lint] -basepython = python3 -deps = - -r{toxinidir}/requirements-dev.txt -commands = - python -m flake8 . - [testenv:coverage] basepython = python3 deps = @@ -29,12 +22,6 @@ deps = -r{toxinidir}/requirements-dev.txt commands = py.test -m update_api_docs --update-api-docs -[flake8] -exclude = - .tox - sensor/settings.py - */migrations/* - [pytest] DJANGO_SETTINGS_MODULE = sensor.settings From 2fef51b3470138f25123c6e26eee496e39314e4a Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Wed, 29 May 2019 23:50:28 -0600 Subject: [PATCH 09/36] Fix some test failures --- src/actions/__init__.py | 20 +++++++++--------- .../tests/test_acquire_single_freq_fft.py | 21 +++++++------------ src/requirements-dev.txt | 2 ++ src/scheduler/tests/utils.py | 1 + src/sensor/exceptions.py | 2 +- src/tasks/models/task_result.py | 2 +- src/tasks/tests/test_detail_view.py | 4 ++-- src/tasks/tests/utils.py | 10 ++++----- src/tox.ini | 6 ++++++ 9 files changed, 35 insertions(+), 33 deletions(-) diff --git a/src/actions/__init__.py b/src/actions/__init__.py index 49b3d498..c0fbd105 100644 --- a/src/actions/__init__.py +++ b/src/actions/__init__.py @@ -5,11 +5,11 @@ from sensor import settings -from . import acquire_single_freq_fft -from . import acquire_stepped_freq_tdomain_iq from . import logger as logger_action -from . import monitor_usrp -from . import sync_gps +from .acquire_single_freq_fft import SingleFrequencyFftAcquisition +from .acquire_stepped_freq_tdomain_iq import SteppedFrequencyTimeDomainIqAcquisition +from .monitor_usrp import UsrpMonitor +from .sync_gps import SyncGps logger = logging.getLogger(__name__) @@ -21,8 +21,8 @@ "admin_logger": logger_action.Logger( loglvl=logger_action.LOGLVL_ERROR, admin_only=True ), - "monitor_usrp": monitor_usrp.UsrpMonitor(admin_only=True), - "sync_gps": sync_gps.SyncGps(admin_only=True), + "monitor_usrp": UsrpMonitor(admin_only=True), + "sync_gps": SyncGps(admin_only=True), } by_name = registered_actions @@ -32,10 +32,10 @@ # The YAML loader can key an object with parameters on these class names action_classes = { "logger": logger_action.Logger, - "usrp_monitor": monitor_usrp.UsrpMonitor, - "sync_gps": sync_gps.SyncGps, - "single_frequency_fft": acquire_single_freq_fft.SingleFrequencyFftAcquisition, - "stepped_frequency_time_domain_iq": acquire_stepped_freq_tdomain_iq.SteppedFrequencyTimeDomainIqAcquisition, + "usrp_monitor": UsrpMonitor, + "sync_gps": SyncGps, + "single_frequency_fft": SingleFrequencyFftAcquisition, + "stepped_frequency_time_domain_iq": SteppedFrequencyTimeDomainIqAcquisition, } diff --git a/src/actions/tests/test_acquire_single_freq_fft.py b/src/actions/tests/test_acquire_single_freq_fft.py index f2db7107..5dc91e6a 100644 --- a/src/actions/tests/test_acquire_single_freq_fft.py +++ b/src/actions/tests/test_acquire_single_freq_fft.py @@ -6,9 +6,8 @@ # from jsonschema import validate as schema_validate from sigmf.validate import validate as sigmf_validate -import actions -from schedule.tests.utils import post_schedule, TEST_SCHEDULE_ENTRY -from tasks.models import Acquisition +from tasks.models import Acquisition, TaskResult +from tasks.tests.utils import simulate_acquisitions SCHEMA_DIR = path.join(settings.REPO_ROOT, "schemas") @@ -19,16 +18,10 @@ schema = json.load(f) -def test_detector(user_client, rf): - # Put an entry in the schedule that we can refer to - rjson = post_schedule(user_client, TEST_SCHEDULE_ENTRY) - entry_name = rjson["name"] - task_id = rjson["next_task_id"] - - # use mock_acquire set up in conftest.py - actions.by_name["mock_acquire"](entry_name, task_id) - acquistion = Acquisition.objects.get(task_id=task_id) - sigmf_metadata = acquistion.sigmf_metadata - assert sigmf_validate(sigmf_metadata) +def test_detector(user_client, test_scheduler): + entry_name = simulate_acquisitions(user_client) + tr = TaskResult.objects.get(schedule_entry__name=entry_name, task_id=1) + acquistion = Acquisition.objects.get(task_result=tr) + assert sigmf_validate(acquistion.metadata) # FIXME: update schema so that this passes # schema_validate(sigmf_metadata, schema) diff --git a/src/requirements-dev.txt b/src/requirements-dev.txt index 6e999c43..1fad8479 100644 --- a/src/requirements-dev.txt +++ b/src/requirements-dev.txt @@ -1,6 +1,8 @@ -rrequirements.txt black==18.9b0 +flake8==3.6.0 +flake8-bugbear==19.3.0 jedi==0.13.3 jsonschema==3.0.1 mkdocs==1.0.4 diff --git a/src/scheduler/tests/utils.py b/src/scheduler/tests/utils.py index c4fb9437..55045031 100644 --- a/src/scheduler/tests/utils.py +++ b/src/scheduler/tests/utils.py @@ -97,6 +97,7 @@ def cb(entry, task_id): cb.__name__ = "testcb" + str(create_action.counter) actions.by_name[cb.__name__] = cb create_action.counter += 1 + return cb, flag diff --git a/src/sensor/exceptions.py b/src/sensor/exceptions.py index 53cc55f5..f2753a20 100644 --- a/src/sensor/exceptions.py +++ b/src/sensor/exceptions.py @@ -38,13 +38,13 @@ def handle_protected_error(exc, context): entry_name = context["kwargs"]["pk"] request = context["request"] + view_name = "task-result-detail" protected_object_urls = [] for protected_object in exc.protected_objects: task_id = protected_object.task_id url_kwargs = {"schedule_entry_name": entry_name, "task_id": task_id} url_kwargs.update(V1) - view_name = "result-detail" url = reverse(view_name, kwargs=url_kwargs, request=request) protected_object_urls.append(url) diff --git a/src/tasks/models/task_result.py b/src/tasks/models/task_result.py index 50ca093b..34c9d8e3 100644 --- a/src/tasks/models/task_result.py +++ b/src/tasks/models/task_result.py @@ -69,7 +69,7 @@ def save(self): if same_entry_results.count() >= self.max_results: same_entry_results[0].delete() - super(TaskResult, self).save() + super().save() def __str__(self): s = "{}/{}" diff --git a/src/tasks/tests/test_detail_view.py b/src/tasks/tests/test_detail_view.py index 00686650..6dedb44f 100644 --- a/src/tasks/tests/test_detail_view.py +++ b/src/tasks/tests/test_detail_view.py @@ -27,7 +27,8 @@ def test_can_view_others_result_details(user_client, alt_user_client): def test_cannot_view_private_result_details(user_client, admin_client, test_scheduler): """A user should not be able to view the result of a private task.""" entry_name = simulate_acquisitions(admin_client, is_private=True) - url = reverse_result_detail(entry_name, 1) + task_id = 1 + url = reverse_result_detail(entry_name, task_id) response = user_client.get(url, **HTTPS_KWARG) validate_response(response, status.HTTP_404_NOT_FOUND) @@ -37,5 +38,4 @@ def test_cannot_delete_result_details(user_client): entry_name = create_task_results(1, user_client) url = reverse_result_detail(entry_name, 1) response = user_client.delete(url, **HTTPS_KWARG) - validate_response(response, status.HTTP_405_METHOD_NOT_ALLOWED) diff --git a/src/tasks/tests/utils.py b/src/tasks/tests/utils.py index 195f10da..591f9c07 100644 --- a/src/tasks/tests/utils.py +++ b/src/tasks/tests/utils.py @@ -77,7 +77,7 @@ def create_task_results(n, user_client, entry_name=None): started=started, finished=started + ONE_MICROSECOND, duration=ONE_MICROSECOND, - result="success", + status="success", detail="", ) tr.max_results = TEST_MAX_TASK_RESULTS @@ -88,13 +88,13 @@ def create_task_results(n, user_client, entry_name=None): def reverse_results_overview(): rf = RequestFactory() - request = rf.get("/results/", **HTTPS_KWARG) + request = rf.get("/tasks/completed/", **HTTPS_KWARG) return reverse("results-overview", kwargs=V1, request=request) def reverse_result_list(schedule_entry_name): rf = RequestFactory() - request = rf.get("/results/" + schedule_entry_name, **HTTPS_KWARG) + request = rf.get("/tasks/completed/" + schedule_entry_name, **HTTPS_KWARG) kws = {"schedule_entry_name": schedule_entry_name} kws.update(V1) return reverse("result-list", kwargs=kws, request=request) @@ -102,11 +102,11 @@ def reverse_result_list(schedule_entry_name): def reverse_result_detail(schedule_entry_name, task_id): rf = RequestFactory() - url = "/results/" + schedule_entry_name + "/" + str(task_id) + url = "/tasks/completed/" + schedule_entry_name + "/" + str(task_id) request = rf.get(url, **HTTPS_KWARG) kws = {"schedule_entry_name": schedule_entry_name, "task_id": task_id} kws.update(V1) - return reverse("result-detail", kwargs=kws, request=request) + return reverse("task-result-detail", kwargs=kws, request=request) def get_results_overview(client): diff --git a/src/tox.ini b/src/tox.ini index 6867913a..f2d03496 100644 --- a/src/tox.ini +++ b/src/tox.ini @@ -25,6 +25,12 @@ commands = py.test -m update_api_docs --update-api-docs [pytest] DJANGO_SETTINGS_MODULE = sensor.settings +[flake8] +max-line-length = 80 +max-complexity = 12 +select = C,E,F,W,B,B950 +ignore = E501 + [coverage:run] omit = .tox/* From 64903058698935b00c1ea0047373cbcce62eb45d Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Thu, 30 May 2019 00:02:39 -0600 Subject: [PATCH 10/36] Fix failing tests - only permission-related tests are still failing --- src/schedule/tests/test_serializers.py | 3 +-- src/sensor/tests/test_api_root_view.py | 9 +-------- src/tasks/tests/test_overview_view.py | 12 ++++++------ src/tasks/tests/test_serializers.py | 4 ++-- src/tasks/tests/utils.py | 4 ++-- 5 files changed, 12 insertions(+), 20 deletions(-) diff --git a/src/schedule/tests/test_serializers.py b/src/schedule/tests/test_serializers.py index 9ac5ab2b..85b564d0 100644 --- a/src/schedule/tests/test_serializers.py +++ b/src/schedule/tests/test_serializers.py @@ -272,8 +272,7 @@ def test_serialized_fields(user_client): # links assert rjson["self"] assert rjson["owner"] - assert rjson["results"] - assert rjson["acquisitions"] + assert rjson["task_results"] def test_non_serialized_fields(user_client): diff --git a/src/sensor/tests/test_api_root_view.py b/src/sensor/tests/test_api_root_view.py index 8acadd80..a9d655c2 100644 --- a/src/sensor/tests/test_api_root_view.py +++ b/src/sensor/tests/test_api_root_view.py @@ -3,14 +3,7 @@ from sensor import V1 from .utils import validate_response, HTTPS_KWARG -API_ROOT_ENDPOINTS = { - "acquisitions", - "users", - "schedule", - "status", - "capabilities", - "results", -} +API_ROOT_ENDPOINTS = {"users", "schedule", "status", "capabilities", "tasks"} def test_index(user_client): diff --git a/src/tasks/tests/test_overview_view.py b/src/tasks/tests/test_overview_view.py index d8ef6721..52221c8e 100644 --- a/src/tasks/tests/test_overview_view.py +++ b/src/tasks/tests/test_overview_view.py @@ -23,16 +23,16 @@ def test_admin_empty_overview_response(admin_client): def test_user_get_overview(user_client): create_task_results(2, user_client) overview, = get_results_overview(user_client) - assert overview["results_available"] == 2 - assert overview["results"] # is non-empty string + assert overview["task_results_available"] == 2 + assert overview["task_results"] # is non-empty string assert overview["schedule_entry"] # is non-empty string def test_admin_get_overview(admin_client): create_task_results(2, admin_client) overview, = get_results_overview(admin_client) - assert overview["results_available"] == 2 - assert overview["results"] # is non-empty string + assert overview["task_results_available"] == 2 + assert overview["task_results"] # is non-empty string assert overview["schedule_entry"] # is non-empty string @@ -44,8 +44,8 @@ def test_overview_for_private_entry_is_private( assert overview == [] overview, = get_results_overview(admin_client) - assert overview["results_available"] == 1 - assert overview["results"] # is non-empty string + assert overview["task_results_available"] == 1 + assert overview["task_results"] # is non-empty string assert overview["schedule_entry"] # is non-empty string diff --git a/src/tasks/tests/test_serializers.py b/src/tasks/tests/test_serializers.py index 50e73120..7366b7f4 100644 --- a/src/tasks/tests/test_serializers.py +++ b/src/tasks/tests/test_serializers.py @@ -12,10 +12,10 @@ def test_task_result_serializer(user_client): context = {"request": None} r = TaskResultSerializer(tr, context=context) assert r.data["task_id"] == 1 - assert r.data["self"] == "/api/v1/results/test/1/" + assert r.data["self"] == "/api/v1/tasks/completed/test/1/" assert r.data["schedule_entry"] == "/api/v1/schedule/test/" assert r.data["detail"] == "" - assert r.data["result"] == "success" + assert r.data["status"] == "success" assert r.data["duration"] == "00:00:00.000001" diff --git a/src/tasks/tests/utils.py b/src/tasks/tests/utils.py index 591f9c07..78e4cbd5 100644 --- a/src/tasks/tests/utils.py +++ b/src/tasks/tests/utils.py @@ -89,7 +89,7 @@ def create_task_results(n, user_client, entry_name=None): def reverse_results_overview(): rf = RequestFactory() request = rf.get("/tasks/completed/", **HTTPS_KWARG) - return reverse("results-overview", kwargs=V1, request=request) + return reverse("task-results-overview", kwargs=V1, request=request) def reverse_result_list(schedule_entry_name): @@ -97,7 +97,7 @@ def reverse_result_list(schedule_entry_name): request = rf.get("/tasks/completed/" + schedule_entry_name, **HTTPS_KWARG) kws = {"schedule_entry_name": schedule_entry_name} kws.update(V1) - return reverse("result-list", kwargs=kws, request=request) + return reverse("task-result-list", kwargs=kws, request=request) def reverse_result_detail(schedule_entry_name, task_id): From 36863ac6a4010578cbeeb6366992b7d29ba90e80 Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Thu, 30 May 2019 22:14:32 -0600 Subject: [PATCH 11/36] Add isort import formatter and run on codebase --- scripts/autoformat_python.sh | 11 ++ src/actions/__init__.py | 1 - src/actions/acquire_single_freq_fft.py | 1 - .../acquire_stepped_freq_tdomain_iq.py | 1 - src/actions/monitor_usrp.py | 1 + src/actions/sync_gps.py | 2 +- .../tests/test_acquire_single_freq_fft.py | 3 - src/actions/tests/test_init.py | 1 - src/authentication/admin.py | 1 - src/authentication/migrations/0001_initial.py | 2 +- src/authentication/serializers.py | 1 + src/authentication/tests/test_list_view.py | 2 +- src/authentication/urls.py | 2 +- src/authentication/views.py | 4 +- src/capabilities/views.py | 1 - src/hardware/__init__.py | 1 - src/hardware/gps_iface.py | 2 +- src/hardware/scale_factors.py | 4 +- src/hardware/tests/test_usrp.py | 1 + src/requirements-dev.txt | 1 + src/schedule/migrations/0001_initial.py | 5 +- src/schedule/models/__init__.py | 2 +- src/schedule/models/schedule_entry.py | 2 +- src/schedule/serializers.py | 2 +- src/schedule/tests/test_admin_views.py | 4 +- src/schedule/tests/test_models.py | 5 +- src/schedule/tests/test_user_views.py | 9 +- src/schedule/tests/test_views.py | 4 +- src/schedule/views.py | 6 +- src/scheduler/scheduler.py | 4 +- src/scheduler/tests/test_scheduler.py | 3 +- src/sensor/tests/test_api_root_view.py | 3 +- src/sensor/urls.py | 3 +- src/sensor/views.py | 2 +- src/sensor/wsgi.py | 5 +- src/status/views.py | 2 +- src/tasks/migrations/0001_initial.py | 5 +- src/tasks/models/acquisition.py | 12 +- src/tasks/models/task_result.py | 1 - src/tasks/serializers/acquisition.py | 2 +- src/tasks/tests/test_detail_view.py | 120 ++++++++++++++++-- src/tasks/tests/test_list_view.py | 2 +- src/tasks/tests/test_overview_view.py | 4 +- src/tasks/tests/utils.py | 19 ++- src/tasks/urls.py | 5 +- src/tasks/views.py | 15 +-- src/tox.ini | 7 + 47 files changed, 210 insertions(+), 86 deletions(-) create mode 100755 scripts/autoformat_python.sh diff --git a/scripts/autoformat_python.sh b/scripts/autoformat_python.sh new file mode 100755 index 00000000..77fb7d91 --- /dev/null +++ b/scripts/autoformat_python.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +# Autoformat python - sort imports and then "blacken" code + +REPO_ROOT=${REPO_ROOT:=$(git rev-parse --show-toplevel)} + +echo "Sorting imports with isort... " +isort -rc ${REPO_ROOT}/src/ +echo +echo "Formatting code with black... " +black ${REPO_ROOT}/src/ diff --git a/src/actions/__init__.py b/src/actions/__init__.py index c0fbd105..0a9fe902 100644 --- a/src/actions/__init__.py +++ b/src/actions/__init__.py @@ -11,7 +11,6 @@ from .monitor_usrp import UsrpMonitor from .sync_gps import SyncGps - logger = logging.getLogger(__name__) diff --git a/src/actions/acquire_single_freq_fft.py b/src/actions/acquire_single_freq_fft.py index 821ac061..64acc4c7 100644 --- a/src/actions/acquire_single_freq_fft.py +++ b/src/actions/acquire_single_freq_fft.py @@ -83,7 +83,6 @@ from enum import Enum import numpy as np - from sigmf.sigmffile import SigMFFile from capabilities import capabilities diff --git a/src/actions/acquire_stepped_freq_tdomain_iq.py b/src/actions/acquire_stepped_freq_tdomain_iq.py index 8035192f..6b77286b 100644 --- a/src/actions/acquire_stepped_freq_tdomain_iq.py +++ b/src/actions/acquire_stepped_freq_tdomain_iq.py @@ -46,7 +46,6 @@ from itertools import zip_longest import numpy as np - from sigmf.sigmffile import SigMFFile from capabilities import capabilities diff --git a/src/actions/monitor_usrp.py b/src/actions/monitor_usrp.py index c4bf45ab..7a3d2282 100644 --- a/src/actions/monitor_usrp.py +++ b/src/actions/monitor_usrp.py @@ -7,6 +7,7 @@ from hardware import usrp_iface from sensor import settings + from .base import Action logger = logging.getLogger(__name__) diff --git a/src/actions/sync_gps.py b/src/actions/sync_gps.py index b1472076..71dab742 100644 --- a/src/actions/sync_gps.py +++ b/src/actions/sync_gps.py @@ -5,7 +5,7 @@ import logging from hardware import gps_iface -from status.models import Location, GPS_LOCATION_DESCRIPTION +from status.models import GPS_LOCATION_DESCRIPTION, Location from .base import Action diff --git a/src/actions/tests/test_acquire_single_freq_fft.py b/src/actions/tests/test_acquire_single_freq_fft.py index 5dc91e6a..06dbbe09 100644 --- a/src/actions/tests/test_acquire_single_freq_fft.py +++ b/src/actions/tests/test_acquire_single_freq_fft.py @@ -2,14 +2,11 @@ from os import path from django.conf import settings - -# from jsonschema import validate as schema_validate from sigmf.validate import validate as sigmf_validate from tasks.models import Acquisition, TaskResult from tasks.tests.utils import simulate_acquisitions - SCHEMA_DIR = path.join(settings.REPO_ROOT, "schemas") SCHEMA_FNAME = "scos_transfer_spec_schema.json" SCHEMA_PATH = path.join(SCHEMA_DIR, SCHEMA_FNAME) diff --git a/src/actions/tests/test_init.py b/src/actions/tests/test_init.py index 4c3218f2..ef1193d6 100644 --- a/src/actions/tests/test_init.py +++ b/src/actions/tests/test_init.py @@ -5,7 +5,6 @@ import actions - # Indentation makes this invalid INVALID_YAML = b"""\ single_frequency_fft: diff --git a/src/authentication/admin.py b/src/authentication/admin.py index 949a5a35..98fd0d22 100644 --- a/src/authentication/admin.py +++ b/src/authentication/admin.py @@ -5,5 +5,4 @@ from .models import User - admin.site.register(User, UserAdmin) diff --git a/src/authentication/migrations/0001_initial.py b/src/authentication/migrations/0001_initial.py index 89234407..c3a3ff4e 100644 --- a/src/authentication/migrations/0001_initial.py +++ b/src/authentication/migrations/0001_initial.py @@ -2,8 +2,8 @@ import django.contrib.auth.models import django.contrib.auth.validators -from django.db import migrations, models import django.utils.timezone +from django.db import migrations, models class Migration(migrations.Migration): diff --git a/src/authentication/serializers.py b/src/authentication/serializers.py index bec3df83..1bc7d7a8 100644 --- a/src/authentication/serializers.py +++ b/src/authentication/serializers.py @@ -2,6 +2,7 @@ from rest_framework.reverse import reverse from sensor import V1 + from .models import User diff --git a/src/authentication/tests/test_list_view.py b/src/authentication/tests/test_list_view.py index 381786c4..ed1d7f59 100644 --- a/src/authentication/tests/test_list_view.py +++ b/src/authentication/tests/test_list_view.py @@ -3,7 +3,7 @@ from schedule.tests.utils import TEST_PRIVATE_SCHEDULE_ENTRY, post_schedule from sensor import V1 -from sensor.tests.utils import validate_response, HTTPS_KWARG +from sensor.tests.utils import HTTPS_KWARG, validate_response def test_user_cannot_view_private_entry_in_list(admin_client, user_client): diff --git a/src/authentication/urls.py b/src/authentication/urls.py index 0c4957f9..780e9c41 100644 --- a/src/authentication/urls.py +++ b/src/authentication/urls.py @@ -1,6 +1,6 @@ from django.urls import path -from .views import UserListView, UserInstanceView +from .views import UserInstanceView, UserListView urlpatterns = ( path("", UserListView.as_view(), name="user-list"), diff --git a/src/authentication/views.py b/src/authentication/views.py index c7ef3202..da365028 100644 --- a/src/authentication/views.py +++ b/src/authentication/views.py @@ -1,18 +1,18 @@ from __future__ import absolute_import -from rest_framework.generics import get_object_or_404 from rest_framework.generics import ( ListAPIView, ListCreateAPIView, RetrieveAPIView, RetrieveUpdateDestroyAPIView, + get_object_or_404, ) from rest_framework.permissions import IsAdminUser from rest_framework.settings import api_settings from rest_framework.views import APIView from .models import User -from .serializers import UserProfileSerializer, UserDetailsSerializer +from .serializers import UserDetailsSerializer, UserProfileSerializer class UserListView(APIView): diff --git a/src/capabilities/views.py b/src/capabilities/views.py index c24deb5f..4127b598 100644 --- a/src/capabilities/views.py +++ b/src/capabilities/views.py @@ -8,7 +8,6 @@ import actions from capabilities import capabilities - logger = logging.getLogger(__name__) diff --git a/src/hardware/__init__.py b/src/hardware/__init__.py index a28db747..8e1bd8cf 100644 --- a/src/hardware/__init__.py +++ b/src/hardware/__init__.py @@ -1,4 +1,3 @@ from . import usrp_iface - sdr = usrp_iface diff --git a/src/hardware/gps_iface.py b/src/hardware/gps_iface.py index 63ec5b5a..aca34963 100644 --- a/src/hardware/gps_iface.py +++ b/src/hardware/gps_iface.py @@ -3,7 +3,7 @@ import logging import subprocess from datetime import datetime -from time import time, sleep +from time import sleep, time from hardware import usrp_iface diff --git a/src/hardware/scale_factors.py b/src/hardware/scale_factors.py index 31ba1383..ee08f30a 100644 --- a/src/hardware/scale_factors.py +++ b/src/hardware/scale_factors.py @@ -1,7 +1,5 @@ -import logging - import json - +import logging logger = logging.getLogger(__name__) diff --git a/src/hardware/tests/test_usrp.py b/src/hardware/tests/test_usrp.py index 3d446564..1b3fbb06 100644 --- a/src/hardware/tests/test_usrp.py +++ b/src/hardware/tests/test_usrp.py @@ -1,6 +1,7 @@ """Test aspects of RadioInterface with mocked USRP.""" import pytest + from hardware import usrp_iface # Create the RadioInterface with the mock usrp_block diff --git a/src/requirements-dev.txt b/src/requirements-dev.txt index 1fad8479..bc24fcee 100644 --- a/src/requirements-dev.txt +++ b/src/requirements-dev.txt @@ -3,6 +3,7 @@ black==18.9b0 flake8==3.6.0 flake8-bugbear==19.3.0 +isort==4.3.20 jedi==0.13.3 jsonschema==3.0.1 mkdocs==1.0.4 diff --git a/src/schedule/migrations/0001_initial.py b/src/schedule/migrations/0001_initial.py index c88a9504..24a788c7 100644 --- a/src/schedule/migrations/0001_initial.py +++ b/src/schedule/migrations/0001_initial.py @@ -1,9 +1,10 @@ # Generated by Django 2.2.1 on 2019-05-17 20:43 -from django.conf import settings import django.core.validators -from django.db import migrations, models import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + import schedule.models.schedule_entry diff --git a/src/schedule/models/__init__.py b/src/schedule/models/__init__.py index 4b8d1db9..c5ac34a6 100644 --- a/src/schedule/models/__init__.py +++ b/src/schedule/models/__init__.py @@ -1,2 +1,2 @@ -from .schedule_entry import ScheduleEntry, DEFAULT_PRIORITY # noqa from .request import Request # noqa +from .schedule_entry import DEFAULT_PRIORITY, ScheduleEntry # noqa diff --git a/src/schedule/models/schedule_entry.py b/src/schedule/models/schedule_entry.py index 6d688060..b7c127ac 100644 --- a/src/schedule/models/schedule_entry.py +++ b/src/schedule/models/schedule_entry.py @@ -1,7 +1,7 @@ import sys from itertools import count -from django.core.validators import MinValueValidator, MaxValueValidator +from django.core.validators import MaxValueValidator, MinValueValidator from django.db import models import actions diff --git a/src/schedule/serializers.py b/src/schedule/serializers.py index 6bb8fd20..8917fcad 100644 --- a/src/schedule/serializers.py +++ b/src/schedule/serializers.py @@ -6,8 +6,8 @@ import actions from sensor import V1 from sensor.utils import get_datetime_from_timestamp, get_timestamp_from_datetime -from .models import DEFAULT_PRIORITY, ScheduleEntry +from .models import DEFAULT_PRIORITY, ScheduleEntry action_help = "[Required] The name of the action to be scheduled" priority_help = "Lower number is higher priority (default={})".format(DEFAULT_PRIORITY) diff --git a/src/schedule/tests/test_admin_views.py b/src/schedule/tests/test_admin_views.py index 331b1c8e..5d7549bb 100644 --- a/src/schedule/tests/test_admin_views.py +++ b/src/schedule/tests/test_admin_views.py @@ -3,13 +3,13 @@ from schedule.tests.utils import ( EMPTY_SCHEDULE_RESPONSE, - TEST_SCHEDULE_ENTRY, TEST_PRIVATE_SCHEDULE_ENTRY, + TEST_SCHEDULE_ENTRY, post_schedule, update_schedule, ) from sensor import V1 -from sensor.tests.utils import validate_response, HTTPS_KWARG +from sensor.tests.utils import HTTPS_KWARG, validate_response def test_post_admin_private_schedule(admin_client): diff --git a/src/schedule/tests/test_models.py b/src/schedule/tests/test_models.py index c7652b80..4a8ed861 100644 --- a/src/schedule/tests/test_models.py +++ b/src/schedule/tests/test_models.py @@ -4,9 +4,10 @@ import pytest from django.core.exceptions import ValidationError -from .utils import flatten +from schedule.models import DEFAULT_PRIORITY, ScheduleEntry from scheduler import utils -from schedule.models import ScheduleEntry, DEFAULT_PRIORITY + +from .utils import flatten @pytest.mark.parametrize( diff --git a/src/schedule/tests/test_user_views.py b/src/schedule/tests/test_user_views.py index 68fc0c7c..81bdf815 100644 --- a/src/schedule/tests/test_user_views.py +++ b/src/schedule/tests/test_user_views.py @@ -1,19 +1,18 @@ import pytest - from rest_framework import status from rest_framework.reverse import reverse from schedule.tests.utils import ( EMPTY_SCHEDULE_RESPONSE, - TEST_SCHEDULE_ENTRY, - TEST_PRIVATE_SCHEDULE_ENTRY, TEST_ALTERNATE_SCHEDULE_ENTRY, + TEST_PRIVATE_SCHEDULE_ENTRY, + TEST_SCHEDULE_ENTRY, post_schedule, - update_schedule, reverse_detail_url, + update_schedule, ) from sensor import V1 -from sensor.tests.utils import validate_response, HTTPS_KWARG +from sensor.tests.utils import HTTPS_KWARG, validate_response def test_user_cannot_post_private_schedule(user_client): diff --git a/src/schedule/tests/test_views.py b/src/schedule/tests/test_views.py index 6d1add3a..e1bd9598 100644 --- a/src/schedule/tests/test_views.py +++ b/src/schedule/tests/test_views.py @@ -3,13 +3,13 @@ from schedule.tests.utils import ( EMPTY_SCHEDULE_RESPONSE, - TEST_SCHEDULE_ENTRY, TEST_PRIVATE_SCHEDULE_ENTRY, + TEST_SCHEDULE_ENTRY, post_schedule, reverse_detail_url, ) from sensor import V1 -from sensor.tests.utils import validate_response, HTTPS_KWARG +from sensor.tests.utils import HTTPS_KWARG, validate_response from tasks.tests.utils import simulate_acquisitions diff --git a/src/schedule/views.py b/src/schedule/views.py index 79e7beb7..447376f4 100644 --- a/src/schedule/views.py +++ b/src/schedule/views.py @@ -1,11 +1,11 @@ -from rest_framework import status, filters +from rest_framework import filters, status from rest_framework.response import Response from rest_framework.settings import api_settings from rest_framework.viewsets import ModelViewSet -from .models import ScheduleEntry, Request +from .models import Request, ScheduleEntry from .permissions import IsAdminOrOwnerOrReadOnly -from .serializers import ScheduleEntrySerializer, AdminScheduleEntrySerializer +from .serializers import AdminScheduleEntrySerializer, ScheduleEntrySerializer class ScheduleEntryViewSet(ModelViewSet): diff --git a/src/scheduler/scheduler.py b/src/scheduler/scheduler.py index 668eb240..f5dc49ba 100644 --- a/src/scheduler/scheduler.py +++ b/src/scheduler/scheduler.py @@ -8,12 +8,12 @@ from django.utils import timezone from requests_futures.sessions import FuturesSession +from schedule.models import ScheduleEntry +from sensor import settings from tasks.consts import MAX_DETAIL_LEN from tasks.models import TaskResult from tasks.serializers import TaskResultSerializer from tasks.task_queue import TaskQueue -from schedule.models import ScheduleEntry -from sensor import settings from . import utils diff --git a/src/scheduler/tests/test_scheduler.py b/src/scheduler/tests/test_scheduler.py index 1200fe46..b425f569 100644 --- a/src/scheduler/tests/test_scheduler.py +++ b/src/scheduler/tests/test_scheduler.py @@ -1,10 +1,11 @@ -import time import threading +import time import pytest import requests_mock from scheduler.scheduler import Scheduler, minimum_duration + from .utils import ( BAD_ACTION_STR, advance_testclock, diff --git a/src/sensor/tests/test_api_root_view.py b/src/sensor/tests/test_api_root_view.py index a9d655c2..6de541a0 100644 --- a/src/sensor/tests/test_api_root_view.py +++ b/src/sensor/tests/test_api_root_view.py @@ -1,7 +1,8 @@ from rest_framework.reverse import reverse from sensor import V1 -from .utils import validate_response, HTTPS_KWARG + +from .utils import HTTPS_KWARG, validate_response API_ROOT_ENDPOINTS = {"users", "schedule", "status", "capabilities", "tasks"} diff --git a/src/sensor/urls.py b/src/sensor/urls.py index f7439df9..945c017e 100644 --- a/src/sensor/urls.py +++ b/src/sensor/urls.py @@ -25,8 +25,7 @@ from rest_framework.urlpatterns import format_suffix_patterns from . import settings -from .views import schema_view, api_v1_root - +from .views import api_v1_root, schema_view # Matches api/v1, api/v2, etc... API_PREFIX = r"^api/(?Pv[0-9]+)/" diff --git a/src/sensor/views.py b/src/sensor/views.py index 5f1ce097..e2e6c98e 100644 --- a/src/sensor/views.py +++ b/src/sensor/views.py @@ -1,7 +1,7 @@ from functools import partial -from drf_yasg.views import get_schema_view from drf_yasg import openapi +from drf_yasg.views import get_schema_view from rest_framework import permissions from rest_framework.decorators import api_view from rest_framework.response import Response diff --git a/src/sensor/wsgi.py b/src/sensor/wsgi.py index 7dd94a07..1d522923 100644 --- a/src/sensor/wsgi.py +++ b/src/sensor/wsgi.py @@ -13,11 +13,12 @@ import django from django.core.wsgi import get_wsgi_application +from scheduler import scheduler # noqa +from sensor import settings # noqa + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sensor.settings") django.setup() # this is necessary because we need to handle our own thread -from sensor import settings # noqa -from scheduler import scheduler # noqa application = get_wsgi_application() diff --git a/src/status/views.py b/src/status/views.py index 354525b0..1dd8a78d 100644 --- a/src/status/views.py +++ b/src/status/views.py @@ -5,10 +5,10 @@ from scheduler import scheduler from sensor import utils + from .models import Location from .serializers import LocationSerializer - logger = logging.getLogger(__name__) diff --git a/src/tasks/migrations/0001_initial.py b/src/tasks/migrations/0001_initial.py index 215abb3d..8e1ef4bb 100644 --- a/src/tasks/migrations/0001_initial.py +++ b/src/tasks/migrations/0001_initial.py @@ -1,10 +1,11 @@ # Generated by Django 2.2.1 on 2019-05-17 20:43 import datetime -from django.db import migrations, models + import django.db.models.deletion -from django.utils.timezone import utc import jsonfield.fields +from django.db import migrations, models +from django.utils.timezone import utc class Migration(migrations.Migration): diff --git a/src/tasks/models/acquisition.py b/src/tasks/models/acquisition.py index 6382ccd0..be34dbcf 100644 --- a/src/tasks/models/acquisition.py +++ b/src/tasks/models/acquisition.py @@ -5,14 +5,14 @@ class Acquisition(models.Model): - """Map between schedule entries and their task data and metadata. + """The data and metadata associated with a task. - Schedule Entry and Task ID map the acquisition to a specific task on the - sensor, while recording ID allows for a single task to create more than one - SigMF recording. + Task Result maps the acquisition to a specific task on the sensor, while + recording ID allows for a single task to create more than one SigMF + recording. - It is an error to create more than one Acquisition with the same schedule - entry, task id, and recording id. + It is an error to create more than one Acquisition associated with the same + task result with the same recording id. """ diff --git a/src/tasks/models/task_result.py b/src/tasks/models/task_result.py index 34c9d8e3..601ee4d4 100644 --- a/src/tasks/models/task_result.py +++ b/src/tasks/models/task_result.py @@ -7,7 +7,6 @@ from sensor.settings import MAX_TASK_RESULTS from tasks.consts import MAX_DETAIL_LEN - UTC = timezone.timezone.utc diff --git a/src/tasks/serializers/acquisition.py b/src/tasks/serializers/acquisition.py index c4016e49..3bdad2bb 100644 --- a/src/tasks/serializers/acquisition.py +++ b/src/tasks/serializers/acquisition.py @@ -1,8 +1,8 @@ from rest_framework import serializers from rest_framework.reverse import reverse -from tasks.models import Acquisition from sensor import V1 +from tasks.models import Acquisition class AcquisitionHyperlinkedRelatedField(serializers.HyperlinkedRelatedField): diff --git a/src/tasks/tests/test_detail_view.py b/src/tasks/tests/test_detail_view.py index 6dedb44f..54528083 100644 --- a/src/tasks/tests/test_detail_view.py +++ b/src/tasks/tests/test_detail_view.py @@ -1,14 +1,38 @@ from rest_framework import status -from sensor.tests.utils import validate_response, HTTPS_KWARG +from sensor.tests.utils import HTTPS_KWARG, validate_response from tasks.tests.utils import ( create_task_results, reverse_result_detail, simulate_acquisitions, + update_result_detail, ) -def test_can_view_own_result_details(user_client): +def test_user_can_create_nonprivate_acquisition(user_client, test_scheduler): + entry_name = simulate_acquisitions(user_client) + result_url = reverse_result_detail(entry_name, 1) + response = user_client.get(result_url, **HTTPS_KWARG) + + validate_response(response, status.HTTP_200_OK) + + +def test_user_cant_create_private_acquisition( + user_client, alt_user_client, test_scheduler +): + # The alt user attempts to create a private acquisition. + entry_name = simulate_acquisitions(alt_user_client, is_private=True) + result_url = reverse_result_detail(entry_name, 1) + + # The user attempts to GET the acquisition that the alt user created. + response = user_client.get(result_url, **HTTPS_KWARG) + + # The user successfully GETs the acquistion that the alt user + # created; meaning that the acquisition was not, in fact, private. + validate_response(response, status.HTTP_200_OK) + + +def test_user_can_view_own_result_details(user_client): """A user should be able to view results they create.""" entry_name = create_task_results(1, user_client) url = reverse_result_detail(entry_name, 1) @@ -16,7 +40,7 @@ def test_can_view_own_result_details(user_client): validate_response(response, status.HTTP_200_OK) -def test_can_view_others_result_details(user_client, alt_user_client): +def test_user_can_view_others_result_details(user_client, alt_user_client): """A user should be able to view results created by others.""" entry_name = create_task_results(1, user_client) url = reverse_result_detail(entry_name, 1) @@ -24,7 +48,9 @@ def test_can_view_others_result_details(user_client, alt_user_client): validate_response(response, status.HTTP_200_OK) -def test_cannot_view_private_result_details(user_client, admin_client, test_scheduler): +def test_user_cannot_view_private_result_details( + user_client, admin_client, test_scheduler +): """A user should not be able to view the result of a private task.""" entry_name = simulate_acquisitions(admin_client, is_private=True) task_id = 1 @@ -33,9 +59,85 @@ def test_cannot_view_private_result_details(user_client, admin_client, test_sche validate_response(response, status.HTTP_404_NOT_FOUND) -def test_cannot_delete_result_details(user_client): - """Results are read-only.""" - entry_name = create_task_results(1, user_client) - url = reverse_result_detail(entry_name, 1) - response = user_client.delete(url, **HTTPS_KWARG) +def test_user_can_delete_own_results(user_client): + """A user should be able to delete results they own.""" + entry_name = simulate_acquisitions(user_client) + result_url = reverse_result_detail(entry_name, 1) + + first_response = user_client.delete(result_url, **HTTPS_KWARG) + second_response = user_client.delete(result_url, **HTTPS_KWARG) + + validate_response(first_response, status.HTTP_204_NO_CONTENT) + validate_response(second_response, status.HTTP_404_NOT_FOUND) + + +def test_user_cant_delete_others_results( + admin_client, user_client, alt_user_client, test_scheduler +): + # alt user schedule entry + alt_user_entry_name = simulate_acquisitions( + alt_user_client, name="alt_user_single_acq" + ) + alt_user_result_url = reverse_result_detail(alt_user_entry_name, 1) + + user_delete_alt_user_response = user_client.delete( + alt_user_result_url, **HTTPS_KWARG + ) + + # admin user schedule entry + admin_result_name = simulate_acquisitions(admin_client, name="admin_single_acq") + admin_result_url = reverse_result_detail(admin_result_name, 1) + + user_delete_admin_response = user_client.delete(admin_result_url, **HTTPS_KWARG) + + validate_response(user_delete_admin_response, status.HTTP_403_FORBIDDEN) + validate_response(user_delete_alt_user_response, status.HTTP_403_FORBIDDEN) + + +def test_user_cant_modify_their_result(user_client, test_scheduler): + """Task results are not modifiable.""" + entry_name = simulate_acquisitions(user_client) + acq_url = reverse_result_detail(entry_name, 1) + + new_result_detail = user_client.get(acq_url, **HTTPS_KWARG).data + + new_result_detail["task_id"] = 2 + + response = update_result_detail(user_client, entry_name, 1, new_result_detail) + validate_response(response, status.HTTP_405_METHOD_NOT_ALLOWED) + + +def test_user_cant_modify_other_results( + admin_client, user_client, alt_user_client, test_scheduler +): + # alt user schedule entry + alt_user_entry_name = simulate_acquisitions( + alt_user_client, name="alt_user_single_acq" + ) + alt_user_acq_url = reverse_result_detail(alt_user_entry_name, 1) + + new_result_detail = user_client.get(alt_user_acq_url, **HTTPS_KWARG) + + new_result_detail = new_result_detail.data + + new_result_detail["task_id"] = 2 + + user_modify_alt_user_response = update_result_detail( + user_client, alt_user_entry_name, 1, new_result_detail + ) + + # admin user schedule entry + admin_entry_name = simulate_acquisitions(admin_client, name="admin_single_acq") + admin_acq_url = reverse_result_detail(admin_entry_name, 1) + + new_result_detail = user_client.get(admin_acq_url, **HTTPS_KWARG).data + + new_result_detail["task_id"] = 2 + + user_modify_admin_response = update_result_detail( + user_client, admin_entry_name, 1, new_result_detail + ) + + validate_response(user_modify_alt_user_response, status.HTTP_403_FORBIDDEN) + validate_response(user_modify_admin_response, status.HTTP_403_FORBIDDEN) diff --git a/src/tasks/tests/test_list_view.py b/src/tasks/tests/test_list_view.py index 79747802..7811d3cb 100644 --- a/src/tasks/tests/test_list_view.py +++ b/src/tasks/tests/test_list_view.py @@ -1,7 +1,7 @@ import pytest from rest_framework import status -from sensor.tests.utils import validate_response, HTTPS_KWARG +from sensor.tests.utils import HTTPS_KWARG, validate_response from tasks.tests.utils import ( create_task_results, get_result_list, diff --git a/src/tasks/tests/test_overview_view.py b/src/tasks/tests/test_overview_view.py index 52221c8e..4b866599 100644 --- a/src/tasks/tests/test_overview_view.py +++ b/src/tasks/tests/test_overview_view.py @@ -1,11 +1,11 @@ from rest_framework import status -from sensor.tests.utils import validate_response, HTTPS_KWARG +from sensor.tests.utils import HTTPS_KWARG, validate_response from tasks.tests.utils import ( EMPTY_RESULTS_RESPONSE, create_task_results, - reverse_results_overview, get_results_overview, + reverse_results_overview, simulate_acquisitions, ) diff --git a/src/tasks/tests/utils.py b/src/tasks/tests/utils.py index 78e4cbd5..bff99140 100644 --- a/src/tasks/tests/utils.py +++ b/src/tasks/tests/utils.py @@ -1,15 +1,16 @@ import datetime +import json from django.test import RequestFactory from django.utils import timezone -from rest_framework.reverse import reverse from rest_framework import status +from rest_framework.reverse import reverse from schedule.models import ScheduleEntry -from schedule.tests.utils import post_schedule, TEST_SCHEDULE_ENTRY +from schedule.tests.utils import TEST_SCHEDULE_ENTRY, post_schedule from scheduler.tests.utils import simulate_scheduler_run from sensor import V1 -from sensor.tests.utils import validate_response, HTTPS_KWARG +from sensor.tests.utils import HTTPS_KWARG, validate_response from tasks.models import TaskResult TEST_MAX_TASK_RESULTS = 100 # Reduce from default of settings.MAX_TASK_RESULTS @@ -127,3 +128,15 @@ def get_result_detail(client, schedule_entry_name, task_id): url = reverse_result_detail(schedule_entry_name, task_id) response = client.get(url, **HTTPS_KWARG) return validate_response(response, status.HTTP_200_OK) + + +def update_acquisition_detail(client, schedule_entry_name, task_id, new_acquisition): + url = reverse_result_detail(schedule_entry_name, task_id) + + kwargs = { + "data": json.dumps(new_acquisition), + "content_type": "application/json", + "wsgi.url_scheme": "https", + } + + return client.put(url, **kwargs) diff --git a/src/tasks/urls.py b/src/tasks/urls.py index ca4668dc..85b622d9 100644 --- a/src/tasks/urls.py +++ b/src/tasks/urls.py @@ -1,14 +1,13 @@ from django.urls import path from .views import ( - TaskResultsOverviewViewSet, - TaskResultListViewSet, TaskResultInstanceViewSet, + TaskResultListViewSet, + TaskResultsOverviewViewSet, task_root, upcoming_tasks, ) - urlpatterns = ( path("", view=task_root, name="task-root"), path("upcoming/", view=upcoming_tasks, name="upcoming-tasks"), diff --git a/src/tasks/views.py b/src/tasks/views.py index b49fa500..adbbc383 100644 --- a/src/tasks/views.py +++ b/src/tasks/views.py @@ -2,20 +2,18 @@ import tempfile from functools import partial -from django.http import Http404, FileResponse +import sigmf.archive +import sigmf.sigmffile +from django.http import FileResponse, Http404 from rest_framework import filters, status -from rest_framework.decorators import action +from rest_framework.decorators import action, api_view from rest_framework.generics import get_object_or_404 -from rest_framework.mixins import ListModelMixin, RetrieveModelMixin, DestroyModelMixin -from rest_framework.decorators import api_view +from rest_framework.mixins import DestroyModelMixin, ListModelMixin, RetrieveModelMixin from rest_framework.response import Response from rest_framework.reverse import reverse from rest_framework.settings import api_settings from rest_framework.viewsets import GenericViewSet -import sigmf.archive -import sigmf.sigmffile - from schedule.models import ScheduleEntry from scheduler import scheduler from sensor import settings @@ -23,8 +21,7 @@ from .models.task_result import TaskResult from .permissions import IsAdminOrOwnerOrReadOnly from .serializers.task import TaskSerializer -from .serializers.task_result import TaskResultsOverviewSerializer, TaskResultSerializer - +from .serializers.task_result import TaskResultSerializer, TaskResultsOverviewSerializer logger = logging.getLogger(__name__) diff --git a/src/tox.ini b/src/tox.ini index f2d03496..7e2ccfc8 100644 --- a/src/tox.ini +++ b/src/tox.ini @@ -31,6 +31,13 @@ max-complexity = 12 select = C,E,F,W,B,B950 ignore = E501 +[isort] +multi_line_output=3 +include_trailing_comma=True +force_grid_wrap=0 +use_parentheses=True +line_length=88 + [coverage:run] omit = .tox/* From 64972fc20bd5270aed017b7f0b381fca97ea995a Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Thu, 30 May 2019 23:46:05 -0600 Subject: [PATCH 12/36] Add import sorter (isort) autoformatter and integrate with pre-commit --- .pre-commit-config.yaml | 5 +++++ .travis.yml | 1 + DEVELOPING.md | 33 ++++++++++++++++++++++++++------- 3 files changed, 32 insertions(+), 7 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b9a75aff..c2db22b8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,4 +1,9 @@ repos: +- repo: https://github.com/pre-commit/mirrors-isort + rev: 'v4.3.20' + hooks: + - id: isort + language_version: python3.6 - repo: https://github.com/ambv/black rev: stable hooks: diff --git a/.travis.yml b/.travis.yml index 92fe3fc7..bfed14c5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -26,6 +26,7 @@ before_script: - pip install -r ./src/requirements-dev.txt script: + - isort --check-only ./src - black --check ./src - tox -c ./src/tox.ini -e coverage - docker ps | grep api | grep -q healthy diff --git a/DEVELOPING.md b/DEVELOPING.md index e008f237..bc7bb7cd 100644 --- a/DEVELOPING.md +++ b/DEVELOPING.md @@ -45,16 +45,35 @@ $ tox --recreate # if you change `requirements.txt` $ tox -e coverage # check where test coverage lacks ``` -Code Formatting ---------------- +Committing +---------- + +Besides running the test suite and ensuring that all tests are passing, we also +expect all python code that's checked in to have been run through an +auto-formatter. This project uses a Python auto-formatter called Black. You probably won't like every decision it makes, but our continuous integration test-runner will reject -your commit if it's not properly formatted. If you've already pip-installed the -dev requirements from the section above, you already have a utility called -`pre-commit` installed that will automate setting up this project's git -pre-commit hooks. Simply type the following _once_, and each time you make a -commit, it will be "blackened" automatically. +your commit if it's not properly formatted. + +Additionally, import statement sorting is handled by `isort`. + +The continuous integration test-runner verifies the code is auto-formatted by +checking that neither `isort` nor `black` would recommend any changes to the +code. Occasionally, this can fail if these two autoformatters disagree. The +only time I've seen this happen is with a commented-out import statement, which +`isort` parses, and `black` treats as a comment. Solution: don't leave +commented-out import statements in the code. + +There are several ways to autoformat your code before committing. First, IDE +integration with on-save hooks is very useful. Second, there is a script, +`scripts/autoformat_python.sh`, that will run both `isort` and `black` over the +codebase. Lastly, if you've already pip-installed the dev requirements from the +section above, you already have a utility called `pre-commit` installed that +will automate setting up this project's git pre-commit hooks. Simply type the +following _once_, and each time you make a commit, it will be appropriately +autoformatted. + ```bash $ pre-commit install From 498ed621dba2f077fe3ac0b948b730bddf57f787 Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Thu, 30 May 2019 23:48:12 -0600 Subject: [PATCH 13/36] Fix all remaining test failures --- src/scheduler/tests/test_scheduler.py | 5 +- src/tasks/tests/test_detail_view.py | 93 +++++++++++++++++++++++++-- src/tasks/tests/test_list_view.py | 11 ++-- src/tasks/tests/utils.py | 3 +- 4 files changed, 96 insertions(+), 16 deletions(-) diff --git a/src/scheduler/tests/test_scheduler.py b/src/scheduler/tests/test_scheduler.py index b425f569..d4fb45c4 100644 --- a/src/scheduler/tests/test_scheduler.py +++ b/src/scheduler/tests/test_scheduler.py @@ -3,7 +3,6 @@ import pytest import requests_mock - from scheduler.scheduler import Scheduler, minimum_duration from .utils import ( @@ -335,7 +334,7 @@ def cb_request_handler(sess, resp): request_json = m.request_history[0].json() assert cb_flag.is_set() - assert request_json["result"] == "failure" + assert request_json["status"] == "failure" assert request_json["task_id"] == 1 assert request_json["self"] assert request_json["detail"] == BAD_ACTION_STR @@ -370,7 +369,7 @@ def cb_request_handler(sess, resp): assert cb_flag.is_set() assert action_flag.is_set() - assert request_json["result"] == "success" + assert request_json["status"] == "success" assert request_json["task_id"] == 1 assert request_json["self"] assert request_json["started"] diff --git a/src/tasks/tests/test_detail_view.py b/src/tasks/tests/test_detail_view.py index 54528083..947ae7d7 100644 --- a/src/tasks/tests/test_detail_view.py +++ b/src/tasks/tests/test_detail_view.py @@ -1,5 +1,4 @@ from rest_framework import status - from sensor.tests.utils import HTTPS_KWARG, validate_response from tasks.tests.utils import ( create_task_results, @@ -56,10 +55,10 @@ def test_user_cannot_view_private_result_details( task_id = 1 url = reverse_result_detail(entry_name, task_id) response = user_client.get(url, **HTTPS_KWARG) - validate_response(response, status.HTTP_404_NOT_FOUND) + validate_response(response, status.HTTP_403_FORBIDDEN) -def test_user_can_delete_own_results(user_client): +def test_user_can_delete_own_results(user_client, test_scheduler): """A user should be able to delete results they own.""" entry_name = simulate_acquisitions(user_client) result_url = reverse_result_detail(entry_name, 1) @@ -94,7 +93,7 @@ def test_user_cant_delete_others_results( validate_response(user_delete_alt_user_response, status.HTTP_403_FORBIDDEN) -def test_user_cant_modify_their_result(user_client, test_scheduler): +def test_user_cant_modify_own_result(user_client, test_scheduler): """Task results are not modifiable.""" entry_name = simulate_acquisitions(user_client) acq_url = reverse_result_detail(entry_name, 1) @@ -108,7 +107,7 @@ def test_user_cant_modify_their_result(user_client, test_scheduler): validate_response(response, status.HTTP_405_METHOD_NOT_ALLOWED) -def test_user_cant_modify_other_results( +def test_user_cant_modify_others_results( admin_client, user_client, alt_user_client, test_scheduler ): # alt user schedule entry @@ -141,3 +140,87 @@ def test_user_cant_modify_other_results( validate_response(user_modify_alt_user_response, status.HTTP_403_FORBIDDEN) validate_response(user_modify_admin_response, status.HTTP_403_FORBIDDEN) + + +def test_admin_can_create_private_results(admin_client, user_client, test_scheduler): + private_entry_name = simulate_acquisitions(admin_client, is_private=True) + private_result_url = reverse_result_detail(private_entry_name, 1) + user_response = user_client.get(private_result_url, **HTTPS_KWARG) + validate_response(user_response, status.HTTP_403_FORBIDDEN) + + +def test_admin_can_view_all_results( + admin_client, alt_admin_client, user_client, test_scheduler +): + # alt admin schedule entry + alt_admin_entry_name = simulate_acquisitions( + alt_admin_client, name="alt_admin_single_acq" + ) + alt_admin_result_url = reverse_result_detail(alt_admin_entry_name, 1) + + admin_view_alt_admin_response = admin_client.get( + alt_admin_result_url, **HTTPS_KWARG + ) + + # user schedule entry + user_result_name = simulate_acquisitions(user_client, name="admin_single_acq") + user_result_url = reverse_result_detail(user_result_name, 1) + + admin_view_user_response = admin_client.get(user_result_url, **HTTPS_KWARG) + + validate_response(admin_view_alt_admin_response, status.HTTP_200_OK) + validate_response(admin_view_user_response, status.HTTP_200_OK) + + +def test_admin_can_view_private_results(admin_client, alt_admin_client, test_scheduler): + private_entry_name = simulate_acquisitions(alt_admin_client, is_private=True) + private_result_url = reverse_result_detail(private_entry_name, 1) + response = admin_client.get(private_result_url, **HTTPS_KWARG) + validate_response(response, status.HTTP_200_OK) + + +def test_admin_can_delete_own_results(admin_client, test_scheduler): + entry_name = simulate_acquisitions(admin_client) + result_url = reverse_result_detail(entry_name, 1) + + first_response = admin_client.delete(result_url, **HTTPS_KWARG) + second_response = admin_client.delete(result_url, **HTTPS_KWARG) + + validate_response(first_response, status.HTTP_204_NO_CONTENT) + validate_response(second_response, status.HTTP_404_NOT_FOUND) + + +def test_admin_can_delete_others_results( + admin_client, alt_admin_client, user_client, test_scheduler +): + # alt admin private schedule entry + alt_admin_entry_name = simulate_acquisitions( + alt_admin_client, name="alt_admin_single_acq", is_private=True + ) + alt_admin_result_url = reverse_result_detail(alt_admin_entry_name, 1) + + admin_delete_alt_admin_response = admin_client.delete( + alt_admin_result_url, **HTTPS_KWARG + ) + + # user schedule entry + user_result_name = simulate_acquisitions(user_client, name="admin_single_acq") + user_result_url = reverse_result_detail(user_result_name, 1) + + admin_delete_user_response = admin_client.delete(user_result_url, **HTTPS_KWARG) + + validate_response(admin_delete_user_response, status.HTTP_204_NO_CONTENT) + validate_response(admin_delete_alt_admin_response, status.HTTP_204_NO_CONTENT) + + +def test_admin_cant_modify_own_results(admin_client, test_scheduler): + entry_name = simulate_acquisitions(admin_client) + result_url = reverse_result_detail(entry_name, 1) + + new_result_detail = admin_client.get(result_url, **HTTPS_KWARG).data + + new_result_detail["task_id"] = 2 + + response = update_result_detail(admin_client, entry_name, 1, new_result_detail) + + validate_response(response, status.HTTP_405_METHOD_NOT_ALLOWED) diff --git a/src/tasks/tests/test_list_view.py b/src/tasks/tests/test_list_view.py index 7811d3cb..1830ca05 100644 --- a/src/tasks/tests/test_list_view.py +++ b/src/tasks/tests/test_list_view.py @@ -1,6 +1,5 @@ import pytest from rest_framework import status - from sensor.tests.utils import HTTPS_KWARG, validate_response from tasks.tests.utils import ( create_task_results, @@ -44,18 +43,18 @@ def test_private_entry_results_list_is_private( entry_name = simulate_acquisitions(admin_client, is_private=True) url = reverse_result_list(entry_name) response = user_client.get(url, **HTTPS_KWARG) - validate_response(response, status.HTTP_404_NOT_FOUND) + validate_response(response, status.HTTP_403_FORBIDDEN) @pytest.mark.django_db def test_delete_list(user_client): - # If result doesn't exist, expect 405 + # If result doesn't exist, expect 404 url = reverse_result_list("doesntexist") response = user_client.delete(url, **HTTPS_KWARG) - validate_response(response, status.HTTP_405_METHOD_NOT_ALLOWED) + validate_response(response, status.HTTP_404_NOT_FOUND) - # If result does exist, expect 405 + # If result does exist, expect 204 entry_name = create_task_results(1, user_client) url = reverse_result_list(entry_name) response = user_client.delete(url, **HTTPS_KWARG) - validate_response(response, status.HTTP_405_METHOD_NOT_ALLOWED) + validate_response(response, status.HTTP_204_NO_CONTENT) diff --git a/src/tasks/tests/utils.py b/src/tasks/tests/utils.py index bff99140..f9acaa8e 100644 --- a/src/tasks/tests/utils.py +++ b/src/tasks/tests/utils.py @@ -5,7 +5,6 @@ from django.utils import timezone from rest_framework import status from rest_framework.reverse import reverse - from schedule.models import ScheduleEntry from schedule.tests.utils import TEST_SCHEDULE_ENTRY, post_schedule from scheduler.tests.utils import simulate_scheduler_run @@ -130,7 +129,7 @@ def get_result_detail(client, schedule_entry_name, task_id): return validate_response(response, status.HTTP_200_OK) -def update_acquisition_detail(client, schedule_entry_name, task_id, new_acquisition): +def update_result_detail(client, schedule_entry_name, task_id, new_acquisition): url = reverse_result_detail(schedule_entry_name, task_id) kwargs = { From 7247440d35aa3293dcdf6705d2b6cb9e64ce3fc1 Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Thu, 30 May 2019 23:53:27 -0600 Subject: [PATCH 14/36] isort recurse the src directory --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index bfed14c5..98680866 100644 --- a/.travis.yml +++ b/.travis.yml @@ -26,7 +26,7 @@ before_script: - pip install -r ./src/requirements-dev.txt script: - - isort --check-only ./src + - isort --check-only --rc ./src - black --check ./src - tox -c ./src/tox.ini -e coverage - docker ps | grep api | grep -q healthy From 3854472bb0b79d80aafc0940e49ad0939686abdf Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Fri, 31 May 2019 00:45:58 -0600 Subject: [PATCH 15/36] Make isort smarter --- .pre-commit-config.yaml | 20 +++++++++++++------- scripts/autoformat_python.sh | 6 ++++-- src/.isort.cfg | 7 +++++++ src/requirements-dev.txt | 1 + src/tox.ini | 7 ------- 5 files changed, 25 insertions(+), 16 deletions(-) create mode 100644 src/.isort.cfg diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c2db22b8..105813aa 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,11 +1,17 @@ repos: -- repo: https://github.com/pre-commit/mirrors-isort - rev: 'v4.3.20' + - repo: https://github.com/asottile/seed-isort-config + rev: v1.9.1 hooks: - - id: isort - language_version: python3.6 -- repo: https://github.com/ambv/black + - id: seed-isort-config + language_version: python3.6 + args: [--application-directories=./src, --settings-path=./src] + - repo: https://github.com/pre-commit/mirrors-isort + rev: v4.3.20 + hooks: + - id: isort + language_version: python3.6 + - repo: https://github.com/ambv/black rev: stable hooks: - - id: black - language_version: python3.6 + - id: black + language_version: python3.6 diff --git a/scripts/autoformat_python.sh b/scripts/autoformat_python.sh index 77fb7d91..74a362b2 100755 --- a/scripts/autoformat_python.sh +++ b/scripts/autoformat_python.sh @@ -3,9 +3,11 @@ # Autoformat python - sort imports and then "blacken" code REPO_ROOT=${REPO_ROOT:=$(git rev-parse --show-toplevel)} +SRC_ROOT=${REPO_ROOT}/src echo "Sorting imports with isort... " -isort -rc ${REPO_ROOT}/src/ +seed-isort-config --application-directories=${SRC_ROOT} --settings-path=${SRC_ROOT} +isort -rc ${SRC_ROOT} echo echo "Formatting code with black... " -black ${REPO_ROOT}/src/ +black ${SRC_ROOT} diff --git a/src/.isort.cfg b/src/.isort.cfg new file mode 100644 index 00000000..5dc108fc --- /dev/null +++ b/src/.isort.cfg @@ -0,0 +1,7 @@ +[settings] +multi_line_output=3 +include_trailing_comma=True +force_grid_wrap=0 +use_parentheses=True +line_length=88 +known_third_party=django,drf_yasg,jsonfield,numpy,pytest,requests_futures,requests_mock,rest_framework,ruamel,sigmf diff --git a/src/requirements-dev.txt b/src/requirements-dev.txt index bc24fcee..0336a318 100644 --- a/src/requirements-dev.txt +++ b/src/requirements-dev.txt @@ -11,4 +11,5 @@ pre-commit==1.16.1 pytest-cov==2.7.1 pytest-django==3.4.8 pytest-flake8==1.0.4 +seed-isort-config==1.9.1 tox==3.10.0 diff --git a/src/tox.ini b/src/tox.ini index 7e2ccfc8..f2d03496 100644 --- a/src/tox.ini +++ b/src/tox.ini @@ -31,13 +31,6 @@ max-complexity = 12 select = C,E,F,W,B,B950 ignore = E501 -[isort] -multi_line_output=3 -include_trailing_comma=True -force_grid_wrap=0 -use_parentheses=True -line_length=88 - [coverage:run] omit = .tox/* From 3a1e5ccf716bb5828d40970422733b2d86ad7888 Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Fri, 31 May 2019 00:46:26 -0600 Subject: [PATCH 16/36] Smarter isort sorting --- src/scheduler/tests/test_scheduler.py | 1 + src/tasks/tests/test_detail_view.py | 1 + src/tasks/tests/test_list_view.py | 1 + src/tasks/tests/utils.py | 1 + 4 files changed, 4 insertions(+) diff --git a/src/scheduler/tests/test_scheduler.py b/src/scheduler/tests/test_scheduler.py index d4fb45c4..98fcd183 100644 --- a/src/scheduler/tests/test_scheduler.py +++ b/src/scheduler/tests/test_scheduler.py @@ -3,6 +3,7 @@ import pytest import requests_mock + from scheduler.scheduler import Scheduler, minimum_duration from .utils import ( diff --git a/src/tasks/tests/test_detail_view.py b/src/tasks/tests/test_detail_view.py index 947ae7d7..e9dbf0cc 100644 --- a/src/tasks/tests/test_detail_view.py +++ b/src/tasks/tests/test_detail_view.py @@ -1,4 +1,5 @@ from rest_framework import status + from sensor.tests.utils import HTTPS_KWARG, validate_response from tasks.tests.utils import ( create_task_results, diff --git a/src/tasks/tests/test_list_view.py b/src/tasks/tests/test_list_view.py index 1830ca05..483b8787 100644 --- a/src/tasks/tests/test_list_view.py +++ b/src/tasks/tests/test_list_view.py @@ -1,5 +1,6 @@ import pytest from rest_framework import status + from sensor.tests.utils import HTTPS_KWARG, validate_response from tasks.tests.utils import ( create_task_results, diff --git a/src/tasks/tests/utils.py b/src/tasks/tests/utils.py index f9acaa8e..f23eac94 100644 --- a/src/tasks/tests/utils.py +++ b/src/tasks/tests/utils.py @@ -5,6 +5,7 @@ from django.utils import timezone from rest_framework import status from rest_framework.reverse import reverse + from schedule.models import ScheduleEntry from schedule.tests.utils import TEST_SCHEDULE_ENTRY, post_schedule from scheduler.tests.utils import simulate_scheduler_run From 14f94701a2441bb30603f1d5a984e225050c908b Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Fri, 31 May 2019 00:47:00 -0600 Subject: [PATCH 17/36] Remove py2 future imports --- src/actions/acquire_single_freq_fft.py | 2 -- src/actions/logger.py | 2 -- src/actions/monitor_usrp.py | 2 -- src/actions/sync_gps.py | 2 -- src/authentication/views.py | 2 -- src/capabilities/apps.py | 1 - src/hardware/apps.py | 1 - src/schedule/models/request.py | 2 -- src/sensor/exceptions.py | 2 -- src/sensor/urls.py | 2 -- src/sensor/wsgi.py | 2 -- 11 files changed, 20 deletions(-) diff --git a/src/actions/acquire_single_freq_fft.py b/src/actions/acquire_single_freq_fft.py index 64acc4c7..5695d3f8 100644 --- a/src/actions/acquire_single_freq_fft.py +++ b/src/actions/acquire_single_freq_fft.py @@ -77,8 +77,6 @@ """ -from __future__ import absolute_import - import logging from enum import Enum diff --git a/src/actions/logger.py b/src/actions/logger.py index c0abfe09..78008eb5 100644 --- a/src/actions/logger.py +++ b/src/actions/logger.py @@ -1,7 +1,5 @@ """A simple example action that logs a message.""" -from __future__ import absolute_import - import logging from .base import Action diff --git a/src/actions/monitor_usrp.py b/src/actions/monitor_usrp.py index 7a3d2282..b901fa17 100644 --- a/src/actions/monitor_usrp.py +++ b/src/actions/monitor_usrp.py @@ -1,7 +1,5 @@ """Monitor the on-board USRP and touch or remove an indicator file.""" -from __future__ import absolute_import - import logging from pathlib import Path diff --git a/src/actions/sync_gps.py b/src/actions/sync_gps.py index 71dab742..60de0266 100644 --- a/src/actions/sync_gps.py +++ b/src/actions/sync_gps.py @@ -1,7 +1,5 @@ """Monitor the on-board USRP and touch or remove an indicator file.""" -from __future__ import absolute_import - import logging from hardware import gps_iface diff --git a/src/authentication/views.py b/src/authentication/views.py index da365028..a16b30bc 100644 --- a/src/authentication/views.py +++ b/src/authentication/views.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - from rest_framework.generics import ( ListAPIView, ListCreateAPIView, diff --git a/src/capabilities/apps.py b/src/capabilities/apps.py index acd07f83..44c5bf57 100644 --- a/src/capabilities/apps.py +++ b/src/capabilities/apps.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import unicode_literals from django.apps import AppConfig diff --git a/src/hardware/apps.py b/src/hardware/apps.py index 54c0c35d..f183653a 100644 --- a/src/hardware/apps.py +++ b/src/hardware/apps.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import unicode_literals from django.apps import AppConfig diff --git a/src/schedule/models/request.py b/src/schedule/models/request.py index aac0f6e8..96cfdca8 100644 --- a/src/schedule/models/request.py +++ b/src/schedule/models/request.py @@ -1,7 +1,5 @@ """Request model to save enough of a request to be passed to reverse().""" -from __future__ import absolute_import - from django.db import models from django.utils.encoding import iri_to_uri from django.utils.functional import cached_property diff --git a/src/sensor/exceptions.py b/src/sensor/exceptions.py index f2753a20..9e35e7c9 100644 --- a/src/sensor/exceptions.py +++ b/src/sensor/exceptions.py @@ -1,7 +1,5 @@ """Provides custom exception handing.""" -from __future__ import absolute_import - import logging from django import db diff --git a/src/sensor/urls.py b/src/sensor/urls.py index 945c017e..23925ff8 100644 --- a/src/sensor/urls.py +++ b/src/sensor/urls.py @@ -17,8 +17,6 @@ """ -from __future__ import absolute_import - from django.contrib import admin from django.urls import include, path, re_path from django.views.generic import RedirectView diff --git a/src/sensor/wsgi.py b/src/sensor/wsgi.py index 1d522923..d9ee8615 100644 --- a/src/sensor/wsgi.py +++ b/src/sensor/wsgi.py @@ -6,8 +6,6 @@ https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/ """ -from __future__ import absolute_import - import os import django From d594c91b142eedb7cee741b06d29276689ef91cc Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Fri, 31 May 2019 00:55:05 -0600 Subject: [PATCH 18/36] Add more pre-commit hooks --- .pre-commit-config.yaml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 105813aa..8dee19b6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,4 +1,13 @@ repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.2.3 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-docstring-first + - id: check-json + - id: check-yaml + - id: debug-statements - repo: https://github.com/asottile/seed-isort-config rev: v1.9.1 hooks: From 7be9a001302b194f4406f290d2f94cf057959e34 Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Fri, 31 May 2019 01:08:57 -0600 Subject: [PATCH 19/36] Disable isort in wsgi.py to enable bootstrapping django app before import --- src/sensor/wsgi.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/sensor/wsgi.py b/src/sensor/wsgi.py index d9ee8615..a6e9bb7c 100644 --- a/src/sensor/wsgi.py +++ b/src/sensor/wsgi.py @@ -4,6 +4,9 @@ For more information on this file, see https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/ + +isort:skip_file + """ import os @@ -11,12 +14,11 @@ import django from django.core.wsgi import get_wsgi_application -from scheduler import scheduler # noqa -from sensor import settings # noqa - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sensor.settings") django.setup() # this is necessary because we need to handle our own thread +from scheduler import scheduler # noqa +from sensor import settings # noqa application = get_wsgi_application() From 8302c092ddc41fb800baac2a12f2e4aacd09f09d Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Fri, 31 May 2019 01:15:17 -0600 Subject: [PATCH 20/36] Fix isort call --- .travis.yml | 2 +- scripts/autoformat_python.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 98680866..3f74ba06 100644 --- a/.travis.yml +++ b/.travis.yml @@ -26,7 +26,7 @@ before_script: - pip install -r ./src/requirements-dev.txt script: - - isort --check-only --rc ./src + - isort --check-only --recursive ./src - black --check ./src - tox -c ./src/tox.ini -e coverage - docker ps | grep api | grep -q healthy diff --git a/scripts/autoformat_python.sh b/scripts/autoformat_python.sh index 74a362b2..7b4557dc 100755 --- a/scripts/autoformat_python.sh +++ b/scripts/autoformat_python.sh @@ -7,7 +7,7 @@ SRC_ROOT=${REPO_ROOT}/src echo "Sorting imports with isort... " seed-isort-config --application-directories=${SRC_ROOT} --settings-path=${SRC_ROOT} -isort -rc ${SRC_ROOT} +isort --recursive ${SRC_ROOT} echo echo "Formatting code with black... " black ${SRC_ROOT} From 6fef777f2eadacef1fbbf0bcfc361f33c5252436 Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Mon, 3 Jun 2019 20:16:26 -0600 Subject: [PATCH 21/36] Don't fail CI check on import order --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 3f74ba06..92fe3fc7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -26,7 +26,6 @@ before_script: - pip install -r ./src/requirements-dev.txt script: - - isort --check-only --recursive ./src - black --check ./src - tox -c ./src/tox.ini -e coverage - docker ps | grep api | grep -q healthy From 69d31287bf11999ec383ff1366d53676a9343f50 Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Mon, 3 Jun 2019 22:12:04 -0600 Subject: [PATCH 22/36] Add archive download tests --- src/hardware/usrp_iface.py | 3 +- src/tasks/tests/test_archive_download.py | 64 ++++++++++++++++++++++++ src/tasks/tests/utils.py | 20 ++++++++ src/tasks/views.py | 30 ++++++----- 4 files changed, 104 insertions(+), 13 deletions(-) create mode 100644 src/tasks/tests/test_archive_download.py diff --git a/src/hardware/usrp_iface.py b/src/hardware/usrp_iface.py index 3dc27bcc..93acd25e 100644 --- a/src/hardware/usrp_iface.py +++ b/src/hardware/usrp_iface.py @@ -28,7 +28,8 @@ radio = None is_available = False -# Testing determined these gain values provide +# Testing determined these gain values provide a good mix of sensitivity and +# dynamic range performance VALID_GAINS = (0, 20, 40, 60) diff --git a/src/tasks/tests/test_archive_download.py b/src/tasks/tests/test_archive_download.py new file mode 100644 index 00000000..347b1051 --- /dev/null +++ b/src/tasks/tests/test_archive_download.py @@ -0,0 +1,64 @@ +import os +import tempfile + +import numpy as np +import sigmf.sigmffile +from rest_framework import status + +import sensor.settings +from tasks.tests.utils import ( + HTTPS_KWARG, + reverse_archive, + reverse_archive_all, + simulate_acquisitions, +) + + +def test_single_acquisition_archive_download(user_client, test_scheduler): + entry_name = simulate_acquisitions(user_client, n=1) + task_id = 1 + url = reverse_archive(entry_name, task_id) + disposition = 'attachment; filename="{}_test_acq_1.sigmf"' + disposition = disposition.format(sensor.settings.FQDN) + response = user_client.get(url, **HTTPS_KWARG) + + assert response.status_code == status.HTTP_200_OK + assert response["content-disposition"] == disposition + assert response["content-type"] == "application/x-tar" + + with tempfile.NamedTemporaryFile() as tf: + for content in response.streaming_content: + tf.write(content) + + sigmf_archive_contents = sigmf.sigmffile.fromarchive(tf.name) + md = sigmf_archive_contents._metadata + datafile = sigmf_archive_contents.data_file + datafile_actual_size = os.stat(datafile).st_size + claimed_sha512 = md["global"]["core:sha512"] + number_of_sample_arrays = len(md["annotations"]) + samples_per_array = md["annotations"][0]["core:sample_count"] + sample_array_size = samples_per_array * np.float32(0.0).nbytes + datafile_expected_size = number_of_sample_arrays * sample_array_size + actual_sha512 = sigmf.sigmf_hash.calculate_sha512(datafile) + + assert datafile_actual_size == datafile_expected_size + assert claimed_sha512 == actual_sha512 + + +def test_all_acquisitions_archive_download(user_client, test_scheduler): + entry_name = simulate_acquisitions(user_client, n=3) + url = reverse_archive_all(entry_name) + disposition = 'attachment; filename="{}_test_multiple_acq.sigmf"' + disposition = disposition.format(sensor.settings.FQDN) + response = user_client.get(url, **HTTPS_KWARG) + + assert response.status_code == status.HTTP_200_OK + assert response["content-disposition"] == disposition + assert response["content-type"] == "application/x-tar" + + with tempfile.NamedTemporaryFile() as tf: + for content in response.streaming_content: + tf.write(content) + + sigmf_archive_contents = sigmf.archive.extract(tf.name) + assert len(sigmf_archive_contents) == 3 diff --git a/src/tasks/tests/utils.py b/src/tasks/tests/utils.py index f23eac94..94f5eae9 100644 --- a/src/tasks/tests/utils.py +++ b/src/tasks/tests/utils.py @@ -110,6 +110,26 @@ def reverse_result_detail(schedule_entry_name, task_id): return reverse("task-result-detail", kwargs=kws, request=request) +def reverse_archive(schedule_entry_name, task_id): + rf = RequestFactory() + entry_name = schedule_entry_name + url = "/tasks/completed/{}/{!s}/archive".format(entry_name, task_id) + request = rf.get(url, **HTTPS_KWARG) + kws = {"schedule_entry_name": entry_name, "task_id": task_id} + kws.update(V1) + return reverse("task-result-archive", kwargs=kws, request=request) + + +def reverse_archive_all(schedule_entry_name): + rf = RequestFactory() + entry_name = schedule_entry_name + url = "/tasks/completed/{}/archive".format(entry_name) + request = rf.get(url, **HTTPS_KWARG) + kws = {"schedule_entry_name": entry_name} + kws.update(V1) + return reverse("task-result-list-archive", kwargs=kws, request=request) + + def get_results_overview(client): url = reverse_results_overview() response = client.get(url, **HTTPS_KWARG) diff --git a/src/tasks/views.py b/src/tasks/views.py index adbbc383..3693f9ac 100644 --- a/src/tasks/views.py +++ b/src/tasks/views.py @@ -18,6 +18,7 @@ from scheduler import scheduler from sensor import settings +from .models.acquisition import Acquisition from .models.task_result import TaskResult from .permissions import IsAdminOrOwnerOrReadOnly from .serializers.task import TaskSerializer @@ -150,15 +151,16 @@ def destroy_all(self, request, version, schedule_entry_name): def archive(self, request, version, schedule_entry_name): queryset = self.get_queryset() - if not queryset.exists(): + acquisitions = Acquisition.objects.filter(task_result__in=queryset) + + if not acquisitions.exists(): raise Http404 - fqdn = settings.FQDN - fname = fqdn + "_" + schedule_entry_name + ".sigmf" + fname = settings.FQDN + "_" + schedule_entry_name + ".sigmf" # FileResponse handles closing the file tmparchive = tempfile.TemporaryFile() - build_sigmf_archive(tmparchive, schedule_entry_name, queryset) + build_sigmf_archive(tmparchive, schedule_entry_name, acquisitions) content_type = "application/x-tar" response = FileResponse( tmparchive, as_attachment=True, filename=fname, content_type=content_type @@ -192,13 +194,15 @@ class TaskResultInstanceViewSet( @action(detail=True) def archive(self, request, version, schedule_entry_name, task_id): entry_name = schedule_entry_name - fqdn = settings.FQDN - fname = fqdn + "_" + entry_name + "_" + str(task_id) + ".sigmf" - acq = self.get_object() + fname = settings.FQDN + "_" + entry_name + "_" + str(task_id) + ".sigmf" + tr = self.get_object() + acquisitions = Acquisition.objects.filter(task_result=tr) + if not acquisitions: + raise Http404 # FileResponse handles closing the file tmparchive = tempfile.TemporaryFile() - build_sigmf_archive(tmparchive, schedule_entry_name, [acq]) + build_sigmf_archive(tmparchive, schedule_entry_name, acquisitions) content_type = "application/x-tar" response = FileResponse( tmparchive, as_attachment=True, filename=fname, content_type=content_type @@ -217,14 +221,16 @@ def build_sigmf_archive(fileobj, schedule_entry_name, acquisitions): """ logger.debug("building sigmf archive") + multirecording = len(acquisitions) > 1 + for acq in acquisitions: with tempfile.NamedTemporaryFile() as tmpdata: tmpdata.write(acq.data) tmpdata.seek(0) # move fd ptr to start of data for reading - name = schedule_entry_name + "_" + str(acq.task_id) - sigmf_file = sigmf.sigmffile.SigMFFile( - metadata=acq.sigmf_metadata, name=name - ) + name = schedule_entry_name + "_" + str(acq.task_result.task_id) + if multirecording: + name += "-" + str(acq.recording_id) + sigmf_file = sigmf.sigmffile.SigMFFile(metadata=acq.metadata, name=name) sigmf_file.set_data_file(tmpdata.name) sigmf.archive.SigMFArchive(sigmf_file, path=name, fileobj=fileobj) From 7cfc00e36b9f73b16c02c13fc4f8deef1067ba02 Mon Sep 17 00:00:00 2001 From: Douglas Anderson Date: Tue, 4 Jun 2019 22:17:34 -0600 Subject: [PATCH 23/36] Update static files --- src/static/drf-yasg/redoc/redoc.min.js | 67 +++++++++--------- .../swagger-ui-dist/favicon-32x32.png | Bin 1643 -> 628 bytes .../swagger-ui-dist/swagger-ui-bundle.js | 22 +++--- .../swagger-ui-standalone-preset.js | 4 +- .../drf-yasg/swagger-ui-dist/swagger-ui.css | 4 +- 5 files changed, 49 insertions(+), 48 deletions(-) diff --git a/src/static/drf-yasg/redoc/redoc.min.js b/src/static/drf-yasg/redoc/redoc.min.js index 39a7fd79..70f395e1 100644 --- a/src/static/drf-yasg/redoc/redoc.min.js +++ b/src/static/drf-yasg/redoc/redoc.min.js @@ -1,10 +1,10 @@ /*! * ReDoc - OpenAPI/Swagger-generated API Reference Documentation * ------------------------------------------------------------- - * Version: "2.0.0-rc.2" + * Version: "2.0.0-rc.4" * Repo: https://github.com/Rebilly/ReDoc */ -!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t(require("null"),function(){try{return require("esprima")}catch(e){}}()):"function"==typeof define&&define.amd?define(["null","esprima"],t):"object"==typeof exports?exports.Redoc=t(require("null"),function(){try{return require("esprima")}catch(e){}}()):e.Redoc=t(e.null,e.esprima)}(this,function(e,t){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var o=t[r]={i:r,l:!1,exports:{}};return e[r].call(o.exports,o,o.exports,n),o.l=!0,o.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)n.d(r,o,function(t){return e[t]}.bind(null,o));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=134)}([function(e,t,n){"use strict";e.exports=n(178)},function(e,t,n){"use strict";(function(e,r){n.d(t,"a",function(){return Jt}),n.d(t,"b",function(){return Wt}),n.d(t,"o",function(){return Ft}),n.d(t,"g",function(){return De}),n.d(t,"n",function(){return v}),n.d(t,"k",function(){return ne}),n.d(t,"i",function(){return Je}),n.d(t,"j",function(){return Pe}),n.d(t,"l",function(){return be}),n.d(t,"e",function(){return le}),n.d(t,"m",function(){return Xt}),n.d(t,"d",function(){return O}),n.d(t,"f",function(){return Kt}),n.d(t,"h",function(){return Le}),n.d(t,"c",function(){return x}); +!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t(function(){try{return require("esprima")}catch(e){}}()):"function"==typeof define&&define.amd?define(["esprima"],t):"object"==typeof exports?exports.Redoc=t(function(){try{return require("esprima")}catch(e){}}()):e.Redoc=t(e.esprima)}(this,function(e){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var o=t[r]={i:r,l:!1,exports:{}};return e[r].call(o.exports,o,o.exports,n),o.l=!0,o.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)n.d(r,o,function(t){return e[t]}.bind(null,o));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=177)}([function(e,t,n){"use strict";e.exports=n(222)},function(e,t,n){"use strict";n.r(t),n.d(t,"__extends",function(){return o}),n.d(t,"__assign",function(){return i}),n.d(t,"__rest",function(){return a}),n.d(t,"__decorate",function(){return s}),n.d(t,"__param",function(){return l}),n.d(t,"__metadata",function(){return u}),n.d(t,"__awaiter",function(){return c}),n.d(t,"__generator",function(){return f}),n.d(t,"__exportStar",function(){return p}),n.d(t,"__values",function(){return d}),n.d(t,"__read",function(){return h}),n.d(t,"__spread",function(){return m}),n.d(t,"__await",function(){return v}),n.d(t,"__asyncGenerator",function(){return g}),n.d(t,"__asyncDelegator",function(){return y}),n.d(t,"__asyncValues",function(){return b}),n.d(t,"__makeTemplateObject",function(){return w}),n.d(t,"__importStar",function(){return x}),n.d(t,"__importDefault",function(){return k}); /*! ***************************************************************************** Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use @@ -19,14 +19,29 @@ MERCHANTABLITY OR NON-INFRINGEMENT. See the Apache Version 2.0 License for specific language governing permissions and limitations under the License. ***************************************************************************** */ -var o=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,t){e.__proto__=t}||function(e,t){for(var n in t)t.hasOwnProperty(n)&&(e[n]=t[n])};function i(e,t){function n(){this.constructor=e}o(e,t),e.prototype=null===t?Object.create(t):(n.prototype=t.prototype,new n)}var a=Object.assign||function(e){for(var t,n=1,r=arguments.length;n0)&&!(r=i.next()).done;)a.push(r.value)}catch(e){o={error:e}}finally{try{r&&!r.done&&(n=i.return)&&n.call(i)}finally{if(o)throw o.error}}return a}function l(){for(var e=[],t=0;t0)for(var l=0;l",e):2===arguments.length&&"function"==typeof t?b(e,t):1===arguments.length&&"string"==typeof e?_(e):!0!==r?_(t).apply(null,arguments):void(e[t]=b(e.name||t,n.value))};function C(e,t,n){pt(e,t,b(t,n.bind(e)))}O.bound=function(e,t,n,r){return!0===r?(C(e,t,n.value),null):n?{configurable:!0,enumerable:!1,get:function(){return C(this,t,n.value||n.initializer.call(this)),this[t]},set:S}:{enumerable:!1,configurable:!0,set:function(e){C(this,t,e)},get:function(){}}};var T=Object.prototype.toString;function j(e,t){return A(e,t)}function A(e,t,n,r){if(e===t)return 0!==e||1/e==1/t;if(null==e||null==t)return!1;if(e!=e)return t!=t;var o=typeof e;return("function"===o||"object"===o||"object"==typeof t)&&function(e,t,n,r){e=I(e),t=I(t);var o=T.call(e);if(o!==T.call(t))return!1;switch(o){case"[object RegExp]":case"[object String]":return""+e==""+t;case"[object Number]":return+e!=+e?+t!=+t:0==+e?1/+e==1/t:+e==+t;case"[object Date]":case"[object Boolean]":return+e==+t;case"[object Symbol]":return"undefined"!=typeof Symbol&&Symbol.valueOf.call(e)===Symbol.valueOf.call(t)}var i="[object Array]"===o;if(!i){if("object"!=typeof e||"object"!=typeof t)return!1;var a=e.constructor,s=t.constructor;if(a!==s&&!("function"==typeof a&&a instanceof a&&"function"==typeof s&&s instanceof s)&&"constructor"in e&&"constructor"in t)return!1}r=r||[];var l=(n=n||[]).length;for(;l--;)if(n[l]===e)return r[l]===t;if(n.push(e),r.push(t),i){if((l=e.length)!==t.length)return!1;for(;l--;)if(!A(e[l],t[l],n,r))return!1}else{var c,u=Object.keys(e);if(l=u.length,Object.keys(t).length!==l)return!1;for(;l--;)if(c=u[l],!P(t,c)||!A(e[c],t[c],n,r))return!1}return n.pop(),r.pop(),!0}(e,t,n,r)}function I(e){return Je(e)?e.peek():ht(e)||Pe(e)?mt(e.entries()):e}function P(e,t){return Object.prototype.hasOwnProperty.call(e,t)}function R(e,t){return e===t}var N={identity:R,structural:function(e,t){return j(e,t)},default:function(e,t){return function(e,t){return"number"==typeof e&&"number"==typeof t&&isNaN(e)&&isNaN(t)}(e,t)||R(e,t)}};function L(e,t){void 0===t&&(t=tt);var n,r=t&&t.name||e.name||"Autorun@"+rt();if(!t.scheduler&&!t.delay)n=new Wt(r,function(){this.track(a)},t.onError);else{var o=D(t),i=!1;n=new Wt(r,function(){i||(i=!0,o(function(){i=!1,n.isDisposed||n.track(a)}))},t.onError)}function a(){e(n)}return n.schedule(),n.getDisposer()}var M=function(e){return e()};function D(e){return e.scheduler?e.scheduler:e.delay?function(t){return setTimeout(t,e.delay)}:M}var F=function(){function e(e){this.dependenciesState=vt.NOT_TRACKING,this.observing=[],this.newObserving=null,this.isBeingObserved=!1,this.isPendingUnobservation=!1,this.observers=[],this.observersIndexes={},this.diffValue=0,this.runId=0,this.lastAccessedBy=0,this.lowestObserverState=vt.UP_TO_DATE,this.unboundDepsCount=0,this.__mapid="#"+rt(),this.value=new Pt(null),this.isComputing=!1,this.isRunningSetter=!1,this.isTracing=bt.NONE,this.derivation=e.get,this.name=e.name||"ComputedValue@"+rt(),e.set&&(this.setter=b(this.name+"-setter",e.set)),this.equals=e.equals||(e.compareStructural||e.struct?N.structural:N.default),this.scope=e.context,this.requiresReaction=!!e.requiresReaction,this.keepAlive=!!e.keepAlive}return e.prototype.onBecomeStale=function(){!function(e){if(e.lowestObserverState!==vt.UP_TO_DATE)return;e.lowestObserverState=vt.POSSIBLY_STALE;var t=e.observers,n=t.length;for(;n--;){var r=t[n];r.dependenciesState===vt.UP_TO_DATE&&(r.dependenciesState=vt.POSSIBLY_STALE,r.isTracing!==bt.NONE&&It(r,e),r.onBecomeStale())}}(this)},e.prototype.onBecomeUnobserved=function(){},e.prototype.onBecomeObserved=function(){},e.prototype.get=function(){this.isComputing&&ot("Cycle detected in computation "+this.name+": "+this.derivation),0!==Et.inBatch||0!==this.observers.length||this.keepAlive?(At(this),Nt(this)&&this.trackAndCompute()&&function(e){if(e.lowestObserverState===vt.STALE)return;e.lowestObserverState=vt.STALE;var t=e.observers,n=t.length;for(;n--;){var r=t[n];r.dependenciesState===vt.POSSIBLY_STALE?r.dependenciesState=vt.STALE:r.dependenciesState===vt.UP_TO_DATE&&(e.lowestObserverState=vt.UP_TO_DATE)}}(this)):Nt(this)&&(this.warnAboutUntrackedRead(),Tt(),this.value=this.computeValue(!1),jt());var e=this.value;if(Rt(e))throw e.cause;return e},e.prototype.peek=function(){var e=this.computeValue(!1);if(Rt(e))throw e.cause;return e},e.prototype.set=function(e){if(this.setter){it(!this.isRunningSetter,"The setter of computed value '"+this.name+"' is trying to update itself. Did you intend to update an _observable_ value, instead of the computed property?"),this.isRunningSetter=!0;try{this.setter.call(this.scope,e)}finally{this.isRunningSetter=!1}}else it(!1,!1)},e.prototype.trackAndCompute=function(){d()&&h({object:this.scope,type:"compute",name:this.name});var e=this.value,t=this.dependenciesState===vt.NOT_TRACKING,n=this.computeValue(!0),r=t||Rt(e)||Rt(n)||!this.equals(e,n);return r&&(this.value=n),r},e.prototype.computeValue=function(e){var t;if(this.isComputing=!0,Et.computationDepth++,e)t=Mt(this,this.derivation,this.scope);else if(!0===Et.disableErrorBoundaries)t=this.derivation.call(this.scope);else try{t=this.derivation.call(this.scope)}catch(e){t=new Pt(e)}return Et.computationDepth--,this.isComputing=!1,t},e.prototype.suspend=function(){this.keepAlive||(Dt(this),this.value=void 0)},e.prototype.observe=function(e,t){var n=this,r=!0,o=void 0;return L(function(){var i=n.get();if(!r||t){var a=Ut();e({type:"update",object:n,newValue:i,oldValue:o}),Bt(a)}r=!1,o=i})},e.prototype.warnAboutUntrackedRead=function(){},e.prototype.toJSON=function(){return this.get()},e.prototype.toString=function(){return this.name+"["+this.derivation.toString()+"]"},e.prototype.valueOf=function(){return yt(this.get())},e}();F.prototype[gt()]=F.prototype.valueOf;var U=dt("ComputedValue",F);function B(e){return void 0!==e.interceptors&&e.interceptors.length>0}function z(e,t){var n=e.interceptors||(e.interceptors=[]);return n.push(t),st(function(){var e=n.indexOf(t);-1!==e&&n.splice(e,1)})}function $(e,t){var n=Ut();try{var r=e.interceptors;if(r)for(var o=0,i=r.length;o0}function q(e,t){var n=e.changeListeners||(e.changeListeners=[]);return n.push(t),st(function(){var e=n.indexOf(t);-1!==e&&n.splice(e,1)})}function H(e,t){var n=Ut(),r=e.changeListeners;if(r){for(var o=0,i=(r=r.slice()).length;o2&&we("box");var n=fe(t);return new V(e,de(n),n.name)},shallowBox:function(e,t){return arguments.length>2&&we("shallowBox"),at("observable.shallowBox","observable.box(value, { deep: false })"),be.box(e,{name:t,deep:!1})},array:function(e,t){arguments.length>2&&we("array");var n=fe(t);return new Ye(e,de(n),n.name)},shallowArray:function(e,t){return arguments.length>2&&we("shallowArray"),at("observable.shallowArray","observable.array(values, { deep: false })"),be.array(e,{name:t,deep:!1})},map:function(e,t){arguments.length>2&&we("map");var n=fe(t);return new je(e,de(n),n.name)},shallowMap:function(e,t){return arguments.length>2&&we("shallowMap"),at("observable.shallowMap","observable.map(values, { deep: false })"),be.map(e,{name:t,deep:!1})},object:function(e,t,n){return"string"==typeof arguments[1]&&we("object"),ce({},e,t,fe(n))},shallowObject:function(e,t){return"string"==typeof arguments[1]&&we("shallowObject"),at("observable.shallowObject","observable.object(values, {}, { deep: false })"),be.object(e,{},{name:t,deep:!1})},ref:ge,shallow:me,deep:he,struct:ye},be=function(e,t,n){if("string"==typeof arguments[1])return he.apply(null,arguments);if(ie(e))return e;var r=ut(e)?be.object(e,t,n):Array.isArray(e)?be.array(e,t):ht(e)?be.map(e,t):e;if(r!==e)return r;ot(!1)};function we(e){ot("Expected one or two arguments to observable."+e+". Did you accidentally try to use observable."+e+" as decorator?")}function xe(e,t,n){return ie(e)?e:Array.isArray(e)?be.array(e,{name:n}):ut(e)?be.object(e,void 0,{name:n}):ht(e)?be.map(e,{name:n}):e}function ke(e){return e}function Ee(){return"function"==typeof Symbol&&Symbol.iterator||"@@iterator"}function Se(e,t){ft(e,Ee(),t)}function _e(e){return e[Ee()]=Oe,e}function Oe(){return this}function Ce(e,t){void 0===t&&(t=void 0),Tt();try{return e.apply(t)}finally{jt()}}Object.keys(ve).forEach(function(e){return be[e]=ve[e]});var Te={},je=function(){function e(e,t,n){if(void 0===t&&(t=xe),void 0===n&&(n="ObservableMap@"+rt()),this.enhancer=t,this.name=n,this.$mobx=Te,this._keys=new Ye(void 0,ke,this.name+".keys()",!0),"function"!=typeof Map)throw new Error("mobx.map requires Map polyfill for the current browser. Check babel-polyfill or core-js/es6/map.js");this._data=new Map,this._hasMap=new Map,this.merge(e)}return e.prototype._has=function(e){return this._data.has(e)},e.prototype.has=function(e){return this._hasMap.has(e)?this._hasMap.get(e).get():this._updateHasMapEntry(e,!1).get()},e.prototype.set=function(e,t){var n=this._has(e);if(B(this)){var r=$(this,{type:n?"update":"add",object:this,newValue:t,name:e});if(!r)return this;t=r.newValue}return n?this._updateValue(e,t):this._addValue(e,t),this},e.prototype.delete=function(e){var t=this;if(B(this)&&!(o=$(this,{type:"delete",object:this,name:e})))return!1;if(this._has(e)){var n=d(),r=W(this),o=r||n?{type:"delete",object:this,oldValue:this._data.get(e).value,name:e}:null;return n&&m(a({},o,{name:this.name,key:e})),Ce(function(){t._keys.remove(e),t._updateHasMapEntry(e,!1),t._data.get(e).setNewValue(void 0),t._data.delete(e)}),r&&H(this,o),n&&y(),!0}return!1},e.prototype._updateHasMapEntry=function(e,t){var n=this._hasMap.get(e);return n?n.setNewValue(t):(n=new V(t,ke,this.name+"."+e+"?",!1),this._hasMap.set(e,n)),n},e.prototype._updateValue=function(e,t){var n=this._data.get(e);if((t=n.prepareNewValue(t))!==Y){var r=d(),o=W(this),i=o||r?{type:"update",object:this,oldValue:n.value,name:e,newValue:t}:null;r&&m(a({},i,{name:this.name,key:e})),n.setNewValue(t),o&&H(this,i),r&&y()}},e.prototype._addValue=function(e,t){var n=this;Ce(function(){var r=new V(t,n.enhancer,n.name+"."+e,!1);n._data.set(e,r),t=r.value,n._updateHasMapEntry(e,!0),n._keys.push(e)});var r=d(),o=W(this),i=o||r?{type:"add",object:this,name:e,newValue:t}:null;r&&m(a({},i,{name:this.name,key:e})),o&&H(this,i),r&&y()},e.prototype.get=function(e){return this.has(e)?this.dehanceValue(this._data.get(e).get()):this.dehanceValue(void 0)},e.prototype.dehanceValue=function(e){return void 0!==this.dehancer?this.dehancer(e):e},e.prototype.keys=function(){return this._keys[Ee()]()},e.prototype.values=function(){var e=this,t=0;return _e({next:function(){return t0?e.map(this.dehancer):e},e.prototype.intercept=function(e){return z(this,e)},e.prototype.observe=function(e,t){return void 0===t&&(t=!1),t&&e({object:this.array,type:"splice",index:0,added:this.values.slice(),addedCount:this.values.length,removed:[],removedCount:0}),q(this,e)},e.prototype.getArrayLength=function(){return this.atom.reportObserved(),this.values.length},e.prototype.setArrayLength=function(e){if("number"!=typeof e||e<0)throw new Error("[mobx.array] Out of range: "+e);var t=this.values.length;if(e!==t)if(e>t){for(var n=new Array(e-t),r=0;r0&&e+t+1>We&&Xe(e+t+1)},e.prototype.spliceWithArray=function(e,t,n){var r=this;Lt(this.atom);var o=this.values.length;if(void 0===e?e=0:e>o?e=o:e<0&&(e=Math.max(0,o+e)),t=1===arguments.length?o-e:null==t?0:Math.max(0,Math.min(t,o-e)),void 0===n&&(n=et),B(this)){var i=$(this,{object:this.array,type:"splice",index:e,removedCount:t,added:n});if(!i)return et;t=i.removedCount,n=i.added}var a=(n=0===n.length?n:n.map(function(e){return r.enhancer(e,void 0)})).length-t;this.updateArrayLength(o,a);var s=this.spliceItemsIntoValues(e,t,n);return 0===t&&0===n.length||this.notifyArraySplice(e,n,s),this.dehanceValues(s)},e.prototype.spliceItemsIntoValues=function(e,t,n){if(n.length<1e4)return(r=this.values).splice.apply(r,l([e,t],n));var r,o=this.values.slice(e,e+t);return this.values=this.values.slice(0,e).concat(n,this.values.slice(e+t)),o},e.prototype.notifyArrayChildUpdate=function(e,t,n){var r=!this.owned&&d(),o=W(this),i=o||r?{object:this.array,type:"update",index:e,newValue:t,oldValue:n}:null;r&&m(a({},i,{name:this.atom.name})),this.atom.reportChanged(),o&&H(this,i),r&&y()},e.prototype.notifyArraySplice=function(e,t,n){var r=!this.owned&&d(),o=W(this),i=o||r?{object:this.array,type:"splice",index:e,removed:n,added:t,removedCount:n.length,addedCount:t.length}:null;r&&m(a({},i,{name:this.atom.name})),this.atom.reportChanged(),o&&H(this,i),r&&y()},e}(),Ye=function(e){function t(t,n,r,o){void 0===r&&(r="ObservableArray@"+rt()),void 0===o&&(o=!1);var i=e.call(this)||this,a=new He(r,n,i,o);if(ft(i,"$mobx",a),t&&t.length){var s=k(!0);i.spliceWithArray(0,0,t),E(s)}return $e&&Object.defineProperty(a.array,"0",Ve),i}return i(t,e),t.prototype.intercept=function(e){return this.$mobx.intercept(e)},t.prototype.observe=function(e,t){return void 0===t&&(t=!1),this.$mobx.observe(e,t)},t.prototype.clear=function(){return this.splice(0)},t.prototype.concat=function(){for(var e=[],t=0;t-1&&(this.splice(t,1),!0)},t.prototype.move=function(e,t){function n(e){if(e<0)throw new Error("[mobx.array] Index out of bounds: "+e+" is negative");var t=this.$mobx.values.length;if(e>=t)throw new Error("[mobx.array] Index out of bounds: "+e+" is not smaller than "+t)}if(at("observableArray.move is deprecated, use .slice() & .replace() instead"),n.call(this,e),n.call(this,t),e!==t){var r,o=this.$mobx.values;r=e0&&!e.__mobxGlobals&&(xt=!1),e.__mobxGlobals&&e.__mobxGlobals.version!==(new wt).version&&(xt=!1),xt?e.__mobxGlobals?(e.__mobxInstanceCount+=1,e.__mobxGlobals):(e.__mobxInstanceCount=1,e.__mobxGlobals=new wt):(setTimeout(function(){kt||ot("There are multiple, different versions of MobX active. Make sure MobX is loaded only once or use `configure({ isolateGlobalState: true })`")},1),new wt)}();function St(e){var t,n,r={name:e.name};return e.observing&&e.observing.length>0&&(r.dependencies=(t=e.observing,n=[],t.forEach(function(e){-1===n.indexOf(e)&&n.push(e)}),n).map(St)),r}function _t(e,t){var n=e.observers.length;n&&(e.observersIndexes[t.__mapid]=n),e.observers[n]=t,e.lowestObserverState>t.dependenciesState&&(e.lowestObserverState=t.dependenciesState)}function Ot(e,t){if(1===e.observers.length)e.observers.length=0,Ct(e);else{var n=e.observers,r=e.observersIndexes,o=n.pop();if(o!==t){var i=r[t.__mapid]||0;i?r[o.__mapid]=i:delete r[o.__mapid],n[i]=o}delete r[t.__mapid]}}function Ct(e){!1===e.isPendingUnobservation&&(e.isPendingUnobservation=!0,Et.pendingUnobservations.push(e))}function Tt(){Et.inBatch++}function jt(){if(0==--Et.inBatch){Yt();for(var e=Et.pendingUnobservations,t=0;t0&&Ct(e),!1)}function It(e,t){if(console.log("[mobx.trace] '"+e.name+"' is invalidated due to a change in: '"+t.name+"'"),e.isTracing===bt.BREAK){var n=[];!function e(t,n,r){if(n.length>=1e3)return void n.push("(and many more)");n.push(""+new Array(r).join("\t")+t.name);t.dependencies&&t.dependencies.forEach(function(t){return e(t,n,r+1)})}((r=e,St(Re(r,o))),n,1),new Function("debugger;\n/*\nTracing '"+e.name+"'\n\nYou are entering this break point because derivation '"+e.name+"' is being traced and '"+t.name+"' is now forcing it to update.\nJust follow the stacktrace you should now see in the devtools to see precisely what piece of your code is causing this update\nThe stackframe you are looking for is at least ~6-8 stack-frames up.\n\n"+(e instanceof F?e.derivation.toString():"")+"\n\nThe dependencies for this derivation are:\n\n"+n.join("\n")+"\n*/\n ")()}var r,o}!function(e){e[e.NOT_TRACKING=-1]="NOT_TRACKING",e[e.UP_TO_DATE=0]="UP_TO_DATE",e[e.POSSIBLY_STALE=1]="POSSIBLY_STALE",e[e.STALE=2]="STALE"}(vt||(vt={})),function(e){e[e.NONE=0]="NONE",e[e.LOG=1]="LOG",e[e.BREAK=2]="BREAK"}(bt||(bt={}));var Pt=function(){return function(e){this.cause=e}}();function Rt(e){return e instanceof Pt}function Nt(e){switch(e.dependenciesState){case vt.UP_TO_DATE:return!1;case vt.NOT_TRACKING:case vt.STALE:return!0;case vt.POSSIBLY_STALE:for(var t=Ut(),n=e.observing,r=n.length,o=0;o0;Et.computationDepth>0&&t&&ot(!1),Et.allowStateChanges||!t&&"strict"!==Et.enforceActions||ot(!1)}function Mt(e,t,n){zt(e),e.newObserving=new Array(e.observing.length+100),e.unboundDepsCount=0,e.runId=++Et.runId;var r,o=Et.trackingDerivation;if(Et.trackingDerivation=e,!0===Et.disableErrorBoundaries)r=t.call(n);else try{r=t.call(n)}catch(e){r=new Pt(e)}return Et.trackingDerivation=o,function(e){for(var t=e.observing,n=e.observing=e.newObserving,r=vt.UP_TO_DATE,o=0,i=e.unboundDepsCount,a=0;ar&&(r=s.dependenciesState)}n.length=o,e.newObserving=null,i=t.length;for(;i--;){var s=t[i];0===s.diffValue&&Ot(s,e),s.diffValue=0}for(;o--;){var s=n[o];1===s.diffValue&&(s.diffValue=0,_t(s,e))}r!==vt.UP_TO_DATE&&(e.dependenciesState=r,e.onBecomeStale())}(e),r}function Dt(e){var t=e.observing;e.observing=[];for(var n=t.length;n--;)Ot(t[n],e);e.dependenciesState=vt.NOT_TRACKING}function Ft(e){var t=Ut(),n=e();return Bt(t),n}function Ut(){var e=Et.trackingDerivation;return Et.trackingDerivation=null,e}function Bt(e){Et.trackingDerivation=e}function zt(e){if(e.dependenciesState!==vt.UP_TO_DATE){e.dependenciesState=vt.UP_TO_DATE;for(var t=e.observing,n=t.length;n--;)t[n].lowestObserverState=vt.UP_TO_DATE}}function $t(){for(var e=[],t=0;t0||Et.isRunningReactions||Ht(Vt)}function Vt(){Et.isRunningReactions=!0;for(var e=Et.pendingReactions,t=0;e.length>0;){++t===qt&&(console.error("Reaction doesn't converge to a stable state after "+qt+" iterations. Probably there is a cycle in the reactive function: "+e[0]),e.splice(0));for(var n=e.splice(0),r=0,o=n.length;r1)for(var n=1;n=0;s--)(o=e[s])&&(a=(i<3?o(a):i>3?o(t,n,a):o(t,n))||a);return i>3&&a&&Object.defineProperty(t,n,a),a}function l(e,t){return function(n,r){t(n,r,e)}}function u(e,t){if("object"==typeof Reflect&&"function"==typeof Reflect.metadata)return Reflect.metadata(e,t)}function c(e,t,n,r){return new(n||(n=Promise))(function(o,i){function a(e){try{l(r.next(e))}catch(e){i(e)}}function s(e){try{l(r.throw(e))}catch(e){i(e)}}function l(e){e.done?o(e.value):new n(function(t){t(e.value)}).then(a,s)}l((r=r.apply(e,t||[])).next())})}function f(e,t){var n,r,o,i,a={label:0,sent:function(){if(1&o[0])throw o[1];return o[1]},trys:[],ops:[]};return i={next:s(0),throw:s(1),return:s(2)},"function"==typeof Symbol&&(i[Symbol.iterator]=function(){return this}),i;function s(i){return function(s){return function(i){if(n)throw new TypeError("Generator is already executing.");for(;a;)try{if(n=1,r&&(o=2&i[0]?r.return:i[0]?r.throw||((o=r.return)&&o.call(r),0):r.next)&&!(o=o.call(r,i[1])).done)return o;switch(r=0,o&&(i=[2&i[0],o.value]),i[0]){case 0:case 1:o=i;break;case 4:return a.label++,{value:i[1],done:!1};case 5:a.label++,r=i[1],i=[0];continue;case 7:i=a.ops.pop(),a.trys.pop();continue;default:if(!(o=(o=a.trys).length>0&&o[o.length-1])&&(6===i[0]||2===i[0])){a=0;continue}if(3===i[0]&&(!o||i[1]>o[0]&&i[1]=e.length&&(e=void 0),{value:e&&e[n++],done:!e}}}}function h(e,t){var n="function"==typeof Symbol&&e[Symbol.iterator];if(!n)return e;var r,o,i=n.call(e),a=[];try{for(;(void 0===t||t-- >0)&&!(r=i.next()).done;)a.push(r.value)}catch(e){o={error:e}}finally{try{r&&!r.done&&(n=i.return)&&n.call(i)}finally{if(o)throw o.error}}return a}function m(){for(var e=[],t=0;t1||s(e,t)})})}function s(e,t){try{(n=o[e](t)).value instanceof v?Promise.resolve(n.value.v).then(l,u):c(i[0][2],n)}catch(e){c(i[0][3],e)}var n}function l(e){s("next",e)}function u(e){s("throw",e)}function c(e,t){e(t),i.shift(),i.length&&s(i[0][0],i[0][1])}}function y(e){var t,n;return t={},r("next"),r("throw",function(e){throw e}),r("return"),t[Symbol.iterator]=function(){return this},t;function r(r,o){t[r]=e[r]?function(t){return(n=!n)?{value:v(e[r](t)),done:"return"===r}:o?o(t):t}:o}}function b(e){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var t,n=e[Symbol.asyncIterator];return n?n.call(e):(e=d(e),t={},r("next"),r("throw"),r("return"),t[Symbol.asyncIterator]=function(){return this},t);function r(n){t[n]=e[n]&&function(t){return new Promise(function(r,o){(function(e,t,n,r){Promise.resolve(r).then(function(t){e({value:t,done:n})},t)})(r,o,(t=e[n](t)).done,t.value)})}}}function w(e,t){return Object.defineProperty?Object.defineProperty(e,"raw",{value:t}):e.raw=t,e}function x(e){if(e&&e.__esModule)return e;var t={};if(null!=e)for(var n in e)Object.hasOwnProperty.call(e,n)&&(t[n]=e[n]);return t.default=e,t}function k(e){return e&&e.__esModule?e:{default:e}}},function(e,t,n){"use strict";(function(e,r){n.d(t,"a",function(){return tn}),n.d(t,"b",function(){return Fe}),n.d(t,"o",function(){return Ee}),n.d(t,"g",function(){return L}),n.d(t,"n",function(){return Ke}),n.d(t,"k",function(){return Ht}),n.d(t,"i",function(){return Pt}),n.d(t,"j",function(){return Lt}),n.d(t,"l",function(){return ee}),n.d(t,"e",function(){return oe}),n.d(t,"m",function(){return ct}),n.d(t,"d",function(){return Ze}),n.d(t,"f",function(){return it}),n.d(t,"h",function(){return Gt}),n.d(t,"c",function(){return le}); +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. All rights reserved. +Licensed under the Apache License, Version 2.0 (the "License"); you may not use +this file except in compliance with the License. You may obtain a copy of the +License at http://www.apache.org/licenses/LICENSE-2.0 + +THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED +WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, +MERCHANTABLITY OR NON-INFRINGEMENT. + +See the Apache Version 2.0 License for specific language governing permissions +and limitations under the License. +***************************************************************************** */ +var o=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,t){e.__proto__=t}||function(e,t){for(var n in t)t.hasOwnProperty(n)&&(e[n]=t[n])};function i(e,t){function n(){this.constructor=e}o(e,t),e.prototype=null===t?Object.create(t):(n.prototype=t.prototype,new n)}var a=Object.assign||function(e){for(var t,n=1,r=arguments.length;n0)&&!(r=i.next()).done;)a.push(r.value)}catch(e){o={error:e}}finally{try{r&&!r.done&&(n=i.return)&&n.call(i)}finally{if(o)throw o.error}}return a}function l(){for(var e=[],t=0;t2&&te("box");var n=V(t);return new fe(e,Q(n),n.name,!0,n.equals)},shallowBox:function(e,t){return arguments.length>2&&te("shallowBox"),ee.box(e,{name:t,deep:!1})},array:function(e,t){arguments.length>2&&te("array");var n=V(t);return new Ot(e,Q(n),n.name)},shallowArray:function(e,t){return arguments.length>2&&te("shallowArray"),ee.array(e,{name:t,deep:!1})},map:function(e,t){arguments.length>2&&te("map");var n=V(t);return new Rt(e,Q(n),n.name)},shallowMap:function(e,t){return arguments.length>2&&te("shallowMap"),ee.map(e,{name:t,deep:!1})},set:function(e,t){arguments.length>2&&te("set");var n=V(t);return new Dt(e,Q(n),n.name)},object:function(e,t,n){return"string"==typeof arguments[1]&&te("object"),at({},e,t,V(n))},shallowObject:function(e,t){return"string"==typeof arguments[1]&&te("shallowObject"),ee.object(e,{},{name:t,deep:!1})},ref:X,shallow:K,deep:G,struct:J},ee=function(e,t,n){if("string"==typeof arguments[1])return G.apply(null,arguments);if(ut(e))return e;var r=b(e)?ee.object(e,t,n):Array.isArray(e)?ee.array(e,t):E(e)?ee.map(e,t):_(e)?ee.set(e,t):e;if(r!==e)return r;h(!1)};function te(e){h("Expected one or two arguments to observable."+e+". Did you accidentally try to use observable."+e+" as decorator?")}Object.keys(Z).forEach(function(e){return ee[e]=Z[e]});var ne=z(!1,function(e,t,n,r,o){var i=n.get,s=n.set,l=o[0]||{};!function(e,t,n){var r=Bt(e);n.name=r.name+"."+t,n.context=e,r.values[t]=new pe(n),Object.defineProperty(e,t,function(e){return Wt[e]||(Wt[e]={configurable:!1,enumerable:!1,get:function(){return qt(this).read(this,e)},set:function(t){qt(this).write(this,e,t)}})}(t))}(e,t,a({get:i,set:s},l))}),re=ne({equals:D.structural}),oe=function(e,t,n){if("string"==typeof t)return ne.apply(null,arguments);if(null!==e&&"object"==typeof e&&1===arguments.length)return ne.apply(null,arguments);var r="object"==typeof t?t:{};return r.get=e,r.set="function"==typeof t?t:r.set,r.name=r.name||e.name||"",new pe(r)};function ie(e,t){var n=function(){return ae(e,t,this,arguments)};return n.isMobxAction=!0,n}function ae(e,t,n,r){var o=function(e,t,n,r){var o=Ye()&&!!e,i=0;if(o){i=Date.now();var a=r&&r.length||0,s=new Array(a);if(a>0)for(var l=0;l0;je.computationDepth>0&&t&&h(!1),je.allowStateChanges||!t&&"strict"!==je.enforceActions||h(!1)}function xe(e,t,n){Oe(e),e.newObserving=new Array(e.observing.length+100),e.unboundDepsCount=0,e.runId=++je.runId;var r,o=je.trackingDerivation;if(je.trackingDerivation=e,!0===je.disableErrorBoundaries)r=t.call(n);else try{r=t.call(n)}catch(e){r=new ge(e)}return je.trackingDerivation=o,function(e){for(var t=e.observing,n=e.observing=e.newObserving,r=de.UP_TO_DATE,o=0,i=e.unboundDepsCount,a=0;ar&&(r=s.dependenciesState)}n.length=o,e.newObserving=null,i=t.length;for(;i--;){var s=t[i];0===s.diffValue&&Pe(s,e),s.diffValue=0}for(;o--;){var s=n[o];1===s.diffValue&&(s.diffValue=0,Ie(s,e))}r!==de.UP_TO_DATE&&(e.dependenciesState=r,e.onBecomeStale())}(e),r}function ke(e){var t=e.observing;e.observing=[];for(var n=t.length;n--;)Pe(t[n],e);e.dependenciesState=de.NOT_TRACKING}function Ee(e){var t=_e(),n=e();return Se(t),n}function _e(){var e=je.trackingDerivation;return je.trackingDerivation=null,e}function Se(e){je.trackingDerivation=e}function Oe(e){if(e.dependenciesState!==de.UP_TO_DATE){e.dependenciesState=de.UP_TO_DATE;for(var t=e.observing,n=t.length;n--;)t[n].lowestObserverState=de.UP_TO_DATE}}var Te=function(){return function(){this.version=5,this.UNCHANGED={},this.trackingDerivation=null,this.computationDepth=0,this.runId=0,this.mobxGuid=0,this.inBatch=0,this.pendingUnobservations=[],this.pendingReactions=[],this.isRunningReactions=!1,this.allowStateChanges=!0,this.enforceActions=!1,this.spyListeners=[],this.globalReactionErrorHandlers=[],this.computedRequiresReaction=!1,this.disableErrorBoundaries=!1,this.suppressReactionErrors=!1}}(),Ce=!0,Ae=!1,je=function(){var e=p();return e.__mobxInstanceCount>0&&!e.__mobxGlobals&&(Ce=!1),e.__mobxGlobals&&e.__mobxGlobals.version!==(new Te).version&&(Ce=!1),Ce?e.__mobxGlobals?(e.__mobxInstanceCount+=1,e.__mobxGlobals.UNCHANGED||(e.__mobxGlobals.UNCHANGED={}),e.__mobxGlobals):(e.__mobxInstanceCount=1,e.__mobxGlobals=new Te):(setTimeout(function(){Ae||h("There are multiple, different versions of MobX active. Make sure MobX is loaded only once or use `configure({ isolateGlobalState: true })`")},1),new Te)}();function Ie(e,t){var n=e.observers.length;n&&(e.observersIndexes[t.__mapid]=n),e.observers[n]=t,e.lowestObserverState>t.dependenciesState&&(e.lowestObserverState=t.dependenciesState)}function Pe(e,t){if(1===e.observers.length)e.observers.length=0,Ne(e);else{var n=e.observers,r=e.observersIndexes,o=n.pop();if(o!==t){var i=r[t.__mapid]||0;i?r[o.__mapid]=i:delete r[o.__mapid],n[i]=o}delete r[t.__mapid]}}function Ne(e){!1===e.isPendingUnobservation&&(e.isPendingUnobservation=!0,je.pendingUnobservations.push(e))}function Re(){je.inBatch++}function Le(){if(0==--je.inBatch){ze();for(var e=je.pendingUnobservations,t=0;t0&&Ne(e),!1)}function De(e,t){if(console.log("[mobx.trace] '"+e.name+"' is invalidated due to a change in: '"+t.name+"'"),e.isTracing===me.BREAK){var n=[];!function e(t,n,r){if(n.length>=1e3)return void n.push("(and many more)");n.push(""+new Array(r).join("\t")+t.name);t.dependencies&&t.dependencies.forEach(function(t){return e(t,n,r+1)})}((r=e,st(Vt(r,o))),n,1),new Function("debugger;\n/*\nTracing '"+e.name+"'\n\nYou are entering this break point because derivation '"+e.name+"' is being traced and '"+t.name+"' is now forcing it to update.\nJust follow the stacktrace you should now see in the devtools to see precisely what piece of your code is causing this update\nThe stackframe you are looking for is at least ~6-8 stack-frames up.\n\n"+(e instanceof pe?e.derivation.toString().replace(/[*]\//g,"/"):"")+"\n\nThe dependencies for this derivation are:\n\n"+n.join("\n")+"\n*/\n ")()}var r,o}var Fe=function(){function e(e,t,n){void 0===e&&(e="Reaction@"+d()),this.name=e,this.onInvalidate=t,this.errorHandler=n,this.observing=[],this.newObserving=[],this.dependenciesState=de.NOT_TRACKING,this.diffValue=0,this.runId=0,this.unboundDepsCount=0,this.__mapid="#"+d(),this.isDisposed=!1,this._isScheduled=!1,this._isTrackPending=!1,this._isRunning=!1,this.isTracing=me.NONE}return e.prototype.onBecomeStale=function(){this.schedule()},e.prototype.schedule=function(){this._isScheduled||(this._isScheduled=!0,je.pendingReactions.push(this),ze())},e.prototype.isScheduled=function(){return this._isScheduled},e.prototype.runReaction=function(){if(!this.isDisposed){if(Re(),this._isScheduled=!1,be(this)){this._isTrackPending=!0;try{this.onInvalidate(),this._isTrackPending&&Ye()&&He({name:this.name,type:"scheduled-reaction"})}catch(e){this.reportExceptionInDerivation(e)}}Le()}},e.prototype.track=function(e){Re();var t,n=Ye();n&&(t=Date.now(),Ve({name:this.name,type:"reaction"})),this._isRunning=!0;var r=xe(this,e,void 0);this._isRunning=!1,this._isTrackPending=!1,this.isDisposed&&ke(this),ye(r)&&this.reportExceptionInDerivation(r.cause),n&&Ge({time:Date.now()-t}),Le()},e.prototype.reportExceptionInDerivation=function(e){var t=this;if(this.errorHandler)this.errorHandler(e,this);else{if(je.disableErrorBoundaries)throw e;var n="[mobx] Encountered an uncaught exception that was thrown by a reaction or observer component, in: '"+this+"'";je.suppressReactionErrors?console.warn("[mobx] (error in reaction '"+this.name+"' suppressed, fix error of causing action below)"):console.error(n,e),Ye()&&He({type:"error",name:this.name,message:n,error:""+e}),je.globalReactionErrorHandlers.forEach(function(n){return n(e,t)})}},e.prototype.dispose=function(){this.isDisposed||(this.isDisposed=!0,this._isRunning||(Re(),ke(this),Le()))},e.prototype.getDisposer=function(){var e=this.dispose.bind(this);return e.$mobx=this,e},e.prototype.toString=function(){return"Reaction["+this.name+"]"},e.prototype.trace=function(e){void 0===e&&(e=!1),function(){for(var e=[],t=0;t0||je.isRunningReactions||Be($e)}function $e(){je.isRunningReactions=!0;for(var e=je.pendingReactions,t=0;e.length>0;){++t===Ue&&(console.error("Reaction doesn't converge to a stable state after "+Ue+" iterations. Probably there is a cycle in the reactive function: "+e[0]),e.splice(0));for(var n=e.splice(0),r=0,o=n.length;r",e):2===arguments.length&&"function"==typeof t?ie(e,t):1===arguments.length&&"string"==typeof e?Je(e):!0!==r?Je(t).apply(null,arguments):void(e[t]=ie(e.name||t,n.value))};function et(e,t,n){w(e,t,ie(t,n.bind(e)))}function tt(e,t){void 0===t&&(t=f);var n,r=t&&t.name||e.name||"Autorun@"+d();if(!t.scheduler&&!t.delay)n=new Fe(r,function(){this.track(a)},t.onError);else{var o=rt(t),i=!1;n=new Fe(r,function(){i||(i=!0,o(function(){i=!1,n.isDisposed||n.track(a)}))},t.onError)}function a(){e(n)}return n.schedule(),n.getDisposer()}Ze.bound=function(e,t,n,r){return!0===r?(et(e,t,n.value),null):n?{configurable:!0,enumerable:!1,get:function(){return et(this,t,n.value||n.initializer.call(this)),this[t]},set:Xe}:{enumerable:!1,configurable:!0,set:function(e){et(this,t,e)},get:function(){}}};var nt=function(e){return e()};function rt(e){return e.scheduler?e.scheduler:e.delay?function(t){return setTimeout(t,e.delay)}:nt}function ot(e,t,n,r){var o="string"==typeof n?Vt(t,n):Vt(t),i="string"==typeof n?r:n,a=o[e];return"function"!=typeof a?h(!1):(o[e]=function(){a.call(this),i.call(this)},function(){o[e]=a})}function it(e){var t=e.enforceActions,n=e.computedRequiresReaction,r=e.disableErrorBoundaries,o=e.arrayBuffer,i=e.reactionScheduler;if(!0===e.isolateGlobalState&&((je.pendingReactions.length||je.inBatch||je.isRunningReactions)&&h("isolateGlobalState should be called before MobX is running any reactions"),Ae=!0,Ce&&(0==--p().__mobxInstanceCount&&(p().__mobxGlobals=void 0),je=new Te)),void 0!==t){var a=void 0;switch(t){case!0:case"observed":a=!0;break;case!1:case"never":a=!1;break;case"strict":case"always":a="strict";break;default:h("Invalid value for 'enforceActions': '"+t+"', expected 'never', 'always' or 'observed'")}je.enforceActions=a,je.allowStateChanges=!0!==a&&"strict"!==a}void 0!==n&&(je.computedRequiresReaction=!!n),void 0!==r&&(!0===r&&console.warn("WARNING: Debug feature only. MobX will NOT recover from errors if this is on."),je.disableErrorBoundaries=!!r),"number"==typeof o&&jt(o),i&&qe(i)}function at(e,t,n,r){var o=(r=V(r)).defaultDecorator||(!1===r.deep?X:G);B(e),Bt(e,r.name,o.enhancer),Re();try{for(var i in t){var a=Object.getOwnPropertyDescriptor(t,i);0;var s=(n&&i in n?n[i]:a.get?ne:o)(e,i,a,!0);s&&Object.defineProperty(e,i,s)}}finally{Le()}return e}function st(e){var t,n,r={name:e.name};return e.observing&&e.observing.length>0&&(r.dependencies=(t=e.observing,n=[],t.forEach(function(e){-1===n.indexOf(e)&&n.push(e)}),n).map(st)),r}function lt(e,t){if(null==e)return!1;if(void 0!==t){if(Ht(e)){var n=e.$mobx;return n.values&&!!n.values[t]}return!1}return Ht(e)||!!e.$mobx||R(e)||We(e)||ve(e)}function ut(e){return 1!==arguments.length&&h(!1),lt(e)}function ct(e,t,n,r){return"function"==typeof n?function(e,t,n,r){return Qt(e,t).observe(n,r)}(e,t,n,r):function(e,t,n){return Qt(e).observe(t,n)}(e,t,n)}function ft(e,t){void 0===t&&(t=void 0),Re();try{return e.apply(t)}finally{Le()}}function pt(e){return void 0!==e.interceptors&&e.interceptors.length>0}function dt(e,t){var n=e.interceptors||(e.interceptors=[]);return n.push(t),v(function(){var e=n.indexOf(t);-1!==e&&n.splice(e,1)})}function ht(e,t){var n=_e();try{var r=e.interceptors;if(r)for(var o=0,i=r.length;o0}function vt(e,t){var n=e.changeListeners||(e.changeListeners=[]);return n.push(t),v(function(){var e=n.indexOf(t);-1!==e&&n.splice(e,1)})}function gt(e,t){var n=_e(),r=e.changeListeners;if(r){for(var o=0,i=(r=r.slice()).length;o0?e.map(this.dehancer):e},e.prototype.intercept=function(e){return dt(this,e)},e.prototype.observe=function(e,t){return void 0===t&&(t=!1),t&&e({object:this.array,type:"splice",index:0,added:this.values.slice(),addedCount:this.values.length,removed:[],removedCount:0}),vt(this,e)},e.prototype.getArrayLength=function(){return this.atom.reportObserved(),this.values.length},e.prototype.setArrayLength=function(e){if("number"!=typeof e||e<0)throw new Error("[mobx.array] Out of range: "+e);var t=this.values.length;if(e!==t)if(e>t){for(var n=new Array(e-t),r=0;r0&&e+t+1>Et&&jt(e+t+1)},e.prototype.spliceWithArray=function(e,t,n){var r=this;we(this.atom);var o=this.values.length;if(void 0===e?e=0:e>o?e=o:e<0&&(e=Math.max(0,o+e)),t=1===arguments.length?o-e:null==t?0:Math.max(0,Math.min(t,o-e)),void 0===n&&(n=c),pt(this)){var i=ht(this,{object:this.array,type:"splice",index:e,removedCount:t,added:n});if(!i)return c;t=i.removedCount,n=i.added}var a=(n=0===n.length?n:n.map(function(e){return r.enhancer(e,void 0)})).length-t;this.updateArrayLength(o,a);var s=this.spliceItemsIntoValues(e,t,n);return 0===t&&0===n.length||this.notifyArraySplice(e,n,s),this.dehanceValues(s)},e.prototype.spliceItemsIntoValues=function(e,t,n){var r;if(n.length<1e4)return(r=this.values).splice.apply(r,l([e,t],n));var o=this.values.slice(e,e+t);return this.values=this.values.slice(0,e).concat(n,this.values.slice(e+t)),o},e.prototype.notifyArrayChildUpdate=function(e,t,n){var r=!this.owned&&Ye(),o=mt(this),i=o||r?{object:this.array,type:"update",index:e,newValue:t,oldValue:n}:null;r&&Ve(a({},i,{name:this.atom.name})),this.atom.reportChanged(),o&>(this,i),r&&Ge()},e.prototype.notifyArraySplice=function(e,t,n){var r=!this.owned&&Ye(),o=mt(this),i=o||r?{object:this.array,type:"splice",index:e,removed:n,added:t,removedCount:n.length,addedCount:t.length}:null;r&&Ve(a({},i,{name:this.atom.name})),this.atom.reportChanged(),o&>(this,i),r&&Ge()},e}(),Ot=function(e){function t(t,n,r,o){void 0===r&&(r="ObservableArray@"+d()),void 0===o&&(o=!1);var i=e.call(this)||this,a=new St(r,n,i,o);if(x(i,"$mobx",a),t&&t.length){var s=ue(!0);i.spliceWithArray(0,0,t),ce(s)}return kt&&Object.defineProperty(a.array,"0",Tt),i}return i(t,e),t.prototype.intercept=function(e){return this.$mobx.intercept(e)},t.prototype.observe=function(e,t){return void 0===t&&(t=!1),this.$mobx.observe(e,t)},t.prototype.clear=function(){return this.splice(0)},t.prototype.concat=function(){for(var e=[],t=0;t-1&&(this.splice(t,1),!0)},t.prototype.move=function(e,t){function n(e){if(e<0)throw new Error("[mobx.array] Index out of bounds: "+e+" is negative");var t=this.$mobx.values.length;if(e>=t)throw new Error("[mobx.array] Index out of bounds: "+e+" is not smaller than "+t)}if(n.call(this,e),n.call(this,t),e!==t){var r,o=this.$mobx.values;r=e=t.length||"\n"===t[o]?r+"\n":r}},{key:"atDocumentBoundary",value:function(e,t,n){var r=e[t];if(!r)return!0;var o=e[t-1];if(o&&"\n"!==o)return!1;if(n){if(r!==n)return!1}else if(r!==c.DIRECTIVES_END&&r!==c.DOCUMENT_END)return!1;var i=e[t+1],a=e[t+2];if(i!==r||a!==r)return!1;var s=e[t+3];return!s||"\n"===s||"\t"===s||" "===s}},{key:"endOfIdentifier",value:function(e,t){for(var n=e[t],r="<"===n,o=r?["\n","\t"," ",">"]:["\n","\t"," ","[","]","{","}",","];n&&-1===o.indexOf(n);)n=e[t+=1];return r&&">"===n&&(t+=1),t}},{key:"endOfIndent",value:function(e,t){for(var n=e[t];" "===n;)n=e[t+=1];return t}},{key:"endOfLine",value:function(e,t){for(var n=e[t];n&&"\n"!==n;)n=e[t+=1];return t}},{key:"endOfWhiteSpace",value:function(e,t){for(var n=e[t];"\t"===n||" "===n;)n=e[t+=1];return t}},{key:"startOfLine",value:function(e,t){var n=e[t-1];if("\n"===n)return t;for(;n&&"\n"!==n;)n=e[t-=1];return t+1}},{key:"endOfBlockIndent",value:function(t,n,r){var o=e.endOfIndent(t,r);if(o>r+n)return o;var i=e.endOfWhiteSpace(t,o),a=t[i];return a&&"\n"!==a?null:i}},{key:"atBlank",value:function(e,t,n){var r=e[t];return"\n"===r||"\t"===r||" "===r||n&&!r}},{key:"atCollectionItem",value:function(t,n){var r=t[n];return("?"===r||":"===r||"-"===r)&&e.atBlank(t,n+1,!0)}},{key:"nextNodeIsIndented",value:function(e,t,n){return!(!e||t<0)&&(t>0||n&&"-"===e)}},{key:"normalizeOffset",value:function(t,n){var r=t[n];return r?"\n"!==r&&"\n"===t[n-1]?n-1:e.endOfWhiteSpace(t,n):n}},{key:"foldNewline",value:function(t,n,r){for(var o=0,i=!1,a="",s=t[n+1];" "===s||"\t"===s||"\n"===s;){switch(s){case"\n":o=0,n+=1,a+="\n";break;case"\t":o<=r&&(i=!0),n=e.endOfWhiteSpace(t,n+2)-1;break;case" ":o+=1,n+=1}s=t[n+1]}return a||(a=" "),s&&o<=r&&(i=!0),{fold:a,offset:n,error:i}}}]),(0,a.default)(e,[{key:"getPropValue",value:function(e,t,n){if(!this.context)return null;var r=this.context.src,o=this.props[e];return o&&r[o.start]===t?r.slice(o.start+(n?1:0),o.end):null}},{key:"commentHasRequiredWhitespace",value:function(t){var n=this.context.src;if(this.header&&t===this.header.end)return!1;if(!this.valueRange)return!1;var r=this.valueRange.end;return t!==r||e.atBlank(n,r-1)}},{key:"parseComment",value:function(t){var n=this.context.src;if(n[t]===c.COMMENT){var r=e.endOfLine(n,t+1),o=new l.default(t,r);return this.props.push(o),r}return t}},{key:"setOrigRanges",value:function(e,t){return this.range&&(t=this.range.setOrigRange(e,t)),this.valueRange&&this.valueRange.setOrigRange(e,t),this.props.forEach(function(n){return n.setOrigRange(e,t)}),t}},{key:"toString",value:function(){var t=this.context.src,n=this.range,r=this.value;if(null!=r)return r;var o=t.slice(n.start,n.end);return e.addStringTerminator(t,n.end,o)}},{key:"anchor",get:function(){for(var e=0;e0?e.join("\n"):null}},{key:"hasComment",get:function(){if(this.context)for(var e=this.context.src,t=0;t * @license MIT */ -var r=n(191),o=n(192),i=n(101);function a(){return l.TYPED_ARRAY_SUPPORT?2147483647:1073741823}function s(e,t){if(a()=a())throw new RangeError("Attempt to allocate Buffer larger than maximum size: 0x"+a().toString(16)+" bytes");return 0|e}function h(e,t){if(l.isBuffer(e))return e.length;if("undefined"!=typeof ArrayBuffer&&"function"==typeof ArrayBuffer.isView&&(ArrayBuffer.isView(e)||e instanceof ArrayBuffer))return e.byteLength;"string"!=typeof e&&(e=""+e);var n=e.length;if(0===n)return 0;for(var r=!1;;)switch(t){case"ascii":case"latin1":case"binary":return n;case"utf8":case"utf-8":case void 0:return B(e).length;case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return 2*n;case"hex":return n>>>1;case"base64":return z(e).length;default:if(r)return B(e).length;t=(""+t).toLowerCase(),r=!0}}function m(e,t,n){var r=e[t];e[t]=e[n],e[n]=r}function g(e,t,n,r,o){if(0===e.length)return-1;if("string"==typeof n?(r=n,n=0):n>2147483647?n=2147483647:n<-2147483648&&(n=-2147483648),n=+n,isNaN(n)&&(n=o?0:e.length-1),n<0&&(n=e.length+n),n>=e.length){if(o)return-1;n=e.length-1}else if(n<0){if(!o)return-1;n=0}if("string"==typeof t&&(t=l.from(t,r)),l.isBuffer(t))return 0===t.length?-1:y(e,t,n,r,o);if("number"==typeof t)return t&=255,l.TYPED_ARRAY_SUPPORT&&"function"==typeof Uint8Array.prototype.indexOf?o?Uint8Array.prototype.indexOf.call(e,t,n):Uint8Array.prototype.lastIndexOf.call(e,t,n):y(e,[t],n,r,o);throw new TypeError("val must be string, number or Buffer")}function y(e,t,n,r,o){var i,a=1,s=e.length,l=t.length;if(void 0!==r&&("ucs2"===(r=String(r).toLowerCase())||"ucs-2"===r||"utf16le"===r||"utf-16le"===r)){if(e.length<2||t.length<2)return-1;a=2,s/=2,l/=2,n/=2}function c(e,t){return 1===a?e[t]:e.readUInt16BE(t*a)}if(o){var u=-1;for(i=n;is&&(n=s-l),i=n;i>=0;i--){for(var p=!0,f=0;fo&&(r=o):r=o;var i=t.length;if(i%2!=0)throw new TypeError("Invalid hex string");r>i/2&&(r=i/2);for(var a=0;a>8,o=n%256,i.push(o),i.push(r);return i}(t,e.length-n),e,n,r)}function S(e,t,n){return 0===t&&n===e.length?r.fromByteArray(e):r.fromByteArray(e.slice(t,n))}function _(e,t,n){n=Math.min(e.length,n);for(var r=[],o=t;o239?4:c>223?3:c>191?2:1;if(o+p<=n)switch(p){case 1:c<128&&(u=c);break;case 2:128==(192&(i=e[o+1]))&&(l=(31&c)<<6|63&i)>127&&(u=l);break;case 3:i=e[o+1],a=e[o+2],128==(192&i)&&128==(192&a)&&(l=(15&c)<<12|(63&i)<<6|63&a)>2047&&(l<55296||l>57343)&&(u=l);break;case 4:i=e[o+1],a=e[o+2],s=e[o+3],128==(192&i)&&128==(192&a)&&128==(192&s)&&(l=(15&c)<<18|(63&i)<<12|(63&a)<<6|63&s)>65535&&l<1114112&&(u=l)}null===u?(u=65533,p=1):u>65535&&(u-=65536,r.push(u>>>10&1023|55296),u=56320|1023&u),r.push(u),o+=p}return function(e){var t=e.length;if(t<=O)return String.fromCharCode.apply(String,e);var n="",r=0;for(;rthis.length)return"";if((void 0===n||n>this.length)&&(n=this.length),n<=0)return"";if((n>>>=0)<=(t>>>=0))return"";for(e||(e="utf8");;)switch(e){case"hex":return j(this,t,n);case"utf8":case"utf-8":return _(this,t,n);case"ascii":return C(this,t,n);case"latin1":case"binary":return T(this,t,n);case"base64":return S(this,t,n);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return A(this,t,n);default:if(r)throw new TypeError("Unknown encoding: "+e);e=(e+"").toLowerCase(),r=!0}}.apply(this,arguments)},l.prototype.equals=function(e){if(!l.isBuffer(e))throw new TypeError("Argument must be a Buffer");return this===e||0===l.compare(this,e)},l.prototype.inspect=function(){var e="",n=t.INSPECT_MAX_BYTES;return this.length>0&&(e=this.toString("hex",0,n).match(/.{2}/g).join(" "),this.length>n&&(e+=" ... ")),""},l.prototype.compare=function(e,t,n,r,o){if(!l.isBuffer(e))throw new TypeError("Argument must be a Buffer");if(void 0===t&&(t=0),void 0===n&&(n=e?e.length:0),void 0===r&&(r=0),void 0===o&&(o=this.length),t<0||n>e.length||r<0||o>this.length)throw new RangeError("out of range index");if(r>=o&&t>=n)return 0;if(r>=o)return-1;if(t>=n)return 1;if(this===e)return 0;for(var i=(o>>>=0)-(r>>>=0),a=(n>>>=0)-(t>>>=0),s=Math.min(i,a),c=this.slice(r,o),u=e.slice(t,n),p=0;po)&&(n=o),e.length>0&&(n<0||t<0)||t>this.length)throw new RangeError("Attempt to write outside buffer bounds");r||(r="utf8");for(var i=!1;;)switch(r){case"hex":return v(this,e,t,n);case"utf8":case"utf-8":return b(this,e,t,n);case"ascii":return w(this,e,t,n);case"latin1":case"binary":return x(this,e,t,n);case"base64":return k(this,e,t,n);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return E(this,e,t,n);default:if(i)throw new TypeError("Unknown encoding: "+r);r=(""+r).toLowerCase(),i=!0}},l.prototype.toJSON=function(){return{type:"Buffer",data:Array.prototype.slice.call(this._arr||this,0)}};var O=4096;function C(e,t,n){var r="";n=Math.min(e.length,n);for(var o=t;or)&&(n=r);for(var o="",i=t;in)throw new RangeError("Trying to access beyond buffer length")}function P(e,t,n,r,o,i){if(!l.isBuffer(e))throw new TypeError('"buffer" argument must be a Buffer instance');if(t>o||te.length)throw new RangeError("Index out of range")}function R(e,t,n,r){t<0&&(t=65535+t+1);for(var o=0,i=Math.min(e.length-n,2);o>>8*(r?o:1-o)}function N(e,t,n,r){t<0&&(t=4294967295+t+1);for(var o=0,i=Math.min(e.length-n,4);o>>8*(r?o:3-o)&255}function L(e,t,n,r,o,i){if(n+r>e.length)throw new RangeError("Index out of range");if(n<0)throw new RangeError("Index out of range")}function M(e,t,n,r,i){return i||L(e,0,n,4),o.write(e,t,n,r,23,4),n+4}function D(e,t,n,r,i){return i||L(e,0,n,8),o.write(e,t,n,r,52,8),n+8}l.prototype.slice=function(e,t){var n,r=this.length;if((e=~~e)<0?(e+=r)<0&&(e=0):e>r&&(e=r),(t=void 0===t?r:~~t)<0?(t+=r)<0&&(t=0):t>r&&(t=r),t0&&(o*=256);)r+=this[e+--t]*o;return r},l.prototype.readUInt8=function(e,t){return t||I(e,1,this.length),this[e]},l.prototype.readUInt16LE=function(e,t){return t||I(e,2,this.length),this[e]|this[e+1]<<8},l.prototype.readUInt16BE=function(e,t){return t||I(e,2,this.length),this[e]<<8|this[e+1]},l.prototype.readUInt32LE=function(e,t){return t||I(e,4,this.length),(this[e]|this[e+1]<<8|this[e+2]<<16)+16777216*this[e+3]},l.prototype.readUInt32BE=function(e,t){return t||I(e,4,this.length),16777216*this[e]+(this[e+1]<<16|this[e+2]<<8|this[e+3])},l.prototype.readIntLE=function(e,t,n){e|=0,t|=0,n||I(e,t,this.length);for(var r=this[e],o=1,i=0;++i=(o*=128)&&(r-=Math.pow(2,8*t)),r},l.prototype.readIntBE=function(e,t,n){e|=0,t|=0,n||I(e,t,this.length);for(var r=t,o=1,i=this[e+--r];r>0&&(o*=256);)i+=this[e+--r]*o;return i>=(o*=128)&&(i-=Math.pow(2,8*t)),i},l.prototype.readInt8=function(e,t){return t||I(e,1,this.length),128&this[e]?-1*(255-this[e]+1):this[e]},l.prototype.readInt16LE=function(e,t){t||I(e,2,this.length);var n=this[e]|this[e+1]<<8;return 32768&n?4294901760|n:n},l.prototype.readInt16BE=function(e,t){t||I(e,2,this.length);var n=this[e+1]|this[e]<<8;return 32768&n?4294901760|n:n},l.prototype.readInt32LE=function(e,t){return t||I(e,4,this.length),this[e]|this[e+1]<<8|this[e+2]<<16|this[e+3]<<24},l.prototype.readInt32BE=function(e,t){return t||I(e,4,this.length),this[e]<<24|this[e+1]<<16|this[e+2]<<8|this[e+3]},l.prototype.readFloatLE=function(e,t){return t||I(e,4,this.length),o.read(this,e,!0,23,4)},l.prototype.readFloatBE=function(e,t){return t||I(e,4,this.length),o.read(this,e,!1,23,4)},l.prototype.readDoubleLE=function(e,t){return t||I(e,8,this.length),o.read(this,e,!0,52,8)},l.prototype.readDoubleBE=function(e,t){return t||I(e,8,this.length),o.read(this,e,!1,52,8)},l.prototype.writeUIntLE=function(e,t,n,r){(e=+e,t|=0,n|=0,r)||P(this,e,t,n,Math.pow(2,8*n)-1,0);var o=1,i=0;for(this[t]=255&e;++i=0&&(i*=256);)this[t+o]=e/i&255;return t+n},l.prototype.writeUInt8=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,1,255,0),l.TYPED_ARRAY_SUPPORT||(e=Math.floor(e)),this[t]=255&e,t+1},l.prototype.writeUInt16LE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,2,65535,0),l.TYPED_ARRAY_SUPPORT?(this[t]=255&e,this[t+1]=e>>>8):R(this,e,t,!0),t+2},l.prototype.writeUInt16BE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,2,65535,0),l.TYPED_ARRAY_SUPPORT?(this[t]=e>>>8,this[t+1]=255&e):R(this,e,t,!1),t+2},l.prototype.writeUInt32LE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,4,4294967295,0),l.TYPED_ARRAY_SUPPORT?(this[t+3]=e>>>24,this[t+2]=e>>>16,this[t+1]=e>>>8,this[t]=255&e):N(this,e,t,!0),t+4},l.prototype.writeUInt32BE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,4,4294967295,0),l.TYPED_ARRAY_SUPPORT?(this[t]=e>>>24,this[t+1]=e>>>16,this[t+2]=e>>>8,this[t+3]=255&e):N(this,e,t,!1),t+4},l.prototype.writeIntLE=function(e,t,n,r){if(e=+e,t|=0,!r){var o=Math.pow(2,8*n-1);P(this,e,t,n,o-1,-o)}var i=0,a=1,s=0;for(this[t]=255&e;++i>0)-s&255;return t+n},l.prototype.writeIntBE=function(e,t,n,r){if(e=+e,t|=0,!r){var o=Math.pow(2,8*n-1);P(this,e,t,n,o-1,-o)}var i=n-1,a=1,s=0;for(this[t+i]=255&e;--i>=0&&(a*=256);)e<0&&0===s&&0!==this[t+i+1]&&(s=1),this[t+i]=(e/a>>0)-s&255;return t+n},l.prototype.writeInt8=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,1,127,-128),l.TYPED_ARRAY_SUPPORT||(e=Math.floor(e)),e<0&&(e=255+e+1),this[t]=255&e,t+1},l.prototype.writeInt16LE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,2,32767,-32768),l.TYPED_ARRAY_SUPPORT?(this[t]=255&e,this[t+1]=e>>>8):R(this,e,t,!0),t+2},l.prototype.writeInt16BE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,2,32767,-32768),l.TYPED_ARRAY_SUPPORT?(this[t]=e>>>8,this[t+1]=255&e):R(this,e,t,!1),t+2},l.prototype.writeInt32LE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,4,2147483647,-2147483648),l.TYPED_ARRAY_SUPPORT?(this[t]=255&e,this[t+1]=e>>>8,this[t+2]=e>>>16,this[t+3]=e>>>24):N(this,e,t,!0),t+4},l.prototype.writeInt32BE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,4,2147483647,-2147483648),e<0&&(e=4294967295+e+1),l.TYPED_ARRAY_SUPPORT?(this[t]=e>>>24,this[t+1]=e>>>16,this[t+2]=e>>>8,this[t+3]=255&e):N(this,e,t,!1),t+4},l.prototype.writeFloatLE=function(e,t,n){return M(this,e,t,!0,n)},l.prototype.writeFloatBE=function(e,t,n){return M(this,e,t,!1,n)},l.prototype.writeDoubleLE=function(e,t,n){return D(this,e,t,!0,n)},l.prototype.writeDoubleBE=function(e,t,n){return D(this,e,t,!1,n)},l.prototype.copy=function(e,t,n,r){if(n||(n=0),r||0===r||(r=this.length),t>=e.length&&(t=e.length),t||(t=0),r>0&&r=this.length)throw new RangeError("sourceStart out of bounds");if(r<0)throw new RangeError("sourceEnd out of bounds");r>this.length&&(r=this.length),e.length-t=0;--o)e[o+t]=this[o+n];else if(i<1e3||!l.TYPED_ARRAY_SUPPORT)for(o=0;o>>=0,n=void 0===n?this.length:n>>>0,e||(e=0),"number"==typeof e)for(i=t;i55295&&n<57344){if(!o){if(n>56319){(t-=3)>-1&&i.push(239,191,189);continue}if(a+1===r){(t-=3)>-1&&i.push(239,191,189);continue}o=n;continue}if(n<56320){(t-=3)>-1&&i.push(239,191,189),o=n;continue}n=65536+(o-55296<<10|n-56320)}else o&&(t-=3)>-1&&i.push(239,191,189);if(o=null,n<128){if((t-=1)<0)break;i.push(n)}else if(n<2048){if((t-=2)<0)break;i.push(n>>6|192,63&n|128)}else if(n<65536){if((t-=3)<0)break;i.push(n>>12|224,n>>6&63|128,63&n|128)}else{if(!(n<1114112))throw new Error("Invalid code point");if((t-=4)<0)break;i.push(n>>18|240,n>>12&63|128,n>>6&63|128,63&n|128)}}return i}function z(e){return r.toByteArray(function(e){if((e=function(e){return e.trim?e.trim():e.replace(/^\s+|\s+$/g,"")}(e).replace(F,"")).length<2)return"";for(;e.length%4!=0;)e+="=";return e}(e))}function $(e,t,n,r){for(var o=0;o=t.length||o>=e.length);++o)t[o+n]=e[o];return o}}).call(this,n(4))},function(e,t,n){"use strict";var r=n(184),o=n(186);function i(){this.protocol=null,this.slashes=null,this.auth=null,this.host=null,this.port=null,this.hostname=null,this.hash=null,this.search=null,this.query=null,this.pathname=null,this.path=null,this.href=null}t.parse=b,t.resolve=function(e,t){return b(e,!1,!0).resolve(t)},t.resolveObject=function(e,t){return e?b(e,!1,!0).resolveObject(t):t},t.format=function(e){o.isString(e)&&(e=b(e));return e instanceof i?e.format():i.prototype.format.call(e)},t.Url=i;var a=/^([a-z0-9.+-]+:)/i,s=/:[0-9]*$/,l=/^(\/\/?(?!\/)[^\?\s]*)(\?[^\s]*)?$/,c=["{","}","|","\\","^","`"].concat(["<",">",'"',"`"," ","\r","\n","\t"]),u=["'"].concat(c),p=["%","/","?",";","#"].concat(u),f=["/","?","#"],d=/^[+a-z0-9A-Z_-]{0,63}$/,h=/^([+a-z0-9A-Z_-]{0,63})(.*)$/,m={javascript:!0,"javascript:":!0},g={javascript:!0,"javascript:":!0},y={http:!0,https:!0,ftp:!0,gopher:!0,file:!0,"http:":!0,"https:":!0,"ftp:":!0,"gopher:":!0,"file:":!0},v=n(187);function b(e,t,n){if(e&&o.isObject(e)&&e instanceof i)return e;var r=new i;return r.parse(e,t,n),r}i.prototype.parse=function(e,t,n){if(!o.isString(e))throw new TypeError("Parameter 'url' must be a string, not "+typeof e);var i=e.indexOf("?"),s=-1!==i&&i127?R+="x":R+=P[N];if(!R.match(d)){var M=A.slice(0,C),D=A.slice(C+1),F=P.match(h);F&&(M.push(F[1]),D.unshift(F[2])),D.length&&(b="/"+D.join(".")+b),this.hostname=M.join(".");break}}}this.hostname.length>255?this.hostname="":this.hostname=this.hostname.toLowerCase(),j||(this.hostname=r.toASCII(this.hostname));var U=this.port?":"+this.port:"",B=this.hostname||"";this.host=B+U,this.href+=this.host,j&&(this.hostname=this.hostname.substr(1,this.hostname.length-2),"/"!==b[0]&&(b="/"+b))}if(!m[k])for(C=0,I=u.length;C0)&&n.host.split("@"))&&(n.auth=j.shift(),n.host=n.hostname=j.shift());return n.search=e.search,n.query=e.query,o.isNull(n.pathname)&&o.isNull(n.search)||(n.path=(n.pathname?n.pathname:"")+(n.search?n.search:"")),n.href=n.format(),n}if(!E.length)return n.pathname=null,n.search?n.path="/"+n.search:n.path=null,n.href=n.format(),n;for(var _=E.slice(-1)[0],O=(n.host||e.host||E.length>1)&&("."===_||".."===_)||""===_,C=0,T=E.length;T>=0;T--)"."===(_=E[T])?E.splice(T,1):".."===_?(E.splice(T,1),C++):C&&(E.splice(T,1),C--);if(!x&&!k)for(;C--;C)E.unshift("..");!x||""===E[0]||E[0]&&"/"===E[0].charAt(0)||E.unshift(""),O&&"/"!==E.join("/").substr(-1)&&E.push("");var j,A=""===E[0]||E[0]&&"/"===E[0].charAt(0);S&&(n.hostname=n.host=A?"":E.length?E.shift():"",(j=!!(n.host&&n.host.indexOf("@")>0)&&n.host.split("@"))&&(n.auth=j.shift(),n.host=n.hostname=j.shift()));return(x=x||n.host&&E.length)&&!A&&E.unshift(""),E.length?n.pathname=E.join("/"):(n.pathname=null,n.path=null),o.isNull(n.pathname)&&o.isNull(n.search)||(n.path=(n.pathname?n.pathname:"")+(n.search?n.search:"")),n.auth=e.auth||n.auth,n.slashes=n.slashes||e.slashes,n.href=n.format(),n},i.prototype.parseHost=function(){var e=this.host,t=s.exec(e);t&&(":"!==(t=t[0])&&(this.port=t.substr(1)),e=e.substr(0,e.length-t.length)),e&&(this.hostname=e)}},function(e,t,n){e.exports=n(182)()},function(e,t,n){var r; +var r=n(235),o=n(236),i=n(136);function a(){return l.TYPED_ARRAY_SUPPORT?2147483647:1073741823}function s(e,t){if(a()=a())throw new RangeError("Attempt to allocate Buffer larger than maximum size: 0x"+a().toString(16)+" bytes");return 0|e}function h(e,t){if(l.isBuffer(e))return e.length;if("undefined"!=typeof ArrayBuffer&&"function"==typeof ArrayBuffer.isView&&(ArrayBuffer.isView(e)||e instanceof ArrayBuffer))return e.byteLength;"string"!=typeof e&&(e=""+e);var n=e.length;if(0===n)return 0;for(var r=!1;;)switch(t){case"ascii":case"latin1":case"binary":return n;case"utf8":case"utf-8":case void 0:return B(e).length;case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return 2*n;case"hex":return n>>>1;case"base64":return z(e).length;default:if(r)return B(e).length;t=(""+t).toLowerCase(),r=!0}}function m(e,t,n){var r=e[t];e[t]=e[n],e[n]=r}function v(e,t,n,r,o){if(0===e.length)return-1;if("string"==typeof n?(r=n,n=0):n>2147483647?n=2147483647:n<-2147483648&&(n=-2147483648),n=+n,isNaN(n)&&(n=o?0:e.length-1),n<0&&(n=e.length+n),n>=e.length){if(o)return-1;n=e.length-1}else if(n<0){if(!o)return-1;n=0}if("string"==typeof t&&(t=l.from(t,r)),l.isBuffer(t))return 0===t.length?-1:g(e,t,n,r,o);if("number"==typeof t)return t&=255,l.TYPED_ARRAY_SUPPORT&&"function"==typeof Uint8Array.prototype.indexOf?o?Uint8Array.prototype.indexOf.call(e,t,n):Uint8Array.prototype.lastIndexOf.call(e,t,n):g(e,[t],n,r,o);throw new TypeError("val must be string, number or Buffer")}function g(e,t,n,r,o){var i,a=1,s=e.length,l=t.length;if(void 0!==r&&("ucs2"===(r=String(r).toLowerCase())||"ucs-2"===r||"utf16le"===r||"utf-16le"===r)){if(e.length<2||t.length<2)return-1;a=2,s/=2,l/=2,n/=2}function u(e,t){return 1===a?e[t]:e.readUInt16BE(t*a)}if(o){var c=-1;for(i=n;is&&(n=s-l),i=n;i>=0;i--){for(var f=!0,p=0;po&&(r=o):r=o;var i=t.length;if(i%2!=0)throw new TypeError("Invalid hex string");r>i/2&&(r=i/2);for(var a=0;a>8,o=n%256,i.push(o),i.push(r);return i}(t,e.length-n),e,n,r)}function _(e,t,n){return 0===t&&n===e.length?r.fromByteArray(e):r.fromByteArray(e.slice(t,n))}function S(e,t,n){n=Math.min(e.length,n);for(var r=[],o=t;o239?4:u>223?3:u>191?2:1;if(o+f<=n)switch(f){case 1:u<128&&(c=u);break;case 2:128==(192&(i=e[o+1]))&&(l=(31&u)<<6|63&i)>127&&(c=l);break;case 3:i=e[o+1],a=e[o+2],128==(192&i)&&128==(192&a)&&(l=(15&u)<<12|(63&i)<<6|63&a)>2047&&(l<55296||l>57343)&&(c=l);break;case 4:i=e[o+1],a=e[o+2],s=e[o+3],128==(192&i)&&128==(192&a)&&128==(192&s)&&(l=(15&u)<<18|(63&i)<<12|(63&a)<<6|63&s)>65535&&l<1114112&&(c=l)}null===c?(c=65533,f=1):c>65535&&(c-=65536,r.push(c>>>10&1023|55296),c=56320|1023&c),r.push(c),o+=f}return function(e){var t=e.length;if(t<=O)return String.fromCharCode.apply(String,e);var n="",r=0;for(;rthis.length)return"";if((void 0===n||n>this.length)&&(n=this.length),n<=0)return"";if((n>>>=0)<=(t>>>=0))return"";for(e||(e="utf8");;)switch(e){case"hex":return A(this,t,n);case"utf8":case"utf-8":return S(this,t,n);case"ascii":return T(this,t,n);case"latin1":case"binary":return C(this,t,n);case"base64":return _(this,t,n);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return j(this,t,n);default:if(r)throw new TypeError("Unknown encoding: "+e);e=(e+"").toLowerCase(),r=!0}}.apply(this,arguments)},l.prototype.equals=function(e){if(!l.isBuffer(e))throw new TypeError("Argument must be a Buffer");return this===e||0===l.compare(this,e)},l.prototype.inspect=function(){var e="",n=t.INSPECT_MAX_BYTES;return this.length>0&&(e=this.toString("hex",0,n).match(/.{2}/g).join(" "),this.length>n&&(e+=" ... ")),""},l.prototype.compare=function(e,t,n,r,o){if(!l.isBuffer(e))throw new TypeError("Argument must be a Buffer");if(void 0===t&&(t=0),void 0===n&&(n=e?e.length:0),void 0===r&&(r=0),void 0===o&&(o=this.length),t<0||n>e.length||r<0||o>this.length)throw new RangeError("out of range index");if(r>=o&&t>=n)return 0;if(r>=o)return-1;if(t>=n)return 1;if(this===e)return 0;for(var i=(o>>>=0)-(r>>>=0),a=(n>>>=0)-(t>>>=0),s=Math.min(i,a),u=this.slice(r,o),c=e.slice(t,n),f=0;fo)&&(n=o),e.length>0&&(n<0||t<0)||t>this.length)throw new RangeError("Attempt to write outside buffer bounds");r||(r="utf8");for(var i=!1;;)switch(r){case"hex":return y(this,e,t,n);case"utf8":case"utf-8":return b(this,e,t,n);case"ascii":return w(this,e,t,n);case"latin1":case"binary":return x(this,e,t,n);case"base64":return k(this,e,t,n);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return E(this,e,t,n);default:if(i)throw new TypeError("Unknown encoding: "+r);r=(""+r).toLowerCase(),i=!0}},l.prototype.toJSON=function(){return{type:"Buffer",data:Array.prototype.slice.call(this._arr||this,0)}};var O=4096;function T(e,t,n){var r="";n=Math.min(e.length,n);for(var o=t;or)&&(n=r);for(var o="",i=t;in)throw new RangeError("Trying to access beyond buffer length")}function P(e,t,n,r,o,i){if(!l.isBuffer(e))throw new TypeError('"buffer" argument must be a Buffer instance');if(t>o||te.length)throw new RangeError("Index out of range")}function N(e,t,n,r){t<0&&(t=65535+t+1);for(var o=0,i=Math.min(e.length-n,2);o>>8*(r?o:1-o)}function R(e,t,n,r){t<0&&(t=4294967295+t+1);for(var o=0,i=Math.min(e.length-n,4);o>>8*(r?o:3-o)&255}function L(e,t,n,r,o,i){if(n+r>e.length)throw new RangeError("Index out of range");if(n<0)throw new RangeError("Index out of range")}function M(e,t,n,r,i){return i||L(e,0,n,4),o.write(e,t,n,r,23,4),n+4}function D(e,t,n,r,i){return i||L(e,0,n,8),o.write(e,t,n,r,52,8),n+8}l.prototype.slice=function(e,t){var n,r=this.length;if((e=~~e)<0?(e+=r)<0&&(e=0):e>r&&(e=r),(t=void 0===t?r:~~t)<0?(t+=r)<0&&(t=0):t>r&&(t=r),t0&&(o*=256);)r+=this[e+--t]*o;return r},l.prototype.readUInt8=function(e,t){return t||I(e,1,this.length),this[e]},l.prototype.readUInt16LE=function(e,t){return t||I(e,2,this.length),this[e]|this[e+1]<<8},l.prototype.readUInt16BE=function(e,t){return t||I(e,2,this.length),this[e]<<8|this[e+1]},l.prototype.readUInt32LE=function(e,t){return t||I(e,4,this.length),(this[e]|this[e+1]<<8|this[e+2]<<16)+16777216*this[e+3]},l.prototype.readUInt32BE=function(e,t){return t||I(e,4,this.length),16777216*this[e]+(this[e+1]<<16|this[e+2]<<8|this[e+3])},l.prototype.readIntLE=function(e,t,n){e|=0,t|=0,n||I(e,t,this.length);for(var r=this[e],o=1,i=0;++i=(o*=128)&&(r-=Math.pow(2,8*t)),r},l.prototype.readIntBE=function(e,t,n){e|=0,t|=0,n||I(e,t,this.length);for(var r=t,o=1,i=this[e+--r];r>0&&(o*=256);)i+=this[e+--r]*o;return i>=(o*=128)&&(i-=Math.pow(2,8*t)),i},l.prototype.readInt8=function(e,t){return t||I(e,1,this.length),128&this[e]?-1*(255-this[e]+1):this[e]},l.prototype.readInt16LE=function(e,t){t||I(e,2,this.length);var n=this[e]|this[e+1]<<8;return 32768&n?4294901760|n:n},l.prototype.readInt16BE=function(e,t){t||I(e,2,this.length);var n=this[e+1]|this[e]<<8;return 32768&n?4294901760|n:n},l.prototype.readInt32LE=function(e,t){return t||I(e,4,this.length),this[e]|this[e+1]<<8|this[e+2]<<16|this[e+3]<<24},l.prototype.readInt32BE=function(e,t){return t||I(e,4,this.length),this[e]<<24|this[e+1]<<16|this[e+2]<<8|this[e+3]},l.prototype.readFloatLE=function(e,t){return t||I(e,4,this.length),o.read(this,e,!0,23,4)},l.prototype.readFloatBE=function(e,t){return t||I(e,4,this.length),o.read(this,e,!1,23,4)},l.prototype.readDoubleLE=function(e,t){return t||I(e,8,this.length),o.read(this,e,!0,52,8)},l.prototype.readDoubleBE=function(e,t){return t||I(e,8,this.length),o.read(this,e,!1,52,8)},l.prototype.writeUIntLE=function(e,t,n,r){(e=+e,t|=0,n|=0,r)||P(this,e,t,n,Math.pow(2,8*n)-1,0);var o=1,i=0;for(this[t]=255&e;++i=0&&(i*=256);)this[t+o]=e/i&255;return t+n},l.prototype.writeUInt8=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,1,255,0),l.TYPED_ARRAY_SUPPORT||(e=Math.floor(e)),this[t]=255&e,t+1},l.prototype.writeUInt16LE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,2,65535,0),l.TYPED_ARRAY_SUPPORT?(this[t]=255&e,this[t+1]=e>>>8):N(this,e,t,!0),t+2},l.prototype.writeUInt16BE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,2,65535,0),l.TYPED_ARRAY_SUPPORT?(this[t]=e>>>8,this[t+1]=255&e):N(this,e,t,!1),t+2},l.prototype.writeUInt32LE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,4,4294967295,0),l.TYPED_ARRAY_SUPPORT?(this[t+3]=e>>>24,this[t+2]=e>>>16,this[t+1]=e>>>8,this[t]=255&e):R(this,e,t,!0),t+4},l.prototype.writeUInt32BE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,4,4294967295,0),l.TYPED_ARRAY_SUPPORT?(this[t]=e>>>24,this[t+1]=e>>>16,this[t+2]=e>>>8,this[t+3]=255&e):R(this,e,t,!1),t+4},l.prototype.writeIntLE=function(e,t,n,r){if(e=+e,t|=0,!r){var o=Math.pow(2,8*n-1);P(this,e,t,n,o-1,-o)}var i=0,a=1,s=0;for(this[t]=255&e;++i>0)-s&255;return t+n},l.prototype.writeIntBE=function(e,t,n,r){if(e=+e,t|=0,!r){var o=Math.pow(2,8*n-1);P(this,e,t,n,o-1,-o)}var i=n-1,a=1,s=0;for(this[t+i]=255&e;--i>=0&&(a*=256);)e<0&&0===s&&0!==this[t+i+1]&&(s=1),this[t+i]=(e/a>>0)-s&255;return t+n},l.prototype.writeInt8=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,1,127,-128),l.TYPED_ARRAY_SUPPORT||(e=Math.floor(e)),e<0&&(e=255+e+1),this[t]=255&e,t+1},l.prototype.writeInt16LE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,2,32767,-32768),l.TYPED_ARRAY_SUPPORT?(this[t]=255&e,this[t+1]=e>>>8):N(this,e,t,!0),t+2},l.prototype.writeInt16BE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,2,32767,-32768),l.TYPED_ARRAY_SUPPORT?(this[t]=e>>>8,this[t+1]=255&e):N(this,e,t,!1),t+2},l.prototype.writeInt32LE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,4,2147483647,-2147483648),l.TYPED_ARRAY_SUPPORT?(this[t]=255&e,this[t+1]=e>>>8,this[t+2]=e>>>16,this[t+3]=e>>>24):R(this,e,t,!0),t+4},l.prototype.writeInt32BE=function(e,t,n){return e=+e,t|=0,n||P(this,e,t,4,2147483647,-2147483648),e<0&&(e=4294967295+e+1),l.TYPED_ARRAY_SUPPORT?(this[t]=e>>>24,this[t+1]=e>>>16,this[t+2]=e>>>8,this[t+3]=255&e):R(this,e,t,!1),t+4},l.prototype.writeFloatLE=function(e,t,n){return M(this,e,t,!0,n)},l.prototype.writeFloatBE=function(e,t,n){return M(this,e,t,!1,n)},l.prototype.writeDoubleLE=function(e,t,n){return D(this,e,t,!0,n)},l.prototype.writeDoubleBE=function(e,t,n){return D(this,e,t,!1,n)},l.prototype.copy=function(e,t,n,r){if(n||(n=0),r||0===r||(r=this.length),t>=e.length&&(t=e.length),t||(t=0),r>0&&r=this.length)throw new RangeError("sourceStart out of bounds");if(r<0)throw new RangeError("sourceEnd out of bounds");r>this.length&&(r=this.length),e.length-t=0;--o)e[o+t]=this[o+n];else if(i<1e3||!l.TYPED_ARRAY_SUPPORT)for(o=0;o>>=0,n=void 0===n?this.length:n>>>0,e||(e=0),"number"==typeof e)for(i=t;i55295&&n<57344){if(!o){if(n>56319){(t-=3)>-1&&i.push(239,191,189);continue}if(a+1===r){(t-=3)>-1&&i.push(239,191,189);continue}o=n;continue}if(n<56320){(t-=3)>-1&&i.push(239,191,189),o=n;continue}n=65536+(o-55296<<10|n-56320)}else o&&(t-=3)>-1&&i.push(239,191,189);if(o=null,n<128){if((t-=1)<0)break;i.push(n)}else if(n<2048){if((t-=2)<0)break;i.push(n>>6|192,63&n|128)}else if(n<65536){if((t-=3)<0)break;i.push(n>>12|224,n>>6&63|128,63&n|128)}else{if(!(n<1114112))throw new Error("Invalid code point");if((t-=4)<0)break;i.push(n>>18|240,n>>12&63|128,n>>6&63|128,63&n|128)}}return i}function z(e){return r.toByteArray(function(e){if((e=function(e){return e.trim?e.trim():e.replace(/^\s+|\s+$/g,"")}(e).replace(F,"")).length<2)return"";for(;e.length%4!=0;)e+="=";return e}(e))}function $(e,t,n,r){for(var o=0;o=t.length||o>=e.length);++o)t[o+n]=e[o];return o}}).call(this,n(12))},function(e,t,n){"use strict";var r=n(3);Object.defineProperty(t,"__esModule",{value:!0}),t.default=void 0;var o=r(n(4)),i=r(n(5)),a=function(){function e(t,n){(0,o.default)(this,e),this.start=t,this.end=n||t}return(0,i.default)(e,null,[{key:"copy",value:function(t){return new e(t.start,t.end)}}]),(0,i.default)(e,[{key:"isEmpty",value:function(){return"number"!=typeof this.start||!this.end||this.end<=this.start}},{key:"setOrigRange",value:function(e,t){var n=this.start,r=this.end;if(0===e.length||r<=e[0])return this.origStart=n,this.origEnd=r,t;for(var o=t;on);)++o;this.origStart=n+o;for(var i=o;o=r);)++o;return this.origEnd=r+o,i}}]),e}();t.default=a,e.exports=t.default,e.exports.default=t.default},function(e,t,n){"use strict";var r=n(3);Object.defineProperty(t,"__esModule",{value:!0}),t.default=void 0;var o=r(n(4)),i=r(n(5)),a=r(n(7)),s=r(n(6)),l=r(n(8)),u=r(n(35)),c=function(e){function t(e){var n;return(0,o.default)(this,t),(n=(0,a.default)(this,(0,s.default)(t).call(this))).value=e,n}return(0,l.default)(t,e),(0,i.default)(t,[{key:"toJSON",value:function(e,t){return t&&t.keep?this.value:(0,u.default)(this.value,e,t)}},{key:"toString",value:function(){return String(this.value)}}]),t}(r(n(48)).default);t.default=c,e.exports=t.default,e.exports.default=t.default},function(e,t,n){"use strict";var r=n(228),o=n(230);function i(){this.protocol=null,this.slashes=null,this.auth=null,this.host=null,this.port=null,this.hostname=null,this.hash=null,this.search=null,this.query=null,this.pathname=null,this.path=null,this.href=null}t.parse=b,t.resolve=function(e,t){return b(e,!1,!0).resolve(t)},t.resolveObject=function(e,t){return e?b(e,!1,!0).resolveObject(t):t},t.format=function(e){o.isString(e)&&(e=b(e));return e instanceof i?e.format():i.prototype.format.call(e)},t.Url=i;var a=/^([a-z0-9.+-]+:)/i,s=/:[0-9]*$/,l=/^(\/\/?(?!\/)[^\?\s]*)(\?[^\s]*)?$/,u=["{","}","|","\\","^","`"].concat(["<",">",'"',"`"," ","\r","\n","\t"]),c=["'"].concat(u),f=["%","/","?",";","#"].concat(c),p=["/","?","#"],d=/^[+a-z0-9A-Z_-]{0,63}$/,h=/^([+a-z0-9A-Z_-]{0,63})(.*)$/,m={javascript:!0,"javascript:":!0},v={javascript:!0,"javascript:":!0},g={http:!0,https:!0,ftp:!0,gopher:!0,file:!0,"http:":!0,"https:":!0,"ftp:":!0,"gopher:":!0,"file:":!0},y=n(231);function b(e,t,n){if(e&&o.isObject(e)&&e instanceof i)return e;var r=new i;return r.parse(e,t,n),r}i.prototype.parse=function(e,t,n){if(!o.isString(e))throw new TypeError("Parameter 'url' must be a string, not "+typeof e);var i=e.indexOf("?"),s=-1!==i&&i127?N+="x":N+=P[R];if(!N.match(d)){var M=j.slice(0,T),D=j.slice(T+1),F=P.match(h);F&&(M.push(F[1]),D.unshift(F[2])),D.length&&(b="/"+D.join(".")+b),this.hostname=M.join(".");break}}}this.hostname.length>255?this.hostname="":this.hostname=this.hostname.toLowerCase(),A||(this.hostname=r.toASCII(this.hostname));var U=this.port?":"+this.port:"",B=this.hostname||"";this.host=B+U,this.href+=this.host,A&&(this.hostname=this.hostname.substr(1,this.hostname.length-2),"/"!==b[0]&&(b="/"+b))}if(!m[k])for(T=0,I=c.length;T0)&&n.host.split("@"))&&(n.auth=A.shift(),n.host=n.hostname=A.shift());return n.search=e.search,n.query=e.query,o.isNull(n.pathname)&&o.isNull(n.search)||(n.path=(n.pathname?n.pathname:"")+(n.search?n.search:"")),n.href=n.format(),n}if(!E.length)return n.pathname=null,n.search?n.path="/"+n.search:n.path=null,n.href=n.format(),n;for(var S=E.slice(-1)[0],O=(n.host||e.host||E.length>1)&&("."===S||".."===S)||""===S,T=0,C=E.length;C>=0;C--)"."===(S=E[C])?E.splice(C,1):".."===S?(E.splice(C,1),T++):T&&(E.splice(C,1),T--);if(!x&&!k)for(;T--;T)E.unshift("..");!x||""===E[0]||E[0]&&"/"===E[0].charAt(0)||E.unshift(""),O&&"/"!==E.join("/").substr(-1)&&E.push("");var A,j=""===E[0]||E[0]&&"/"===E[0].charAt(0);_&&(n.hostname=n.host=j?"":E.length?E.shift():"",(A=!!(n.host&&n.host.indexOf("@")>0)&&n.host.split("@"))&&(n.auth=A.shift(),n.host=n.hostname=A.shift()));return(x=x||n.host&&E.length)&&!j&&E.unshift(""),E.length?n.pathname=E.join("/"):(n.pathname=null,n.path=null),o.isNull(n.pathname)&&o.isNull(n.search)||(n.path=(n.pathname?n.pathname:"")+(n.search?n.search:"")),n.auth=e.auth||n.auth,n.slashes=n.slashes||e.slashes,n.href=n.format(),n},i.prototype.parseHost=function(){var e=this.host,t=s.exec(e);t&&(":"!==(t=t[0])&&(this.port=t.substr(1)),e=e.substr(0,e.length-t.length)),e&&(this.hostname=e)}},function(e,t){var n=e.exports={version:"2.6.5"};"number"==typeof __e&&(__e=n)},function(e,t){e.exports=function(e){return"object"==typeof e?null!==e:"function"==typeof e}},function(e,t){var n,r,o=e.exports={};function i(){throw new Error("setTimeout has not been defined")}function a(){throw new Error("clearTimeout has not been defined")}function s(e){if(n===setTimeout)return setTimeout(e,0);if((n===i||!n)&&setTimeout)return n=setTimeout,setTimeout(e,0);try{return n(e,0)}catch(t){try{return n.call(null,e,0)}catch(t){return n.call(this,e,0)}}}!function(){try{n="function"==typeof setTimeout?setTimeout:i}catch(e){n=i}try{r="function"==typeof clearTimeout?clearTimeout:a}catch(e){r=a}}();var l,u=[],c=!1,f=-1;function p(){c&&l&&(c=!1,l.length?u=l.concat(u):f=-1,u.length&&d())}function d(){if(!c){var e=s(p);c=!0;for(var t=u.length;t;){for(l=u,u=[];++f1)for(var n=1;n1&&void 0!==arguments[1]?arguments[1]:null;return(0,i.default)(this,t),(n=(0,s.default)(this,(0,l.default)(t).call(this))).key=e,n.value=r,n.type="PAIR",n}return(0,u.default)(t,e),(0,a.default)(t,[{key:"toJSON",value:function(e,t){var n={},r=this.stringKey;return n[r]=(0,f.default)(this.value,r,t),n}},{key:"toString",value:function(e,t,n){if(!e||!e.doc)return JSON.stringify(this);var r=this.key,i=this.value,a=r instanceof d.default&&r.comment,s=!r||a||r instanceof p.default,l=e,u=l.doc,f=l.indent;e=Object.assign({},e,{implicitKey:!s,indent:f+" "});var h=!1,m=u.schema.stringify(r,e,function(){return a=null},function(){return h=!0});if(m=(0,c.default)(m,e.indent,a),e.allNullValues)return this.comment?(m=(0,c.default)(m,e.indent,this.comment),t&&t()):h&&!a&&n&&n(),e.inFlow?m:"? ".concat(m);m=s?"? ".concat(m,"\n").concat(f,":"):"".concat(m,":"),this.comment&&(m=(0,c.default)(m,e.indent,this.comment),t&&t());var v="",g=null;if(i instanceof d.default){if(i.spaceBefore&&(v="\n"),i.commentBefore){var y=i.commentBefore.replace(/^/gm,"".concat(e.indent,"#"));v+="\n".concat(y)}g=i.comment}else i&&"object"===(0,o.default)(i)&&(i=u.schema.createNode(i,!0));e.implicitKey=!1,h=!1;var b=u.schema.stringify(i,e,function(){return g=null},function(){return h=!0}),w=" ";if(v||this.comment)w="".concat(v,"\n").concat(e.indent);else if(!s&&i instanceof p.default){("["===b[0]||"{"===b[0])&&!b.includes("\n")||(w="\n".concat(e.indent))}return h&&!g&&n&&n(),(0,c.default)(m+w+b,e.indent,g)}},{key:"commentBefore",get:function(){return this.key&&this.key.commentBefore},set:function(e){null==this.key&&(this.key=new h.default(null)),this.key.commentBefore=e}},{key:"stringKey",get:function(){var e=(0,f.default)(this.key);if(null===e)return"";if("object"===(0,o.default)(e))try{return JSON.stringify(e)}catch(e){}return String(e)}}]),t}(d.default);t.default=m,e.exports=t.default,e.exports.default=t.default},function(e,t,n){e.exports=n(226)()},function(e,t,n){var r; /*! Copyright (c) 2017 Jed Watson. Licensed under the MIT License (MIT), see @@ -37,51 +52,52 @@ var r=n(191),o=n(192),i=n(101);function a(){return l.TYPED_ARRAY_SUPPORT?2147483 Licensed under the MIT License (MIT), see http://jedwatson.github.io/classnames */ -!function(){"use strict";var n={}.hasOwnProperty;function o(){for(var e=[],t=0;t=0?e.substr(t).toLowerCase():""},t.getHash=function(e){var t=e.indexOf("#");return t>=0?e.substr(t):"#"},t.stripHash=function(e){var t=e.indexOf("#");return t>=0&&(e=e.substr(0,t)),e},t.isHttp=function(e){var t=s.getProtocol(e);return"http"===t||"https"===t||void 0===t&&r.browser},t.isFileSystemPath=function(e){if(r.browser)return!1;var t=s.getProtocol(e);return void 0===t||"file"===t},t.fromFileSystemPath=function(e){o&&(e=e.replace(/\\/g,"/")),e=encodeURI(e);for(var t=0;t0&&(h=e.exports.formatter.apply(null,s)),n&&n.message&&(h+=(h?" \n":"")+n.message);var m,g,y,v=new t(h);return function(e,t){var n,r;!function(e){if(!d)return!1;var t=Object.getOwnPropertyDescriptor(e,"stack");return!!t&&"function"==typeof t.get}(e)?e.stack=t?p(e.stack,t.stack):f(e.stack):t?function(e,t){var n=Object.getOwnPropertyDescriptor(e,"stack");Object.defineProperty(e,"stack",{get:function(){return p(n.get.apply(e),t.stack)},enumerable:!1,configurable:!0})}(e,t):(n=e,r=Object.getOwnPropertyDescriptor(n,"stack"),Object.defineProperty(n,"stack",{get:function(){return f(r.get.apply(n))},enumerable:!1,configurable:!0}))}(m=v,g=n),l(m,g),(y=v).toJSON=c,y.inspect=u,l(v,r),v}}function l(e,t){if(t&&"object"==typeof t)for(var n=Object.keys(t),r=0;r=0))try{e[o]=t[o]}catch(e){}}}function c(){var e={},t=Object.keys(this);t=t.concat(a);for(var n=0;n=0)return t.splice(n,1),t.join("\n")}return e}}e.exports=s(Error),e.exports.error=s(Error),e.exports.eval=s(EvalError),e.exports.range=s(RangeError),e.exports.reference=s(ReferenceError),e.exports.syntax=s(SyntaxError),e.exports.type=s(TypeError),e.exports.uri=s(URIError),e.exports.formatter=r;var d=!(!Object.getOwnPropertyDescriptor||!Object.defineProperty||"undefined"!=typeof navigator&&/Android/.test(navigator.userAgent))},function(e,t){var n={}.toString;e.exports=function(e){return n.call(e).slice(8,-1)}},function(e,t){e.exports=function(e){try{return!!e()}catch(e){return!0}}},function(e,t,n){var r=n(64),o=n(44);e.exports=function(e){return r(o(e))}},function(e,t,n){"use strict";var r=n(38),o=n(48),i=n(3);function a(e,t,n){var r=[];return e.include.forEach(function(e){n=a(e,t,n)}),e[t].forEach(function(e){n.forEach(function(t,n){t.tag===e.tag&&t.kind===e.kind&&r.push(n)}),n.push(e)}),n.filter(function(e,t){return-1===r.indexOf(t)})}function s(e){this.include=e.include||[],this.implicit=e.implicit||[],this.explicit=e.explicit||[],this.implicit.forEach(function(e){if(e.loadKind&&"scalar"!==e.loadKind)throw new o("There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.")}),this.compiledImplicit=a(this,"implicit",[]),this.compiledExplicit=a(this,"explicit",[]),this.compiledTypeMap=function(){var e,t,n={scalar:{},sequence:{},mapping:{},fallback:{}};function r(e){n[e.kind][e.tag]=n.fallback[e.tag]=e}for(e=0,t=arguments.length;ee.length)return;if(!(x instanceof l)){if(m&&b!=t.length-1){if(f.lastIndex=w,!(C=f.exec(e)))break;for(var k=C.index+(h?C[1].length:0),E=C.index+C[0].length,S=b,_=w,O=t.length;S=(_+=t[S].length)&&(++b,w=_);if(t[b]instanceof l)continue;T=S-b,x=e.slice(w,_),C.index-=w}else{f.lastIndex=0;var C=f.exec(x),T=1}if(C){h&&(g=C[1]?C[1].length:0);E=(k=C.index+g)+(C=C[0].slice(g)).length;var j=x.slice(0,k),A=x.slice(E),I=[b,T];j&&(++b,w+=j.length,I.push(j));var P=new l(c,d?r.tokenize(C,d):C,y,C,m);if(I.push(P),A&&I.push(A),Array.prototype.splice.apply(t,I),1!=T&&r.matchGrammar(e,t,n,b,w,!0,c),a)break}else if(a)break}}}}},tokenize:function(e,t,n){var o=[e],i=t.rest;if(i){for(var a in i)t[a]=i[a];delete t.rest}return r.matchGrammar(e,o,t,0,0,!1),o},hooks:{all:{},add:function(e,t){var n=r.hooks.all;n[e]=n[e]||[],n[e].push(t)},run:function(e,t){var n=r.hooks.all[e];if(n&&n.length)for(var o,i=0;o=n[i++];)o(t)}}},o=r.Token=function(e,t,n,r,o){this.type=e,this.content=t,this.alias=n,this.length=0|(r||"").length,this.greedy=!!o};if(o.stringify=function(e,t,n){if("string"==typeof e)return e;if("Array"===r.util.type(e))return e.map(function(n){return o.stringify(n,t,e)}).join("");var i={type:e.type,content:o.stringify(e.content,t,n),tag:"span",classes:["token",e.type],attributes:{},language:t,parent:n};if(e.alias){var a="Array"===r.util.type(e.alias)?e.alias:[e.alias];Array.prototype.push.apply(i.classes,a)}r.hooks.run("wrap",i);var s=Object.keys(i.attributes).map(function(e){return e+'="'+(i.attributes[e]||"").replace(/"/g,""")+'"'}).join(" ");return"<"+i.tag+' class="'+i.classes.join(" ")+'"'+(s?" "+s:"")+">"+i.content+""},!n.document)return n.addEventListener?(r.disableWorkerMessageHandler||n.addEventListener("message",function(e){var t=JSON.parse(e.data),o=t.language,i=t.code,a=t.immediateClose;n.postMessage(r.highlight(i,r.languages[o],o)),a&&n.close()},!1),n.Prism):n.Prism;var i=document.currentScript||[].slice.call(document.getElementsByTagName("script")).pop();return i&&(r.filename=i.src,r.manual||i.hasAttribute("data-manual")||("loading"!==document.readyState?window.requestAnimationFrame?window.requestAnimationFrame(r.highlightAll):window.setTimeout(r.highlightAll,16):document.addEventListener("DOMContentLoaded",r.highlightAll))),n.Prism}();e.exports&&(e.exports=r),void 0!==t&&(t.Prism=r),r.languages.markup={comment://,prolog:/<\?[\s\S]+?\?>/,doctype://i,cdata://i,tag:{pattern:/<\/?(?!\d)[^\s>\/=$<%]+(?:\s+[^\s>\/=]+(?:=(?:("|')(?:\\[\s\S]|(?!\1)[^\\])*\1|[^\s'">=]+))?)*\s*\/?>/i,greedy:!0,inside:{tag:{pattern:/^<\/?[^\s>\/]+/i,inside:{punctuation:/^<\/?/,namespace:/^[^\s>\/:]+:/}},"attr-value":{pattern:/=(?:("|')(?:\\[\s\S]|(?!\1)[^\\])*\1|[^\s'">=]+)/i,inside:{punctuation:[/^=/,{pattern:/(^|[^\\])["']/,lookbehind:!0}]}},punctuation:/\/?>/,"attr-name":{pattern:/[^\s>\/]+/,inside:{namespace:/^[^\s>\/:]+:/}}}},entity:/&#?[\da-z]{1,8};/i},r.languages.markup.tag.inside["attr-value"].inside.entity=r.languages.markup.entity,r.hooks.add("wrap",function(e){"entity"===e.type&&(e.attributes.title=e.content.replace(/&/,"&"))}),r.languages.xml=r.languages.markup,r.languages.html=r.languages.markup,r.languages.mathml=r.languages.markup,r.languages.svg=r.languages.markup,r.languages.css={comment:/\/\*[\s\S]*?\*\//,atrule:{pattern:/@[\w-]+?.*?(?:;|(?=\s*\{))/i,inside:{rule:/@[\w-]+/}},url:/url\((?:(["'])(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1|.*?)\)/i,selector:/[^{}\s][^{};]*?(?=\s*\{)/,string:{pattern:/("|')(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/,greedy:!0},property:/[-_a-z\xA0-\uFFFF][-\w\xA0-\uFFFF]*(?=\s*:)/i,important:/\B!important\b/i,function:/[-a-z0-9]+(?=\()/i,punctuation:/[(){};:]/},r.languages.css.atrule.inside.rest=r.languages.css,r.languages.markup&&(r.languages.insertBefore("markup","tag",{style:{pattern:/()[\s\S]*?(?=<\/style>)/i,lookbehind:!0,inside:r.languages.css,alias:"language-css",greedy:!0}}),r.languages.insertBefore("inside","attr-value",{"style-attr":{pattern:/\s*style=("|')(?:\\[\s\S]|(?!\1)[^\\])*\1/i,inside:{"attr-name":{pattern:/^\s*style/i,inside:r.languages.markup.tag.inside},punctuation:/^\s*=\s*['"]|['"]\s*$/,"attr-value":{pattern:/.+/i,inside:r.languages.css}},alias:"language-css"}},r.languages.markup.tag)),r.languages.clike={comment:[{pattern:/(^|[^\\])\/\*[\s\S]*?(?:\*\/|$)/,lookbehind:!0},{pattern:/(^|[^\\:])\/\/.*/,lookbehind:!0,greedy:!0}],string:{pattern:/(["'])(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/,greedy:!0},"class-name":{pattern:/((?:\b(?:class|interface|extends|implements|trait|instanceof|new)\s+)|(?:catch\s+\())[\w.\\]+/i,lookbehind:!0,inside:{punctuation:/[.\\]/}},keyword:/\b(?:if|else|while|do|for|return|in|instanceof|function|new|try|throw|catch|finally|null|break|continue)\b/,boolean:/\b(?:true|false)\b/,function:/[a-z0-9_]+(?=\()/i,number:/\b0x[\da-f]+\b|(?:\b\d+\.?\d*|\B\.\d+)(?:e[+-]?\d+)?/i,operator:/--?|\+\+?|!=?=?|<=?|>=?|==?=?|&&?|\|\|?|\?|\*|\/|~|\^|%/,punctuation:/[{}[\];(),.:]/},r.languages.javascript=r.languages.extend("clike",{keyword:/\b(?:as|async|await|break|case|catch|class|const|continue|debugger|default|delete|do|else|enum|export|extends|finally|for|from|function|get|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|set|static|super|switch|this|throw|try|typeof|var|void|while|with|yield)\b/,number:/\b(?:0[xX][\dA-Fa-f]+|0[bB][01]+|0[oO][0-7]+|NaN|Infinity)\b|(?:\b\d+\.?\d*|\B\.\d+)(?:[Ee][+-]?\d+)?/,function:/[_$a-z\xA0-\uFFFF][$\w\xA0-\uFFFF]*(?=\s*\()/i,operator:/-[-=]?|\+[+=]?|!=?=?|<>?>?=?|=(?:==?|>)?|&[&=]?|\|[|=]?|\*\*?=?|\/=?|~|\^=?|%=?|\?|\.{3}/}),r.languages.insertBefore("javascript","keyword",{regex:{pattern:/((?:^|[^$\w\xA0-\uFFFF."'\])\s])\s*)\/(\[[^\]\r\n]+]|\\.|[^\/\\\[\r\n])+\/[gimyu]{0,5}(?=\s*($|[\r\n,.;})\]]))/,lookbehind:!0,greedy:!0},"function-variable":{pattern:/[_$a-z\xA0-\uFFFF][$\w\xA0-\uFFFF]*(?=\s*=\s*(?:function\b|(?:\([^()]*\)|[_$a-z\xA0-\uFFFF][$\w\xA0-\uFFFF]*)\s*=>))/i,alias:"function"},constant:/\b[A-Z][A-Z\d_]*\b/}),r.languages.insertBefore("javascript","string",{"template-string":{pattern:/`(?:\\[\s\S]|\${[^}]+}|[^\\`])*`/,greedy:!0,inside:{interpolation:{pattern:/\${[^}]+}/,inside:{"interpolation-punctuation":{pattern:/^\${|}$/,alias:"punctuation"},rest:null}},string:/[\s\S]+/}}}),r.languages.javascript["template-string"].inside.interpolation.inside.rest=r.languages.javascript,r.languages.markup&&r.languages.insertBefore("markup","tag",{script:{pattern:/()[\s\S]*?(?=<\/script>)/i,lookbehind:!0,inside:r.languages.javascript,alias:"language-javascript",greedy:!0}}),r.languages.js=r.languages.javascript,"undefined"!=typeof self&&self.Prism&&self.document&&document.querySelector&&(self.Prism.fileHighlight=function(){var e={js:"javascript",py:"python",rb:"ruby",ps1:"powershell",psm1:"powershell",sh:"bash",bat:"batch",h:"c",tex:"latex"};Array.prototype.slice.call(document.querySelectorAll("pre[data-src]")).forEach(function(t){for(var n,o=t.getAttribute("data-src"),i=t,a=/\blang(?:uage)?-([\w-]+)\b/i;i&&!a.test(i.className);)i=i.parentNode;if(i&&(n=(t.className.match(a)||[,""])[1]),!n){var s=(o.match(/\.(\w+)$/)||[,""])[1];n=e[s]||s}var l=document.createElement("code");l.className="language-"+n,t.textContent="",l.textContent="Loading…",t.appendChild(l);var c=new XMLHttpRequest;c.open("GET",o,!0),c.onreadystatechange=function(){4==c.readyState&&(c.status<400&&c.responseText?(l.textContent=c.responseText,r.highlightElement(l)):c.status>=400?l.textContent="✖ Error "+c.status+" while fetching file: "+c.statusText:l.textContent="✖ Error: File does not exist or is empty")},c.send(null)}),r.plugins.toolbar&&r.plugins.toolbar.registerButton("download-file",function(e){var t=e.element.parentNode;if(t&&/pre/i.test(t.nodeName)&&t.hasAttribute("data-src")&&t.hasAttribute("data-download-link")){var n=t.getAttribute("data-src"),r=document.createElement("a");return r.textContent=t.getAttribute("data-download-link-label")||"Download",r.setAttribute("download",""),r.href=n,r}})},document.addEventListener("DOMContentLoaded",self.Prism.fileHighlight))}).call(this,n(4))},function(e,t){e.exports=!1},function(e,t){var n=0,r=Math.random();e.exports=function(e){return"Symbol(".concat(void 0===e?"":e,")_",(++n+r).toString(36))}},function(e,t){e.exports={}},function(e,t,n){var r=n(84),o=n(66);e.exports=Object.keys||function(e){return r(e,o)}},function(e,t,n){var r=n(61),o=Math.min;e.exports=function(e){return e>0?o(r(e),9007199254740991):0}},function(e,t,n){var r=n(16).f,o=n(23),i=n(2)("toStringTag");e.exports=function(e,t,n){e&&!o(e=n?e:e.prototype,i)&&r(e,i,{configurable:!0,value:t})}},function(e,t,n){"use strict";function r(e){return null==e}e.exports.isNothing=r,e.exports.isObject=function(e){return"object"==typeof e&&null!==e},e.exports.toArray=function(e){return Array.isArray(e)?e:r(e)?[]:[e]},e.exports.repeat=function(e,t){var n,r="";for(n=0;n2?r:e).apply(void 0,o)}}e.memoize=o,e.debounce=i,e.bind=a,e.default={memoize:o,debounce:i,bind:a}})?r.apply(t,o):r)||(e.exports=i)},function(e,t,n){(function(t){!function(t){"use strict";var n={newline:/^\n+/,code:/^( {4}[^\n]+\n*)+/,fences:g,hr:/^ {0,3}((?:- *){3,}|(?:_ *){3,}|(?:\* *){3,})(?:\n+|$)/,heading:/^ *(#{1,6}) *([^\n]+?) *(?:#+ *)?(?:\n+|$)/,nptable:g,blockquote:/^( {0,3}> ?(paragraph|[^\n]*)(?:\n|$))+/,list:/^( {0,3})(bull) [\s\S]+?(?:hr|def|\n{2,}(?! )(?!\1bull )\n*|\s*$)/,html:"^ {0,3}(?:<(script|pre|style)[\\s>][\\s\\S]*?(?:[^\\n]*\\n+|$)|comment[^\\n]*(\\n+|$)|<\\?[\\s\\S]*?\\?>\\n*|\\n*|\\n*|)[\\s\\S]*?(?:\\n{2,}|$)|<(?!script|pre|style)([a-z][\\w-]*)(?:attribute)*? */?>(?=\\h*\\n)[\\s\\S]*?(?:\\n{2,}|$)|(?=\\h*\\n)[\\s\\S]*?(?:\\n{2,}|$))",def:/^ {0,3}\[(label)\]: *\n? *]+)>?(?:(?: +\n? *| *\n *)(title))? *(?:\n+|$)/,table:g,lheading:/^([^\n]+)\n *(=|-){2,} *(?:\n+|$)/,paragraph:/^([^\n]+(?:\n(?!hr|heading|lheading| {0,3}>|<\/?(?:tag)(?: +|\n|\/?>)|<(?:script|pre|style|!--))[^\n]+)*)/,text:/^[^\n]+/};function r(e){this.tokens=[],this.tokens.links=Object.create(null),this.options=e||w.defaults,this.rules=n.normal,this.options.pedantic?this.rules=n.pedantic:this.options.gfm&&(this.options.tables?this.rules=n.tables:this.rules=n.gfm)}n._label=/(?!\s*\])(?:\\[\[\]]|[^\[\]])+/,n._title=/(?:"(?:\\"?|[^"\\])*"|'[^'\n]*(?:\n[^'\n]+)*\n?'|\([^()]*\))/,n.def=f(n.def).replace("label",n._label).replace("title",n._title).getRegex(),n.bullet=/(?:[*+-]|\d{1,9}\.)/,n.item=/^( *)(bull) ?[^\n]*(?:\n(?!\1bull ?)[^\n]*)*/,n.item=f(n.item,"gm").replace(/bull/g,n.bullet).getRegex(),n.list=f(n.list).replace(/bull/g,n.bullet).replace("hr","\\n+(?=\\1?(?:(?:- *){3,}|(?:_ *){3,}|(?:\\* *){3,})(?:\\n+|$))").replace("def","\\n+(?="+n.def.source+")").getRegex(),n._tag="address|article|aside|base|basefont|blockquote|body|caption|center|col|colgroup|dd|details|dialog|dir|div|dl|dt|fieldset|figcaption|figure|footer|form|frame|frameset|h[1-6]|head|header|hr|html|iframe|legend|li|link|main|menu|menuitem|meta|nav|noframes|ol|optgroup|option|p|param|section|source|summary|table|tbody|td|tfoot|th|thead|title|tr|track|ul",n._comment=//,n.html=f(n.html,"i").replace("comment",n._comment).replace("tag",n._tag).replace("attribute",/ +[a-zA-Z:_][\w.:-]*(?: *= *"[^"\n]*"| *= *'[^'\n]*'| *= *[^\s"'=<>`]+)?/).getRegex(),n.paragraph=f(n.paragraph).replace("hr",n.hr).replace("heading",n.heading).replace("lheading",n.lheading).replace("tag",n._tag).getRegex(),n.blockquote=f(n.blockquote).replace("paragraph",n.paragraph).getRegex(),n.normal=y({},n),n.gfm=y({},n.normal,{fences:/^ {0,3}(`{3,}|~{3,})([^`\n]*)\n(?:|([\s\S]*?)\n)(?: {0,3}\1[~`]* *(?:\n+|$)|$)/,paragraph:/^/,heading:/^ *(#{1,6}) +([^\n]+?) *#* *(?:\n+|$)/}),n.gfm.paragraph=f(n.paragraph).replace("(?!","(?!"+n.gfm.fences.source.replace("\\1","\\2")+"|"+n.list.source.replace("\\1","\\3")+"|").getRegex(),n.tables=y({},n.gfm,{nptable:/^ *([^|\n ].*\|.*)\n *([-:]+ *\|[-| :]*)(?:\n((?:.*[^>\n ].*(?:\n|$))*)\n*|$)/,table:/^ *\|(.+)\n *\|?( *[-:]+[-| :]*)(?:\n((?: *[^>\n ].*(?:\n|$))*)\n*|$)/}),n.pedantic=y({},n.normal,{html:f("^ *(?:comment *(?:\\n|\\s*$)|<(tag)[\\s\\S]+? *(?:\\n{2,}|\\s*$)|\\s]*)*?/?> *(?:\\n{2,}|\\s*$))").replace("comment",n._comment).replace(/tag/g,"(?!(?:a|em|strong|small|s|cite|q|dfn|abbr|data|time|code|var|samp|kbd|sub|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo|span|br|wbr|ins|del|img)\\b)\\w+(?!:|[^\\w\\s@]*@)\\b").getRegex(),def:/^ *\[([^\]]+)\]: *]+)>?(?: +(["(][^\n]+[")]))? *(?:\n+|$)/}),r.rules=n,r.lex=function(e,t){return new r(t).lex(e)},r.prototype.lex=function(e){return e=e.replace(/\r\n|\r/g,"\n").replace(/\t/g," ").replace(/\u00a0/g," ").replace(/\u2424/g,"\n"),this.token(e,!0)},r.prototype.token=function(e,t){var r,o,i,a,s,l,c,u,p,f,d,h,m,g,y,w;for(e=e.replace(/^ +$/gm,"");e;)if((i=this.rules.newline.exec(e))&&(e=e.substring(i[0].length),i[0].length>1&&this.tokens.push({type:"space"})),i=this.rules.code.exec(e))e=e.substring(i[0].length),i=i[0].replace(/^ {4}/gm,""),this.tokens.push({type:"code",text:this.options.pedantic?i:b(i,"\n")});else if(i=this.rules.fences.exec(e))e=e.substring(i[0].length),this.tokens.push({type:"code",lang:i[2]?i[2].trim():i[2],text:i[3]||""});else if(i=this.rules.heading.exec(e))e=e.substring(i[0].length),this.tokens.push({type:"heading",depth:i[1].length,text:i[2]});else if(t&&(i=this.rules.nptable.exec(e))&&(l={type:"table",header:v(i[1].replace(/^ *| *\| *$/g,"")),align:i[2].replace(/^ *|\| *$/g,"").split(/ *\| */),cells:i[3]?i[3].replace(/\n$/,"").split("\n"):[]}).header.length===l.align.length){for(e=e.substring(i[0].length),d=0;d ?/gm,""),this.token(i,t),this.tokens.push({type:"blockquote_end"});else if(i=this.rules.list.exec(e)){for(e=e.substring(i[0].length),c={type:"list_start",ordered:g=(a=i[2]).length>1,start:g?+a:"",loose:!1},this.tokens.push(c),u=[],r=!1,m=(i=i[0].match(this.rules.item)).length,d=0;d1?1===s.length:s.length>1||this.options.smartLists&&s!==a)&&(e=i.slice(d+1).join("\n")+e,d=m-1)),o=r||/\n\n(?!\s*$)/.test(l),d!==m-1&&(r="\n"===l.charAt(l.length-1),o||(o=r)),o&&(c.loose=!0),w=void 0,(y=/^\[[ xX]\] /.test(l))&&(w=" "!==l[1],l=l.replace(/^\[[ xX]\] +/,"")),p={type:"list_item_start",task:y,checked:w,loose:o},u.push(p),this.tokens.push(p),this.token(l,!1),this.tokens.push({type:"list_item_end"});if(c.loose)for(m=u.length,d=0;d?@\[\]\\^_`{|}~])/,autolink:/^<(scheme:[^\s\x00-\x1f<>]*|email)>/,url:g,tag:"^comment|^|^<[a-zA-Z][\\w-]*(?:attribute)*?\\s*/?>|^<\\?[\\s\\S]*?\\?>|^|^",link:/^!?\[(label)\]\(href(?:\s+(title))?\s*\)/,reflink:/^!?\[(label)\]\[(?!\s*\])((?:\\[\[\]]?|[^\[\]\\])+)\]/,nolink:/^!?\[(?!\s*\])((?:\[[^\[\]]*\]|\\[\[\]]|[^\[\]])*)\](?:\[\])?/,strong:/^__([^\s_])__(?!_)|^\*\*([^\s*])\*\*(?!\*)|^__([^\s][\s\S]*?[^\s])__(?!_)|^\*\*([^\s][\s\S]*?[^\s])\*\*(?!\*)/,em:/^_([^\s_])_(?!_)|^\*([^\s*"<\[])\*(?!\*)|^_([^\s][\s\S]*?[^\s_])_(?!_|[^\spunctuation])|^_([^\s_][\s\S]*?[^\s])_(?!_|[^\spunctuation])|^\*([^\s"<\[][\s\S]*?[^\s*])\*(?!\*)|^\*([^\s*"<\[][\s\S]*?[^\s])\*(?!\*)/,code:/^(`+)([^`]|[^`][\s\S]*?[^`])\1(?!`)/,br:/^( {2,}|\\)\n(?!\s*$)/,del:g,text:/^(`+|[^`])[\s\S]*?(?=[\\?@\\[^_{|}~",o.em=f(o.em).replace(/punctuation/g,o._punctuation).getRegex(),o._escapes=/\\([!"#$%&'()*+,\-.\/:;<=>?@\[\]\\^_`{|}~])/g,o._scheme=/[a-zA-Z][a-zA-Z0-9+.-]{1,31}/,o._email=/[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+(@)[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(?![-_])/,o.autolink=f(o.autolink).replace("scheme",o._scheme).replace("email",o._email).getRegex(),o._attribute=/\s+[a-zA-Z:_][\w.:-]*(?:\s*=\s*"[^"]*"|\s*=\s*'[^']*'|\s*=\s*[^\s"'=<>`]+)?/,o.tag=f(o.tag).replace("comment",n._comment).replace("attribute",o._attribute).getRegex(),o._label=/(?:\[[^\[\]]*\]|\\[\[\]]?|`[^`]*`|[^\[\]\\])*?/,o._href=/\s*(<(?:\\[<>]?|[^\s<>\\])*>|(?:\\[()]?|\([^\s\x00-\x1f\\]*\)|[^\s\x00-\x1f()\\])*?)/,o._title=/"(?:\\"?|[^"\\])*"|'(?:\\'?|[^'\\])*'|\((?:\\\)?|[^)\\])*\)/,o.link=f(o.link).replace("label",o._label).replace("href",o._href).replace("title",o._title).getRegex(),o.reflink=f(o.reflink).replace("label",o._label).getRegex(),o.normal=y({},o),o.pedantic=y({},o.normal,{strong:/^__(?=\S)([\s\S]*?\S)__(?!_)|^\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)/,em:/^_(?=\S)([\s\S]*?\S)_(?!_)|^\*(?=\S)([\s\S]*?\S)\*(?!\*)/,link:f(/^!?\[(label)\]\((.*?)\)/).replace("label",o._label).getRegex(),reflink:f(/^!?\[(label)\]\s*\[([^\]]*)\]/).replace("label",o._label).getRegex()}),o.gfm=y({},o.normal,{escape:f(o.escape).replace("])","~|])").getRegex(),_extended_email:/[A-Za-z0-9._+-]+(@)[a-zA-Z0-9-_]+(?:\.[a-zA-Z0-9-_]*[a-zA-Z0-9])+(?![-_])/,url:/^((?:ftp|https?):\/\/|www\.)(?:[a-zA-Z0-9\-]+\.?)+[^\s<]*|^email/,_backpedal:/(?:[^?!.,:;*_~()&]+|\([^)]*\)|&(?![a-zA-Z0-9]+;$)|[?!.,:;*_~)]+(?!$))+/,del:/^~+(?=\S)([\s\S]*?\S)~+/,text:f(o.text).replace("]|","~]|").replace("|$","|https?://|ftp://|www\\.|[a-zA-Z0-9.!#$%&'*+/=?^_`{\\|}~-]+@|$").getRegex()}),o.gfm.url=f(o.gfm.url,"i").replace("email",o.gfm._extended_email).getRegex(),o.breaks=y({},o.gfm,{br:f(o.br).replace("{2,}","*").getRegex(),text:f(o.gfm.text).replace("{2,}","*").getRegex()}),i.rules=o,i.output=function(e,t,n){return new i(t,n).output(e)},i.prototype.output=function(e){for(var t,n,r,o,a,s,l="";e;)if(a=this.rules.escape.exec(e))e=e.substring(a[0].length),l+=u(a[1]);else if(a=this.rules.tag.exec(e))!this.inLink&&/^/i.test(a[0])&&(this.inLink=!1),!this.inRawBlock&&/^<(pre|code|kbd|script)(\s|>)/i.test(a[0])?this.inRawBlock=!0:this.inRawBlock&&/^<\/(pre|code|kbd|script)(\s|>)/i.test(a[0])&&(this.inRawBlock=!1),e=e.substring(a[0].length),l+=this.options.sanitize?this.options.sanitizer?this.options.sanitizer(a[0]):u(a[0]):a[0];else if(a=this.rules.link.exec(e))e=e.substring(a[0].length),this.inLink=!0,r=a[2],this.options.pedantic?(t=/^([^'"]*[^\s])\s+(['"])(.*)\2/.exec(r))?(r=t[1],o=t[3]):o="":o=a[3]?a[3].slice(1,-1):"",r=r.trim().replace(/^<([\s\S]*)>$/,"$1"),l+=this.outputLink(a,{href:i.escapes(r),title:i.escapes(o)}),this.inLink=!1;else if((a=this.rules.reflink.exec(e))||(a=this.rules.nolink.exec(e))){if(e=e.substring(a[0].length),t=(a[2]||a[1]).replace(/\s+/g," "),!(t=this.links[t.toLowerCase()])||!t.href){l+=a[0].charAt(0),e=a[0].substring(1)+e;continue}this.inLink=!0,l+=this.outputLink(a,t),this.inLink=!1}else if(a=this.rules.strong.exec(e))e=e.substring(a[0].length),l+=this.renderer.strong(this.output(a[4]||a[3]||a[2]||a[1]));else if(a=this.rules.em.exec(e))e=e.substring(a[0].length),l+=this.renderer.em(this.output(a[6]||a[5]||a[4]||a[3]||a[2]||a[1]));else if(a=this.rules.code.exec(e))e=e.substring(a[0].length),l+=this.renderer.codespan(u(a[2].trim(),!0));else if(a=this.rules.br.exec(e))e=e.substring(a[0].length),l+=this.renderer.br();else if(a=this.rules.del.exec(e))e=e.substring(a[0].length),l+=this.renderer.del(this.output(a[1]));else if(a=this.rules.autolink.exec(e))e=e.substring(a[0].length),r="@"===a[2]?"mailto:"+(n=u(this.mangle(a[1]))):n=u(a[1]),l+=this.renderer.link(r,null,n);else if(this.inLink||!(a=this.rules.url.exec(e))){if(a=this.rules.text.exec(e))e=e.substring(a[0].length),this.inRawBlock?l+=this.renderer.text(a[0]):l+=this.renderer.text(u(this.smartypants(a[0])));else if(e)throw new Error("Infinite loop on byte: "+e.charCodeAt(0))}else{if("@"===a[2])r="mailto:"+(n=u(a[0]));else{do{s=a[0],a[0]=this.rules._backpedal.exec(a[0])[0]}while(s!==a[0]);n=u(a[0]),r="www."===a[1]?"http://"+n:n}e=e.substring(a[0].length),l+=this.renderer.link(r,null,n)}return l},i.escapes=function(e){return e?e.replace(i.rules._escapes,"$1"):e},i.prototype.outputLink=function(e,t){var n=t.href,r=t.title?u(t.title):null;return"!"!==e[0].charAt(0)?this.renderer.link(n,r,this.output(e[1])):this.renderer.image(n,r,u(e[1]))},i.prototype.smartypants=function(e){return this.options.smartypants?e.replace(/---/g,"—").replace(/--/g,"–").replace(/(^|[-\u2014\/(\[{"\s])'/g,"$1‘").replace(/'/g,"’").replace(/(^|[-\u2014\/(\[{\u2018\s])"/g,"$1“").replace(/"/g,"”").replace(/\.{3}/g,"…"):e},i.prototype.mangle=function(e){if(!this.options.mangle)return e;for(var t,n="",r=e.length,o=0;o.5&&(t="x"+t.toString(16)),n+="&#"+t+";";return n},a.prototype.code=function(e,t,n){var r=(t||"").match(/\S*/)[0];if(this.options.highlight){var o=this.options.highlight(e,r);null!=o&&o!==e&&(n=!0,e=o)}return r?'
'+(n?e:u(e,!0))+"
\n":"
"+(n?e:u(e,!0))+"
"},a.prototype.blockquote=function(e){return"
\n"+e+"
\n"},a.prototype.html=function(e){return e},a.prototype.heading=function(e,t,n,r){return this.options.headerIds?"'+e+"\n":""+e+"\n"},a.prototype.hr=function(){return this.options.xhtml?"
\n":"
\n"},a.prototype.list=function(e,t,n){var r=t?"ol":"ul";return"<"+r+(t&&1!==n?' start="'+n+'"':"")+">\n"+e+"\n"},a.prototype.listitem=function(e){return"
  • "+e+"
  • \n"},a.prototype.checkbox=function(e){return" "},a.prototype.paragraph=function(e){return"

    "+e+"

    \n"},a.prototype.table=function(e,t){return t&&(t=""+t+""),"\n\n"+e+"\n"+t+"
    \n"},a.prototype.tablerow=function(e){return"\n"+e+"\n"},a.prototype.tablecell=function(e,t){var n=t.header?"th":"td";return(t.align?"<"+n+' align="'+t.align+'">':"<"+n+">")+e+"\n"},a.prototype.strong=function(e){return""+e+""},a.prototype.em=function(e){return""+e+""},a.prototype.codespan=function(e){return""+e+""},a.prototype.br=function(){return this.options.xhtml?"
    ":"
    "},a.prototype.del=function(e){return""+e+""},a.prototype.link=function(e,t,n){if(null===(e=d(this.options.sanitize,this.options.baseUrl,e)))return n;var r='
    "},a.prototype.image=function(e,t,n){if(null===(e=d(this.options.sanitize,this.options.baseUrl,e)))return n;var r=''+n+'":">"},a.prototype.text=function(e){return e},s.prototype.strong=s.prototype.em=s.prototype.codespan=s.prototype.del=s.prototype.text=function(e){return e},s.prototype.link=s.prototype.image=function(e,t,n){return""+n},s.prototype.br=function(){return""},l.parse=function(e,t){return new l(t).parse(e)},l.prototype.parse=function(e){this.inline=new i(e.links,this.options),this.inlineText=new i(e.links,y({},this.options,{renderer:new s})),this.tokens=e.reverse();for(var t="";this.next();)t+=this.tok();return t},l.prototype.next=function(){return this.token=this.tokens.pop()},l.prototype.peek=function(){return this.tokens[this.tokens.length-1]||0},l.prototype.parseText=function(){for(var e=this.token.text;"text"===this.peek().type;)e+="\n"+this.next().text;return this.inline.output(e)},l.prototype.tok=function(){switch(this.token.type){case"space":return"";case"hr":return this.renderer.hr();case"heading":return this.renderer.heading(this.inline.output(this.token.text),this.token.depth,p(this.inlineText.output(this.token.text)),this.slugger);case"code":return this.renderer.code(this.token.text,this.token.lang,this.token.escaped);case"table":var e,t,n,r,o="",i="";for(n="",e=0;e?@[\]^`{|}~]/g,"").replace(/\s/g,"-");if(this.seen.hasOwnProperty(t)){var n=t;do{this.seen[n]++,t=n+"-"+this.seen[n]}while(this.seen.hasOwnProperty(t))}return this.seen[t]=0,t},u.escapeTest=/[&<>"']/,u.escapeReplace=/[&<>"']/g,u.replacements={"&":"&","<":"<",">":">",'"':""","'":"'"},u.escapeTestNoEncode=/[<>"']|&(?!#?\w+;)/,u.escapeReplaceNoEncode=/[<>"']|&(?!#?\w+;)/g;var h={},m=/^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;function g(){}function y(e){for(var t,n,r=1;r=0&&"\\"===n[o];)r=!r;return r?"|":" |"}).split(/ \|/),r=0;if(n.length>t)n.splice(t);else for(;n.lengthAn error occurred:

    "+u(e.message+"",!0)+"
    ";throw e}}g.exec=g,w.options=w.setOptions=function(e){return y(w.defaults,e),w},w.getDefaults=function(){return{baseUrl:null,breaks:!1,gfm:!0,headerIds:!0,headerPrefix:"",highlight:null,langPrefix:"language-",mangle:!0,pedantic:!1,renderer:new a,sanitize:!1,sanitizer:null,silent:!1,smartLists:!1,smartypants:!1,tables:!0,xhtml:!1}},w.defaults=w.getDefaults(),w.Parser=l,w.parser=l.parse,w.Renderer=a,w.TextRenderer=s,w.Lexer=r,w.lexer=r.lex,w.InlineLexer=i,w.inlineLexer=i.output,w.Slugger=c,w.parse=w,e.exports=w}(this||"undefined"!=typeof window&&window)}).call(this,n(4))},function(e,t){e.exports=function(e,t){return{enumerable:!(1&e),configurable:!(2&e),writable:!(4&e),value:t}}},function(e,t){e.exports=function(e){if(null==e)throw TypeError("Can't call method on "+e);return e}},function(e,t){e.exports=function(e){if("function"!=typeof e)throw TypeError(e+" is not a function!");return e}},function(e,t){t.f={}.propertyIsEnumerable},function(e,t,n){"use strict";var r=n(195);e.exports=r},function(e,t,n){"use strict";function r(e,t){Error.call(this),this.name="YAMLException",this.reason=e,this.mark=t,this.message=(this.reason||"(unknown reason)")+(this.mark?" "+this.mark.toString():""),Error.captureStackTrace?Error.captureStackTrace(this,this.constructor):this.stack=(new Error).stack||""}r.prototype=Object.create(Error.prototype),r.prototype.constructor=r,r.prototype.toString=function(e){var t=this.name+": ";return t+=this.reason||"(unknown reason)",!e&&this.mark&&(t+=" "+this.mark.toString()),t},e.exports=r},function(e,t,n){"use strict";var r=n(29);e.exports=new r({include:[n(104)],implicit:[n(205),n(206)],explicit:[n(207),n(208),n(209),n(210)]})},function(e,t,n){"use strict";(function(t){!t.version||0===t.version.indexOf("v0.")||0===t.version.indexOf("v1.")&&0!==t.version.indexOf("v1.8.")?e.exports={nextTick:function(e,n,r,o){if("function"!=typeof e)throw new TypeError('"callback" argument must be a function');var i,a,s=arguments.length;switch(s){case 0:case 1:return t.nextTick(e);case 2:return t.nextTick(function(){e.call(null,n)});case 3:return t.nextTick(function(){e.call(null,n,r)});case 4:return t.nextTick(function(){e.call(null,n,r,o)});default:for(i=new Array(s-1),a=0;a0},o.isExternal$Ref=function(e){return o.is$Ref(e)&&"#"!==e.$ref[0]},o.isAllowed$Ref=function(e,t){if(o.is$Ref(e)){if("#/"===e.$ref.substr(0,2)||"#"===e.$ref)return!0;if("#"!==e.$ref[0]&&(!t||t.resolve.external))return!0}},o.isExtended$Ref=function(e){return o.is$Ref(e)&&Object.keys(e).length>1},o.dereference=function(e,t){if(t&&"object"==typeof t&&o.isExtended$Ref(e)){var n={};return Object.keys(e).forEach(function(t){"$ref"!==t&&(n[t]=e[t])}),Object.keys(t).forEach(function(e){e in n||(n[e]=t[e])}),n}return t}},function(e,t,n){(function(e){function n(e,t){for(var n=0,r=e.length-1;r>=0;r--){var o=e[r];"."===o?e.splice(r,1):".."===o?(e.splice(r,1),n++):n&&(e.splice(r,1),n--)}if(t)for(;n--;n)e.unshift("..");return e}var r=/^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/,o=function(e){return r.exec(e).slice(1)};function i(e,t){if(e.filter)return e.filter(t);for(var n=[],r=0;r=-1&&!r;o--){var a=o>=0?arguments[o]:e.cwd();if("string"!=typeof a)throw new TypeError("Arguments to path.resolve must be strings");a&&(t=a+"/"+t,r="/"===a.charAt(0))}return(r?"/":"")+(t=n(i(t.split("/"),function(e){return!!e}),!r).join("/"))||"."},t.normalize=function(e){var r=t.isAbsolute(e),o="/"===a(e,-1);return(e=n(i(e.split("/"),function(e){return!!e}),!r).join("/"))||r||(e="."),e&&o&&(e+="/"),(r?"/":"")+e},t.isAbsolute=function(e){return"/"===e.charAt(0)},t.join=function(){var e=Array.prototype.slice.call(arguments,0);return t.normalize(i(e,function(e,t){if("string"!=typeof e)throw new TypeError("Arguments to path.join must be strings");return e}).join("/"))},t.relative=function(e,n){function r(e){for(var t=0;t=0&&""===e[n];n--);return t>n?[]:e.slice(t,n-t+1)}e=t.resolve(e).substr(1),n=t.resolve(n).substr(1);for(var o=r(e.split("/")),i=r(n.split("/")),a=Math.min(o.length,i.length),s=a,l=0;l0?r:n)(e)}},function(e,t,n){"use strict";var r=n(32),o=n(18),i=n(21),a=n(22),s=n(34),l=n(138),c=n(37),u=n(142),p=n(2)("iterator"),f=!([].keys&&"next"in[].keys()),d=function(){return this};e.exports=function(e,t,n,h,m,g,y){l(n,t,h);var v,b,w,x=function(e){if(!f&&e in _)return _[e];switch(e){case"keys":case"values":return function(){return new n(this,e)}}return function(){return new n(this,e)}},k=t+" Iterator",E="values"==m,S=!1,_=e.prototype,O=_[p]||_["@@iterator"]||m&&_[m],C=O||x(m),T=m?E?x("entries"):C:void 0,j="Array"==t&&_.entries||O;if(j&&(w=u(j.call(new e)))!==Object.prototype&&w.next&&(c(w,k,!0),r||"function"==typeof w[p]||a(w,p,d)),E&&O&&"values"!==O.name&&(S=!0,C=function(){return O.call(this)}),r&&!y||!f&&!S&&_[p]||a(_,p,C),s[t]=C,s[k]=d,m)if(v={values:E?C:x("values"),keys:g?C:x("keys"),entries:T},y)for(b in v)b in _||i(_,b,v[b]);else o(o.P+o.F*(f||S),t,v);return v}},function(e,t,n){var r=n(14),o=n(139),i=n(66),a=n(65)("IE_PROTO"),s=function(){},l=function(){var e,t=n(59)("iframe"),r=i.length;for(t.style.display="none",n(85).appendChild(t),t.src="javascript:",(e=t.contentWindow.document).open(),e.write("