From e3b42e11128f5ee5d11399e6736ff728e222630f Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 29 Aug 2024 18:40:57 -0400 Subject: [PATCH 01/25] Remove QPath from back-end code --- .../actions/automatic_transcription.py | 9 +- kobo/apps/subsequences/actions/base.py | 4 +- .../subsequences/actions/keyword_search.py | 19 ++- kobo/apps/subsequences/actions/qual.py | 47 +++--- kobo/apps/subsequences/actions/translation.py | 9 +- .../advanced_features_params_schema.py | 21 ++- kobo/apps/subsequences/api_view.py | 36 ++++- .../subsequences/integrations/google/base.py | 12 +- .../integrations/google/google_transcribe.py | 5 +- .../integrations/google/google_translate.py | 5 +- kobo/apps/subsequences/models.py | 6 +- ...add_qual_to_last_question_of_last_asset.py | 139 +++++++++++------- .../tests/test_known_cols_utils.py | 91 ++++++------ .../tests/test_submission_stream.py | 6 +- kobo/apps/subsequences/utils/__init__.py | 7 +- kobo/apps/subsequences/utils/deprecation.py | 19 +++ .../determine_export_cols_with_values.py | 21 +-- .../subsequences/utils/parse_known_cols.py | 79 +++++----- kpi/mixins/formpack_xlsform_utils.py | 2 +- kpi/models/asset.py | 63 +++++--- kpi/tests/test_asset_content.py | 9 +- kpi/tests/test_asset_versions.py | 6 +- kpi/utils/absolute_paths.py | 7 - kpi/views/environment.py | 4 +- 24 files changed, 353 insertions(+), 273 deletions(-) create mode 100644 kobo/apps/subsequences/utils/deprecation.py diff --git a/kobo/apps/subsequences/actions/automatic_transcription.py b/kobo/apps/subsequences/actions/automatic_transcription.py index 3e16ed91ca..960288f31b 100644 --- a/kobo/apps/subsequences/actions/automatic_transcription.py +++ b/kobo/apps/subsequences/actions/automatic_transcription.py @@ -9,25 +9,26 @@ DT_MOD = BaseAction.DATE_MODIFIED_FIELD DT_CREATED = BaseAction.DATE_CREATED_FIELD + class AutomaticTranscriptionAction(BaseAction): ID = 'transcript' MANUAL = 'user_transcribed' @classmethod - def build_params(kls, params, content): + def build_params(cls, params, content): possible_transcribed_fields = [] for row in content.get('survey', []): if row['type'] in ['audio', 'video']: - possible_transcribed_fields.append(kls.get_qpath(kls, row)) + possible_transcribed_fields.append(cls.get_xpath(cls, row)) params = {'values': possible_transcribed_fields, 'services': []} return params @classmethod - def get_values_for_content(kls, content): + def get_values_for_content(cls, content): possible_transcribed_fields = [] for row in content.get('survey', []): if row['type'] in ['audio', 'video']: - possible_transcribed_fields.append(kls.get_qpath(kls, row)) + possible_transcribed_fields.append(cls.get_xpath(cls, row)) return possible_transcribed_fields def load_params(self, params): diff --git a/kobo/apps/subsequences/actions/base.py b/kobo/apps/subsequences/actions/base.py index fdae53cc79..c7993c4756 100644 --- a/kobo/apps/subsequences/actions/base.py +++ b/kobo/apps/subsequences/actions/base.py @@ -106,9 +106,9 @@ def has_change(self, original, edit): def build_params(kls, *args, **kwargs): raise NotImplementedError(f'{kls.__name__} has not implemented a build_params method') - def get_qpath(self, row): + def get_xpath(self, row): # return the full path... - for name_field in ['qpath', 'name', '$autoname']: + for name_field in ['xpath', 'name', '$autoname']: if name_field in row: return row[name_field] return None diff --git a/kobo/apps/subsequences/actions/keyword_search.py b/kobo/apps/subsequences/actions/keyword_search.py index 2f3afdd869..312162eb4e 100644 --- a/kobo/apps/subsequences/actions/keyword_search.py +++ b/kobo/apps/subsequences/actions/keyword_search.py @@ -1,26 +1,27 @@ import copy from ..actions.base import BaseAction, ACTION_NEEDED, PASSES + class KeywordSearchAction(BaseAction): ID = 'keyword_search' ''' @classmethod - def build_params(kls, params, content): + def build_params(cls, params, content): possible_transcribed_fields = [] for row in content.get('survey', []): if row['type'] in ['audio', 'video']: - possible_transcribed_fields.append(kls.get_qpath(kls, row)) + possible_transcribed_fields.append(cls.get_xpath(cls, row)) params = {'values': possible_transcribed_fields} return params ''' @classmethod - def get_values_for_content(kls, content): + def get_values_for_content(cls, content): possible_transcribed_fields = [] for row in content.get('survey', []): if row['type'] in ['audio', 'video']: - possible_transcribed_fields.append(kls.get_qpath(kls, row)) + possible_transcribed_fields.append(cls.get_xpath(cls, row)) return possible_transcribed_fields def load_params(self, params): @@ -68,8 +69,9 @@ def check_submission_status(self, submission): response = self._traverse_object(submission, source) except KeyError: continue - qpath = source.split('/')[0] - all_output = submission[qpath].setdefault(self.ID, []) + # FIXME QPATH + xpath = source.split('/')[0] + all_output = submission[xpath].setdefault(self.ID, []) this_output = self._get_matching_element(all_output, **query) if not this_output: return ACTION_NEEDED @@ -90,8 +92,9 @@ def run_change(self, submission): matches = 0 for keyword in query['keywords']: matches += response['value'].count(keyword) - qpath = source.split('/')[0] - all_output = submission[qpath].setdefault(self.ID, []) + # FIXME QPATH + xpath = source.split('/')[0] + all_output = submission[xpath].setdefault(self.ID, []) this_output = self._get_matching_element(all_output, **query) if not this_output: this_output = copy.deepcopy(query) diff --git a/kobo/apps/subsequences/actions/qual.py b/kobo/apps/subsequences/actions/qual.py index 1092e94901..e1a6b53902 100644 --- a/kobo/apps/subsequences/actions/qual.py +++ b/kobo/apps/subsequences/actions/qual.py @@ -6,7 +6,7 @@ class QualAction(BaseAction): ID = 'qual' @classmethod - def build_params(kls, survey_content): + def build_params(cls, survey_content): _fields = [] for row in survey_content.get('survey', []): if row['type'] in ['audio', 'video']: @@ -16,22 +16,22 @@ def build_params(kls, survey_content): def load_params(self, params): ''' Action.load_params is called when the instance is initialized - for each Asset. It will + for each Asset. It will ''' self.fields = params.get('values', []) self.qual_survey = params.get('qual_survey', []) self.everything_else = params @classmethod - def get_values_for_content(kls, content): - ''' + def get_values_for_content(cls, content): + """ If no "values" are defined for a given asset, then this method will generate a set of defaults. - ''' + """ values = [] for row in content.get('survey', []): if row['type'] in ['audio', 'video']: - values.append(kls.get_qpath(kls, row)) + values.append(cls.get_xpath(cls, row)) return values def modify_jsonschema(self, schema): @@ -40,29 +40,32 @@ def modify_jsonschema(self, schema): for qual_item in self.qual_survey: if qual_item.get('scope') != 'by_question#survey': - raise NotImplementedError('by_question#survey is ' - 'the only implementation') - item_qpath = qual_item.get('qpath') - field_def = schema['properties'].setdefault( - item_qpath, - {'type': 'object', - 'additionalProperties': False, - 'properties': { - self.ID: { - 'type': 'array', - 'items': { - '$ref': '#/definitions/qual_item', + raise NotImplementedError( + 'by_question#survey is the only implementation' + ) + item_xpath = qual_item.get('xpath') + schema['properties'].setdefault( + item_xpath, + { + 'type': 'object', + 'additionalProperties': False, + 'properties': { + self.ID: { + 'type': 'array', + 'items': { + '$ref': '#/definitions/qual_item', + }, } - } - }}, + }, + }, ) return schema def compile_revised_record(self, content, edits): - ''' + """ a method that applies changes to a json structure but stores NO revision history - ''' + """ for field_name, vals in edits.items(): if field_name == 'submission': continue diff --git a/kobo/apps/subsequences/actions/translation.py b/kobo/apps/subsequences/actions/translation.py index 5c5801bd8d..644a6dd985 100644 --- a/kobo/apps/subsequences/actions/translation.py +++ b/kobo/apps/subsequences/actions/translation.py @@ -11,21 +11,20 @@ class TranslationAction(BaseAction): MANUAL = 'user_translated' @classmethod - def build_params(kls, survey_content): - audio_questions = [] + def build_params(cls, survey_content): translatable_fields = [] for row in survey_content.get('survey', []): if row['type'] in ['audio', 'video', 'text']: - translatable_fields.append(kls.get_qpath(kls, row)) + translatable_fields.append(cls.get_xpath(cls, row)) params = {'values': translatable_fields} return params @classmethod - def get_values_for_content(kls, content): + def get_values_for_content(cls, content): translatable_fields = [] for row in content.get('survey', []): if row['type'] in ['audio', 'video', 'text']: - name = kls.get_qpath(kls, row) + name = cls.get_xpath(cls, row) if name: translatable_fields.append(name) return translatable_fields diff --git a/kobo/apps/subsequences/advanced_features_params_schema.py b/kobo/apps/subsequences/advanced_features_params_schema.py index 90bb507110..846ce6f2ca 100644 --- a/kobo/apps/subsequences/advanced_features_params_schema.py +++ b/kobo/apps/subsequences/advanced_features_params_schema.py @@ -1,10 +1,10 @@ -''' +""" When setting "asset.advanced_features", the value is compared against this jsonschema. As "advanced_features" are added to the code, this schema will grow to describe what is needed. -''' +""" ADVANCED_FEATURES_PARAMS_SCHEMA = { 'type': 'object', @@ -25,23 +25,20 @@ 'type': 'array', 'items': {'type': 'string'}, }, - } + }, }, 'translation': { 'type': 'object', 'properties': { - 'languages': { - 'type': 'array', - 'items': {'type': 'string'} - }, + 'languages': {'type': 'array', 'items': {'type': 'string'}}, 'values': { 'type': 'array', 'items': {'type': 'string'}, }, }, - 'required': ['languages'] - } - } + 'required': ['languages'], + }, + }, } # User-defined qualitative analysis forms @@ -70,10 +67,10 @@ 'items': {'$ref': '#/$defs/qualChoice'}, }, 'scope': {'type': 'string'}, - 'qpath': {'type': 'string'}, + 'xpath': {'type': 'string'}, 'options': {'type': 'object'}, }, - 'required': ['uuid', 'type', 'labels', 'scope', 'qpath'], + 'required': ['uuid', 'type', 'labels', 'scope', 'xpath'], }, 'qualLabels': { 'type': 'object', diff --git a/kobo/apps/subsequences/api_view.py b/kobo/apps/subsequences/api_view.py index d53ebbfabe..77385b5681 100644 --- a/kobo/apps/subsequences/api_view.py +++ b/kobo/apps/subsequences/api_view.py @@ -1,23 +1,24 @@ -import json from copy import deepcopy from jsonschema import validate from jsonschema.exceptions import ValidationError as SchemaValidationError -from kobo.apps.subsequences.models import SubmissionExtras -from kpi.models import Asset -from kpi.permissions import SubmissionPermission -from kpi.views.environment import _check_asr_mt_access_for_user from rest_framework.exceptions import PermissionDenied from rest_framework.exceptions import ValidationError as APIValidationError from rest_framework.response import Response from rest_framework.views import APIView +from kobo.apps.subsequences.models import SubmissionExtras +from kobo.apps.subsequences.utils.deprecation import qpath_to_xpath +from kpi.models import Asset +from kpi.permissions import SubmissionPermission +from kpi.views.environment import check_asr_mt_access_for_user + def _check_asr_mt_access_if_applicable(user, posted_data): # This is for proof-of-concept testing and will be replaced with proper # quotas and accounting MAGIC_STATUS_VALUE = 'requested' - user_has_access = _check_asr_mt_access_for_user(user) + user_has_access = check_asr_mt_access_for_user(user) if user_has_access: return True # Oops, no access. But did they request ASR/MT in the first place? @@ -97,8 +98,27 @@ def post(self, request, asset_uid, format=None): def get_submission_processing(asset, s_uuid): try: - submission = asset.submission_extras.get(submission_uuid=s_uuid) - return Response(submission.content) + submission_extra = asset.submission_extras.get(submission_uuid=s_uuid) + + # TODO delete the loop when every asset is repopulated with `xpath` + # instead of `qpath`. + content = deepcopy(submission_extra.content) + changed = False + for old_xpath, values in submission_extra.content.items(): + if '-' in old_xpath and '/' not in old_xpath: + xpath = qpath_to_xpath(old_xpath, asset) + if xpath == old_xpath: + continue + + del content[old_xpath] + content[xpath] = values + changed = True + + if changed: + submission_extra.content = content + # TODO save submission_extra? + + return Response(submission_extra.content) except SubmissionExtras.DoesNotExist: # submission might exist but no SubmissionExtras object has been created return Response({'info': f'nothing found for submission: {s_uuid}'}) diff --git a/kobo/apps/subsequences/integrations/google/base.py b/kobo/apps/subsequences/integrations/google/base.py index 4c8b0e8662..123e6e6a25 100644 --- a/kobo/apps/subsequences/integrations/google/base.py +++ b/kobo/apps/subsequences/integrations/google/base.py @@ -108,19 +108,9 @@ def handle_google_operation( return response @abstractmethod - def process_data(self, qpath: str, options: dict) -> dict: + def process_data(self, xpath: str, options: dict) -> dict: pass - def qpath_to_xpath(self, qpath: str) -> str: - xpath = None - for row in self.asset.content['survey']: - if '$qpath' in row and '$xpath' in row and row['$qpath'] == qpath: - xpath = row['$xpath'] - break - if xpath is None: - raise KeyError(f'xpath for {qpath=} not found') - return xpath - def update_counters(self, amount) -> None: update_nlp_counter( self.counter_name, diff --git a/kobo/apps/subsequences/integrations/google/google_transcribe.py b/kobo/apps/subsequences/integrations/google/google_transcribe.py index 392a9c5a1a..53b3f804cd 100644 --- a/kobo/apps/subsequences/integrations/google/google_transcribe.py +++ b/kobo/apps/subsequences/integrations/google/google_transcribe.py @@ -121,7 +121,7 @@ def get_converted_audio( ) return attachment.get_transcoded_audio('flac', include_duration=True) - def process_data(self, qpath: str, vals: dict) -> dict: + def process_data(self, xpath: str, vals: dict) -> dict: autoparams = vals[GOOGLETS] language_code = autoparams.get('languageCode') region_code = autoparams.get('regionCode') @@ -130,10 +130,7 @@ def process_data(self, qpath: str, vals: dict) -> dict: 'languageCode': language_code, 'regionCode': region_code, } - xpath = self.qpath_to_xpath(qpath) region_or_language_code = region_code or language_code - result_string = '' - results = [] try: flac_content, duration = self.get_converted_audio( xpath, diff --git a/kobo/apps/subsequences/integrations/google/google_translate.py b/kobo/apps/subsequences/integrations/google/google_translate.py index 6604418660..abbff2a2d1 100644 --- a/kobo/apps/subsequences/integrations/google/google_translate.py +++ b/kobo/apps/subsequences/integrations/google/google_translate.py @@ -173,12 +173,11 @@ def get_unique_paths( ) return source_path, output_path - def process_data(self, qpath: str, vals: dict) -> dict: + def process_data(self, xpath: str, vals: dict) -> dict: """ - Translates the value for a given qpath and it's json values. + Translates the value for a given xpath and its json values. """ autoparams = vals[GOOGLETX] - xpath = self.qpath_to_xpath(qpath) try: content = vals['transcript']['value'] source_lang = vals['transcript']['languageCode'] diff --git a/kobo/apps/subsequences/models.py b/kobo/apps/subsequences/models.py index 769f774aae..e5a85a27b1 100644 --- a/kobo/apps/subsequences/models.py +++ b/kobo/apps/subsequences/models.py @@ -33,17 +33,17 @@ def save(self, *args, **kwargs): from .integrations.google.google_translate import GoogleTranslationService features = self.asset.advanced_features - for qpath, vals in self.content.items(): + for xpath, vals in self.content.items(): if 'transcript' in features: options = vals.get(GOOGLETS, {}) if options.get('status') == 'requested': service = GoogleTranscriptionService(self) - vals[GOOGLETS] = service.process_data(qpath, vals) + vals[GOOGLETS] = service.process_data(xpath, vals) if 'translation' in features: options = vals.get(GOOGLETX, {}) if options.get('status') == 'requested': service = GoogleTranslationService(self) - vals[GOOGLETX] = service.process_data(qpath, vals) + vals[GOOGLETX] = service.process_data(xpath, vals) asset_changes = False asset_known_cols = self.asset.known_cols diff --git a/kobo/apps/subsequences/scripts/add_qual_to_last_question_of_last_asset.py b/kobo/apps/subsequences/scripts/add_qual_to_last_question_of_last_asset.py index fefcf7dc78..3cd6128590 100644 --- a/kobo/apps/subsequences/scripts/add_qual_to_last_question_of_last_asset.py +++ b/kobo/apps/subsequences/scripts/add_qual_to_last_question_of_last_asset.py @@ -1,77 +1,112 @@ import json from kpi.models import Asset from jsonschema import validate -from pprint import pprint -EXAMPLES = [{'labels': {'_default': 'Any descriptors?'}, - 'qpath': '', - 'scope': 'by_question#survey', - 'type': 'qual_tags', - 'uuid': '00000000-0000-0000-0000-000000000000'}, - {'labels': {'_default': 'Short summary (one sentence)'}, - 'qpath': '', - 'scope': 'by_question#survey', - 'type': 'qual_text', - 'uuid': '11111111-1111-1111-1111-111111111111'}, - {'labels': {'_default': 'How many people are heard speaking in this ' - 'response?'}, - 'qpath': '', - 'scope': 'by_question#survey', - 'type': 'qual_integer', - 'uuid': '22222222-2222-2222-2222-222222222222'}, - {'choices': [{'labels': {'_default': 'Yes'}, - 'uuid': '44444444-4444-4444-4444-444444444444'}, - {'labels': {'_default': 'No'}, - 'uuid': '55555555-5555-5555-5555-555555555555'}], - 'labels': {'_default': 'Do they describe the facility as being well ' - 'maintained?'}, - 'qpath': '', - 'scope': 'by_question#survey', - 'type': 'qual_select_one', - 'uuid': '33333333-3333-3333-3333-333333333333'}, - {'choices': [{'labels': {'_default': 'Lighting'}, - 'uuid': '77777777-7777-7777-7777-777777777777'}, - {'labels': {'_default': 'Ventilation'}, - 'uuid': '88888888-8888-8888-8888-888888888888'}, - {'labels': {'_default': 'Security'}, - 'uuid': '99999999-9999-9999-9999-999999999999'}], - 'labels': {'_default': 'Select any mentioned areas of concern'}, - 'qpath': '', - 'scope': 'by_question#survey', - 'type': 'qual_select_multiple', - 'uuid': '66666666-6666-6666-6666-666666666666'}, - {'labels': {'_default': 'Please respect the confidentiality of our ' - 'respondents.'}, - 'qpath': '', - 'scope': 'by_question#survey', - 'type': 'qual_note', - 'uuid': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'}] +EXAMPLES = [ + { + 'labels': {'_default': 'Any descriptors?'}, + 'xpath': '', + 'scope': 'by_question#survey', + 'type': 'qual_tags', + 'uuid': '00000000-0000-0000-0000-000000000000', + }, + { + 'labels': {'_default': 'Short summary (one sentence)'}, + 'xpath': '', + 'scope': 'by_question#survey', + 'type': 'qual_text', + 'uuid': '11111111-1111-1111-1111-111111111111', + }, + { + 'labels': { + '_default': 'How many people are heard speaking in this ' + 'response?' + }, + 'xpath': '', + 'scope': 'by_question#survey', + 'type': 'qual_integer', + 'uuid': '22222222-2222-2222-2222-222222222222', + }, + { + 'choices': [ + { + 'labels': {'_default': 'Yes'}, + 'uuid': '44444444-4444-4444-4444-444444444444', + }, + { + 'labels': {'_default': 'No'}, + 'uuid': '55555555-5555-5555-5555-555555555555', + }, + ], + 'labels': { + '_default': 'Do they describe the facility as being well ' + 'maintained?' + }, + 'xpath': '', + 'scope': 'by_question#survey', + 'type': 'qual_select_one', + 'uuid': '33333333-3333-3333-3333-333333333333', + }, + { + 'choices': [ + { + 'labels': {'_default': 'Lighting'}, + 'uuid': '77777777-7777-7777-7777-777777777777', + }, + { + 'labels': {'_default': 'Ventilation'}, + 'uuid': '88888888-8888-8888-8888-888888888888', + }, + { + 'labels': {'_default': 'Security'}, + 'uuid': '99999999-9999-9999-9999-999999999999', + }, + ], + 'labels': {'_default': 'Select any mentioned areas of concern'}, + 'xpath': '', + 'scope': 'by_question#survey', + 'type': 'qual_select_multiple', + 'uuid': '66666666-6666-6666-6666-666666666666', + }, + { + 'labels': { + '_default': 'Please respect the confidentiality of our ' + 'respondents.' + }, + 'xpath': '', + 'scope': 'by_question#survey', + 'type': 'qual_note', + 'uuid': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', + }, +] EXAMPLE_QUAL_SURVEY_JSON = json.dumps({'qual_survey': EXAMPLES}) + def run(): asset = Asset.objects.order_by('-date_created')[0] - final_question_qpath = None + final_question_xpath = None for row in reversed(asset.content['survey']): if row['type'] in ('audio', 'video'): - final_question_qpath = row['$qpath'] - if not final_question_qpath: + final_question_xpath = row['$xpath'] + if not final_question_xpath: raise RuntimeError( 'Survey does not contain any audio or video question' ) asset.advanced_features['qual'] = json.loads( - EXAMPLE_QUAL_SURVEY_JSON.replace('', final_question_qpath) + EXAMPLE_QUAL_SURVEY_JSON.replace('', final_question_xpath) ) asset.save() if asset.submission_extras.count() == 0: - print('If a submission_extras model exists, this script will populate ' - 'it with sample data') - # asset.submission_extras.create(submission_uuid='...') + print( + 'If a submission_extras model exists, this script will populate ' + 'it with sample data' + ) else: subex = asset.submission_extras.last() subex_content_schema = asset.get_advanced_submission_schema() - subex.content[final_question_qpath] = { + subex.content[final_question_xpath] = { 'qual': [ { 'uuid': '00000000-0000-0000-0000-000000000000', diff --git a/kobo/apps/subsequences/tests/test_known_cols_utils.py b/kobo/apps/subsequences/tests/test_known_cols_utils.py index da550dbe59..291ac4283c 100644 --- a/kobo/apps/subsequences/tests/test_known_cols_utils.py +++ b/kobo/apps/subsequences/tests/test_known_cols_utils.py @@ -1,15 +1,10 @@ -import pytest - - -from kobo.apps.subsequences.utils.parse_known_cols import ( - parse_known_cols, -) +from kobo.apps.subsequences.utils.parse_known_cols import parse_known_cols def test_known_cols_transc_duplicates(): results = parse_known_cols([ - 'col-qpath:transc_a:en', - 'col-qpath:transc_a:en', + 'col/xpath:transc_a:en', + 'col/xpath:transc_a:en', ]) assert len(results) == 1 assert results[0]['language'] == 'en' @@ -17,63 +12,61 @@ def test_known_cols_transc_duplicates(): def test_known_cols_transl_duplicates(): results = parse_known_cols([ - 'col-qpath:transl_a:fr', - 'col-qpath:transl_a:fr', + 'col/xpath:transl_a:fr', + 'col/xpath:transl_a:fr', ]) assert len(results) == 1 def test_known_cols_transc_uniqs(): results = parse_known_cols([ - 'col-qpath1:transc_a:en', - 'col-qpath1:transc_b:fr', - 'col-qpath2:transc_a:en', - 'col-qpath2:transc_b:fr', + 'col/xpath1:transc_a:en', + 'col/xpath1:transc_b:fr', + 'col/xpath2:transc_a:en', + 'col/xpath2:transc_b:fr', ]) assert len(results) == 4 rs = {} - for prop in ['language', 'label', 'qpath']: + for prop in ['language', 'label', 'xpath']: rs[prop] = [rr[prop] for rr in results] assert rs['language'] == ['en', 'fr', 'en', 'fr'] assert rs['label'] == [ - 'qpath1 - transcript', - 'qpath1 - transcript', - 'qpath2 - transcript', - 'qpath2 - transcript', + 'xpath1 - transcript', + 'xpath1 - transcript', + 'xpath2 - transcript', + 'xpath2 - transcript', ] - assert rs['qpath'] == [ - 'col-qpath1-transcript-en', - 'col-qpath1-transcript-fr', - 'col-qpath2-transcript-en', - 'col-qpath2-transcript-fr', + assert rs['xpath'] == [ + 'col/xpath1/transcript/en', + 'col/xpath1/transcript/fr', + 'col/xpath2/transcript/en', + 'col/xpath2/transcript/fr', ] def test_known_cols_transl_uniqs(): results = parse_known_cols([ - 'col-qpath1:transl_a:en', - 'col-qpath1:transl_b:fr', - 'col-qpath2:transl_a:en', - 'col-qpath2:transl_b:fr', + 'col/xpath1:transl_a:en', + 'col/xpath1:transl_b:fr', + 'col/xpath2:transl_a:en', + 'col/xpath2:transl_b:fr', ]) assert len(results) == 4 - langs = [r['language'] for r in results] - labls = [r['label'] for r in results] - qpths = [r['qpath'] for r in results] - assert qpths == [ - 'col-qpath1-translation-en', - 'col-qpath1-translation-fr', - 'col-qpath2-translation-en', - 'col-qpath2-translation-fr', + xpaths = [r['xpath'] for r in results] + assert xpaths == [ + 'col/xpath1/translation/en', + 'col/xpath1/translation/fr', + 'col/xpath2/translation/en', + 'col/xpath2/translation/fr', ] def test_known_cols_combos(): results = parse_known_cols([ - 'col-qpath1:transl_a:en', - 'col-qpath1:transl_b:fr', - 'col-qpath2:transl_a:en', - 'col-qpath2:transl_b:fr', + 'col/xpath1:transl_a:en', + 'col/xpath1:transl_b:fr', + 'col/xpath2:transl_a:en', + 'col/xpath2:transl_b:fr', ]) langs = [r['language'] for r in results] assert langs == ['en', 'fr', 'en', 'fr'] @@ -86,20 +79,20 @@ def test_known_cols_grouped_source(): results = parse_known_cols([ # `group` is the group name # `question` is the (source) question name - 'group-question:transcript:en', - 'group-question:translation:es', + 'group/question:transcript:en', + 'group/question:translation:es', ]) sources = [r['source'] for r in results] - qpaths = [r['qpath'] for r in results] + xpaths = [r['xpath'] for r in results] names = [r['name'] for r in results] - assert set(sources) == set(('group-question',)) - assert qpaths == [ - 'group-question-transcript-en', - 'group-question-translation-es', + assert set(sources) == set(('group/question',)) + assert xpaths == [ + 'group/question/transcript/en', + 'group/question/translation/es', ] assert names == [ # This can't be right (why a mixture of dash and slash delimiters?) but # it is at least what the front end expects - 'group-question/transcript_en', - 'group-question/translation_es', + 'group/question/transcript_en', + 'group/question/translation_es', ] diff --git a/kobo/apps/subsequences/tests/test_submission_stream.py b/kobo/apps/subsequences/tests/test_submission_stream.py index a8e19b19a2..eaafb11984 100644 --- a/kobo/apps/subsequences/tests/test_submission_stream.py +++ b/kobo/apps/subsequences/tests/test_submission_stream.py @@ -26,7 +26,6 @@ def _create_asset(self): 'type': 'text', '$kuid': 'rc9ak31', 'label': ["What's your name?"], - '$qpath': 'What_s_your_name', '$xpath': 'What_s_your_name', 'required': False, '$autoname': 'What_s_your_name', @@ -35,7 +34,6 @@ def _create_asset(self): 'type': 'audio', '$kuid': 'ff6ek09', 'label': ['Tell me a story!'], - '$qpath': 'Tell_me_a_story', '$xpath': 'Tell_me_a_story', 'required': False, '$autoname': 'Tell_me_a_story', @@ -49,14 +47,14 @@ def _create_asset(self): { 'type': 'qual_integer', 'uuid': '1a2c8eb0-e2ec-4b3c-942a-c1a5410c081a', - 'qpath': 'Tell_me_a_story', + 'xpath': 'Tell_me_a_story', 'scope': 'by_question#survey', 'labels': {'_default': 'When was this recorded?'}, }, { 'type': 'qual_select_one', 'uuid': '1a8b748b-f470-4c40-bc09-ce2b1197f503', - 'qpath': 'Tell_me_a_story', + 'xpath': 'Tell_me_a_story', 'scope': 'by_question#survey', 'labels': { '_default': "What's the source of this story?" diff --git a/kobo/apps/subsequences/utils/__init__.py b/kobo/apps/subsequences/utils/__init__.py index d9ce7bceac..2f633da542 100644 --- a/kobo/apps/subsequences/utils/__init__.py +++ b/kobo/apps/subsequences/utils/__init__.py @@ -43,7 +43,6 @@ def advanced_feature_instances(content, actions): - action_instances = [] for action_id, action_params in actions.items(): action_kls = ACTIONS_BY_ID[action_id] if action_params is True: @@ -74,7 +73,7 @@ def populate_paths(_content): logging.error('missing row name', extra={'path': group_stack}) # /HOTFIX 2022-12-06 - row['qpath'] = '-'.join([*group_stack, rowname]) + row['xpath'] = '/'.join([*group_stack, rowname]) return content @@ -160,9 +159,9 @@ def stream_with_extras(submission_stream, asset): else: uuid = submission['_uuid'] - all_supplemental_details = deepcopy(extras.get(uuid, {})) - for qpath, supplemental_details in all_supplemental_details.items(): + # FIXME QPATH + for supplemental_details in all_supplemental_details.values(): try: all_qual_responses = supplemental_details['qual'] except KeyError: diff --git a/kobo/apps/subsequences/utils/deprecation.py b/kobo/apps/subsequences/utils/deprecation.py new file mode 100644 index 0000000000..cd94a2d5cd --- /dev/null +++ b/kobo/apps/subsequences/utils/deprecation.py @@ -0,0 +1,19 @@ + +def qpath_to_xpath(qpath: str, asset: 'Asset') -> str: + """ + We have abandoned `qpath` attribute in favor of `xpath`. + Existing projects may still use it though. + We need to find the equivalent `xpath` + """ + for row in asset.content['survey']: + if '$qpath' in row and '$xpath' in row and row['$qpath'] == qpath: + return row['$xpath'] + + # Could not find it from the survey, let's try to detect it automatically + xpaths = asset.get_attachment_xpaths(deployed=True) + for xpath in xpaths: + dashed_xpath = xpath.replace('/', '-') + if dashed_xpath == qpath: + return xpath + + raise KeyError(f'xpath for {qpath} not found') diff --git a/kobo/apps/subsequences/utils/determine_export_cols_with_values.py b/kobo/apps/subsequences/utils/determine_export_cols_with_values.py index e945e007b3..220e4f12ed 100644 --- a/kobo/apps/subsequences/utils/determine_export_cols_with_values.py +++ b/kobo/apps/subsequences/utils/determine_export_cols_with_values.py @@ -34,32 +34,33 @@ def get_lang_code(key, tvals): def determine_export_cols_indiv(sub_ex_content): - ''' + """ used primarily when a SubmissionExtras object is saved. iterates through content to see which questions have transcripts/translations that need to end up in the export yields strings in this format- - ":transcript:" - ":translation:" - ''' - for qpath in sub_ex_content.keys(): - for key in sub_ex_content[qpath].keys(): - tvals = sub_ex_content[qpath][key] + ":transcript:" + ":translation:" + """ + + for xpath in sub_ex_content.keys(): + for key in sub_ex_content[xpath].keys(): + tvals = sub_ex_content[xpath][key] # if not is_non_null_submext_data(key, tvals): # continue dtype = KEY_TYPE_DICTS.get(key, key) - col_string = f'{qpath}:{dtype}' + col_string = f'{xpath}:{dtype}' for lang_code in get_lang_code(key, tvals): yield f'{col_string}:{lang_code}' def determine_export_cols_with_values(asset_submission_extras_all): - ''' + """ used in management command to rebuild asset.known_cols - ''' + """ col_strings = tuple() for sub_ex in asset_submission_extras_all: for col_string in determine_export_cols_indiv(sub_ex.content): diff --git a/kobo/apps/subsequences/utils/parse_known_cols.py b/kobo/apps/subsequences/utils/parse_known_cols.py index a0afdf19b7..4c4bb5c32b 100644 --- a/kobo/apps/subsequences/utils/parse_known_cols.py +++ b/kobo/apps/subsequences/utils/parse_known_cols.py @@ -1,5 +1,4 @@ -# coding: utf-8 -''' +""" this util parses the string of known_cols saved in the db and builds the structure that formpack expects to see in the asset.analysis_form_json() @@ -10,28 +9,31 @@ - q1:translt:de output is a more descriptive structure. (See test_parse_known_cols) -''' +""" from collections import defaultdict -def extend_col_deets(lang, coltype, label, q_path): +def extend_col_deets(lang: str, coltype: str, label: str, xpath: str) -> dict: # NB: refer to commit d013bfe0f5 when trying to figure out the original # intent here - name = q_path.split('-')[-1] - out = {'label': name} - out['dtpath'] = f'{q_path}/{coltype}_{lang}' - out['type'] = coltype - out['language'] = lang - out['label'] = f'{label} - {coltype}' - out['name'] = f'{q_path}/{coltype}_{lang}' - out['source'] = q_path - out['qpath'] = f'{q_path}-{coltype}-{lang}' - out['settings'] = {'mode': 'manual', 'engine': f'engines/{coltype}_manual'} - out['path'] = [q_path, coltype] + name = xpath.split('/')[-1] + out = { + 'label': name, + 'dtpath': f'{xpath}/{coltype}_{lang}', + 'type': coltype, + 'language': lang, + 'label': f'{label} - {coltype}', + 'name': f'{xpath}/{coltype}_{lang}', + 'source': xpath, + # FIXME QPATH + 'xpath': f'{xpath}/{coltype}/{lang}', + 'settings': {'mode': 'manual', 'engine': f'engines/{coltype}_manual'}, + 'path': [xpath, coltype], + } return out -def parse_field_cols(qpath, fieldcols): +def parse_field_cols(xpath, fieldcols): fcx = {'tsc': [], 'tsl': []} for fc in fieldcols: if 'tx' not in fc: @@ -52,30 +54,39 @@ def parse_field_cols(qpath, fieldcols): if len(langs['tsc']) > 0: for lang in langs['tsc']: - out.append(extend_col_deets(lang=lang, label=qpath.split('-')[-1], q_path=qpath, - coltype='transcript', - )) + out.append( + extend_col_deets( + lang=lang, + label=xpath.split('/')[-1], + xpath=xpath, + coltype='transcript', + ) + ) if len(langs['tsl']) > 0: for lang in langs['tsl']: - out.append(extend_col_deets(lang=lang, label=qpath.split('-')[-1], q_path=qpath, - coltype='translation', - )) + out.append( + extend_col_deets( + lang=lang, + label=xpath.split('/')[-1], + xpath=xpath, + coltype='translation', + ) + ) return out -def parse_known_cols(knownc): - by_qpath = defaultdict(list) - out = [] - if isinstance(knownc, dict): - knownc = knownc.get('known') - for fieldstr in knownc: +def parse_known_cols(known_columns): + by_xpath = defaultdict(list) + if isinstance(known_columns, dict): + known_columns = known_columns.get('known') + for fieldstr in known_columns: sects = fieldstr.split(':') - [qpath, field, *rest] = sects + [xpath, field, *_] = sects item = {'field': field} if len(sects) == 3: item['tx'] = sects[2] - by_qpath[qpath].append(item) - by_qpath_list = [] - for qpath, cols in by_qpath.items(): - by_qpath_list = [*by_qpath_list, *parse_field_cols(qpath, cols)] - return by_qpath_list + by_xpath[xpath].append(item) + by_xpath_list = [] + for xpath, cols in by_xpath.items(): + by_xpath_list = [*by_xpath_list, *parse_field_cols(xpath, cols)] + return by_xpath_list diff --git a/kpi/mixins/formpack_xlsform_utils.py b/kpi/mixins/formpack_xlsform_utils.py index c84ef7f4b6..3a9d3e5b60 100644 --- a/kpi/mixins/formpack_xlsform_utils.py +++ b/kpi/mixins/formpack_xlsform_utils.py @@ -68,7 +68,7 @@ def _autoname(self, content): autoname_fields_in_place(content, '$autoname') autovalue_choices_in_place(content, '$autovalue') - def _insert_qpath(self, content): + def _insert_xpath(self, content): insert_full_paths_in_place(content) def _populate_fields_with_autofields(self, content): diff --git a/kpi/models/asset.py b/kpi/models/asset.py index 5cdee0d240..8f542e884e 100644 --- a/kpi/models/asset.py +++ b/kpi/models/asset.py @@ -32,6 +32,7 @@ advanced_feature_instances, advanced_submission_jsonschema, ) +from kobo.apps.subsequences.utils.deprecation import qpath_to_xpath from kobo.apps.subsequences.utils.parse_known_cols import parse_known_cols from kpi.constants import ( ASSET_TYPES, @@ -83,7 +84,6 @@ from kpi.utils.object_permission import get_cached_code_names from kpi.utils.sluggify import sluggify_label - class AssetDeploymentStatus(models.TextChoices): ARCHIVED = 'archived', 'Archived' @@ -448,7 +448,7 @@ def adjust_content_on_save(self): self._strip_empty_rows(self.content) self._assign_kuids(self.content) self._autoname(self.content) - self._insert_qpath(self.content) + self._insert_xpath(self.content) self._unlink_list_items(self.content) self._remove_empty_expressions(self.content) self._remove_version(self.content) @@ -492,21 +492,25 @@ def analysis_form_json(self, omit_question_types=None): # # See also injectSupplementalRowsIntoListOfRows() in # assetUtils.ts - qpath = qual_question['qpath'] + try: + xpath = qual_question['xpath'] + except KeyError: + xpath = qpath_to_xpath(qual_question['qpath'], self) + field = dict( label=qual_question['labels']['_default'], - name=f"{qpath}/{qual_question['uuid']}", - dtpath=f"{qpath}/{qual_question['uuid']}", + name=f"{xpath}/{qual_question['uuid']}", + dtpath=f"{xpath}/{qual_question['uuid']}", type=qual_question['type'], # could say '_default' or the language of the transcript, # but really that would be meaningless and misleading language='??', - source=qpath, - qpath=f"{qpath}-{qual_question['uuid']}", + source=xpath, + xpath=f"{xpath}/{qual_question['uuid']}", # seems not applicable given the transx questions describe # manual vs. auto here and which engine was used settings='??', - path=[qpath, qual_question['uuid']], + path=[xpath, qual_question['uuid']], ) if field['type'] in omit_question_types: continue @@ -515,6 +519,7 @@ def analysis_form_json(self, omit_question_types=None): except KeyError: pass additional_fields.append(field) + return output def clone(self, version_uid=None): @@ -618,7 +623,7 @@ def _get_xpaths(survey_: dict) -> Optional[list]: if xpaths := _get_xpaths(survey): return xpaths - self._insert_qpath(content) + self._insert_xpath(content) return _get_xpaths(survey) def get_filters_for_partial_perm( @@ -1119,16 +1124,19 @@ def update_languages(self, children=None): if children: languages = set(obj_languages) - children_languages = [child.summary.get('languages') - for child in children - if child.summary.get('languages')] + children_languages = [ + child.summary.get('languages') + for child in children + if child.summary.get('languages') + ] else: - children_languages = list(self.children - .values_list('summary__languages', - flat=True) - .exclude(Q(summary__languages=[]) | - Q(summary__languages=[None])) - .order_by()) + children_languages = list( + self.children.values_list('summary__languages', flat=True) + .exclude( + Q(summary__languages=[]) | Q(summary__languages=[None]) + ) + .order_by() + ) if children_languages: # Flatten `children_languages` to 1-dimension list. @@ -1148,6 +1156,7 @@ def update_languages(self, children=None): def validate_advanced_features(self): if self.advanced_features is None: self.advanced_features = {} + jsonschema_validate( instance=self.advanced_features, schema=ADVANCED_FEATURES_PARAMS_SCHEMA, @@ -1182,12 +1191,26 @@ def version_number_and_date(self) -> str: return f'{count} {self.date_modified:(%Y-%m-%d %H:%M:%S)}' def _get_additional_fields(self): + + # TODO delete the loop when every asset is repopulated with `xpath` + # instead of `qpath`. + for idx, known_column in enumerate(self.known_cols): + xpath, *rest = known_column.split(':') + # Old `qpath` should not contain "/", but could contain "-". + # If the question does not belong to a group but does contain "-", + # it will enter this condition - which is not a problem except extra + # CPU usage for nothing. + if '-' in xpath and '/' not in xpath: + xpath = qpath_to_xpath(xpath, self) + rest.insert(0, xpath) + self.known_cols[idx] = ':'.join(rest) + return parse_known_cols(self.known_cols) def _get_engines(self): - ''' + """ engines are individual NLP services that can be used - ''' + """ for instance in self.get_advanced_feature_instances(): if hasattr(instance, 'engines'): for key, val in instance.engines(): diff --git a/kpi/tests/test_asset_content.py b/kpi/tests/test_asset_content.py index d2e9f59ef9..5f8bedb7cc 100644 --- a/kpi/tests/test_asset_content.py +++ b/kpi/tests/test_asset_content.py @@ -845,7 +845,7 @@ def test_kuid_persists(): assert content['survey'][1].get('$kuid') == initial_kuid_2 -def test_populates_qpath_xpath_correctly(): +def test_populates_xpath_correctly(): asset = Asset(content={ 'survey': [ {'type': 'begin_group', 'name': 'g1'}, @@ -858,12 +858,11 @@ def test_populates_qpath_xpath_correctly(): }) asset.adjust_content_on_save() rs = asset.content['survey'][0:4] - assert [rr['$qpath'] for rr in rs] == ['g1', 'g1-r1', 'g1-g2', 'g1-g2-r2'] assert [rr['$xpath'] for rr in rs] == ['g1', 'g1/r1', 'g1/g2', 'g1/g2/r2'] @pytest.mark.django_db() -def test_return_xpaths_and_qpath_even_if_missing(): +def test_return_xpaths_even_if_missing(): user = baker.make( settings.AUTH_USER_MODEL, username='johndoe' ) @@ -879,8 +878,8 @@ def test_return_xpaths_and_qpath_even_if_missing(): }) expected = ['g1/r1', 'g1/g2/r2'] - # 'qpath' and 'xpath' are not injected until an Asset object is saved with `adjust_content=True` - # or `adjust_content_on_save()` is called directly. + # 'xpath' is not injected until an Asset object is saved with + # `adjust_content=True` or `adjust_content_on_save()` is called directly. # No matter what, `get_attachment_xpaths()` should be able to return # attachment xpaths. assert asset.get_attachment_xpaths(deployed=False) == expected diff --git a/kpi/tests/test_asset_versions.py b/kpi/tests/test_asset_versions.py index a1ddb8d26a..1e0a524c01 100644 --- a/kpi/tests/test_asset_versions.py +++ b/kpi/tests/test_asset_versions.py @@ -30,12 +30,12 @@ def test_init_asset_version(self): } new_asset = Asset.objects.create(asset_type='survey', content=_content) _vc = deepcopy(new_asset.latest_version.version_content) - pop_atts = ['$kuid', + pop_atts = [ + '$kuid', '$autoname', '$prev', - '$qpath', '$xpath', - ] + ] for row in _vc['survey']: for att in pop_atts: row.pop(att, None) diff --git a/kpi/utils/absolute_paths.py b/kpi/utils/absolute_paths.py index 62d37fc6d9..79053e8da6 100644 --- a/kpi/utils/absolute_paths.py +++ b/kpi/utils/absolute_paths.py @@ -7,12 +7,6 @@ ENDERS = ENDERS + (f'end_{hierarchy_keyword}',) -def concat_paths(name, parent_names): - return DELIMITER.join( - [*parent_names, name or ''] - ) - - def concat_xpath(name, parent_names): return '/'.join( [*parent_names, name or ''] @@ -39,7 +33,6 @@ def insert_full_paths_in_place(content): else: rowname = get_name(row) if rowname is not None: - row['$qpath'] = concat_paths(rowname, hierarchy) row['$xpath'] = concat_xpath(rowname, hierarchy) if row.get('type') in BEGINNERS: hierarchy.append(rowname) diff --git a/kpi/views/environment.py b/kpi/views/environment.py index 310349c143..0ca636ab1a 100644 --- a/kpi/views/environment.py +++ b/kpi/views/environment.py @@ -23,7 +23,7 @@ from kpi.utils.object_permission import get_database_user -def _check_asr_mt_access_for_user(user): +def check_asr_mt_access_for_user(user): # This is for proof-of-concept testing and will be replaced with proper # quotas and accounting if user.is_anonymous: @@ -165,7 +165,7 @@ def process_other_configs(request): ) ) - data['asr_mt_features_enabled'] = _check_asr_mt_access_for_user( + data['asr_mt_features_enabled'] = check_asr_mt_access_for_user( request.user ) data['submission_placeholder'] = SUBMISSION_PLACEHOLDER From ed6401cc9d2e562f5de0fc906926081fb81cdbd7 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 29 Aug 2024 18:42:05 -0400 Subject: [PATCH 02/25] Remove QPath from front-end code --- jsapp/js/assetUtils.ts | 17 ++-- .../analysis/analysisContent.component.tsx | 2 +- .../analysis/analysisHeader.component.tsx | 2 +- .../analysis/analysisQuestions.actions.ts | 4 +- .../analysis/analysisQuestions.reducer.ts | 4 +- .../analysis/analysisTab.component.tsx | 2 +- .../processing/analysis/constants.ts | 6 +- .../list/analysisQuestionsList.component.tsx | 2 +- .../integerResponseForm.component.tsx | 2 +- .../selectMultipleResponseForm.component.tsx | 2 +- .../selectOneResponseForm.component.tsx | 2 +- .../tagsResponseForm.component.tsx | 2 +- .../textResponseForm.component.tsx | 2 +- .../components/processing/analysis/utils.ts | 18 ++--- .../processing/processingActions.ts | 70 ++++++++-------- .../processing/routes.utils.tests.ts | 14 ++-- .../js/components/processing/routes.utils.ts | 39 ++++++--- .../processing/singleProcessingContent.tsx | 1 - .../processing/singleProcessingHeader.tsx | 26 +++--- .../processing/singleProcessingRoute.tsx | 6 +- .../processing/singleProcessingStore.ts | 81 ++++++++++--------- .../transcript/transcriptTab.component.tsx | 8 +- jsapp/js/components/submissions/audioCell.tsx | 4 +- jsapp/js/components/submissions/mediaCell.tsx | 2 +- .../submissions/submissionDataTable.tsx | 2 +- .../submissions/submissionUtils.mocks.es6 | 12 +-- .../components/submissions/submissionUtils.ts | 4 +- jsapp/js/components/submissions/table.tsx | 4 +- jsapp/js/dataInterface.ts | 3 +- jsapp/js/router/permProtectedRoute.tsx | 25 +++++- jsapp/js/router/routerConstants.ts | 4 +- 31 files changed, 207 insertions(+), 165 deletions(-) diff --git a/jsapp/js/assetUtils.ts b/jsapp/js/assetUtils.ts index 18da8ebf7c..6f025576cd 100644 --- a/jsapp/js/assetUtils.ts +++ b/jsapp/js/assetUtils.ts @@ -451,8 +451,8 @@ export function findRow(assetContent: AssetContent, rowName: string) { return assetContent?.survey?.find((row) => getRowName(row) === rowName); } -export function findRowByQpath(assetContent: AssetContent, qpath: string) { - return assetContent?.survey?.find((row) => row.$qpath === qpath); +export function findRowByXpath(assetContent: AssetContent, xpath: string) { + return assetContent?.survey?.find((row) => row.$xpath === xpath); } export function getRowType(assetContent: AssetContent, rowName: string) { @@ -460,8 +460,8 @@ export function getRowType(assetContent: AssetContent, rowName: string) { return foundRow?.type; } -export function getRowNameByQpath(assetContent: AssetContent, qpath: string) { - const foundRow = findRowByQpath(assetContent, qpath); +export function getRowNameByXpath(assetContent: AssetContent, xpath: string) { + const foundRow = findRowByXpath(assetContent, xpath); if (foundRow) { return getRowName(foundRow); } @@ -552,9 +552,8 @@ export function injectSupplementalRowsIntoListOfRows( // Step 4: Inject all the extra columns immediately after source question const outputWithCols: string[] = []; output.forEach((col: string) => { - const qpath = col.replace(/\//g, '-'); outputWithCols.push(col); - (extraColsBySource[qpath] || []).forEach((extraCol) => { + (extraColsBySource[col] || []).forEach((extraCol) => { outputWithCols.push(`_supplementalDetails/${extraCol.dtpath}`); }); }); @@ -702,7 +701,7 @@ export function getAssetSubmissionProcessingUrl( return undefined; } -/** Returns a list of all rows (their `qpath`s) activated for advanced features. */ +/** Returns a list of all rows (their `xpath`s) activated for advanced features. */ export function getAssetProcessingRows(assetUid: string) { const foundAsset = assetStore.getAsset(assetUid); if (foundAsset?.advanced_submission_schema?.properties) { @@ -723,9 +722,9 @@ export function getAssetProcessingRows(assetUid: string) { return undefined; } -export function isRowProcessingEnabled(assetUid: string, qpath: string) { +export function isRowProcessingEnabled(assetUid: string, xpath: string) { const processingRows = getAssetProcessingRows(assetUid); - return Array.isArray(processingRows) && processingRows.includes(qpath); + return Array.isArray(processingRows) && processingRows.includes(xpath); } export function isAssetProcessingActivated(assetUid: string) { diff --git a/jsapp/js/components/processing/analysis/analysisContent.component.tsx b/jsapp/js/components/processing/analysis/analysisContent.component.tsx index 581f80cbcb..255f38cd16 100644 --- a/jsapp/js/components/processing/analysis/analysisContent.component.tsx +++ b/jsapp/js/components/processing/analysis/analysisContent.component.tsx @@ -14,7 +14,7 @@ export default function AnalysisContent() { // We only want to display analysis questions for this survey question const filteredQuestions = analysisQuestions.state.questions.filter( - (question) => question.qpath === singleProcessingStore.currentQuestionQpath + (question) => question.xpath === singleProcessingStore.currentQuestionXpath ); return ( diff --git a/jsapp/js/components/processing/analysis/analysisHeader.component.tsx b/jsapp/js/components/processing/analysis/analysisHeader.component.tsx index d5848ad68c..274f3b30c4 100644 --- a/jsapp/js/components/processing/analysis/analysisHeader.component.tsx +++ b/jsapp/js/components/processing/analysis/analysisHeader.component.tsx @@ -46,7 +46,7 @@ export default function AnalysisHeader() { analysisQuestions?.dispatch({ type: 'addQuestion', payload: { - qpath: singleProcessingStore.currentQuestionQpath, + xpath: singleProcessingStore.currentQuestionXpath, type: definition.type, }, }); diff --git a/jsapp/js/components/processing/analysis/analysisQuestions.actions.ts b/jsapp/js/components/processing/analysis/analysisQuestions.actions.ts index e2b4e8bf7d..108b07fe01 100644 --- a/jsapp/js/components/processing/analysis/analysisQuestions.actions.ts +++ b/jsapp/js/components/processing/analysis/analysisQuestions.actions.ts @@ -8,7 +8,7 @@ export type AnalysisQuestionsAction = // Sets all the quetsion with new ones (useful for initialising) | {type: 'setQuestions'; payload: {questions: AnalysisQuestionInternal[]}} // Creates a draft question of given type with new uid assigned - | {type: 'addQuestion'; payload: {qpath: string; type: AnalysisQuestionType}} + | {type: 'addQuestion'; payload: {xpath: string; type: AnalysisQuestionType}} // Opens question for editing, i.e. causes the editor to be opened for given // question | {type: 'startEditingQuestion'; payload: {uuid: string}} @@ -43,7 +43,7 @@ export type AnalysisQuestionsAction = | { type: 'updateResponseCompleted'; payload: { - qpath: string; + xpath: string; apiResponse: SubmissionProcessingDataResponse; }; } diff --git a/jsapp/js/components/processing/analysis/analysisQuestions.reducer.ts b/jsapp/js/components/processing/analysis/analysisQuestions.reducer.ts index f7571cc3eb..f6703ef408 100644 --- a/jsapp/js/components/processing/analysis/analysisQuestions.reducer.ts +++ b/jsapp/js/components/processing/analysis/analysisQuestions.reducer.ts @@ -73,7 +73,7 @@ export const analysisQuestionsReducer: AnalysisQuestionReducerType = ( } const newQuestion: AnalysisQuestionInternal = { - qpath: action.payload.qpath, + xpath: action.payload.xpath, type: action.payload.type, labels: {_default: ''}, uuid: newUuid, @@ -186,7 +186,7 @@ export const analysisQuestionsReducer: AnalysisQuestionReducerType = ( } case 'updateResponseCompleted': { const newQuestions = applyUpdateResponseToInternalQuestions( - action.payload.qpath, + action.payload.xpath, action.payload.apiResponse, state.questions ); diff --git a/jsapp/js/components/processing/analysis/analysisTab.component.tsx b/jsapp/js/components/processing/analysis/analysisTab.component.tsx index da1f3e0f79..2db65e0c82 100644 --- a/jsapp/js/components/processing/analysis/analysisTab.component.tsx +++ b/jsapp/js/components/processing/analysis/analysisTab.component.tsx @@ -70,7 +70,7 @@ export default function AnalysisTab() { await fetchGetUrl(processingUrl); questions = applyUpdateResponseToInternalQuestions( - singleProcessingStore.currentQuestionQpath, + singleProcessingStore.currentQuestionXpath, apiResponse, questions ); diff --git a/jsapp/js/components/processing/analysis/constants.ts b/jsapp/js/components/processing/analysis/constants.ts index 86876350d4..5208951fc9 100644 --- a/jsapp/js/components/processing/analysis/constants.ts +++ b/jsapp/js/components/processing/analysis/constants.ts @@ -72,7 +72,7 @@ export interface AnalysisQuestionBase { uuid: string; options?: AnalysisQuestionOptions; /** The survey question that this analysis questions is for. */ - qpath: string; + xpath: string; } /** Analysis question definition from the asset's schema (i.e. from Back end) */ @@ -160,7 +160,7 @@ export interface SubmissionAnalysisResponse extends AnalysisQuestionBase { * This is the payload of a request made to update a question response. */ export interface AnalysisResponseUpdateRequest { - [qpath: string]: + [xpath: string]: | { qual: AnalysisRequest[]; } @@ -173,7 +173,7 @@ export interface AnalysisResponseUpdateRequest { * response. */ export interface SubmissionProcessingDataResponse { - [qpath: string]: { + [xpath: string]: { qual: AnalysisResponse[]; }; } diff --git a/jsapp/js/components/processing/analysis/list/analysisQuestionsList.component.tsx b/jsapp/js/components/processing/analysis/list/analysisQuestionsList.component.tsx index 88a117e83d..9236695474 100644 --- a/jsapp/js/components/processing/analysis/list/analysisQuestionsList.component.tsx +++ b/jsapp/js/components/processing/analysis/list/analysisQuestionsList.component.tsx @@ -35,7 +35,7 @@ export default function AnalysisQuestionsList() { // hide them at this point (not filtering the whole list beforehand), // because we need the indexes to match the whole list. And FYI all // analysis questions live on a single list :) - if (question.qpath !== singleProcessingStore.currentQuestionQpath) { + if (question.xpath !== singleProcessingStore.currentQuestionXpath) { return null; } diff --git a/jsapp/js/components/processing/analysis/responseForms/integerResponseForm.component.tsx b/jsapp/js/components/processing/analysis/responseForms/integerResponseForm.component.tsx index 195ecd2215..91e6f23111 100644 --- a/jsapp/js/components/processing/analysis/responseForms/integerResponseForm.component.tsx +++ b/jsapp/js/components/processing/analysis/responseForms/integerResponseForm.component.tsx @@ -52,7 +52,7 @@ export default function IntegerResponseForm(props: IntegerResponseFormProps) { updateResponseAndReducer( analysisQuestions.dispatch, - question.qpath, + question.xpath, props.uuid, question.type, response diff --git a/jsapp/js/components/processing/analysis/responseForms/selectMultipleResponseForm.component.tsx b/jsapp/js/components/processing/analysis/responseForms/selectMultipleResponseForm.component.tsx index 874d6a4ed0..a6b39b2274 100644 --- a/jsapp/js/components/processing/analysis/responseForms/selectMultipleResponseForm.component.tsx +++ b/jsapp/js/components/processing/analysis/responseForms/selectMultipleResponseForm.component.tsx @@ -63,7 +63,7 @@ export default function SelectMultipleResponseForm( // Update endpoint and reducer updateResponseAndReducer( analysisQuestions.dispatch, - question.qpath, + question.xpath, props.uuid, question.type, newResponse diff --git a/jsapp/js/components/processing/analysis/responseForms/selectOneResponseForm.component.tsx b/jsapp/js/components/processing/analysis/responseForms/selectOneResponseForm.component.tsx index f7e200ef4e..b814f26ad0 100644 --- a/jsapp/js/components/processing/analysis/responseForms/selectOneResponseForm.component.tsx +++ b/jsapp/js/components/processing/analysis/responseForms/selectOneResponseForm.component.tsx @@ -57,7 +57,7 @@ export default function SelectOneResponseForm( // Update endpoint and reducer updateResponseAndReducer( analysisQuestions.dispatch, - question.qpath, + question.xpath, props.uuid, question.type, newResponse diff --git a/jsapp/js/components/processing/analysis/responseForms/tagsResponseForm.component.tsx b/jsapp/js/components/processing/analysis/responseForms/tagsResponseForm.component.tsx index 444d08a67b..e25fe1ded4 100644 --- a/jsapp/js/components/processing/analysis/responseForms/tagsResponseForm.component.tsx +++ b/jsapp/js/components/processing/analysis/responseForms/tagsResponseForm.component.tsx @@ -55,7 +55,7 @@ export default function TagsResponseForm(props: TagsResponseFormProps) { // Update endpoint and reducer updateResponseAndReducer( analysisQuestions.dispatch, - question.qpath, + question.xpath, props.uuid, question.type, newTags diff --git a/jsapp/js/components/processing/analysis/responseForms/textResponseForm.component.tsx b/jsapp/js/components/processing/analysis/responseForms/textResponseForm.component.tsx index 55336726fb..bd8427d0c5 100644 --- a/jsapp/js/components/processing/analysis/responseForms/textResponseForm.component.tsx +++ b/jsapp/js/components/processing/analysis/responseForms/textResponseForm.component.tsx @@ -52,7 +52,7 @@ export default function TextResponseForm(props: TextResponseFormProps) { updateResponseAndReducer( analysisQuestions.dispatch, - question.qpath, + question.xpath, props.uuid, question.type, response diff --git a/jsapp/js/components/processing/analysis/utils.ts b/jsapp/js/components/processing/analysis/utils.ts index 41e7b537c2..f19812e495 100644 --- a/jsapp/js/components/processing/analysis/utils.ts +++ b/jsapp/js/components/processing/analysis/utils.ts @@ -49,7 +49,7 @@ export function convertQuestionsFromInternalToSchema( options: question.options, choices: question.additionalFields?.choices, scope: 'by_question#survey', - qpath: question.qpath, + xpath: question.xpath, }; }); } @@ -64,7 +64,7 @@ export function convertQuestionsFromSchemaToInternal( ): AnalysisQuestionInternal[] { return questions.map((question) => { const output: AnalysisQuestionInternal = { - qpath: question.qpath, + xpath: question.xpath, uuid: question.uuid, type: question.type, labels: question.labels, @@ -85,12 +85,12 @@ export function convertQuestionsFromSchemaToInternal( * internal questions list using the API endpoint response. */ export function applyUpdateResponseToInternalQuestions( - qpath: string, + xpath: string, updateResp: SubmissionProcessingDataResponse, questions: AnalysisQuestionInternal[] ): AnalysisQuestionInternal[] { const newQuestions = clonedeep(questions); - const analysisResponses = updateResp[qpath]?.qual || []; + const analysisResponses = updateResp[xpath]?.qual || []; newQuestions.forEach((question) => { const foundResponse = analysisResponses.find( (analResp) => question.uuid === analResp.uuid @@ -191,7 +191,7 @@ export async function updateSurveyQuestions( async function updateResponse( processingUrl: string, submissionUid: string, - qpath: string, + xpath: string, analysisQuestionUuid: string, analysisQuestionType: AnalysisQuestionType, newResponse: string | string[] | number | null @@ -199,7 +199,7 @@ async function updateResponse( try { const payload: AnalysisResponseUpdateRequest = { submission: submissionUid, - [qpath]: { + [xpath]: { qual: [ { uuid: analysisQuestionUuid, @@ -219,7 +219,7 @@ async function updateResponse( return { apiResponse: apiResponse, - qpath: qpath, + xpath: xpath, }; } catch (err) { return Promise.reject(err); @@ -243,7 +243,7 @@ async function updateResponse( */ export async function updateResponseAndReducer( dispatch: React.Dispatch, - surveyQuestionQpath: string, + surveyQuestionXpath: string, analysisQuestionUuid: string, analysisQuestionType: AnalysisQuestionType, response: string | string[] @@ -284,7 +284,7 @@ export async function updateResponseAndReducer( const result = await updateResponse( processingUrl, singleProcessingStore.currentSubmissionEditId, - surveyQuestionQpath, + surveyQuestionXpath, analysisQuestionUuid, analysisQuestionType, actualResponse diff --git a/jsapp/js/components/processing/processingActions.ts b/jsapp/js/components/processing/processingActions.ts index b473a3f002..3460c4952e 100644 --- a/jsapp/js/components/processing/processingActions.ts +++ b/jsapp/js/components/processing/processingActions.ts @@ -80,13 +80,13 @@ export interface TransxObject extends TransxRequestObject { /** Object we send to Back end when updating transcript text manually. */ interface TranscriptRequest { - [qpath: string]: string | undefined | {transcript: TransxRequestObject}; + [xpath: string]: string | undefined | {transcript: TransxRequestObject}; submission?: string; } /** Object we send to Back end when requesting an automatic transcription. */ interface AutoTranscriptRequest { - [qpath: string]: + [xpath: string]: | string | undefined | {googlets: AutoTranscriptRequestEngineParams}; @@ -100,7 +100,7 @@ interface AutoTranscriptRequestEngineParams { /** Object we send to Back end when updating translation text manually. */ interface TranslationRequest { - [qpath: string]: + [xpath: string]: | string | undefined | {translation: TranslationsRequestObject}; @@ -112,7 +112,7 @@ interface TranslationsRequestObject { /** Object we send to Back end when requesting an automatic translation. */ interface AutoTranslationRequest { - [qpath: string]: + [xpath: string]: | string | undefined | {googletx: AutoTranslationRequestEngineParams}; @@ -261,7 +261,7 @@ processingActions.getProcessingData.failed.listen(() => { */ function setTranscriptInnerMethod( assetUid: string, - qpath: string, + xpath: string, submissionEditId: string, languageCode: LanguageCode, value: string @@ -273,7 +273,7 @@ function setTranscriptInnerMethod( const data: TranscriptRequest = { submission: submissionEditId, }; - data[qpath] = { + data[xpath] = { transcript: { value: value, languageCode: languageCode, @@ -304,7 +304,7 @@ function setTranscriptInnerMethod( interface SetTranscriptFn { ( assetUid: string, - qpath: string, + xpath: string, submissionEditId: string, languageCode: LanguageCode, value: string @@ -316,7 +316,7 @@ interface SetTranscriptDefinition extends SetTranscriptFn { failed: ListenableCallback; } processingActions.setTranscript.listen( - (assetUid, qpath, submissionEditId, languageCode, value) => { + (assetUid, xpath, submissionEditId, languageCode, value) => { // This first block of code is about getting currently enabled languages. const currentFeatures = getAssetAdvancedFeatures(assetUid); if (currentFeatures?.transcript === undefined) { @@ -332,7 +332,7 @@ processingActions.setTranscript.listen( ) { setTranscriptInnerMethod( assetUid, - qpath, + xpath, submissionEditId, languageCode, value @@ -362,7 +362,7 @@ processingActions.setTranscript.listen( onComplete: setTranscriptInnerMethod.bind( this, assetUid, - qpath, + xpath, submissionEditId, languageCode, value @@ -386,7 +386,7 @@ processingActions.setTranscript.failed.listen(() => { * `advanced_feature` (i.e. makes it "not enabled"). */ interface DeleteTranscriptFn { - (assetUid: string, qpath: string, submissionEditId: string): void; + (assetUid: string, xpath: string, submissionEditId: string): void; } interface DeleteTranscriptDefinition extends DeleteTranscriptFn { listen: (fn: DeleteTranscriptFn) => void; @@ -394,7 +394,7 @@ interface DeleteTranscriptDefinition extends DeleteTranscriptFn { failed: ListenableCallback; } processingActions.deleteTranscript.listen( - (assetUid, qpath, submissionEditId) => { + (assetUid, xpath, submissionEditId) => { const processingUrl = getAssetProcessingUrl(assetUid); if (processingUrl === undefined) { processingActions.deleteTranscript.failed(NO_FEATURE_ERROR); @@ -402,7 +402,7 @@ processingActions.deleteTranscript.listen( const data: TranscriptRequest = { submission: submissionEditId, }; - data[qpath] = { + data[xpath] = { transcript: { value: DELETE_CHAR, languageCode: '', @@ -438,7 +438,7 @@ processingActions.deleteTranscript.failed.listen(() => { interface RequestAutoTranscriptionFn { ( assetUid: string, - qpath: string, + xpath: string, submissionEditId: string, languageCode?: string, regionCode?: string | null @@ -458,7 +458,7 @@ interface RequestAutoTranscriptionDefinition failed: ListenableCallback; } processingActions.requestAutoTranscription.listen( - (assetUid, qpath, submissionEditId, languageCode, regionCode) => { + (assetUid, xpath, submissionEditId, languageCode, regionCode) => { const processingUrl = getAssetProcessingUrl(assetUid); if (processingUrl === undefined) { processingActions.requestAutoTranscription.failed(NO_FEATURE_ERROR); @@ -475,7 +475,7 @@ processingActions.requestAutoTranscription.listen( if (regionCode) { autoparams.regionCode = regionCode; } - data[qpath] = { + data[xpath] = { googlets: autoparams, }; @@ -487,7 +487,7 @@ processingActions.requestAutoTranscription.listen( data: JSON.stringify(data), }) .done((response: ProcessingDataResponse) => { - const responseStatus = response[qpath]?.googlets?.status; + const responseStatus = response[xpath]?.googlets?.status; if (responseStatus === 'requested' || responseStatus === 'in_progress') { processingActions.requestAutoTranscription.in_progress({ @@ -511,10 +511,10 @@ processingActions.requestAutoTranscription.listen( /** A small utility function for getting easier to use data. */ function pickTranslationsFromProcessingDataResponse( response: ProcessingDataResponse, - qpath: string + xpath: string ): TransxObject[] { const translations: TransxObject[] = []; - Object.values(response[qpath]?.translation).forEach((translation) => { + Object.values(response[xpath]?.translation).forEach((translation) => { translations.push(translation); }); return translations; @@ -522,7 +522,7 @@ function pickTranslationsFromProcessingDataResponse( /** A function that builds translation data object for processing endpoint. */ function getTranslationDataObject( - qpath: string, + xpath: string, submissionEditId: string, languageCode: LanguageCode, value: string @@ -537,7 +537,7 @@ function getTranslationDataObject( const data: TranslationRequest = { submission: submissionEditId, }; - data[qpath] = { + data[xpath] = { translation: translationsObj, }; return data; @@ -549,7 +549,7 @@ function getTranslationDataObject( */ function setTranslationInnerMethod( assetUid: string, - qpath: string, + xpath: string, submissionEditId: string, languageCode: LanguageCode, value: string @@ -559,7 +559,7 @@ function setTranslationInnerMethod( processingActions.setTranslation.failed(NO_FEATURE_ERROR); } else { const data = getTranslationDataObject( - qpath, + xpath, submissionEditId, languageCode, value @@ -573,7 +573,7 @@ function setTranslationInnerMethod( }) .done((response: ProcessingDataResponse) => { processingActions.setTranslation.completed( - pickTranslationsFromProcessingDataResponse(response, qpath) + pickTranslationsFromProcessingDataResponse(response, xpath) ); }) .fail(processingActions.setTranslation.failed); @@ -590,7 +590,7 @@ function setTranslationInnerMethod( interface SetTranslationFn { ( assetUid: string, - qpath: string, + xpath: string, submissionEditId: string, languageCode: LanguageCode, value: string @@ -602,7 +602,7 @@ interface SetTranslationDefinition extends SetTranslationFn { failed: ListenableCallback; } processingActions.setTranslation.listen( - (assetUid, qpath, submissionEditId, languageCode, value) => { + (assetUid, xpath, submissionEditId, languageCode, value) => { // This first block of code is about getting currently enabled languages. const currentFeatures = getAssetAdvancedFeatures(assetUid); if (currentFeatures?.translation === undefined) { @@ -618,7 +618,7 @@ processingActions.setTranslation.listen( ) { setTranslationInnerMethod( assetUid, - qpath, + xpath, submissionEditId, languageCode, value @@ -648,7 +648,7 @@ processingActions.setTranslation.listen( onComplete: setTranslationInnerMethod.bind( this, assetUid, - qpath, + xpath, submissionEditId, languageCode, value @@ -674,7 +674,7 @@ processingActions.setTranslation.failed.listen(() => { interface DeleteTranslationFn { ( assetUid: string, - qpath: string, + xpath: string, submissionEditId: string, languageCode: LanguageCode ): void; @@ -685,13 +685,13 @@ interface DeleteTranslationDefinition extends DeleteTranslationFn { failed: ListenableCallback; } processingActions.deleteTranslation.listen( - (assetUid, qpath, submissionEditId, languageCode) => { + (assetUid, xpath, submissionEditId, languageCode) => { const processingUrl = getAssetProcessingUrl(assetUid); if (processingUrl === undefined) { processingActions.deleteTranslation.failed(NO_FEATURE_ERROR); } else { const data = getTranslationDataObject( - qpath, + xpath, submissionEditId, languageCode, DELETE_CHAR @@ -722,7 +722,7 @@ processingActions.deleteTranslation.failed.listen(() => { interface RequestAutoTranslationFn { ( assetUid: string, - qpath: string, + xpath: string, submissionEditId: string, languageCode: string ): void; @@ -740,7 +740,7 @@ interface RequestAutoTranslationDefinition extends RequestAutoTranslationFn { failed: ListenableCallback; } processingActions.requestAutoTranslation.listen( - (assetUid, qpath, submissionEditId, languageCode) => { + (assetUid, xpath, submissionEditId, languageCode) => { const processingUrl = getAssetProcessingUrl(assetUid); if (processingUrl === undefined) { processingActions.requestAutoTranslation.failed(NO_FEATURE_ERROR); @@ -748,7 +748,7 @@ processingActions.requestAutoTranslation.listen( const data: AutoTranslationRequest = { submission: submissionEditId, }; - data[qpath] = { + data[xpath] = { googletx: { status: 'requested', languageCode: languageCode, @@ -763,7 +763,7 @@ processingActions.requestAutoTranslation.listen( data: JSON.stringify(data), }) .done((response: ProcessingDataResponse) => { - const responseStatus = response[qpath]?.googletx?.status; + const responseStatus = response[xpath]?.googletx?.status; if (responseStatus === 'requested' || responseStatus === 'in_progress') { processingActions.requestAutoTranslation.in_progress({ diff --git a/jsapp/js/components/processing/routes.utils.tests.ts b/jsapp/js/components/processing/routes.utils.tests.ts index 7af424fcf6..4a3468c94d 100644 --- a/jsapp/js/components/processing/routes.utils.tests.ts +++ b/jsapp/js/components/processing/routes.utils.tests.ts @@ -12,7 +12,7 @@ describe('processing routes.utils tests', () => { const test = getProcessingRouteParts(path); chai.expect(test).to.deep.equal({ assetUid: 'abc123', - qpath: 'My_que', + xpath: 'My_que', submissionEditId: 'def-45gh-jklm', tabName: ProcessingTab.Analysis, }); @@ -23,7 +23,7 @@ describe('processing routes.utils tests', () => { const test = getProcessingRouteParts(path); chai.expect(test).to.deep.equal({ assetUid: 'abc123', - qpath: 'My_que', + xpath: 'My_que', submissionEditId: 'def-45gh-jklm', tabName: ProcessingTab.Transcript, }); @@ -34,7 +34,7 @@ describe('processing routes.utils tests', () => { const test = getProcessingRouteParts(path); chai.expect(test).to.deep.equal({ assetUid: 'abc123', - qpath: 'My_que', + xpath: 'My_que', submissionEditId: 'def-45gh-jklm', }); }); @@ -44,7 +44,7 @@ describe('processing routes.utils tests', () => { const test = getProcessingRouteParts(path); chai.expect(test).to.deep.equal({ assetUid: 'abc123', - qpath: 'My_que', + xpath: 'My_que', submissionEditId: 'def-45gh-jklm', }); }); @@ -54,7 +54,7 @@ describe('processing routes.utils tests', () => { const test = getProcessingRouteParts(path); chai.expect(test).to.deep.equal({ assetUid: '', - qpath: '', + xpath: '', submissionEditId: '', }); }); @@ -64,7 +64,7 @@ describe('processing routes.utils tests', () => { const test = getProcessingRouteParts(path); chai.expect(test).to.deep.equal({ assetUid: '', - qpath: '', + xpath: '', submissionEditId: '', }); }); @@ -74,7 +74,7 @@ describe('processing routes.utils tests', () => { const test = getProcessingRouteParts(path); chai.expect(test).to.deep.equal({ assetUid: '', - qpath: '', + xpath: '', submissionEditId: '', }); }); diff --git a/jsapp/js/components/processing/routes.utils.ts b/jsapp/js/components/processing/routes.utils.ts index 7f9c7918ff..45ba23cd80 100644 --- a/jsapp/js/components/processing/routes.utils.ts +++ b/jsapp/js/components/processing/routes.utils.ts @@ -1,7 +1,7 @@ // This is a collection of various utility functions related to processing // routes and navigation. -import {generatePath, matchPath} from 'react-router-dom'; +import {generatePath, matchPath, useNavigate} from 'react-router-dom'; import {router} from 'js/router/legacy'; import {ROUTES, PROCESSING_ROUTES, PROCESSING_ROUTE_GENERIC} from 'js/router/routerConstants'; import {getCurrentPath} from 'js/router/routerUtils'; @@ -24,7 +24,7 @@ const TabToRouteMap: Map = new Map([ interface ProcessingRouteParts { assetUid: string; - qpath: string; + xpath: string; submissionEditId: string; tabName?: ProcessingTab; } @@ -35,7 +35,7 @@ interface ProcessingRouteParts { export function getProcessingRouteParts(path: string): ProcessingRouteParts { const output: ProcessingRouteParts = { assetUid: '', - qpath: '', + xpath: '', submissionEditId: '', }; @@ -57,7 +57,7 @@ export function getProcessingRouteParts(path: string): ProcessingRouteParts { // Step 4. Assign all the found values to output output.assetUid = matchProfile.params.uid as string; - output.qpath = matchProfile.params.qpath as string; + output.xpath = decodeURLParamWithSlash(matchProfile.params.xpath || '') as string; output.submissionEditId = matchProfile.params.submissionEditId as string; if ( 'tabName' in matchProfile.params && @@ -65,10 +65,31 @@ export function getProcessingRouteParts(path: string): ProcessingRouteParts { ) { output.tabName = matchProfile.params.tabName as ProcessingTab; } - return output; }; +/** + * Restore previously encoded value with encodeURLParamWithSlash to its + * original value + * + * @param value + */ +export function decodeURLParamWithSlash(value: string) { + return value.replace('|', '/'); +} + +/** + * Replace slashes ("/") with pipe ("|") + * + * React router seems to decode `%2F` automatically, thus we cannot use + * `encodeComponentURI()` to pass params with encoded slashes (i.e.: %2F) + * to `router.navigate()` without navigating to url with decoded slashes ("/") + * @param value + */ +export function encodeURLParamWithSlash(value: string) { + return encodeURIComponent(value.replace(/\//g, '|')); +} + export function getCurrentProcessingRouteParts(): ProcessingRouteParts { return getProcessingRouteParts(getCurrentPath()); } @@ -82,7 +103,7 @@ function applyCurrentRouteParams(targetRoute: string) { return generatePath(targetRoute, { uid: routeParams.assetUid, - qpath: routeParams.qpath, + xpath: encodeURLParamWithSlash(routeParams.xpath), submissionEditId: routeParams.submissionEditId, }); } @@ -100,7 +121,7 @@ export function isAnyProcessingRoute(path?: string): boolean { return Boolean( processingRouteParts.assetUid && processingRouteParts.submissionEditId && - processingRouteParts.qpath + processingRouteParts.xpath ); } @@ -155,7 +176,7 @@ export function goToTabRoute(targetTabRoute: string) { */ export function goToProcessing( assetUid: string, - qpath: string, + xpath: string, submissionEditId: string, remainOnSameTab?: boolean ) { @@ -173,7 +194,7 @@ export function goToProcessing( const path = generatePath(targetRoute, { uid: assetUid, - qpath, + xpath: encodeURLParamWithSlash(xpath), submissionEditId, }); router!.navigate(path); diff --git a/jsapp/js/components/processing/singleProcessingContent.tsx b/jsapp/js/components/processing/singleProcessingContent.tsx index 855e0135c5..4788e0c973 100644 --- a/jsapp/js/components/processing/singleProcessingContent.tsx +++ b/jsapp/js/components/processing/singleProcessingContent.tsx @@ -58,7 +58,6 @@ export default class SingleProcessingContent extends React.Component<{}> { if (isProcessingRouteActive(PROCESSING_ROUTES.ANALYSIS)) { return ; } - return null; } diff --git a/jsapp/js/components/processing/singleProcessingHeader.tsx b/jsapp/js/components/processing/singleProcessingHeader.tsx index 80dbbc0962..09b18c0776 100644 --- a/jsapp/js/components/processing/singleProcessingHeader.tsx +++ b/jsapp/js/components/processing/singleProcessingHeader.tsx @@ -2,7 +2,7 @@ import React from 'react'; import {QUESTION_TYPES, META_QUESTION_TYPES} from 'js/constants'; import type {AssetContent, AssetResponse} from 'js/dataInterface'; import { - findRowByQpath, + findRowByXpath, getRowTypeIcon, getTranslatedRowLabel, getRowName, @@ -62,9 +62,9 @@ class SingleProcessingHeader extends React.Component< this.forceUpdate(); } - onQuestionSelectChange(newQpath: string | null) { - if (newQpath !== null) { - this.goToSubmission(newQpath, this.props.submissionEditId); + onQuestionSelectChange(newXpath: string | null) { + if (newXpath !== null) { + this.goToSubmission(newXpath, this.props.submissionEditId); } } @@ -98,13 +98,13 @@ class SingleProcessingHeader extends React.Component< } if (editIds) { - Object.keys(editIds).forEach((qpath) => { - const questionData = findRowByQpath(assetContent, qpath); + Object.keys(editIds).forEach((xpath) => { + const questionData = findRowByXpath(assetContent, xpath); // At this point we want to find out whether the question has at least // one editId (i.e. there is at least one transcriptable response to // the question). Otherwise there's no point in having the question as // selectable option. - const questionEditIds = editIds[qpath]; + const questionEditIds = editIds[xpath]; const hasAtLeastOneEditId = Boolean( questionEditIds.find((editIdOrNull) => editIdOrNull !== null) ); @@ -122,7 +122,7 @@ class SingleProcessingHeader extends React.Component< languageIndex ); options.push({ - value: qpath, + value: xpath, label: translatedLabel !== null ? translatedLabel : rowName, icon: getRowTypeIcon(questionData.type), }); @@ -177,15 +177,15 @@ class SingleProcessingHeader extends React.Component< } /** Goes to another submission. */ - goToSubmission(qpath: string, targetSubmissionEditId: string) { - goToProcessing(this.props.assetUid, qpath, targetSubmissionEditId, true); + goToSubmission(xpath: string, targetSubmissionEditId: string) { + goToProcessing(this.props.assetUid, xpath, targetSubmissionEditId, true); } goPrev() { const prevEditId = this.getPrevSubmissionEditId(); if (prevEditId !== null) { this.goToSubmission( - singleProcessingStore.currentQuestionQpath, + singleProcessingStore.currentQuestionXpath, prevEditId ); } @@ -195,7 +195,7 @@ class SingleProcessingHeader extends React.Component< const nextEditId = this.getNextSubmissionEditId(); if (nextEditId !== null) { this.goToSubmission( - singleProcessingStore.currentQuestionQpath, + singleProcessingStore.currentQuestionXpath, nextEditId ); } @@ -303,7 +303,7 @@ class SingleProcessingHeader extends React.Component< type='gray' size='l' options={this.getQuestionSelectorOptions()} - selectedOption={singleProcessingStore.currentQuestionQpath} + selectedOption={singleProcessingStore.currentQuestionXpath} onChange={this.onQuestionSelectChange.bind(this)} /> diff --git a/jsapp/js/components/processing/singleProcessingRoute.tsx b/jsapp/js/components/processing/singleProcessingRoute.tsx index e421b2466b..53c8a56230 100644 --- a/jsapp/js/components/processing/singleProcessingRoute.tsx +++ b/jsapp/js/components/processing/singleProcessingRoute.tsx @@ -18,7 +18,7 @@ const NO_DATA_MESSAGE = t('There is no data for this question for the current su interface SingleProcessingRouteProps extends WithRouterProps { uid: string; - qpath: string; + xpath: string; submissionEditId: string; } @@ -76,10 +76,10 @@ export default class SingleProcessingRoute extends React.Component< /** Is processing enabled for current question. */ isProcessingEnabled() { - if (this.props.params.uid && this.props.params.qpath) { + if (this.props.params.uid && this.props.params.xpath) { return isRowProcessingEnabled( this.props.params.uid, - this.props.params.qpath + this.props.params.xpath ); } return false; diff --git a/jsapp/js/components/processing/singleProcessingStore.ts b/jsapp/js/components/processing/singleProcessingStore.ts index ed8b8de509..5780d01590 100644 --- a/jsapp/js/components/processing/singleProcessingStore.ts +++ b/jsapp/js/components/processing/singleProcessingStore.ts @@ -8,9 +8,9 @@ import { getAssetProcessingRows, isAssetProcessingActivated, getAssetAdvancedFeatures, - findRowByQpath, + findRowByXpath, getRowName, - getRowNameByQpath, + getRowNameByXpath, getFlatQuestionsList, getLanguageIndex, } from 'js/assetUtils'; @@ -99,7 +99,7 @@ interface TransxDraft { * ``` */ interface SubmissionsEditIds { - [qpath: string]: Array<{ + [xpath: string]: Array<{ editId: string; hasResponse: boolean; }>; @@ -186,8 +186,8 @@ class SingleProcessingStore extends Reflux.Store { return getCurrentProcessingRouteParts().assetUid; } - public get currentQuestionQpath() { - return getCurrentProcessingRouteParts().qpath; + public get currentQuestionXpath() { + return getCurrentProcessingRouteParts().xpath; } public get currentSubmissionEditId() { @@ -197,7 +197,7 @@ class SingleProcessingStore extends Reflux.Store { public get currentQuestionName() { const asset = assetStore.getAsset(this.currentAssetUid); if (asset?.content) { - const foundRow = findRowByQpath(asset.content, this.currentQuestionQpath); + const foundRow = findRowByXpath(asset.content, this.currentQuestionXpath); if (foundRow) { return getRowName(foundRow); } @@ -209,9 +209,9 @@ class SingleProcessingStore extends Reflux.Store { public get currentQuestionType(): AnyRowTypeName | undefined { const asset = assetStore.getAsset(this.currentAssetUid); if (asset?.content) { - const foundRow = findRowByQpath( + const foundRow = findRowByXpath( asset?.content, - this.currentQuestionQpath + this.currentQuestionXpath ); return foundRow?.type; } @@ -407,7 +407,7 @@ class SingleProcessingStore extends Reflux.Store { isAnyProcessingRoute(newPath) && previousPathParts && previousPathParts.assetUid === newPathParts.assetUid && - previousPathParts.qpath === newPathParts.qpath && + previousPathParts.xpath === newPathParts.xpath && previousPathParts.submissionEditId === newPathParts.submissionEditId && // This check is needed to avoid going into this in case when route // redirects from no tab (e.g. `/`) into default tab (e.g. `/transcript`). @@ -427,7 +427,7 @@ class SingleProcessingStore extends Reflux.Store { isAnyProcessingRoute(newPath) && previousPathParts && previousPathParts.assetUid === newPathParts.assetUid && - (previousPathParts.qpath !== newPathParts.qpath || + (previousPathParts.xpath !== newPathParts.xpath || previousPathParts.submissionEditId !== newPathParts.submissionEditId) ) { this.fetchProcessingData(); @@ -479,8 +479,8 @@ class SingleProcessingStore extends Reflux.Store { const asset = assetStore.getAsset(this.currentAssetUid); let flatPaths: SurveyFlatPaths = {}; - // We need to get a regular path (not qpath!) for each of the processing - // rows. In theory we could just convert the qpath strings, but it's safer + // We need to get a regular path (not xpath!) for each of the processing + // rows. In theory we could just convert the xpath strings, but it's safer // to use the asset data that we already have. const processingRowsPaths: string[] = []; @@ -488,11 +488,11 @@ class SingleProcessingStore extends Reflux.Store { flatPaths = getSurveyFlatPaths(asset.content.survey); if (processingRows) { - processingRows.forEach((qpath) => { + processingRows.forEach((xpath) => { if (asset?.content) { - // Here we need to "convert" qpath into name, as flatPaths work with - // names only. We search the row by qpath and use its name. - const rowName = getRowNameByQpath(asset.content, qpath); + // Here we need to "convert" xpath into name, as flatPaths work with + // names only. We search the row by xpath and use its name. + const rowName = getRowNameByXpath(asset.content, xpath); if (rowName && flatPaths[rowName]) { processingRowsPaths.push(flatPaths[rowName]); @@ -521,16 +521,16 @@ class SingleProcessingStore extends Reflux.Store { flatPaths = getSurveyFlatPaths(asset.content.survey); if (processingRows !== undefined) { - processingRows.forEach((qpath) => { - submissionsEditIds[qpath] = []; + processingRows.forEach((xpath) => { + submissionsEditIds[xpath] = []; }); response.results.forEach((result) => { - processingRows.forEach((qpath) => { + processingRows.forEach((xpath) => { if (asset?.content) { - // Here we need to "convert" qpath into name, as flatPaths work with - // names only. We search the row by qpath and use its name. - const rowName = getRowNameByQpath(asset.content, qpath); + // Here we need to "convert" xpath into name, as flatPaths work with + // names only. We search the row by xpath and use its name. + const rowName = getRowNameByXpath(asset.content, xpath); if (rowName) { // `meta/rootUuid` is persistent across edits while `_uuid` is not; @@ -539,7 +539,7 @@ class SingleProcessingStore extends Reflux.Store { if (uuid === undefined) { uuid = result['_uuid']; } - submissionsEditIds[qpath].push({ + submissionsEditIds[xpath].push({ editId: uuid, hasResponse: Object.keys(result).includes(flatPaths[rowName]), }); @@ -580,7 +580,7 @@ class SingleProcessingStore extends Reflux.Store { } private onFetchProcessingDataCompleted(response: ProcessingDataResponse) { - const transcriptResponse = response[this.currentQuestionQpath]?.transcript; + const transcriptResponse = response[this.currentQuestionXpath]?.transcript; // NOTE: we treat empty transcript object same as nonexistent one this.data.transcript = undefined; if (transcriptResponse?.value && transcriptResponse?.languageCode) { @@ -588,8 +588,9 @@ class SingleProcessingStore extends Reflux.Store { } const translationsResponse = - response[this.currentQuestionQpath]?.translation; + response[this.currentQuestionXpath]?.translation; const translationsArray: Transx[] = []; + if (translationsResponse) { Object.keys(translationsResponse).forEach( (languageCode: LanguageCode) => { @@ -640,7 +641,7 @@ class SingleProcessingStore extends Reflux.Store { } private onSetTranscriptCompleted(response: ProcessingDataResponse) { - const transcriptResponse = response[this.currentQuestionQpath]?.transcript; + const transcriptResponse = response[this.currentQuestionXpath]?.transcript; this.data.isFetchingData = false; @@ -664,7 +665,7 @@ class SingleProcessingStore extends Reflux.Store { // Note: previously initiated automatic transcriptions may no longer be // applicable to the current route const googleTsResponse = - event.response[this.currentQuestionQpath]?.googlets; + event.response[this.currentQuestionXpath]?.googlets; return ( event.submissionEditId === this.currentSubmissionEditId && googleTsResponse && @@ -677,14 +678,14 @@ class SingleProcessingStore extends Reflux.Store { private onRequestAutoTranscriptionCompleted(event: AutoTransxEvent) { if ( - !this.currentQuestionQpath || + !this.currentQuestionXpath || !this.data.isPollingForTranscript || !this.data.transcriptDraft ) { return; } - const googleTsResponse = event.response[this.currentQuestionQpath]?.googlets; + const googleTsResponse = event.response[this.currentQuestionXpath]?.googlets; if (googleTsResponse && this.isAutoTranscriptionEventApplicable(event)) { this.data.isPollingForTranscript = false; this.data.transcriptDraft.value = googleTsResponse.value; @@ -725,7 +726,7 @@ class SingleProcessingStore extends Reflux.Store { private isAutoTranslationEventApplicable(event: AutoTransxEvent) { const googleTxResponse = - event.response[this.currentQuestionQpath]?.googletx; + event.response[this.currentQuestionXpath]?.googletx; return ( event.submissionEditId === this.currentSubmissionEditId && googleTxResponse && @@ -738,14 +739,14 @@ class SingleProcessingStore extends Reflux.Store { private onRequestAutoTranslationCompleted(event: AutoTransxEvent) { if ( - !this.currentQuestionQpath || + !this.currentQuestionXpath || !this.data.isPollingForTranslation || !this.data.translationDraft ) { return; } - const googleTxResponse = event.response[this.currentQuestionQpath]?.googletx; + const googleTxResponse = event.response[this.currentQuestionXpath]?.googletx; if ( googleTxResponse && this.isAutoTranslationEventApplicable(event) @@ -767,7 +768,7 @@ class SingleProcessingStore extends Reflux.Store { this.data.isPollingForTranslation = true; console.log('trying to poll!'); // TEMP DELETEME this.requestAutoTranslation( - event.response[this.currentQuestionQpath]!.googlets!.languageCode + event.response[this.currentQuestionXpath]!.googlets!.languageCode ); } else { console.log('no more polling!'); // TEMP DELETEME @@ -816,7 +817,7 @@ class SingleProcessingStore extends Reflux.Store { this.data.isFetchingData = true; processingActions.setTranscript( this.currentAssetUid, - this.currentQuestionQpath, + this.currentQuestionXpath, this.currentSubmissionEditId, languageCode, value @@ -828,7 +829,7 @@ class SingleProcessingStore extends Reflux.Store { this.data.isFetchingData = true; processingActions.deleteTranscript( this.currentAssetUid, - this.currentQuestionQpath, + this.currentQuestionXpath, this.currentSubmissionEditId ); this.trigger(this.data); @@ -838,7 +839,7 @@ class SingleProcessingStore extends Reflux.Store { this.data.isPollingForTranscript = true; processingActions.requestAutoTranscription( this.currentAssetUid, - this.currentQuestionQpath, + this.currentQuestionXpath, this.currentSubmissionEditId, this.data.transcriptDraft?.languageCode, this.data.transcriptDraft?.regionCode @@ -902,7 +903,7 @@ class SingleProcessingStore extends Reflux.Store { this.data.isFetchingData = true; processingActions.setTranslation( this.currentAssetUid, - this.currentQuestionQpath, + this.currentQuestionXpath, this.currentSubmissionEditId, languageCode, value @@ -914,7 +915,7 @@ class SingleProcessingStore extends Reflux.Store { this.data.isFetchingData = true; processingActions.deleteTranslation( this.currentAssetUid, - this.currentQuestionQpath, + this.currentQuestionXpath, this.currentSubmissionEditId, languageCode ); @@ -925,7 +926,7 @@ class SingleProcessingStore extends Reflux.Store { this.data.isFetchingData = true; processingActions.requestAutoTranslation( this.currentAssetUid, - this.currentQuestionQpath, + this.currentQuestionXpath, this.currentSubmissionEditId, languageCode ); @@ -979,7 +980,7 @@ class SingleProcessingStore extends Reflux.Store { /** NOTE: Returns editIds for current question name, not for all of them. */ getCurrentQuestionSubmissionsEditIds() { if (this.data.submissionsEditIds !== undefined) { - return this.data.submissionsEditIds[this.currentQuestionQpath]; + return this.data.submissionsEditIds[this.currentQuestionXpath]; } return undefined; } diff --git a/jsapp/js/components/processing/transcript/transcriptTab.component.tsx b/jsapp/js/components/processing/transcript/transcriptTab.component.tsx index 0112dcd866..1b4dadb533 100644 --- a/jsapp/js/components/processing/transcript/transcriptTab.component.tsx +++ b/jsapp/js/components/processing/transcript/transcriptTab.component.tsx @@ -43,7 +43,7 @@ export default class TranscriptTab extends React.Component<{}> { } // Step 2: Config - for selecting the transcript language and mode. - // We display it when there is ongoing draft, but it doesn't have a language + // We display it when there is ongoing draft, but it doesn't have a language // or a value, and the region code is not selected. if ( draft !== undefined && @@ -53,9 +53,9 @@ export default class TranscriptTab extends React.Component<{}> { return ; } - // Step 2.1: Config Automatic - for selecting region and other automatic + // Step 2.1: Config Automatic - for selecting region and other automatic // options. - // We display it when there is ongoing draft, but it doesn't have a language + // We display it when there is ongoing draft, but it doesn't have a language // or a value, and the region code is selected. if ( draft !== undefined && @@ -71,7 +71,7 @@ export default class TranscriptTab extends React.Component<{}> { } // Step 4: Viewer - display existing (on backend) transcript. - // We display it when there is transcript in the store, and there is no + // We display it when there is transcript in the store, and there is no // ongoing draft (we only support single transcript ATM). if ( singleProcessingStore.getTranscript() !== undefined && diff --git a/jsapp/js/components/submissions/audioCell.tsx b/jsapp/js/components/submissions/audioCell.tsx index dea417e925..5cc1394129 100644 --- a/jsapp/js/components/submissions/audioCell.tsx +++ b/jsapp/js/components/submissions/audioCell.tsx @@ -11,7 +11,7 @@ bem.AudioCell = makeBem(null, 'audio-cell'); interface AudioCellProps { assetUid: string; - qpath: string; + xpath: string; /* submissionEditId is meta/rootUuid || _uuid */ submissionEditId: string; /** Required by the mini player. String passed is an error message */ @@ -44,7 +44,7 @@ export default function AudioCell(props: AudioCellProps) { label={t('Open')} isDisabled={typeof props.mediaAttachment === 'string'} onClick={() => { - goToProcessing(props.assetUid, props.qpath, props.submissionEditId); + goToProcessing(props.assetUid, props.xpath, props.submissionEditId); }} /> diff --git a/jsapp/js/components/submissions/mediaCell.tsx b/jsapp/js/components/submissions/mediaCell.tsx index 58e89eb9aa..0029b889f5 100644 --- a/jsapp/js/components/submissions/mediaCell.tsx +++ b/jsapp/js/components/submissions/mediaCell.tsx @@ -39,7 +39,7 @@ interface MediaCellProps { /** Total submissions for text questions. */ submissionTotal: number; assetUid: string; - qpath: string; + xpath: string; submissionUuid: string; } diff --git a/jsapp/js/components/submissions/submissionDataTable.tsx b/jsapp/js/components/submissions/submissionDataTable.tsx index 1cc22d4b78..46712a4210 100644 --- a/jsapp/js/components/submissions/submissionDataTable.tsx +++ b/jsapp/js/components/submissions/submissionDataTable.tsx @@ -58,7 +58,7 @@ class SubmissionDataTable extends React.Component { if (foundRow) { goToProcessing( this.props.asset.uid, - foundRow.$qpath, + foundRow.$xpath, this.props.submissionData._uuid ); } diff --git a/jsapp/js/components/submissions/submissionUtils.mocks.es6 b/jsapp/js/components/submissions/submissionUtils.mocks.es6 index a5d7f140a2..e73af844a6 100644 --- a/jsapp/js/components/submissions/submissionUtils.mocks.es6 +++ b/jsapp/js/components/submissions/submissionUtils.mocks.es6 @@ -1914,7 +1914,7 @@ export const assetWithSupplementalDetails = { { 'type': 'qual_text', 'uuid': 'ab0e40e1-fbcc-43e9-9d00-b9b3314089cb', - 'qpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', + 'xpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', 'scope': 'by_question#survey', 'labels': { '_default': 'What is?', @@ -1923,7 +1923,7 @@ export const assetWithSupplementalDetails = { { 'type': 'qual_integer', 'uuid': '97fd5387-ac2b-4108-b5b4-37fa91ae0e22', - 'qpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', + 'xpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', 'scope': 'by_question#survey', 'labels': { '_default': 'How much is the fish?', @@ -1932,7 +1932,7 @@ export const assetWithSupplementalDetails = { { 'type': 'qual_tags', 'uuid': 'b05f29f7-8b58-4dd7-8695-c29cb04f3f7a', - 'qpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', + 'xpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', 'scope': 'by_question#survey', 'labels': { '_default': 'Another tag question here?', @@ -1941,7 +1941,7 @@ export const assetWithSupplementalDetails = { { 'type': 'qual_select_multiple', 'uuid': '1a89e0da-3344-4b5d-b919-ab8b072e0918', - 'qpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', + 'xpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', 'scope': 'by_question#survey', 'labels': { '_default': 'Choose multiple', @@ -1970,7 +1970,7 @@ export const assetWithSupplementalDetails = { { 'type': 'qual_auto_keyword_count', 'uuid': 'd4813284-d928-43b7-bde5-133eabe76024', - 'qpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', + 'xpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', 'scope': 'by_question#survey', 'labels': { '_default': 'How many swear words were used?', @@ -1979,7 +1979,7 @@ export const assetWithSupplementalDetails = { { 'type': 'qual_tags', 'uuid': '056c8f57-0733-4669-a84e-aa9726ffbf6b', - 'qpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', + 'xpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', 'scope': 'by_question#survey', 'labels': { '_default': 'Do tags work?', diff --git a/jsapp/js/components/submissions/submissionUtils.ts b/jsapp/js/components/submissions/submissionUtils.ts index 4a15b99fd3..3cbb2ca5bd 100644 --- a/jsapp/js/components/submissions/submissionUtils.ts +++ b/jsapp/js/components/submissions/submissionUtils.ts @@ -372,8 +372,8 @@ export function getSubmissionDisplayData( ); parentGroup.addChild(rowObj); - const rowqpath = flatPaths[rowName].replace(/\//g, '-'); - supplementalDetailKeys[rowqpath]?.forEach((sdKey: string) => { + const rowxpath = flatPaths[rowName]; + supplementalDetailKeys[rowxpath]?.forEach((sdKey: string) => { parentGroup.addChild( new DisplayResponse( null, diff --git a/jsapp/js/components/submissions/table.tsx b/jsapp/js/components/submissions/table.tsx index 13b1da15ef..542f89c72c 100644 --- a/jsapp/js/components/submissions/table.tsx +++ b/jsapp/js/components/submissions/table.tsx @@ -969,7 +969,7 @@ export class DataTable extends React.Component { return ( @@ -986,7 +986,7 @@ export class DataTable extends React.Component { submissionIndex={row.index + 1} submissionTotal={this.state.submissions.length} assetUid={this.props.asset.uid} - qpath={q.$qpath} + xpath={q.$xpath} submissionUuid={row.original._uuid} /> ); diff --git a/jsapp/js/dataInterface.ts b/jsapp/js/dataInterface.ts index 9b1d8ed9b5..fd8681be6c 100644 --- a/jsapp/js/dataInterface.ts +++ b/jsapp/js/dataInterface.ts @@ -351,7 +351,6 @@ interface ExportSettingSettings { */ export interface SurveyRow { /** This is a unique identifier that includes both name and path (names of parents). */ - $qpath: string; $xpath: string; $autoname: string; $kuid: string; @@ -579,7 +578,7 @@ export interface AnalysisFormJsonField { type: string; language: string; source: string; - qpath: string; + xpath: string; settings: { mode: string; engine: string; diff --git a/jsapp/js/router/permProtectedRoute.tsx b/jsapp/js/router/permProtectedRoute.tsx index aa7b936585..ada4c47702 100644 --- a/jsapp/js/router/permProtectedRoute.tsx +++ b/jsapp/js/router/permProtectedRoute.tsx @@ -8,6 +8,7 @@ import assetStore from 'js/assetStore'; import type {PermissionCodename} from 'js/components/permissions/permConstants'; import type {WithRouterProps} from 'jsapp/js/router/legacy'; import type {AssetResponse, FailResponse} from 'js/dataInterface'; +import {decodeURLParamWithSlash} from "js/components/processing/routes.utils"; interface PermProtectedRouteProps extends WithRouterProps { /** One of PATHS */ @@ -137,6 +138,27 @@ class PermProtectedRoute extends React.Component< } } + filterProps(props: any) { + const {params, ...rest} = props; + if (!params?.xpath) { + return props; + } + + const {xpath, ...restParams} = params; + const decodedXPath = decodeURLParamWithSlash(xpath); + if (xpath !== decodedXPath) { + return { + ...rest, + params: { + xpath: decodedXPath, + ...restParams, + }, + }; + } else { + return props; + } + } + getUserHasRequiredPermission( asset: AssetResponse, requiredPermission: PermissionCodename @@ -168,10 +190,11 @@ class PermProtectedRoute extends React.Component< if (!this.state.isLoadAssetFinished) { return ; } else if (this.state.userHasRequiredPermissions) { + const filteredProps = this.filterProps(this.props); return ( }> diff --git a/jsapp/js/router/routerConstants.ts b/jsapp/js/router/routerConstants.ts index 0fa0a564b8..0ef491d8aa 100644 --- a/jsapp/js/router/routerConstants.ts +++ b/jsapp/js/router/routerConstants.ts @@ -34,8 +34,8 @@ export const ROUTES = Object.freeze({ FORM_GALLERY: '/forms/:uid/data/gallery', FORM_MAP: '/forms/:uid/data/map', FORM_MAP_BY: '/forms/:uid/data/map/:viewby', - /** Has: :uid, :qpath, :submissionEditId */ - FORM_PROCESSING_ROOT: '/forms/:uid/data/processing/:qpath/:submissionEditId', + /** Has: :uid, :xpath, :submissionEditId */ + FORM_PROCESSING_ROOT: '/forms/:uid/data/processing/:xpath/:submissionEditId', FORM_SETTINGS: '/forms/:uid/settings', FORM_MEDIA: '/forms/:uid/settings/media', FORM_SHARING: '/forms/:uid/settings/sharing', From 97e8e57de105dd3c264b5392645bd65269ca30e0 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Fri, 30 Aug 2024 11:13:57 -0400 Subject: [PATCH 03/25] Support advanced features JSON validation with qpath --- .../advanced_features_params_schema.py | 1 + kpi/models/asset.py | 30 +++++++++++++++---- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/kobo/apps/subsequences/advanced_features_params_schema.py b/kobo/apps/subsequences/advanced_features_params_schema.py index 846ce6f2ca..3efe7eb9a1 100644 --- a/kobo/apps/subsequences/advanced_features_params_schema.py +++ b/kobo/apps/subsequences/advanced_features_params_schema.py @@ -89,6 +89,7 @@ 'required': ['labels', 'uuid'], }, } + ADVANCED_FEATURES_PARAMS_SCHEMA['properties']['qual'] = { 'type': 'object', 'additionalProperties': False, diff --git a/kpi/models/asset.py b/kpi/models/asset.py index 8f542e884e..2d212f1c48 100644 --- a/kpi/models/asset.py +++ b/kpi/models/asset.py @@ -19,7 +19,8 @@ from formpack.utils.flatten_content import flatten_content from formpack.utils.json_hash import json_hash from formpack.utils.kobo_locking import strip_kobo_locking_profile -from jsonschema import validate as jsonschema_validate +from jsonschema import exceptions, validate as jsonschema_validate + from kobo.apps.reports.constants import ( SPECIFIC_REPORTS_KEY, @@ -1157,10 +1158,29 @@ def validate_advanced_features(self): if self.advanced_features is None: self.advanced_features = {} - jsonschema_validate( - instance=self.advanced_features, - schema=ADVANCED_FEATURES_PARAMS_SCHEMA, - ) + try: + jsonschema_validate( + instance=self.advanced_features, + schema=ADVANCED_FEATURES_PARAMS_SCHEMA, + ) + except exceptions.ValidationError as e: + if "'qpath' was unexpected" not in str(e): + raise + + # TODO delete the try/except when every asset is repopulated with + # `xpath` instead of `qpath`. + qual_survey_orig = self.advanced_features['qual']['qual_survey'] + qual_survey_iter = copy.deepcopy(qual_survey_orig) + for idx, qual_q in enumerate(qual_survey_iter): + qpath = qual_survey_orig[idx]['qpath'] + xpath = qpath_to_xpath(qpath, self) + del qual_survey_orig[idx]['qpath'] + qual_survey_orig[idx]['xpath'] = xpath + + jsonschema_validate( + instance=self.advanced_features, + schema=ADVANCED_FEATURES_PARAMS_SCHEMA, + ) @property def version__content_hash(self): From 5583793fe591ad1bf4d00b98ebc57e1554990556 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Sat, 31 Aug 2024 15:10:34 -0400 Subject: [PATCH 04/25] Refactoring --- kobo/apps/subsequences/api_view.py | 22 ++--- kobo/apps/subsequences/utils/deprecation.py | 95 +++++++++++++++++++++ kpi/models/asset.py | 58 ++++--------- kpi/models/import_export_task.py | 2 +- kpi/serializers/v2/asset.py | 5 +- 5 files changed, 123 insertions(+), 59 deletions(-) diff --git a/kobo/apps/subsequences/api_view.py b/kobo/apps/subsequences/api_view.py index 77385b5681..7cf40fc96c 100644 --- a/kobo/apps/subsequences/api_view.py +++ b/kobo/apps/subsequences/api_view.py @@ -8,7 +8,9 @@ from rest_framework.views import APIView from kobo.apps.subsequences.models import SubmissionExtras -from kobo.apps.subsequences.utils.deprecation import qpath_to_xpath +from kobo.apps.subsequences.utils.deprecation import ( + sanitize_submission_extra_content, +) from kpi.models import Asset from kpi.permissions import SubmissionPermission from kpi.views.environment import check_asr_mt_access_for_user @@ -100,23 +102,9 @@ def get_submission_processing(asset, s_uuid): try: submission_extra = asset.submission_extras.get(submission_uuid=s_uuid) - # TODO delete the loop when every asset is repopulated with `xpath` + # TODO delete line below when every asset is repopulated with `xpath` # instead of `qpath`. - content = deepcopy(submission_extra.content) - changed = False - for old_xpath, values in submission_extra.content.items(): - if '-' in old_xpath and '/' not in old_xpath: - xpath = qpath_to_xpath(old_xpath, asset) - if xpath == old_xpath: - continue - - del content[old_xpath] - content[xpath] = values - changed = True - - if changed: - submission_extra.content = content - # TODO save submission_extra? + sanitize_submission_extra_content(submission_extra, asset) return Response(submission_extra.content) except SubmissionExtras.DoesNotExist: diff --git a/kobo/apps/subsequences/utils/deprecation.py b/kobo/apps/subsequences/utils/deprecation.py index cd94a2d5cd..1c35b73f97 100644 --- a/kobo/apps/subsequences/utils/deprecation.py +++ b/kobo/apps/subsequences/utils/deprecation.py @@ -1,3 +1,39 @@ +from __future__ import annotations + +from copy import deepcopy +from typing import Optional + +import jsonschema + +from kpi.fields import WritableJSONField +from ..advanced_features_params_schema import ( + ADVANCED_FEATURES_PARAMS_SCHEMA, +) + + +def jsonschema_validate(asset: 'Asset'): + try: + jsonschema.validate( + instance=asset.advanced_features, + schema=ADVANCED_FEATURES_PARAMS_SCHEMA, + ) + except jsonschema.exceptions.ValidationError as e: + if "'qpath' was unexpected" not in str(e): + raise + + qual_survey_orig = asset.advanced_features['qual']['qual_survey'] + qual_survey_iter = deepcopy(qual_survey_orig) + for idx, qual_q in enumerate(qual_survey_iter): + qpath = qual_survey_orig[idx]['qpath'] + xpath = qpath_to_xpath(qpath, asset) + del qual_survey_orig[idx]['qpath'] + qual_survey_orig[idx]['xpath'] = xpath + + jsonschema.validate( + instance=asset.advanced_features, + schema=ADVANCED_FEATURES_PARAMS_SCHEMA, + ) + def qpath_to_xpath(qpath: str, asset: 'Asset') -> str: """ @@ -17,3 +53,62 @@ def qpath_to_xpath(qpath: str, asset: 'Asset') -> str: return xpath raise KeyError(f'xpath for {qpath} not found') + + +def sanitize_known_columns(asset: 'Asset'): + for idx, known_column in enumerate(asset.known_cols): + xpath, *rest = known_column.split(':') + # Old `qpath` should not contain "/", but could contain "-". + # If the question does not belong to a group but does contain "-", + # it will enter this condition - which is not a problem except extra + # CPU usage for nothing. + if '-' in xpath and '/' not in xpath: + xpath = qpath_to_xpath(xpath, asset) + rest.insert(0, xpath) + asset.known_cols[idx] = ':'.join(rest) + + # TODO Should we save asset.known_cols if it has changed? + + +def sanitize_submission_extra_content( + submission_extra: 'SubmissionExtras', asset: 'Asset' +) -> Optional[dict]: + """ + Replace with `qpath` attribute (if it exists) with `xpath` counterpart + """ + content = deepcopy(submission_extra.content) + changed = False + for old_xpath, values in submission_extra.content.items(): + if '-' in old_xpath and '/' not in old_xpath: + xpath = qpath_to_xpath(old_xpath, asset) + if xpath == old_xpath: + continue + + del content[old_xpath] + content[xpath] = values + changed = True + + if changed: + submission_extra.content = content + # TODO Should we save submission_extra? + + +class WritableAdvancedFeaturesField(WritableJSONField): + """ + This class brings support to old projects which are still using + `qpath` as the identifier for questions for advanced features. + + It should be deleted and replaced with WritableJSONField when all + assets are repopulated. + """ + def __init__(self, **kwargs): + super().__init__(**kwargs) + self._model_instance = None + + def to_representation(self, value): + self._model_instance.validate_advanced_features() + return value + + def get_attribute(self, instance): + self._model_instance = instance + return super().get_attribute(instance) diff --git a/kpi/models/asset.py b/kpi/models/asset.py index 2d212f1c48..f9cdc2a18f 100644 --- a/kpi/models/asset.py +++ b/kpi/models/asset.py @@ -19,21 +19,22 @@ from formpack.utils.flatten_content import flatten_content from formpack.utils.json_hash import json_hash from formpack.utils.kobo_locking import strip_kobo_locking_profile -from jsonschema import exceptions, validate as jsonschema_validate from kobo.apps.reports.constants import ( SPECIFIC_REPORTS_KEY, DEFAULT_REPORTS_KEY, ) -from kobo.apps.subsequences.advanced_features_params_schema import ( - ADVANCED_FEATURES_PARAMS_SCHEMA, -) from kobo.apps.subsequences.utils import ( advanced_feature_instances, advanced_submission_jsonschema, ) -from kobo.apps.subsequences.utils.deprecation import qpath_to_xpath +from kobo.apps.subsequences.utils.deprecation import ( + jsonschema_validate, + qpath_to_xpath, + sanitize_known_columns, + sanitize_submission_extra_content, +) from kobo.apps.subsequences.utils.parse_known_cols import parse_known_cols from kpi.constants import ( ASSET_TYPES, @@ -1095,6 +1096,7 @@ def update_submission_extra(self, content, user=None): .first() ) instances = self.get_advanced_feature_instances() + sanitize_submission_extra_content(sub, self) compiled_content = {**sub.content} for instance in instances: compiled_content = instance.compile_revised_record( @@ -1158,29 +1160,15 @@ def validate_advanced_features(self): if self.advanced_features is None: self.advanced_features = {} - try: - jsonschema_validate( - instance=self.advanced_features, - schema=ADVANCED_FEATURES_PARAMS_SCHEMA, - ) - except exceptions.ValidationError as e: - if "'qpath' was unexpected" not in str(e): - raise - - # TODO delete the try/except when every asset is repopulated with - # `xpath` instead of `qpath`. - qual_survey_orig = self.advanced_features['qual']['qual_survey'] - qual_survey_iter = copy.deepcopy(qual_survey_orig) - for idx, qual_q in enumerate(qual_survey_iter): - qpath = qual_survey_orig[idx]['qpath'] - xpath = qpath_to_xpath(qpath, self) - del qual_survey_orig[idx]['qpath'] - qual_survey_orig[idx]['xpath'] = xpath - - jsonschema_validate( - instance=self.advanced_features, - schema=ADVANCED_FEATURES_PARAMS_SCHEMA, - ) + # TODO uncomment the 4 lines below… + # jsonschema_validate( + # instance=self.advanced_features, + # schema=ADVANCED_FEATURES_PARAMS_SCHEMA, + # ) + + # TODO … and delete this one when every asset is repopulated with + # `xpath` instead of `qpath`. + jsonschema_validate(self) @property def version__content_hash(self): @@ -1212,18 +1200,8 @@ def version_number_and_date(self) -> str: def _get_additional_fields(self): - # TODO delete the loop when every asset is repopulated with `xpath` - # instead of `qpath`. - for idx, known_column in enumerate(self.known_cols): - xpath, *rest = known_column.split(':') - # Old `qpath` should not contain "/", but could contain "-". - # If the question does not belong to a group but does contain "-", - # it will enter this condition - which is not a problem except extra - # CPU usage for nothing. - if '-' in xpath and '/' not in xpath: - xpath = qpath_to_xpath(xpath, self) - rest.insert(0, xpath) - self.known_cols[idx] = ':'.join(rest) + # TODO line below when when every asset is repopulated with `xpath` + sanitize_known_columns(self) return parse_known_cols(self.known_cols) diff --git a/kpi/models/import_export_task.py b/kpi/models/import_export_task.py index 1c7ad6c3e4..436eae476e 100644 --- a/kpi/models/import_export_task.py +++ b/kpi/models/import_export_task.py @@ -16,6 +16,7 @@ from backports.zoneinfo import ZoneInfo import constance +import formpack import requests from django.conf import settings from django.contrib.postgres.indexes import BTreeIndex, HashIndex @@ -24,7 +25,6 @@ from django.db.models import F from django.urls import reverse from django.utils.translation import gettext as t -import formpack from formpack.constants import ( KOBO_LOCK_SHEET, ) diff --git a/kpi/serializers/v2/asset.py b/kpi/serializers/v2/asset.py index b8fecd4973..afc3d699a7 100644 --- a/kpi/serializers/v2/asset.py +++ b/kpi/serializers/v2/asset.py @@ -18,6 +18,9 @@ from kobo.apps.reports.constants import FUZZY_VERSION_PATTERN from kobo.apps.reports.report_data import build_formpack +from kobo.apps.subsequences.utils.deprecation import ( + WritableAdvancedFeaturesField, +) from kobo.apps.trash_bin.exceptions import ( TrashIntegrityError, TrashTaskInProgressError, @@ -297,7 +300,7 @@ class AssetSerializer(serializers.HyperlinkedModelSerializer): report_custom = WritableJSONField(required=False) map_styles = WritableJSONField(required=False) map_custom = WritableJSONField(required=False) - advanced_features = WritableJSONField(required=False) + advanced_features = WritableAdvancedFeaturesField(required=False) advanced_submission_schema = serializers.SerializerMethodField() files = serializers.SerializerMethodField() analysis_form_json = serializers.SerializerMethodField() From 9c1061a63c4c825db3ac16c5ab83261baf27f1d7 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 3 Sep 2024 12:53:13 -0400 Subject: [PATCH 05/25] Update formpack dependencies --- dependencies/pip/dev_requirements.txt | 2 +- dependencies/pip/requirements.in | 2 +- dependencies/pip/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/dependencies/pip/dev_requirements.txt b/dependencies/pip/dev_requirements.txt index a0d327ba18..a5b8744c83 100644 --- a/dependencies/pip/dev_requirements.txt +++ b/dependencies/pip/dev_requirements.txt @@ -8,7 +8,7 @@ # via -r dependencies/pip/requirements.in -e git+https://github.com/trevoriancox/django-dont-vary-on.git@01a804122b7ddcdc22f50b40993f91c27b03bef6#egg=django-dont-vary-on # via -r dependencies/pip/requirements.in --e git+https://github.com/kobotoolbox/formpack.git@451df4cd2a0d614be69a3b3309259c67369f7efb#egg=formpack +-e git+https://github.com/kobotoolbox/formpack.git@eea375baccce809b9bfe393ec1abe1245e63ab04#egg=formpack # via -r dependencies/pip/requirements.in -e git+https://github.com/dimagi/python-digest@5c94bb74516b977b60180ee832765c0695ff2b56#egg=python_digest # via -r dependencies/pip/requirements.in diff --git a/dependencies/pip/requirements.in b/dependencies/pip/requirements.in index da85d168d3..3a5e1a81d8 100644 --- a/dependencies/pip/requirements.in +++ b/dependencies/pip/requirements.in @@ -2,7 +2,7 @@ # https://github.com/bndr/pipreqs is a handy utility, too. # formpack --e git+https://github.com/kobotoolbox/formpack.git@451df4cd2a0d614be69a3b3309259c67369f7efb#egg=formpack +-e git+https://github.com/kobotoolbox/formpack.git@eea375baccce809b9bfe393ec1abe1245e63ab04#egg=formpack # More up-to-date version of django-digest than PyPI seems to have. # Also, python-digest is an unlisted dependency thereof. diff --git a/dependencies/pip/requirements.txt b/dependencies/pip/requirements.txt index 3299d51d17..e7c527dca5 100644 --- a/dependencies/pip/requirements.txt +++ b/dependencies/pip/requirements.txt @@ -8,7 +8,7 @@ # via -r dependencies/pip/requirements.in -e git+https://github.com/trevoriancox/django-dont-vary-on.git@01a804122b7ddcdc22f50b40993f91c27b03bef6#egg=django-dont-vary-on # via -r dependencies/pip/requirements.in --e git+https://github.com/kobotoolbox/formpack.git@451df4cd2a0d614be69a3b3309259c67369f7efb#egg=formpack +-e git+https://github.com/kobotoolbox/formpack.git@eea375baccce809b9bfe393ec1abe1245e63ab04#egg=formpack # via -r dependencies/pip/requirements.in -e git+https://github.com/dimagi/python-digest@5c94bb74516b977b60180ee832765c0695ff2b56#egg=python_digest # via -r dependencies/pip/requirements.in From 2cf67ef8456f9c2eb4fb0700f64cb842636aa705 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 3 Sep 2024 14:45:11 -0400 Subject: [PATCH 06/25] Update FormPack --- dependencies/pip/dev_requirements.txt | 2 +- dependencies/pip/requirements.in | 2 +- dependencies/pip/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/dependencies/pip/dev_requirements.txt b/dependencies/pip/dev_requirements.txt index a5b8744c83..ea5267af66 100644 --- a/dependencies/pip/dev_requirements.txt +++ b/dependencies/pip/dev_requirements.txt @@ -8,7 +8,7 @@ # via -r dependencies/pip/requirements.in -e git+https://github.com/trevoriancox/django-dont-vary-on.git@01a804122b7ddcdc22f50b40993f91c27b03bef6#egg=django-dont-vary-on # via -r dependencies/pip/requirements.in --e git+https://github.com/kobotoolbox/formpack.git@eea375baccce809b9bfe393ec1abe1245e63ab04#egg=formpack +-e git+https://github.com/kobotoolbox/formpack.git@5a8cda8cc37a56a2313c98b88fcacc18049ef477#egg=formpack # via -r dependencies/pip/requirements.in -e git+https://github.com/dimagi/python-digest@5c94bb74516b977b60180ee832765c0695ff2b56#egg=python_digest # via -r dependencies/pip/requirements.in diff --git a/dependencies/pip/requirements.in b/dependencies/pip/requirements.in index 3a5e1a81d8..3ab614bb6d 100644 --- a/dependencies/pip/requirements.in +++ b/dependencies/pip/requirements.in @@ -2,7 +2,7 @@ # https://github.com/bndr/pipreqs is a handy utility, too. # formpack --e git+https://github.com/kobotoolbox/formpack.git@eea375baccce809b9bfe393ec1abe1245e63ab04#egg=formpack +-e git+https://github.com/kobotoolbox/formpack.git@5a8cda8cc37a56a2313c98b88fcacc18049ef477#egg=formpack # More up-to-date version of django-digest than PyPI seems to have. # Also, python-digest is an unlisted dependency thereof. diff --git a/dependencies/pip/requirements.txt b/dependencies/pip/requirements.txt index e7c527dca5..5d3f83fe9f 100644 --- a/dependencies/pip/requirements.txt +++ b/dependencies/pip/requirements.txt @@ -8,7 +8,7 @@ # via -r dependencies/pip/requirements.in -e git+https://github.com/trevoriancox/django-dont-vary-on.git@01a804122b7ddcdc22f50b40993f91c27b03bef6#egg=django-dont-vary-on # via -r dependencies/pip/requirements.in --e git+https://github.com/kobotoolbox/formpack.git@eea375baccce809b9bfe393ec1abe1245e63ab04#egg=formpack +-e git+https://github.com/kobotoolbox/formpack.git@5a8cda8cc37a56a2313c98b88fcacc18049ef477#egg=formpack # via -r dependencies/pip/requirements.in -e git+https://github.com/dimagi/python-digest@5c94bb74516b977b60180ee832765c0695ff2b56#egg=python_digest # via -r dependencies/pip/requirements.in From 6723dd6b140833597f3cf28181935ae298505e82 Mon Sep 17 00:00:00 2001 From: Leszek Date: Wed, 4 Sep 2024 14:13:12 +0200 Subject: [PATCH 07/25] Fix isProcessingRouteActive having problems with "|" character --- jsapp/js/components/processing/routes.utils.ts | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/jsapp/js/components/processing/routes.utils.ts b/jsapp/js/components/processing/routes.utils.ts index 45ba23cd80..4e669aa708 100644 --- a/jsapp/js/components/processing/routes.utils.ts +++ b/jsapp/js/components/processing/routes.utils.ts @@ -137,7 +137,14 @@ export function isAnyProcessingRouteActive(): boolean { * is active) */ export function isProcessingRouteActive(targetRoute: string) { - return getCurrentPath().startsWith(applyCurrentRouteParams(targetRoute)); + // We need to apply actual values for the route definition `:param`s here. + // After that we use `decodeURI` to ensure that `|` in the test route is the + // same as `|` in the `getCurrentPath`. Without this we would be comparing + // string that could be "exactly" the same, just one containing `|` and + // the other `%7C` (ASCII for `|`) - resulting in incorrect `false`. + const routeToTest = decodeURI(applyCurrentRouteParams(targetRoute)); + + return getCurrentPath().startsWith(routeToTest); } /** From 48b4a8164f4f9d67c16c8bb184a4ace1203aefe0 Mon Sep 17 00:00:00 2001 From: Leszek Date: Wed, 4 Sep 2024 23:08:03 +0200 Subject: [PATCH 08/25] further improve isProcessingRouteActive in some edge cases --- .../permissions/userAssetPermsEditor.component.tsx | 6 +++--- jsapp/js/components/processing/routes.utils.ts | 6 ++++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/jsapp/js/components/permissions/userAssetPermsEditor.component.tsx b/jsapp/js/components/permissions/userAssetPermsEditor.component.tsx index 4e3cb20062..67e03765fe 100644 --- a/jsapp/js/components/permissions/userAssetPermsEditor.component.tsx +++ b/jsapp/js/components/permissions/userAssetPermsEditor.component.tsx @@ -478,10 +478,10 @@ export default class UserAssetPermsEditor extends React.Component< false, true ); - for (const [, qPath] of Object.entries(flatPaths)) { + for (const [, path] of Object.entries(flatPaths)) { output.push({ - value: qPath, - label: qPath, + value: path, + label: path, }); } } diff --git a/jsapp/js/components/processing/routes.utils.ts b/jsapp/js/components/processing/routes.utils.ts index 4e669aa708..ec460342b9 100644 --- a/jsapp/js/components/processing/routes.utils.ts +++ b/jsapp/js/components/processing/routes.utils.ts @@ -143,8 +143,10 @@ export function isProcessingRouteActive(targetRoute: string) { // string that could be "exactly" the same, just one containing `|` and // the other `%7C` (ASCII for `|`) - resulting in incorrect `false`. const routeToTest = decodeURI(applyCurrentRouteParams(targetRoute)); - - return getCurrentPath().startsWith(routeToTest); + // Sometimes current path containts `|` and sometimes with `%7C` so we need to + // be extra safe here. + const currentPath = decodeURI(getCurrentPath()); + return currentPath.startsWith(routeToTest); } /** From 1098de0faf2d98bc79e5fee69a39e5b16cea75b0 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Wed, 18 Sep 2024 16:20:11 -0400 Subject: [PATCH 09/25] Fix linter errors --- kobo/apps/subsequences/actions/keyword_search.py | 4 ++-- kobo/apps/subsequences/utils/__init__.py | 1 - kobo/apps/subsequences/utils/parse_known_cols.py | 3 --- kpi/models/asset.py | 1 + kpi/serializers/v2/asset.py | 3 --- 5 files changed, 3 insertions(+), 9 deletions(-) diff --git a/kobo/apps/subsequences/actions/keyword_search.py b/kobo/apps/subsequences/actions/keyword_search.py index 312162eb4e..3e83748e77 100644 --- a/kobo/apps/subsequences/actions/keyword_search.py +++ b/kobo/apps/subsequences/actions/keyword_search.py @@ -69,7 +69,7 @@ def check_submission_status(self, submission): response = self._traverse_object(submission, source) except KeyError: continue - # FIXME QPATH + xpath = source.split('/')[0] all_output = submission[xpath].setdefault(self.ID, []) this_output = self._get_matching_element(all_output, **query) @@ -92,7 +92,7 @@ def run_change(self, submission): matches = 0 for keyword in query['keywords']: matches += response['value'].count(keyword) - # FIXME QPATH + xpath = source.split('/')[0] all_output = submission[xpath].setdefault(self.ID, []) this_output = self._get_matching_element(all_output, **query) diff --git a/kobo/apps/subsequences/utils/__init__.py b/kobo/apps/subsequences/utils/__init__.py index 2f633da542..75a28cec6e 100644 --- a/kobo/apps/subsequences/utils/__init__.py +++ b/kobo/apps/subsequences/utils/__init__.py @@ -160,7 +160,6 @@ def stream_with_extras(submission_stream, asset): uuid = submission['_uuid'] all_supplemental_details = deepcopy(extras.get(uuid, {})) - # FIXME QPATH for supplemental_details in all_supplemental_details.values(): try: all_qual_responses = supplemental_details['qual'] diff --git a/kobo/apps/subsequences/utils/parse_known_cols.py b/kobo/apps/subsequences/utils/parse_known_cols.py index 4c4bb5c32b..53701ef622 100644 --- a/kobo/apps/subsequences/utils/parse_known_cols.py +++ b/kobo/apps/subsequences/utils/parse_known_cols.py @@ -16,16 +16,13 @@ def extend_col_deets(lang: str, coltype: str, label: str, xpath: str) -> dict: # NB: refer to commit d013bfe0f5 when trying to figure out the original # intent here - name = xpath.split('/')[-1] out = { - 'label': name, 'dtpath': f'{xpath}/{coltype}_{lang}', 'type': coltype, 'language': lang, 'label': f'{label} - {coltype}', 'name': f'{xpath}/{coltype}_{lang}', 'source': xpath, - # FIXME QPATH 'xpath': f'{xpath}/{coltype}/{lang}', 'settings': {'mode': 'manual', 'engine': f'engines/{coltype}_manual'}, 'path': [xpath, coltype], diff --git a/kpi/models/asset.py b/kpi/models/asset.py index 3a9e71a73a..64301925bd 100644 --- a/kpi/models/asset.py +++ b/kpi/models/asset.py @@ -86,6 +86,7 @@ from kpi.utils.object_permission import get_cached_code_names from kpi.utils.sluggify import sluggify_label + class AssetDeploymentStatus(models.TextChoices): ARCHIVED = 'archived', 'Archived' diff --git a/kpi/serializers/v2/asset.py b/kpi/serializers/v2/asset.py index afc3d699a7..af8a2d6808 100644 --- a/kpi/serializers/v2/asset.py +++ b/kpi/serializers/v2/asset.py @@ -689,9 +689,6 @@ def get_permissions(self, obj): queryset.all(), many=True, read_only=True, context=context ).data - def get_project_ownership(self, asset) -> Optional[dict]: - pass - def get_project_ownership(self, asset) -> Optional[dict]: if not (transfer := asset.transfers.order_by('-date_created').first()): return From 5422e925991b897182f8c9573afbe7376ee7d282 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 19 Sep 2024 17:12:59 -0400 Subject: [PATCH 10/25] Apply requested changes --- kobo/apps/subsequences/api_view.py | 11 +- kobo/apps/subsequences/utils/__init__.py | 19 ++- kobo/apps/subsequences/utils/deprecation.py | 128 ++++++++++---------- kpi/models/asset.py | 49 ++++---- 4 files changed, 113 insertions(+), 94 deletions(-) diff --git a/kobo/apps/subsequences/api_view.py b/kobo/apps/subsequences/api_view.py index 7cf40fc96c..d5e806c66b 100644 --- a/kobo/apps/subsequences/api_view.py +++ b/kobo/apps/subsequences/api_view.py @@ -9,7 +9,7 @@ from kobo.apps.subsequences.models import SubmissionExtras from kobo.apps.subsequences.utils.deprecation import ( - sanitize_submission_extra_content, + get_sanitized_dict_keys, ) from kpi.models import Asset from kpi.permissions import SubmissionPermission @@ -102,9 +102,12 @@ def get_submission_processing(asset, s_uuid): try: submission_extra = asset.submission_extras.get(submission_uuid=s_uuid) - # TODO delete line below when every asset is repopulated with `xpath` - # instead of `qpath`. - sanitize_submission_extra_content(submission_extra, asset) + # TODO delete two lines below when every asset is repopulated with + # `xpath` instead of `qpath`. + if content := get_sanitized_dict_keys( + submission_extra.content, asset + ): + submission_extra.content = content return Response(submission_extra.content) except SubmissionExtras.DoesNotExist: diff --git a/kobo/apps/subsequences/utils/__init__.py b/kobo/apps/subsequences/utils/__init__.py index 75a28cec6e..ce5f223909 100644 --- a/kobo/apps/subsequences/utils/__init__.py +++ b/kobo/apps/subsequences/utils/__init__.py @@ -1,6 +1,10 @@ from collections import defaultdict from copy import deepcopy +from .deprecation import ( + get_sanitized_advanced_features, + get_sanitized_dict_keys, +) from ..actions.automatic_transcription import AutomaticTranscriptionAction from ..actions.translation import TranslationAction from ..actions.qual import QualAction @@ -83,10 +87,10 @@ def advanced_submission_jsonschema(content, actions, url=None): content = populate_paths(content) # devhack: this keeps serializer from breaking when old params # are still in the database - if 'translated' in actions: # migration + if 'translated' in actions: # migration actions['translation'] = actions['translated'] # migration assert 'languages' in actions['translation'] - del actions['translated'] # migration + del actions['translated'] # migration # /devhack for action_id, action_params in actions.items(): @@ -131,12 +135,19 @@ def stream_with_extras(submission_stream, asset): extras = dict( asset.submission_extras.values_list('submission_uuid', 'content') ) + + if asset.advanced_features and ( + advanced_features := get_sanitized_advanced_features(asset) + ): + asset.advanced_features = advanced_features + try: qual_survey = asset.advanced_features['qual']['qual_survey'] except KeyError: qual_survey = [] else: qual_survey = deepcopy(qual_survey) + # keys are question UUIDs, values are question definitions qual_questions_by_uuid = {} # outer keys are question UUIDs, inner keys are choice UUIDs, values are @@ -201,5 +212,7 @@ def stream_with_extras(submission_stream, asset): val_expanded = val_expanded[0] qual_response['val'] = val_expanded qual_response.update(qual_q) - submission[SUPPLEMENTAL_DETAILS_KEY] = all_supplemental_details + submission[SUPPLEMENTAL_DETAILS_KEY] = get_sanitized_dict_keys( + all_supplemental_details, asset + ) yield submission diff --git a/kobo/apps/subsequences/utils/deprecation.py b/kobo/apps/subsequences/utils/deprecation.py index 1c35b73f97..e63abc9ccc 100644 --- a/kobo/apps/subsequences/utils/deprecation.py +++ b/kobo/apps/subsequences/utils/deprecation.py @@ -1,62 +1,61 @@ from __future__ import annotations +import json from copy import deepcopy -from typing import Optional - -import jsonschema from kpi.fields import WritableJSONField -from ..advanced_features_params_schema import ( - ADVANCED_FEATURES_PARAMS_SCHEMA, -) - - -def jsonschema_validate(asset: 'Asset'): - try: - jsonschema.validate( - instance=asset.advanced_features, - schema=ADVANCED_FEATURES_PARAMS_SCHEMA, - ) - except jsonschema.exceptions.ValidationError as e: - if "'qpath' was unexpected" not in str(e): - raise - - qual_survey_orig = asset.advanced_features['qual']['qual_survey'] - qual_survey_iter = deepcopy(qual_survey_orig) - for idx, qual_q in enumerate(qual_survey_iter): - qpath = qual_survey_orig[idx]['qpath'] - xpath = qpath_to_xpath(qpath, asset) - del qual_survey_orig[idx]['qpath'] - qual_survey_orig[idx]['xpath'] = xpath - - jsonschema.validate( - instance=asset.advanced_features, - schema=ADVANCED_FEATURES_PARAMS_SCHEMA, - ) -def qpath_to_xpath(qpath: str, asset: 'Asset') -> str: +def get_sanitized_advanced_features(asset: 'Asset') -> dict | None: """ - We have abandoned `qpath` attribute in favor of `xpath`. - Existing projects may still use it though. - We need to find the equivalent `xpath` + Replace `qpath` attributes (if present) with their `xpath` + counterparts in asset.advanced_features """ - for row in asset.content['survey']: - if '$qpath' in row and '$xpath' in row and row['$qpath'] == qpath: - return row['$xpath'] - # Could not find it from the survey, let's try to detect it automatically - xpaths = asset.get_attachment_xpaths(deployed=True) - for xpath in xpaths: - dashed_xpath = xpath.replace('/', '-') - if dashed_xpath == qpath: - return xpath + if not asset.advanced_features: + return + + if 'qpath' not in json.dumps(asset.advanced_features): + return + + advanced_features = deepcopy(asset.advanced_features) + qual_survey_orig = advanced_features['qual']['qual_survey'] + qual_survey_iter = deepcopy(qual_survey_orig) + for idx, qual_q in enumerate(qual_survey_iter): + qpath = qual_survey_orig[idx]['qpath'] + xpath = qpath_to_xpath(qpath, asset) + del qual_survey_orig[idx]['qpath'] + qual_survey_orig[idx]['xpath'] = xpath + + return advanced_features - raise KeyError(f'xpath for {qpath} not found') +def get_sanitized_dict_keys(dict_to_update: dict, asset: 'Asset') -> dict | None: + """ + Update `dict_to_update` keys created with `qpath`(if they are present) with + their `xpath` counterpart. + """ + updated_dict = deepcopy(dict_to_update) + changed = False + for old_xpath, values in dict_to_update.items(): + if '-' in old_xpath and '/' not in old_xpath: + xpath = qpath_to_xpath(old_xpath, asset) + if xpath == old_xpath: + continue + + del updated_dict[old_xpath] + updated_dict[xpath] = values + changed = True -def sanitize_known_columns(asset: 'Asset'): - for idx, known_column in enumerate(asset.known_cols): + if changed: + return updated_dict + + +def get_sanitized_known_columns(asset: 'Asset') -> list: + + known_cols = list(asset.known_cols) + + for idx, known_column in enumerate(known_cols): xpath, *rest = known_column.split(':') # Old `qpath` should not contain "/", but could contain "-". # If the question does not belong to a group but does contain "-", @@ -65,32 +64,29 @@ def sanitize_known_columns(asset: 'Asset'): if '-' in xpath and '/' not in xpath: xpath = qpath_to_xpath(xpath, asset) rest.insert(0, xpath) - asset.known_cols[idx] = ':'.join(rest) + known_cols[idx] = ':'.join(rest) - # TODO Should we save asset.known_cols if it has changed? + return known_cols -def sanitize_submission_extra_content( - submission_extra: 'SubmissionExtras', asset: 'Asset' -) -> Optional[dict]: +def qpath_to_xpath(qpath: str, asset: 'Asset') -> str: """ - Replace with `qpath` attribute (if it exists) with `xpath` counterpart + We have abandoned `qpath` attribute in favor of `xpath`. + Existing projects may still use it though. + We need to find the equivalent `xpath`. """ - content = deepcopy(submission_extra.content) - changed = False - for old_xpath, values in submission_extra.content.items(): - if '-' in old_xpath and '/' not in old_xpath: - xpath = qpath_to_xpath(old_xpath, asset) - if xpath == old_xpath: - continue + for row in asset.content['survey']: + if '$qpath' in row and '$xpath' in row and row['$qpath'] == qpath: + return row['$xpath'] - del content[old_xpath] - content[xpath] = values - changed = True + # Could not find it from the survey, let's try to detect it automatically + xpaths = asset.get_attachment_xpaths(deployed=True) + for xpath in xpaths: + dashed_xpath = xpath.replace('/', '-') + if dashed_xpath == qpath: + return xpath - if changed: - submission_extra.content = content - # TODO Should we save submission_extra? + raise KeyError(f'xpath for {qpath} not found') class WritableAdvancedFeaturesField(WritableJSONField): @@ -107,7 +103,7 @@ def __init__(self, **kwargs): def to_representation(self, value): self._model_instance.validate_advanced_features() - return value + return self._model_instance.advanced_features def get_attribute(self, instance): self._model_instance = instance diff --git a/kpi/models/asset.py b/kpi/models/asset.py index 64301925bd..7165e8d48c 100644 --- a/kpi/models/asset.py +++ b/kpi/models/asset.py @@ -1,11 +1,10 @@ -# coding: utf-8 -# 😬 import copy import re from functools import reduce from operator import add from typing import Optional, Union +import jsonschema from django.conf import settings from django.contrib.auth.models import Permission from django.contrib.postgres.indexes import BTreeIndex, GinIndex @@ -20,7 +19,6 @@ from formpack.utils.json_hash import json_hash from formpack.utils.kobo_locking import strip_kobo_locking_profile - from kobo.apps.reports.constants import ( SPECIFIC_REPORTS_KEY, DEFAULT_REPORTS_KEY, @@ -29,11 +27,14 @@ advanced_feature_instances, advanced_submission_jsonschema, ) +from kobo.apps.subsequences.advanced_features_params_schema import ( + ADVANCED_FEATURES_PARAMS_SCHEMA, +) from kobo.apps.subsequences.utils.deprecation import ( - jsonschema_validate, + get_sanitized_known_columns, + get_sanitized_dict_keys, + get_sanitized_advanced_features, qpath_to_xpath, - sanitize_known_columns, - sanitize_submission_extra_content, ) from kobo.apps.subsequences.utils.parse_known_cols import parse_known_cols from kpi.constants import ( @@ -144,15 +145,15 @@ def add(self, *tags, **kwargs): strips leading and trailng whitespace. Behavior should match the cleanupTags function in jsapp/js/utils.ts. """ tags_out = [] - for t in tags: + for tag in tags: # Modify strings only; the superclass' add() method will then # create Tags or use existing ones as appropriate. We do not fix # existing Tag objects, which could also be passed into this # method, because a fixed name could collide with the name of # another Tag object already in the database. - if isinstance(t, str): - t = t.strip().replace(' ', '-') - tags_out.append(t) + if isinstance(tag, str): + tag = tag.strip().replace(' ', '-') + tags_out.append(tag) super().add(*tags_out, **kwargs) @@ -580,9 +581,14 @@ def get_advanced_feature_instances(self): return advanced_feature_instances(self.content, self.advanced_features) def get_advanced_submission_schema(self, url=None, content=False): + if len(self.advanced_features) == 0: NO_FEATURES_MSG = 'no advanced features activated for this form' return {'type': 'object', '$description': NO_FEATURES_MSG} + + if advanced_features := get_sanitized_advanced_features(self): + self.advanced_features = advanced_features + last_deployed_version = self.deployed_versions.first() if content: return advanced_submission_jsonschema( @@ -1091,8 +1097,11 @@ def update_submission_extra(self, content, user=None): .first() ) instances = self.get_advanced_feature_instances() - sanitize_submission_extra_content(sub, self) + if sub_extra_content := get_sanitized_dict_keys(sub.content, self): + sub.content = sub_extra_content + compiled_content = {**sub.content} + for instance in instances: compiled_content = instance.compile_revised_record( compiled_content, edits=content @@ -1155,15 +1164,13 @@ def validate_advanced_features(self): if self.advanced_features is None: self.advanced_features = {} - # TODO uncomment the 4 lines below… - # jsonschema_validate( - # instance=self.advanced_features, - # schema=ADVANCED_FEATURES_PARAMS_SCHEMA, - # ) + if advanced_features := get_sanitized_advanced_features(self): + self.advanced_features = advanced_features - # TODO … and delete this one when every asset is repopulated with - # `xpath` instead of `qpath`. - jsonschema_validate(self) + jsonschema.validate( + instance=self.advanced_features, + schema=ADVANCED_FEATURES_PARAMS_SCHEMA, + ) @property def version__content_hash(self): @@ -1195,8 +1202,8 @@ def version_number_and_date(self) -> str: def _get_additional_fields(self): - # TODO line below when when every asset is repopulated with `xpath` - sanitize_known_columns(self) + # TODO Remove line below when when every asset is repopulated with `xpath` + self.known_cols = get_sanitized_known_columns(self) return parse_known_cols(self.known_cols) From fd6e61a1ddc089b7c30a05d9be38152b7d1c0a7c Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 19 Sep 2024 17:13:53 -0400 Subject: [PATCH 11/25] Improve repop_known_cols script efficiency to update data --- .../subsequences/scripts/repop_known_cols.py | 117 +++++++++++++----- 1 file changed, 87 insertions(+), 30 deletions(-) diff --git a/kobo/apps/subsequences/scripts/repop_known_cols.py b/kobo/apps/subsequences/scripts/repop_known_cols.py index f01cc71da4..8f1b57a043 100644 --- a/kobo/apps/subsequences/scripts/repop_known_cols.py +++ b/kobo/apps/subsequences/scripts/repop_known_cols.py @@ -1,69 +1,126 @@ -# coding: utf-8 - -''' +""" Usage: python manage.py runscript repop_known_cols --script-args= -''' -import re +""" + import json -# from pprint import pprint -from kpi.models.asset import Asset -from kobo.apps.subsequences.models import SubmissionExtras +from django.core.paginator import Paginator -from kobo.apps.subsequences.utils.parse_known_cols import parse_known_cols +from kobo.apps.subsequences.models import SubmissionExtras from kobo.apps.subsequences.utils.determine_export_cols_with_values import ( determine_export_cols_with_values, ) +from kobo.apps.subsequences.utils.deprecation import ( + get_sanitized_known_columns, + get_sanitized_dict_keys, +) +from kpi.models.asset import Asset -def migrate_subex_content(sub_ex): +def migrate_subex_content( + sub_ex: SubmissionExtras, asset: Asset, save=True +) -> SubmissionExtras: content_string = json.dumps(sub_ex.content) - if '"translated"' in content_string: # migration - content_string = content_string.replace('"translated"', '"translation"') # migration + if '"translated"' in content_string: # migration + content_string = content_string.replace( + '"translated"', '"translation"' + ) # migration sub_ex.content = json.loads(content_string) + if content := get_sanitized_dict_keys(sub_ex.content, asset): + sub_ex.content = content print('submission_extra has old content') - sub_ex.save() + if save: + sub_ex.save() + return sub_ex -def migrate_subex_content_for_asset(asset): +def migrate_subex_content_for_asset(asset, save=True): + submission_extras = [] for sub_ex in asset.submission_extras.all(): - migrate_subex_content(sub_ex) + if updated_sub_ex := migrate_subex_content( + sub_ex, asset=asset, save=save + ): + submission_extras.append(updated_sub_ex) + return submission_extras -def repop_asset_known_cols(asset): + +def repop_asset_known_cols(asset, save=True): print(f'for_asset: {asset.uid}') print(' before:') print(' - ' + '\n - '.join(sorted(asset.known_cols))) known_cols = determine_export_cols_with_values(asset.submission_extras.all()) - asset.known_cols = known_cols - if 'translated' in asset.advanced_features: # migration - asset.advanced_features['translation'] = asset.advanced_features['translated'] # migration - del asset.advanced_features['translated'] # migration - asset.save(create_version=False) + asset.known_cols = get_sanitized_known_columns(asset) + if 'translated' in asset.advanced_features: # migration + asset.advanced_features['translation'] = asset.advanced_features['translated'] + del asset.advanced_features['translated'] + if save: + asset.save(create_version=False) print(' after:') print(' - ' + '\n - '.join(sorted(known_cols))) -def migrate_advanced_features(asset): - if 'translated' in asset.advanced_features: # migration - asset.advanced_features['translation'] = asset.advanced_features['translated'] # migration - asset.save(create_version=False) +def migrate_advanced_features(asset, save=True): + if 'translated' in asset.advanced_features: # migration + asset.advanced_features['translation'] = asset.advanced_features['translated'] + if save: + asset.save(create_version=False) def run(asset_uid=None): + if asset_uid == "!": SubmissionExtras.objects.all().delete() - for asset in Asset.objects.exclude(advanced_features__exact={}).all(): + for asset in Asset.objects.exclude(advanced_features__exact={}).iterator(): asset.advanced_features = {} asset.save(create_version=False) repop_asset_known_cols(asset) print('Note:\nRemoved all transcript+translation related data from all assets') elif asset_uid is None: - for asset in Asset.objects.exclude(advanced_features__exact={}).all(): - migrate_advanced_features(asset) - migrate_subex_content_for_asset(asset) - repop_asset_known_cols(asset) + + page_size = 2000 + paginator = Paginator( + Asset.objects.only( + 'id', + 'uid', + 'content', + 'advanced_features', + 'known_cols', + 'summary', + 'asset_type', + ) + .prefetch_related('submission_extras') + .exclude(advanced_features__exact={}) + .order_by('pk'), + page_size, + ) + + for page in paginator.page_range: + assets = paginator.page(page).object_list + updated_assets = [] + updated_submission_extras = [] + for asset in assets: + print(f'Processing asset {asset.uid}') + migrate_advanced_features(asset, save=False) + updated_submission_extras.extend( + migrate_subex_content_for_asset(asset, save=False) + ) + repop_asset_known_cols(asset, save=False) + asset.adjust_content_on_save() + asset.validate_advanced_features() + updated_assets.append(asset) + + if updated_assets: + Asset.objects.bulk_update( + updated_assets, + ['content', 'advanced_features', 'known_cols'], + ) + + if updated_submission_extras: + SubmissionExtras.objects.bulk_update( + updated_submission_extras, ['content'] + ) else: asset = Asset.objects.get(uid=asset_uid) migrate_subex_content_for_asset(asset) From c178d7af845820d46d5ba38b4b1b7ebce82adf60 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 19 Sep 2024 18:09:16 -0400 Subject: [PATCH 12/25] fix bug when supplementalDetails is empty if qpath is not present --- kobo/apps/subsequences/api_view.py | 2 +- kobo/apps/subsequences/utils/__init__.py | 10 ++++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/kobo/apps/subsequences/api_view.py b/kobo/apps/subsequences/api_view.py index d5e806c66b..e744cd7102 100644 --- a/kobo/apps/subsequences/api_view.py +++ b/kobo/apps/subsequences/api_view.py @@ -102,7 +102,7 @@ def get_submission_processing(asset, s_uuid): try: submission_extra = asset.submission_extras.get(submission_uuid=s_uuid) - # TODO delete two lines below when every asset is repopulated with + # TODO delete "if" statement below when every asset is repopulated with # `xpath` instead of `qpath`. if content := get_sanitized_dict_keys( submission_extra.content, asset diff --git a/kobo/apps/subsequences/utils/__init__.py b/kobo/apps/subsequences/utils/__init__.py index ce5f223909..3be21a587d 100644 --- a/kobo/apps/subsequences/utils/__init__.py +++ b/kobo/apps/subsequences/utils/__init__.py @@ -212,7 +212,13 @@ def stream_with_extras(submission_stream, asset): val_expanded = val_expanded[0] qual_response['val'] = val_expanded qual_response.update(qual_q) - submission[SUPPLEMENTAL_DETAILS_KEY] = get_sanitized_dict_keys( + + # Remove `qpath` if present + if sanitized_suppl_details := get_sanitized_dict_keys( all_supplemental_details, asset - ) + ): + all_supplemental_details = sanitized_suppl_details + + submission[SUPPLEMENTAL_DETAILS_KEY] = all_supplemental_details + yield submission From bf2069066c823b24d191f84366e539591830beb2 Mon Sep 17 00:00:00 2001 From: Rebecca Graber Date: Fri, 20 Sep 2024 12:55:14 -0400 Subject: [PATCH 13/25] Log out user from all devices (#5106) --- kobo/settings/base.py | 2 ++ kpi/tests/api/v2/test_api_logout_all.py | 47 +++++++++++++++++++++++++ kpi/urls/__init__.py | 2 ++ kpi/views/v2/logout.py | 32 +++++++++++++++++ 4 files changed, 83 insertions(+) create mode 100644 kpi/tests/api/v2/test_api_logout_all.py create mode 100644 kpi/views/v2/logout.py diff --git a/kobo/settings/base.py b/kobo/settings/base.py index e207737429..2b942448af 100644 --- a/kobo/settings/base.py +++ b/kobo/settings/base.py @@ -105,6 +105,7 @@ 'allauth.socialaccount', 'allauth.socialaccount.providers.microsoft', 'allauth.socialaccount.providers.openid_connect', + 'allauth.usersessions', 'hub.HubAppConfig', 'loginas', 'webpack_loader', @@ -154,6 +155,7 @@ 'django.contrib.sessions.middleware.SessionMiddleware', 'hub.middleware.LocaleMiddleware', 'allauth.account.middleware.AccountMiddleware', + 'allauth.usersessions.middleware.UserSessionsMiddleware', 'django.middleware.common.CommonMiddleware', # Still needed really? 'kobo.apps.openrosa.libs.utils.middleware.LocaleMiddlewareWithTweaks', diff --git a/kpi/tests/api/v2/test_api_logout_all.py b/kpi/tests/api/v2/test_api_logout_all.py new file mode 100644 index 0000000000..c4c04ee2ea --- /dev/null +++ b/kpi/tests/api/v2/test_api_logout_all.py @@ -0,0 +1,47 @@ +from allauth.usersessions.models import UserSession +from django.urls import reverse + +from kobo.apps.kobo_auth.shortcuts import User +from kpi.tests.base_test_case import BaseTestCase + + +class TestLogoutAll(BaseTestCase): + + fixtures = ['test_data'] + + def test_logout_all_sessions(self): + # create 2 user sessions + user = User.objects.get(username='someuser') + UserSession.objects.create(user=user, session_key='12345', ip='1.2.3.4') + UserSession.objects.create(user=user, session_key='56789', ip='5.6.7.8') + count = UserSession.objects.filter(user=user).count() + self.assertEqual(count, 2) + self.client.force_login(user) + url = self._get_endpoint('logout_all') + self.client.post(reverse(url)) + + # ensure both sessions have been deleted + count = UserSession.objects.filter(user=user).count() + self.assertEqual(count, 0) + + def test_logout_all_sessions_does_not_affect_other_users(self): + user1 = User.objects.get(username='someuser') + user2 = User.objects.get(username='anotheruser') + # create sessions for user1 + UserSession.objects.create( + user=user1, session_key='12345', ip='1.2.3.4' + ) + UserSession.objects.create( + user=user1, session_key='56789', ip='5.6.7.8' + ) + count = UserSession.objects.count() + self.assertEqual(count, 2) + + # login user2 + self.client.force_login(user2) + url = self._get_endpoint('logout_all') + self.client.post(reverse(url)) + + # ensure no sessions have been deleted + count = UserSession.objects.filter().count() + self.assertEqual(count, 2) diff --git a/kpi/urls/__init__.py b/kpi/urls/__init__.py index 786df65e38..9cf2ce732b 100644 --- a/kpi/urls/__init__.py +++ b/kpi/urls/__init__.py @@ -13,6 +13,7 @@ from .router_api_v1 import router_api_v1 from .router_api_v2 import router_api_v2, URL_NAMESPACE +from ..views.v2.logout import logout_from_all_devices # TODO: Give other apps their own `urls.py` files instead of importing their # views directly! See @@ -50,6 +51,7 @@ re_path(r'^private-media/', include(private_storage.urls)), # Statistics for superusers re_path(r'^superuser_stats/', include(('kobo.apps.superuser_stats.urls', 'superuser_stats'))), + path('logout-all/', logout_from_all_devices, name='logout_all') ] diff --git a/kpi/views/v2/logout.py b/kpi/views/v2/logout.py new file mode 100644 index 0000000000..959f6a8330 --- /dev/null +++ b/kpi/views/v2/logout.py @@ -0,0 +1,32 @@ +from allauth.usersessions.adapter import get_adapter +from allauth.usersessions.models import UserSession +from rest_framework.decorators import api_view, permission_classes +from rest_framework.response import Response + +from kpi.permissions import IsAuthenticated + + +@api_view(['POST']) +@permission_classes((IsAuthenticated,)) +def logout_from_all_devices(request): + """ + Log calling user out from all devices + +
+    POST /logout-all/
+    
+ + > Example + > + > curl -H 'Authorization Token 12345' -X POST https://[kpi-url]/logout-all + + > Response 200 + + > { "Logged out of all sessions" } + + """ + user = request.user + all_user_sessions = UserSession.objects.purge_and_list(user) + adapter = get_adapter() + adapter.end_sessions(all_user_sessions) + return Response('Logged out of all sessions') From bb54380a2469a24da51fa03d86a30ee099974f49 Mon Sep 17 00:00:00 2001 From: James Kiger Date: Tue, 24 Sep 2024 08:26:04 -0400 Subject: [PATCH 14/25] Add freezegun to dev requirements for testing --- dependencies/pip/dev_requirements.in | 1 + dependencies/pip/dev_requirements.txt | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/dependencies/pip/dev_requirements.in b/dependencies/pip/dev_requirements.in index cb497a7e1d..61020e2124 100644 --- a/dependencies/pip/dev_requirements.in +++ b/dependencies/pip/dev_requirements.in @@ -13,6 +13,7 @@ pytest-cov pytest-django pytest-env pytest-xdist +freezegun # KoboCAT httmock diff --git a/dependencies/pip/dev_requirements.txt b/dependencies/pip/dev_requirements.txt index b77f0951c9..d18d3a447d 100644 --- a/dependencies/pip/dev_requirements.txt +++ b/dependencies/pip/dev_requirements.txt @@ -267,6 +267,8 @@ flake8-quotes==3.4.0 # via -r dependencies/pip/dev_requirements.in flower==2.0.1 # via -r dependencies/pip/requirements.in +freezegun==1.5.1 + # via -r dependencies/pip/dev_requirements.in frozenlist==1.4.1 # via # aiohttp @@ -521,6 +523,7 @@ python-dateutil==2.9.0.post0 # -r dependencies/pip/requirements.in # botocore # celery + # freezegun # pandas # python-crontab python3-openid==3.2.0 @@ -685,4 +688,3 @@ yubico-client==1.13.0 # The following packages are considered to be unsafe in a requirements file: # setuptools -backports-zoneinfo==0.2.1; python_version < '3.9' From 561d5ab1036f2b77b362f42e50c533e6b4388cb2 Mon Sep 17 00:00:00 2001 From: James Kiger Date: Tue, 24 Sep 2024 08:27:04 -0400 Subject: [PATCH 15/25] Ensure billing cycle start date matches cancelation date in 31-day months --- kobo/apps/organizations/utils.py | 6 +++-- .../stripe/tests/test_organization_usage.py | 25 +++++++++++++++++++ 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/kobo/apps/organizations/utils.py b/kobo/apps/organizations/utils.py index 99e4147413..8f354f00a6 100644 --- a/kobo/apps/organizations/utils.py +++ b/kobo/apps/organizations/utils.py @@ -30,10 +30,12 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): ): return first_of_this_month, first_of_next_month - period_end = canceled_subscription_anchor.replace(tzinfo=pytz.UTC) + canceled_subscription_anchor = canceled_subscription_anchor.replace(tzinfo=pytz.UTC) + period_end = canceled_subscription_anchor while period_end < now: period_end += relativedelta(months=1) - period_start = period_end - relativedelta(months=1) + # Avoid pushing billing cycle back to before cancelation date + period_start = max(period_end - relativedelta(months=1), canceled_subscription_anchor) return period_start, period_end if not billing_details.get('billing_cycle_anchor'): diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index 9de13ff2a9..cec207ed04 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -10,6 +10,7 @@ from django.urls import reverse from django.utils import timezone from djstripe.models import Customer +from freezegun import freeze_time from model_bakery import baker from kobo.apps.kobo_auth.shortcuts import User @@ -305,6 +306,30 @@ def test_plan_canceled_last_month(self): == current_billing_period_end.isoformat() ) + def test_plan_canceled_edge_date(self): + """ + If a plan is canceled on the last day of a 31-day month, we want the subsequent + billing cycle to end on the last day of the next month, but we also need to make + sure the cycle starts on the cancelation date + """ + cancel_date = datetime(year=2024, month=8, day=31, tzinfo=pytz.UTC) + with freeze_time(cancel_date.replace(day=1)): + subscription = generate_plan_subscription(self.organization, age_days=1095) + + subscription.status = 'canceled' + subscription.ended_at = cancel_date + subscription.save() + + with freeze_time(cancel_date.replace(month=9, day=1)): + response = self.client.get(self.detail_url) + current_month_start = datetime.fromisoformat(response.data['current_month_start']) + current_month_end = datetime.fromisoformat(response.data['current_month_end']) + + assert current_month_start.month == cancel_date.month + assert current_month_start.day == cancel_date.day + assert current_month_end.month == 9 + assert current_month_end.day == 30 + def test_multiple_canceled_plans(self): """ If a user has multiple canceled plans, their default billing cycle From f710e52b57edd328719a3d8bd522a1d77c80b30c Mon Sep 17 00:00:00 2001 From: James Kiger Date: Tue, 24 Sep 2024 08:38:25 -0400 Subject: [PATCH 16/25] Test cleanup --- kobo/apps/stripe/tests/test_organization_usage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index cec207ed04..bd32292620 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -314,7 +314,7 @@ def test_plan_canceled_edge_date(self): """ cancel_date = datetime(year=2024, month=8, day=31, tzinfo=pytz.UTC) with freeze_time(cancel_date.replace(day=1)): - subscription = generate_plan_subscription(self.organization, age_days=1095) + subscription = generate_plan_subscription(self.organization) subscription.status = 'canceled' subscription.ended_at = cancel_date From ec01d3f2dc42f07ad51ec6901833a2c2d5fe56ef Mon Sep 17 00:00:00 2001 From: James Kiger Date: Tue, 24 Sep 2024 09:01:40 -0400 Subject: [PATCH 17/25] Linting --- kobo/apps/organizations/utils.py | 9 +++++++-- kobo/apps/stripe/tests/test_organization_usage.py | 10 +++++++--- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/kobo/apps/organizations/utils.py b/kobo/apps/organizations/utils.py index 8f354f00a6..5328343c24 100644 --- a/kobo/apps/organizations/utils.py +++ b/kobo/apps/organizations/utils.py @@ -30,12 +30,17 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): ): return first_of_this_month, first_of_next_month - canceled_subscription_anchor = canceled_subscription_anchor.replace(tzinfo=pytz.UTC) + canceled_subscription_anchor = canceled_subscription_anchor.replace( + tzinfo=pytz.UTC + ) period_end = canceled_subscription_anchor while period_end < now: period_end += relativedelta(months=1) # Avoid pushing billing cycle back to before cancelation date - period_start = max(period_end - relativedelta(months=1), canceled_subscription_anchor) + period_start = max( + period_end - relativedelta(months=1), + canceled_subscription_anchor, + ) return period_start, period_end if not billing_details.get('billing_cycle_anchor'): diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index bd32292620..58f9f1cfb3 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -309,7 +309,7 @@ def test_plan_canceled_last_month(self): def test_plan_canceled_edge_date(self): """ If a plan is canceled on the last day of a 31-day month, we want the subsequent - billing cycle to end on the last day of the next month, but we also need to make + billing cycle to end on the last day of the next month, but we also need to make sure the cycle starts on the cancelation date """ cancel_date = datetime(year=2024, month=8, day=31, tzinfo=pytz.UTC) @@ -322,8 +322,12 @@ def test_plan_canceled_edge_date(self): with freeze_time(cancel_date.replace(month=9, day=1)): response = self.client.get(self.detail_url) - current_month_start = datetime.fromisoformat(response.data['current_month_start']) - current_month_end = datetime.fromisoformat(response.data['current_month_end']) + current_month_start = datetime.fromisoformat( + response.data['current_month_start'] + ) + current_month_end = datetime.fromisoformat( + response.data['current_month_end'] + ) assert current_month_start.month == cancel_date.month assert current_month_start.day == cancel_date.day From 4d96d9c4604b2146188fca557a12039953fa8124 Mon Sep 17 00:00:00 2001 From: James Kiger Date: Tue, 24 Sep 2024 10:15:01 -0400 Subject: [PATCH 18/25] Add back zoneinfo backports to dev dependencies.txt and remove remaining uses of pytz --- dependencies/pip/dev_requirements.txt | 1 + kobo/apps/organizations/utils.py | 30 +++++++++++-------- .../stripe/tests/test_organization_usage.py | 10 +++++-- kobo/apps/subsequences/actions/base.py | 8 +++-- 4 files changed, 32 insertions(+), 17 deletions(-) diff --git a/dependencies/pip/dev_requirements.txt b/dependencies/pip/dev_requirements.txt index d18d3a447d..e94950f4f5 100644 --- a/dependencies/pip/dev_requirements.txt +++ b/dependencies/pip/dev_requirements.txt @@ -688,3 +688,4 @@ yubico-client==1.13.0 # The following packages are considered to be unsafe in a requirements file: # setuptools +backports-zoneinfo==0.2.1; python_version < '3.9' \ No newline at end of file diff --git a/kobo/apps/organizations/utils.py b/kobo/apps/organizations/utils.py index 5328343c24..e28042ffa6 100644 --- a/kobo/apps/organizations/utils.py +++ b/kobo/apps/organizations/utils.py @@ -1,8 +1,12 @@ from typing import Union -import pytz from datetime import datetime from dateutil.relativedelta import relativedelta +try: + from zoneinfo import ZoneInfo +except ImportError: + from backports.zoneinfo import ZoneInfo + from django.utils import timezone from kobo.apps.organizations.models import Organization @@ -11,8 +15,8 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): """Returns start and end dates of an organization's monthly billing cycle""" - now = timezone.now().replace(tzinfo=pytz.UTC) - first_of_this_month = datetime(now.year, now.month, 1, tzinfo=pytz.UTC) + now = timezone.now().replace(tzinfo=ZoneInfo('UTC')) + first_of_this_month = datetime(now.year, now.month, 1, tzinfo=ZoneInfo('UTC')) first_of_next_month = ( first_of_this_month + relativedelta(months=1) @@ -31,7 +35,7 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): return first_of_this_month, first_of_next_month canceled_subscription_anchor = canceled_subscription_anchor.replace( - tzinfo=pytz.UTC + tzinfo=ZoneInfo('UTC') ) period_end = canceled_subscription_anchor while period_end < now: @@ -49,15 +53,15 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): # Subscription is billed monthly, use the current billing period dates if billing_details.get('recurring_interval') == 'month': period_start = billing_details.get('current_period_start').replace( - tzinfo=pytz.UTC + tzinfo=ZoneInfo('UTC') ) period_end = billing_details.get('current_period_end').replace( - tzinfo=pytz.UTC + tzinfo=ZoneInfo('UTC') ) return period_start, period_end # Subscription is billed yearly - count backwards from the end of the current billing year - period_start = billing_details.get('current_period_end').replace(tzinfo=pytz.UTC) + period_start = billing_details.get('current_period_end').replace(tzinfo=ZoneInfo('UTC')) while period_start > now: period_start -= relativedelta(months=1) period_end = period_start + relativedelta(months=1) @@ -66,8 +70,8 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): def get_yearly_billing_dates(organization: Union[Organization, None]): """Returns start and end dates of an organization's annual billing cycle""" - now = timezone.now().replace(tzinfo=pytz.UTC) - first_of_this_year = datetime(now.year, 1, 1, tzinfo=pytz.UTC) + now = timezone.now().replace(tzinfo=ZoneInfo('UTC')) + first_of_this_year = datetime(now.year, 1, 1, tzinfo=ZoneInfo('UTC')) first_of_next_year = first_of_this_year + relativedelta(years=1) if not organization: @@ -80,15 +84,17 @@ def get_yearly_billing_dates(organization: Union[Organization, None]): # Subscription is billed yearly, use the dates from the subscription if billing_details.get('recurring_interval') == 'year': period_start = billing_details.get('current_period_start').replace( - tzinfo=pytz.UTC + tzinfo=ZoneInfo('UTC') ) period_end = billing_details.get('current_period_end').replace( - tzinfo=pytz.UTC + tzinfo=ZoneInfo('UTC') ) return period_start, period_end # Subscription is monthly, calculate this year's start based on anchor date - period_start = anchor_date.replace(tzinfo=pytz.UTC) + relativedelta(years=1) + period_start = anchor_date.replace( + tzinfo=ZoneInfo('UTC')) + relativedelta(years=1 + ) while period_start < now: anchor_date += relativedelta(years=1) period_end = period_start + relativedelta(years=1) diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index 58f9f1cfb3..0d152b7186 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -2,9 +2,13 @@ import itertools import pytest -import pytz from datetime import datetime from dateutil.relativedelta import relativedelta +try: + from zoneinfo import ZoneInfo +except ImportError: + from backports.zoneinfo import ZoneInfo + from django.core.cache import cache from django.test import override_settings from django.urls import reverse @@ -196,7 +200,7 @@ def test_default_plan_period(self): response = self.client.get(self.detail_url) now = timezone.now() - first_of_month = datetime(now.year, now.month, 1, tzinfo=pytz.UTC) + first_of_month = datetime(now.year, now.month, 1, tzinfo=ZoneInfo('UTC')) first_of_next_month = first_of_month + relativedelta(months=1) assert response.data['total_submission_count']['current_month'] == num_submissions @@ -312,7 +316,7 @@ def test_plan_canceled_edge_date(self): billing cycle to end on the last day of the next month, but we also need to make sure the cycle starts on the cancelation date """ - cancel_date = datetime(year=2024, month=8, day=31, tzinfo=pytz.UTC) + cancel_date = datetime(year=2024, month=8, day=31, tzinfo=ZoneInfo('UTC')) with freeze_time(cancel_date.replace(day=1)): subscription = generate_plan_subscription(self.organization) diff --git a/kobo/apps/subsequences/actions/base.py b/kobo/apps/subsequences/actions/base.py index c7993c4756..69e798f61a 100644 --- a/kobo/apps/subsequences/actions/base.py +++ b/kobo/apps/subsequences/actions/base.py @@ -1,5 +1,9 @@ import datetime -import pytz +try: + from zoneinfo import ZoneInfo +except ImportError: + from backports.zoneinfo import ZoneInfo + from django.utils import timezone from kobo.apps.subsequences.constants import (GOOGLETS, GOOGLETX) @@ -19,7 +23,7 @@ def __init__(self, params): self.load_params(params) def cur_time(self): - return datetime.datetime.now(tz=pytz.UTC).strftime('%Y-%m-%dT%H:%M:%SZ') + return datetime.datetime.now(tz=ZoneInfo('UTC')).strftime('%Y-%m-%dT%H:%M:%SZ') def load_params(self, params): raise NotImplementedError('subclass must define a load_params method') From da73987ca7da58da73bbe42cda47f4f7a54a53f3 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 24 Sep 2024 10:15:48 -0400 Subject: [PATCH 19/25] Replace pytz with ZoneInfo --- kobo/apps/organizations/utils.py | 40 +++++++++++-------- .../stripe/tests/test_organization_usage.py | 13 ++++-- kobo/apps/subsequences/actions/base.py | 9 ++++- 3 files changed, 39 insertions(+), 23 deletions(-) diff --git a/kobo/apps/organizations/utils.py b/kobo/apps/organizations/utils.py index 99e4147413..5cb3e5eb99 100644 --- a/kobo/apps/organizations/utils.py +++ b/kobo/apps/organizations/utils.py @@ -1,6 +1,9 @@ from typing import Union +try: + from zoneinfo import ZoneInfo +except ImportError: + from backports.zoneinfo import ZoneInfo -import pytz from datetime import datetime from dateutil.relativedelta import relativedelta from django.utils import timezone @@ -11,8 +14,8 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): """Returns start and end dates of an organization's monthly billing cycle""" - now = timezone.now().replace(tzinfo=pytz.UTC) - first_of_this_month = datetime(now.year, now.month, 1, tzinfo=pytz.UTC) + now = timezone.now().replace(tzinfo=ZoneInfo('UTC')) + first_of_this_month = datetime(now.year, now.month, 1, tzinfo=ZoneInfo('UTC')) first_of_next_month = ( first_of_this_month + relativedelta(months=1) @@ -21,36 +24,39 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): # If no organization, just use the calendar month if not organization: return first_of_this_month, first_of_next_month - - # If no active subscription, check for canceled subscription + + # If no active subscription, check for canceled subscription if not (billing_details := organization.active_subscription_billing_details()): if not ( canceled_subscription_anchor := organization.canceled_subscription_billing_cycle_anchor() ): return first_of_this_month, first_of_next_month - - period_end = canceled_subscription_anchor.replace(tzinfo=pytz.UTC) + + period_end = canceled_subscription_anchor.replace(tzinfo=ZoneInfo('UTC')) while period_end < now: period_end += relativedelta(months=1) period_start = period_end - relativedelta(months=1) return period_start, period_end - + if not billing_details.get('billing_cycle_anchor'): return first_of_this_month, first_of_next_month # Subscription is billed monthly, use the current billing period dates if billing_details.get('recurring_interval') == 'month': period_start = billing_details.get('current_period_start').replace( - tzinfo=pytz.UTC + tzinfo=ZoneInfo('UTC') ) period_end = billing_details.get('current_period_end').replace( - tzinfo=pytz.UTC + tzinfo=ZoneInfo('UTC') ) return period_start, period_end - # Subscription is billed yearly - count backwards from the end of the current billing year - period_start = billing_details.get('current_period_end').replace(tzinfo=pytz.UTC) + # Subscription is billed yearly - count backwards from the end of the + # current billing year + period_start = billing_details.get('current_period_end').replace( + tzinfo=ZoneInfo('UTC') + ) while period_start > now: period_start -= relativedelta(months=1) period_end = period_start + relativedelta(months=1) @@ -59,8 +65,8 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): def get_yearly_billing_dates(organization: Union[Organization, None]): """Returns start and end dates of an organization's annual billing cycle""" - now = timezone.now().replace(tzinfo=pytz.UTC) - first_of_this_year = datetime(now.year, 1, 1, tzinfo=pytz.UTC) + now = timezone.now().replace(tzinfo=ZoneInfo('UTC')) + first_of_this_year = datetime(now.year, 1, 1, tzinfo=ZoneInfo('UTC')) first_of_next_year = first_of_this_year + relativedelta(years=1) if not organization: @@ -73,15 +79,15 @@ def get_yearly_billing_dates(organization: Union[Organization, None]): # Subscription is billed yearly, use the dates from the subscription if billing_details.get('recurring_interval') == 'year': period_start = billing_details.get('current_period_start').replace( - tzinfo=pytz.UTC + tzinfo=ZoneInfo('UTC') ) period_end = billing_details.get('current_period_end').replace( - tzinfo=pytz.UTC + tzinfo=ZoneInfo('UTC') ) return period_start, period_end # Subscription is monthly, calculate this year's start based on anchor date - period_start = anchor_date.replace(tzinfo=pytz.UTC) + relativedelta(years=1) + period_start = anchor_date.replace(tzinfo=ZoneInfo('UTC')) + relativedelta(years=1) while period_start < now: anchor_date += relativedelta(years=1) period_end = period_start + relativedelta(years=1) diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index 9de13ff2a9..43b5e8b9d0 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -1,8 +1,11 @@ import timeit import itertools +try: + from zoneinfo import ZoneInfo +except ImportError: + from backports.zoneinfo import ZoneInfo import pytest -import pytz from datetime import datetime from dateutil.relativedelta import relativedelta from django.core.cache import cache @@ -11,6 +14,7 @@ from django.utils import timezone from djstripe.models import Customer from model_bakery import baker +from rest_framework import status from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.organizations.models import Organization, OrganizationUser @@ -24,7 +28,6 @@ ) from kpi.tests.test_usage_calculator import BaseServiceUsageTestCase from kpi.tests.api.v2.test_api_asset_usage import AssetUsageAPITestCase -from rest_framework import status class OrganizationServiceUsageAPIMultiUserTestCase(BaseServiceUsageTestCase): @@ -46,7 +49,9 @@ def setUpTestData(cls): super().setUpTestData() cls.now = timezone.now() - cls.organization = baker.make(Organization, id=cls.org_id, name='test organization') + cls.organization = baker.make( + Organization, id=cls.org_id, name='test organization' + ) cls.organization.add_user(cls.anotheruser, is_admin=True) assets = create_mock_assets([cls.anotheruser], cls.assets_per_user) @@ -195,7 +200,7 @@ def test_default_plan_period(self): response = self.client.get(self.detail_url) now = timezone.now() - first_of_month = datetime(now.year, now.month, 1, tzinfo=pytz.UTC) + first_of_month = datetime(now.year, now.month, 1, tzinfo=ZoneInfo('UTC')) first_of_next_month = first_of_month + relativedelta(months=1) assert response.data['total_submission_count']['current_month'] == num_submissions diff --git a/kobo/apps/subsequences/actions/base.py b/kobo/apps/subsequences/actions/base.py index c7993c4756..8b01b9b14a 100644 --- a/kobo/apps/subsequences/actions/base.py +++ b/kobo/apps/subsequences/actions/base.py @@ -1,5 +1,9 @@ import datetime -import pytz +try: + from zoneinfo import ZoneInfo +except ImportError: + from backports.zoneinfo import ZoneInfo + from django.utils import timezone from kobo.apps.subsequences.constants import (GOOGLETS, GOOGLETX) @@ -7,6 +11,7 @@ ACTION_NEEDED = 'ACTION_NEEDED' PASSES = 'PASSES' + class BaseAction: ID = None _destination_field = '_supplementalDetails' @@ -19,7 +24,7 @@ def __init__(self, params): self.load_params(params) def cur_time(self): - return datetime.datetime.now(tz=pytz.UTC).strftime('%Y-%m-%dT%H:%M:%SZ') + return datetime.datetime.now(tz=ZoneInfo('UTC')).strftime('%Y-%m-%dT%H:%M:%SZ') def load_params(self, params): raise NotImplementedError('subclass must define a load_params method') From fd312c083e69ad5739bdc9095de543816c56ccf8 Mon Sep 17 00:00:00 2001 From: James Kiger Date: Tue, 24 Sep 2024 10:27:37 -0400 Subject: [PATCH 20/25] Linting --- kobo/apps/organizations/utils.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/kobo/apps/organizations/utils.py b/kobo/apps/organizations/utils.py index e28042ffa6..a5786f55d7 100644 --- a/kobo/apps/organizations/utils.py +++ b/kobo/apps/organizations/utils.py @@ -16,7 +16,9 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): """Returns start and end dates of an organization's monthly billing cycle""" now = timezone.now().replace(tzinfo=ZoneInfo('UTC')) - first_of_this_month = datetime(now.year, now.month, 1, tzinfo=ZoneInfo('UTC')) + first_of_this_month = datetime( + now.year, now.month, 1, tzinfo=ZoneInfo('UTC') + ) first_of_next_month = ( first_of_this_month + relativedelta(months=1) @@ -25,7 +27,7 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): # If no organization, just use the calendar month if not organization: return first_of_this_month, first_of_next_month - + # If no active subscription, check for canceled subscription if not (billing_details := organization.active_subscription_billing_details()): if not ( @@ -33,7 +35,7 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): := organization.canceled_subscription_billing_cycle_anchor() ): return first_of_this_month, first_of_next_month - + canceled_subscription_anchor = canceled_subscription_anchor.replace( tzinfo=ZoneInfo('UTC') ) @@ -46,7 +48,7 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): canceled_subscription_anchor, ) return period_start, period_end - + if not billing_details.get('billing_cycle_anchor'): return first_of_this_month, first_of_next_month @@ -61,7 +63,9 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): return period_start, period_end # Subscription is billed yearly - count backwards from the end of the current billing year - period_start = billing_details.get('current_period_end').replace(tzinfo=ZoneInfo('UTC')) + period_start = billing_details.get('current_period_end').replace( + tzinfo=ZoneInfo('UTC') + ) while period_start > now: period_start -= relativedelta(months=1) period_end = period_start + relativedelta(months=1) @@ -93,8 +97,8 @@ def get_yearly_billing_dates(organization: Union[Organization, None]): # Subscription is monthly, calculate this year's start based on anchor date period_start = anchor_date.replace( - tzinfo=ZoneInfo('UTC')) + relativedelta(years=1 - ) + tzinfo=ZoneInfo('UTC') + ) + relativedelta(years=1) while period_start < now: anchor_date += relativedelta(years=1) period_end = period_start + relativedelta(years=1) From 5b059bf4a7e0037e1402e89cbb4447738e9bb53c Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 24 Sep 2024 10:16:18 -0400 Subject: [PATCH 21/25] Remove backport.zoneinfo dependency --- dependencies/pip/dev_requirements.txt | 2 +- dependencies/pip/requirements.in | 2 +- dependencies/pip/requirements.txt | 2 +- pip-compile.sh | 9 +++++---- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/dependencies/pip/dev_requirements.txt b/dependencies/pip/dev_requirements.txt index b77f0951c9..271d23c4a3 100644 --- a/dependencies/pip/dev_requirements.txt +++ b/dependencies/pip/dev_requirements.txt @@ -685,4 +685,4 @@ yubico-client==1.13.0 # The following packages are considered to be unsafe in a requirements file: # setuptools -backports-zoneinfo==0.2.1; python_version < '3.9' +# backports-zoneinfo==0.2.1; python_version < '3.9' diff --git a/dependencies/pip/requirements.in b/dependencies/pip/requirements.in index c59f7921b7..ed01f1c4ce 100644 --- a/dependencies/pip/requirements.in +++ b/dependencies/pip/requirements.in @@ -111,4 +111,4 @@ djangorestframework-jsonp pandas # Python 3.8 support -backports.zoneinfo; python_version < '3.9' +# backports.zoneinfo; python_version < '3.9' diff --git a/dependencies/pip/requirements.txt b/dependencies/pip/requirements.txt index a4810f845f..dd585330f3 100644 --- a/dependencies/pip/requirements.txt +++ b/dependencies/pip/requirements.txt @@ -530,4 +530,4 @@ yarl==1.9.4 # via aiohttp yubico-client==1.13.0 # via django-trench -backports-zoneinfo==0.2.1; python_version < '3.9' +# backports-zoneinfo==0.2.1; python_version < '3.9' diff --git a/pip-compile.sh b/pip-compile.sh index d3c554b9e0..68ac90525a 100755 --- a/pip-compile.sh +++ b/pip-compile.sh @@ -12,7 +12,8 @@ do pip-compile "$@" "$in_file" || exit $? done for out_file in dependencies/pip/*.txt -do - # Workaround for https://github.com/jazzband/pip-tools/issues/1326 - echo "backports-zoneinfo==0.2.1; python_version < '3.9'" >> "$out_file" -done + +#do +# # Workaround for https://github.com/jazzband/pip-tools/issues/1326 +# echo "backports-zoneinfo==0.2.1; python_version < '3.9'" >> "$out_file" +#done From c02d8a50e1a6a32ae828514e8847478d284fb96e Mon Sep 17 00:00:00 2001 From: James Kiger Date: Tue, 24 Sep 2024 11:57:54 -0400 Subject: [PATCH 22/25] Remove duplicate import from merge --- kobo/apps/organizations/utils.py | 4 ---- kobo/apps/stripe/tests/test_organization_usage.py | 4 ---- 2 files changed, 8 deletions(-) diff --git a/kobo/apps/organizations/utils.py b/kobo/apps/organizations/utils.py index c356171e54..0d17572af4 100644 --- a/kobo/apps/organizations/utils.py +++ b/kobo/apps/organizations/utils.py @@ -6,10 +6,6 @@ from datetime import datetime from dateutil.relativedelta import relativedelta -try: - from zoneinfo import ZoneInfo -except ImportError: - from backports.zoneinfo import ZoneInfo from django.utils import timezone diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index 5a7a662ad3..c85cc057d2 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -8,10 +8,6 @@ import pytest from datetime import datetime from dateutil.relativedelta import relativedelta -try: - from zoneinfo import ZoneInfo -except ImportError: - from backports.zoneinfo import ZoneInfo from django.core.cache import cache from django.test import override_settings From 7dc0997b04a743cbed1632b290518e419d84a78e Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 24 Sep 2024 16:03:25 -0400 Subject: [PATCH 23/25] Remove useless comments --- dependencies/pip/dev_requirements.txt | 1 - dependencies/pip/requirements.in | 3 --- dependencies/pip/requirements.txt | 1 - kpi/views/v2/asset.py | 2 +- pip-compile.sh | 5 ----- 5 files changed, 1 insertion(+), 11 deletions(-) diff --git a/dependencies/pip/dev_requirements.txt b/dependencies/pip/dev_requirements.txt index 271d23c4a3..5b3215fec2 100644 --- a/dependencies/pip/dev_requirements.txt +++ b/dependencies/pip/dev_requirements.txt @@ -685,4 +685,3 @@ yubico-client==1.13.0 # The following packages are considered to be unsafe in a requirements file: # setuptools -# backports-zoneinfo==0.2.1; python_version < '3.9' diff --git a/dependencies/pip/requirements.in b/dependencies/pip/requirements.in index ed01f1c4ce..cd7bd9ea7e 100644 --- a/dependencies/pip/requirements.in +++ b/dependencies/pip/requirements.in @@ -109,6 +109,3 @@ modilabs-python-utils djangorestframework-csv djangorestframework-jsonp pandas - -# Python 3.8 support -# backports.zoneinfo; python_version < '3.9' diff --git a/dependencies/pip/requirements.txt b/dependencies/pip/requirements.txt index dd585330f3..8d4cf907dd 100644 --- a/dependencies/pip/requirements.txt +++ b/dependencies/pip/requirements.txt @@ -530,4 +530,3 @@ yarl==1.9.4 # via aiohttp yubico-client==1.13.0 # via django-trench -# backports-zoneinfo==0.2.1; python_version < '3.9' diff --git a/kpi/views/v2/asset.py b/kpi/views/v2/asset.py index 973dc3275b..b44de7640e 100644 --- a/kpi/views/v2/asset.py +++ b/kpi/views/v2/asset.py @@ -667,7 +667,7 @@ def get_serializer_context(self): # the issue here: https://github.com/kobotoolbox/kpi/issues/2576 queryset = self.__filtered_queryset - # 1) Retrieve all asset IDs of current list + # 1) Retrieve all asset IDs of the current list asset_ids = AssetPagination.get_all_asset_ids_from_queryset( queryset ) diff --git a/pip-compile.sh b/pip-compile.sh index 68ac90525a..b4f7b2da70 100755 --- a/pip-compile.sh +++ b/pip-compile.sh @@ -12,8 +12,3 @@ do pip-compile "$@" "$in_file" || exit $? done for out_file in dependencies/pip/*.txt - -#do -# # Workaround for https://github.com/jazzband/pip-tools/issues/1326 -# echo "backports-zoneinfo==0.2.1; python_version < '3.9'" >> "$out_file" -#done From af4e29007cfb47901879885b09e8fb67fae5386a Mon Sep 17 00:00:00 2001 From: James Kiger Date: Wed, 25 Sep 2024 07:13:42 -0400 Subject: [PATCH 24/25] Clearer unit test variables --- kobo/apps/stripe/tests/test_organization_usage.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index c85cc057d2..8b0670504b 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -318,15 +318,22 @@ def test_plan_canceled_edge_date(self): billing cycle to end on the last day of the next month, but we also need to make sure the cycle starts on the cancelation date """ - cancel_date = datetime(year=2024, month=8, day=31, tzinfo=ZoneInfo('UTC')) - with freeze_time(cancel_date.replace(day=1)): + frozen_datetime_now = datetime( + year=2024, + month=9, + day=1, + tzinfo=ZoneInfo('UTC'), + ) + subscribe_date = frozen_datetime_now.replace(month=8, day=1) + cancel_date = frozen_datetime_now.replace(month=8, day=31) + with freeze_time(subscribe_date): subscription = generate_plan_subscription(self.organization) subscription.status = 'canceled' subscription.ended_at = cancel_date subscription.save() - with freeze_time(cancel_date.replace(month=9, day=1)): + with freeze_time(frozen_datetime_now): response = self.client.get(self.detail_url) current_month_start = datetime.fromisoformat( response.data['current_month_start'] From 658825c5fa21fd69500c82636f9a1a9fbcbc7e31 Mon Sep 17 00:00:00 2001 From: Rebecca Graber Date: Mon, 30 Sep 2024 10:29:22 -0400 Subject: [PATCH 25/25] Change access log endpoint names (#5132) --- kobo/apps/audit_log/urls.py | 4 ++-- kobo/apps/audit_log/views.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/kobo/apps/audit_log/urls.py b/kobo/apps/audit_log/urls.py index 7820bca243..fac5f9ad15 100644 --- a/kobo/apps/audit_log/urls.py +++ b/kobo/apps/audit_log/urls.py @@ -5,9 +5,9 @@ router = DefaultRouter() router.register(r'audit-logs', AuditLogViewSet, basename='audit-log') -router.register(r'access-logs', AccessLogViewSet, basename='access-log') +router.register(r'access-logs', AllAccessLogViewSet, basename='all-access-logs') router.register( - r'access-logs/all', AllAccessLogViewSet, basename='all-access-logs' + r'access-logs/me', AccessLogViewSet, basename='access-log' ) urlpatterns = [] diff --git a/kobo/apps/audit_log/views.py b/kobo/apps/audit_log/views.py index 4d2ab951e0..5a81b54765 100644 --- a/kobo/apps/audit_log/views.py +++ b/kobo/apps/audit_log/views.py @@ -93,12 +93,12 @@ class AllAccessLogViewSet(AuditLogViewSet): Lists all access logs for all users. Only available to superusers.
-    GET /api/v2/access-logs/all
+    GET /api/v2/access-logs/
     
> Example > - > curl -X GET https://[kpi-url]/access-logs/all + > curl -X GET https://[kpi-url]/access-logs/ > Response 200 @@ -144,12 +144,12 @@ class AccessLogViewSet(AuditLogViewSet): Lists all access logs for the authenticated user
-    GET /api/v2/access-logs/
+    GET /api/v2/access-logs/me
     
> Example > - > curl -X GET https://[kpi-url]/access-logs/ + > curl -X GET https://[kpi-url]/access-logs/me > Response 200