diff --git a/arches/app/utils/data_management/resources/formats/archesfile.py b/arches/app/utils/data_management/resources/formats/archesfile.py index e436bceee2c..dddb325c5d3 100644 --- a/arches/app/utils/data_management/resources/formats/archesfile.py +++ b/arches/app/utils/data_management/resources/formats/archesfile.py @@ -143,7 +143,7 @@ def replace_source_nodeid(self, tiles, mapping): tile["data"] = new_data return tiles - def import_business_data_without_mapping(self, business_data, reporter, overwrite="append", prevent_indexing=False): + def import_business_data_without_mapping(self, business_data, reporter, overwrite="append", prevent_indexing=False, escape_function=False): errors = [] last_resource = None # only set if prevent_indexing=False for resource in business_data["resources"]: @@ -202,7 +202,9 @@ def update_or_create_tile(src_tile): for tile in [k for k in resource["tiles"] if k["parenttile_id"] is None]: update_or_create_tile(tile) - resourceinstance.save(index=False) + resourceinstance.save(index=False, context={ + "escape_function": escape_function + }) if not prevent_indexing: last_resource = self.save_descriptors_and_index( @@ -229,11 +231,11 @@ def get_blank_tile(self, sourcetilegroup, blanktilecache, tiles, resourceinstanc blank_tile = None return blank_tile - def import_business_data(self, business_data, mapping=None, overwrite="append", prevent_indexing=False, transaction_id=None): + def import_business_data(self, business_data, mapping=None, overwrite="append", prevent_indexing=False, transaction_id=None, escape_function=False): reporter = ResourceImportReporter(business_data) try: if mapping is None or mapping == "": - self.import_business_data_without_mapping(business_data, reporter, overwrite=overwrite, prevent_indexing=prevent_indexing) + self.import_business_data_without_mapping(business_data, reporter, overwrite=overwrite, prevent_indexing=prevent_indexing, escape_function=escape_function) else: blanktilecache = {} target_nodegroup_cardinalities = {} diff --git a/arches/app/utils/data_management/resources/importer.py b/arches/app/utils/data_management/resources/importer.py index 5a4922ec284..180880db626 100644 --- a/arches/app/utils/data_management/resources/importer.py +++ b/arches/app/utils/data_management/resources/importer.py @@ -187,11 +187,11 @@ def import_business_data( create_collections=False, use_multiprocessing=False, prevent_indexing=False, + escape_function=False, transaction_id=None, ): start = time() cursor = connection.cursor() - try: if file_format is None: file_format = self.file_format @@ -204,7 +204,7 @@ def import_business_data( if file_format == "json": reader.import_business_data( - business_data, mapping=mapping, overwrite=overwrite, prevent_indexing=prevent_indexing, transaction_id=transaction_id + business_data, mapping=mapping, overwrite=overwrite, prevent_indexing=prevent_indexing, transaction_id=transaction_id, escape_function=escape_function ) elif file_format == "jsonl": with open(self.file[0], "r") as openf: diff --git a/arches/management/commands/packages.py b/arches/management/commands/packages.py index 6c05c9e746c..6e32d8184c4 100644 --- a/arches/management/commands/packages.py +++ b/arches/management/commands/packages.py @@ -227,6 +227,14 @@ def add_arguments(self, parser): help="Prevents indexing the resources or concepts into Elasticsearch. If set to True will override any 'defer_indexing' setting.", ) + parser.add_argument( + "-ef", + "--escape_function", + action="store_true", + dest="escape_function", + help="Setting this flag as true will pass a parameter to the context object at the key escape_function. Functions can then conditionally decide if they will escape when this is true. This is purely dependant on if it has been implemented onto a given function.", + ) + parser.add_argument( "-create_concepts", "--create_concepts", @@ -325,6 +333,7 @@ def handle(self, *args, **options): use_multiprocessing=options["use_multiprocessing"], force=options["yes"], prevent_indexing=prevent_indexing, + escape_function=options["escape_function"], ) except Exception as exc: import traceback @@ -1142,11 +1151,13 @@ def import_business_data( use_multiprocessing=False, force=False, prevent_indexing=False, + escape_function=False, ): """ Imports business data from all formats. A config file (mapping file) is required for .csv format. """ + print('-------------- importing business data: ', escape_function) # messages about experimental multiprocessing and JSONL support. if data_source.endswith(".jsonl"): print( @@ -1221,6 +1232,7 @@ def import_business_data( except Exception as e: raise Exception("Couldn't save new entry for {language}.".format(language=language)) from e + print('-------------- before importer call') importer.import_business_data( overwrite=overwrite, bulk=bulk_load, @@ -1228,6 +1240,7 @@ def import_business_data( create_collections=create_collections, use_multiprocessing=use_multiprocessing, prevent_indexing=prevent_indexing, + escape_function=escape_function, transaction_id=transaction_id, ) else: