diff --git a/arches/app/datatypes/core/geojson_feature_collection.py b/arches/app/datatypes/core/geojson_feature_collection.py index 8960921013d..1242f306282 100644 --- a/arches/app/datatypes/core/geojson_feature_collection.py +++ b/arches/app/datatypes/core/geojson_feature_collection.py @@ -117,15 +117,11 @@ def check_geojson_value(self, value): feature["geometry"] ) for new_feature in new_collection["features"]: - new_feature["id"] = ( - geojson["id"] if "id" in geojson else str(uuid.uuid4()) - ) + new_feature["id"] = geojson.get("id", str(uuid.uuid4())) features = features + new_collection["features"] else: # keep the feature id if it exists, or generate a fresh one. - feature["id"] = ( - feature["id"] if "id" in feature else str(uuid.uuid4()) - ) + feature["id"] = feature.get("id", str(uuid.uuid4())) features.append(feature) geojson["features"] = features return geojson diff --git a/arches/app/media/js/utils/map-popup-provider.js b/arches/app/media/js/utils/map-popup-provider.js index 8a660438831..2da520e06ce 100644 --- a/arches/app/media/js/utils/map-popup-provider.js +++ b/arches/app/media/js/utils/map-popup-provider.js @@ -1,4 +1,5 @@ -define(['arches', +define([ + 'arches', 'knockout', 'templates/views/components/map-popup.htm' ], function(arches, ko, popupTemplate) { @@ -40,6 +41,44 @@ define(['arches', return features; }, + /** + * This method enables custom logic for how the feature in the popup should be handled and/or mutated en route to the mapFilter. + * @param popupFeatureObject - the javascript object of the feature and its associated contexts (e.g. mapCard). + * @required @method mapCard.filterByFeatureGeom() + * @required @send argument: @param feature - a geojson feature object + */ + sendFeatureToMapFilter: function(popupFeatureObject) + { + const foundFeature = this.findPopupFeatureById(popupFeatureObject); + popupFeatureObject.mapCard.filterByFeatureGeom(foundFeature); + }, + + /** + * Determines whether to show the button for Filter By Feature + * @param popupFeatureObject - the javascript object of the feature and its associated contexts (e.g. mapCard). + * @returns {boolean} - whether to show "Filter by Feature" on map popup + * typically dependent on at least 1 feature with a geometry and/or a featureid/resourceid combo + */ + showFilterByFeature: function(popupFeatureObject) { + const noFeatureId = popupFeatureObject.feature?.properties?.featureid === undefined; + if (noFeatureId) + return false; + return this.findPopupFeatureById(popupFeatureObject) !== null; + }, + + findPopupFeatureById: function(popupFeatureObject) { + let foundFeature = null; + const strippedFeatureId = popupFeatureObject.feature.properties.featureid.replace(/-/g,""); + for (let geometry of popupFeatureObject.geometries()) { + if (geometry.geom && Array.isArray(geometry.geom.features)) { + foundFeature = geometry.geom.features.find(feature => feature.id.replace(/-/g, "") === strippedFeatureId); + if (foundFeature) + break; + } + } + return foundFeature; + }, + }; return provider; }); diff --git a/arches/app/media/js/viewmodels/map.js b/arches/app/media/js/viewmodels/map.js index 2eae59c83d2..149dd06169f 100644 --- a/arches/app/media/js/viewmodels/map.js +++ b/arches/app/media/js/viewmodels/map.js @@ -343,8 +343,11 @@ define([ const popupFeatures = features.map(feature => { var data = feature.properties; var id = data.resourceinstanceid; + data.showFilterByFeatureButton = !!params.search; data.showEditButton = self.canEdit; - const descriptionProperties = ['displayname', 'graph_name', 'map_popup']; + data.sendFeatureToMapFilter = mapPopupProvider.sendFeatureToMapFilter.bind(mapPopupProvider); + data.showFilterByFeature = mapPopupProvider.showFilterByFeature.bind(mapPopupProvider); + const descriptionProperties = ['displayname', 'graph_name', 'map_popup', 'geometries']; if (id) { if (!self.resourceLookup[id]){ data = _.defaults(data, { @@ -352,6 +355,7 @@ define([ 'displayname': '', 'graph_name': '', 'map_popup': '', + 'geometries': [], 'feature': feature, }); if (data.permissions) { diff --git a/arches/app/media/js/views/components/search/map-filter.js b/arches/app/media/js/views/components/search/map-filter.js index 8355290b6df..2f00cce545d 100644 --- a/arches/app/media/js/views/components/search/map-filter.js +++ b/arches/app/media/js/views/components/search/map-filter.js @@ -274,6 +274,18 @@ define([ } }, this); + this.filterByFeatureGeom = function(feature) { + if (feature.geometry.type == 'Point' && this.buffer() == 0) { this.buffer(25); } + self.searchGeometries.removeAll(); + this.draw.deleteAll(); + this.draw.set({ + "type": "FeatureCollection", + "features": [feature] + }); + self.searchGeometries([feature]); + self.updateFilter(); + }; + var updateSearchResultPointLayer = function() { var pointSource = self.map().getSource('search-results-points'); var agg = ko.unwrap(self.searchAggregations); diff --git a/arches/app/search/components/map_filter.py b/arches/app/search/components/map_filter.py index 32bd6cfbcf1..67b2969bb8d 100644 --- a/arches/app/search/components/map_filter.py +++ b/arches/app/search/components/map_filter.py @@ -27,7 +27,7 @@ def append_dsl(self, search_query_object, **kwargs): permitted_nodegroups = kwargs.get("permitted_nodegroups") include_provisional = kwargs.get("include_provisional") search_query = Bool() - querystring_params = kwargs.get("querystring", "") + querystring_params = kwargs.get("querystring", "{}") spatial_filter = JSONDeserializer().deserialize(querystring_params) if "features" in spatial_filter: if len(spatial_filter["features"]) > 0: @@ -35,53 +35,22 @@ def append_dsl(self, search_query_object, **kwargs): feature_properties = {} if "properties" in spatial_filter["features"][0]: feature_properties = spatial_filter["features"][0]["properties"] - buffer = {"width": 0, "unit": "ft"} - if "buffer" in feature_properties: - buffer = feature_properties["buffer"] - search_buffer = _buffer(feature_geom, buffer["width"], buffer["unit"]) - feature_geom = JSONDeserializer().deserialize(search_buffer.geojson) - geoshape = GeoShape( - field="geometries.geom.features.geometry", - type=feature_geom["type"], - coordinates=feature_geom["coordinates"], - ) - - invert_spatial_search = False - if "inverted" in feature_properties: - invert_spatial_search = feature_properties["inverted"] - - spatial_query = Bool() - if invert_spatial_search is True: - spatial_query.must_not(geoshape) - else: - spatial_query.filter(geoshape) - # get the nodegroup_ids that the user has permission to search - spatial_query.filter( - Terms(field="geometries.nodegroup_id", terms=permitted_nodegroups) + buffered_feature_geom = add_geoshape_query_to_search_query( + feature_geom, + feature_properties, + permitted_nodegroups, + include_provisional, + search_query, ) + search_query_object["query"].add_query(search_query) - if include_provisional is False: - spatial_query.filter( - Terms(field="geometries.provisional", terms=["false"]) - ) - - elif include_provisional == "only provisional": - spatial_query.filter( - Terms(field="geometries.provisional", terms=["true"]) - ) - - search_query.filter(Nested(path="geometries", query=spatial_query)) - - search_query_object["query"].add_query(search_query) - - if self.componentname not in search_query_object: - search_query_object[self.componentname] = {} - - try: - search_query_object[self.componentname]["search_buffer"] = feature_geom - except NameError: - logger.info(_("Feature geometry is not defined")) + # Add the buffered feature geometry to the search query object + if self.componentname not in search_query_object: + search_query_object[self.componentname] = {} + search_query_object[self.componentname][ + "search_buffer" + ] = buffered_feature_geom def _buffer(geojson, width=0, unit="ft"): @@ -111,3 +80,48 @@ def _buffer(geojson, width=0, unit="ft"): res = cursor.fetchone() geom = GEOSGeometry(res[0], srid=4326) return geom + + +def add_geoshape_query_to_search_query( + feature_geom, + feature_properties, + permitted_nodegroups, + include_provisional, + search_query, +): + + buffer = {"width": 0, "unit": "ft"} + if "buffer" in feature_properties: + buffer = feature_properties["buffer"] + # feature_geom = spatial_filter["features"][0]["geometry"] + search_buffer = _buffer(feature_geom, int(buffer["width"]), buffer["unit"]) + feature_geom = JSONDeserializer().deserialize(search_buffer.geojson) + geoshape = GeoShape( + field="geometries.geom.features.geometry", + type=feature_geom["type"], + coordinates=feature_geom["coordinates"], + ) + invert_spatial_search = False + if "inverted" in feature_properties: + invert_spatial_search = feature_properties["inverted"] + + spatial_query = Bool() + if invert_spatial_search is True: + spatial_query.must_not(geoshape) + else: + spatial_query.filter(geoshape) + + # get the nodegroup_ids that the user has permission to search + spatial_query.filter( + Terms(field="geometries.nodegroup_id", terms=permitted_nodegroups) + ) + + if include_provisional is False: + spatial_query.filter(Terms(field="geometries.provisional", terms=["false"])) + + elif include_provisional == "only provisional": + spatial_query.filter(Terms(field="geometries.provisional", terms=["true"])) + + search_query.filter(Nested(path="geometries", query=spatial_query)) + + return feature_geom diff --git a/arches/app/templates/javascript.htm b/arches/app/templates/javascript.htm index 331e0c647f6..da286151cf7 100644 --- a/arches/app/templates/javascript.htm +++ b/arches/app/templates/javascript.htm @@ -667,6 +667,7 @@ map-add-line='{% trans "Add line" as mapAddLine %} "{{ mapAddLine|escapejs }}"' map-add-polygon='{% trans "Add polygon" as mapAddPolygon %} "{{ mapAddPolygon|escapejs }}"' map-select-drawing='{% trans "Select drawing" as mapSelectDrawing %} "{{ mapSelectDrawing|escapejs }}"' + filter-by-feature='{% trans "Filter by Map Feature" as filterByFeature %} "{{ filterByFeature|escapejs }}"' related-instance-map-sources='{% trans "Related instance map sources" as relatedInstanceMapSources %} "{{ relatedInstanceMapSources|escapejs }}"' related-instance-map-source-layers='{% trans "Related instance map source layers (optional)" as relatedInstanceMapSourceLayers %} "{{ relatedInstanceMapSourceLayers|escapejs }}"' intersection-layer-configuration='{% trans "Intersection layer configuration" as intersectionLayerConfiguration %} "{{ intersectionLayerConfiguration|escapejs }}"' diff --git a/arches/app/templates/views/components/map-popup.htm b/arches/app/templates/views/components/map-popup.htm index 3ff25881bd7..53e0bcd0a7c 100644 --- a/arches/app/templates/views/components/map-popup.htm +++ b/arches/app/templates/views/components/map-popup.htm @@ -9,65 +9,71 @@ - - - -
- {% block title %} -
-
-
-
-
- {% endblock title %} -
- -
- {% block body %} -
- -
- - - diff --git a/tests/search/spatial_search_tests.py b/tests/search/spatial_search_tests.py new file mode 100644 index 00000000000..77b43484a61 --- /dev/null +++ b/tests/search/spatial_search_tests.py @@ -0,0 +1,159 @@ +""" +ARCHES - a program developed to inventory and manage immovable cultural heritage. +Copyright (C) 2013 J. Paul Getty Trust and World Monuments Fund + +This program is free software: you can redistribute it and/or modify +it under the terms of the GNU Affero General Public License as +published by the Free Software Foundation, either version 3 of the +License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Affero General Public License for more details. + +You should have received a copy of the GNU Affero General Public License +along with this program. If not, see . +""" + +import os +from tests.base_test import ArchesTestCase +from tests.utils.search_test_utils import sync_es, get_response_json +from django.contrib.auth.models import User, Group +from django.test.client import Client +from arches.app.models import models +from arches.app.models.resource import Resource +from arches.app.models.tile import Tile +from arches.app.utils.i18n import LanguageSynchronizer +from arches.app.utils.data_management.resource_graphs.importer import ( + import_graph as ResourceGraphImporter, +) +from arches.app.utils.betterJSONSerializer import JSONDeserializer +from arches.app.search.search_engine_factory import SearchEngineFactory +from arches.app.search.elasticsearch_dsl_builder import Query +from arches.app.search.mappings import TERMS_INDEX, CONCEPTS_INDEX, RESOURCES_INDEX + +# these tests can be run from the command line via +# python manage.py test tests.search.spatial_search_tests --settings="tests.test_settings" + + +class SpatialSearchTests(ArchesTestCase): + @classmethod + def setUpClass(cls): + super().setUpClass() + + se = SearchEngineFactory().create() + q = Query(se=se) + for indexname in [TERMS_INDEX, CONCEPTS_INDEX, RESOURCES_INDEX]: + q.delete(index=indexname, refresh=True) + + cls.client = Client() + cls.client.login(username="admin", password="admin") + + LanguageSynchronizer.synchronize_settings_with_db() + models.ResourceInstance.objects.all().delete() + with open( + os.path.join("tests/fixtures/resource_graphs/Search Test Model.json"), "r" + ) as f: + archesfile = JSONDeserializer().deserialize(f) + ResourceGraphImporter(archesfile["graph"]) + + cls.search_model_graphid = "d291a445-fa5f-11e6-afa8-14109fd34195" + cls.search_model_cultural_period_nodeid = "7a182580-fa60-11e6-96d1-14109fd34195" + cls.search_model_creation_date_nodeid = "1c1d05f5-fa60-11e6-887f-14109fd34195" + cls.search_model_destruction_date_nodeid = ( + "e771b8a1-65fe-11e7-9163-14109fd34195" + ) + cls.search_model_name_nodeid = "2fe14de3-fa61-11e6-897b-14109fd34195" + cls.search_model_sensitive_info_nodeid = "57446fae-65ff-11e7-b63a-14109fd34195" + cls.search_model_geom_nodeid = "3ebc6785-fa61-11e6-8c85-14109fd34195" + + cls.user = User.objects.create_user( + "unpriviliged_user", "unpriviliged_user@archesproject.org", "test" + ) + cls.user.groups.add(Group.objects.get(name="Guest")) + + cls.spatial_filter_geom_resourceid = "cbb1e9df-5110-4f22-933c-9ccbeb57431b" + cls.spatial_filter_geom_resource = Resource( + graph_id=cls.search_model_graphid, + resourceinstanceid=cls.spatial_filter_geom_resourceid, + ) + cls.spatial_filter_geom_resource.save() + cls.polygon_feature_id = "2190cb9e-7c57-485c-bf1a-7b6f0389f8b1" + + geom_poly = { + "type": "FeatureCollection", + "features": [ + { + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [-118.22687435396205, 34.04498354472949], + [-118.22673462509519, 34.045024944460636], + [-118.22661984555208, 34.044757071199754], + [-118.22675979254618, 34.044715607647184], + [-118.22687435396205, 34.04498354472949], + ] + ], + }, + "type": "Feature", + "id": cls.polygon_feature_id, + "properties": {}, + } + ], + } + poly_tile = Tile.get_blank_tile( + cls.search_model_geom_nodeid, resourceid=cls.spatial_filter_geom_resourceid + ) + poly_tile.data[cls.search_model_geom_nodeid] = geom_poly + poly_tile.save() + cls.point_feature_id = "d41e81ac-4a53-4049-b266-c459b7641bc1" + geom_point = { + "type": "FeatureCollection", + "features": [ + { + "geometry": { + "type": "Point", + "coordinates": [-118.22687435396205, 34.04498354472949], + }, + "type": "Feature", + "id": cls.point_feature_id, + "properties": {}, + } + ], + } + point_tile = Tile.get_blank_tile( + cls.search_model_geom_nodeid, resourceid=cls.spatial_filter_geom_resourceid + ) + point_tile.data[cls.search_model_geom_nodeid] = geom_point + point_tile.save() + sync_es(se) + + @classmethod + def tearDownClass(cls): + cls.user.delete() + Resource.objects.filter(graph_id=cls.search_model_graphid).delete() + models.GraphModel.objects.filter(pk=cls.search_model_graphid).delete() + super().tearDownClass() + + def test_spatial_search_by_point_buffered(self): + spatial_filter = { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "properties": { + "inverted": False, + "buffer": {"width": "100", "unit": "ft"}, + }, + "geometry": { + "coordinates": [-118.22687435396205, 34.04498354472949], + "type": "Point", + }, + } + ], + } + query = {"map-filter": spatial_filter} + response_json = get_response_json(self.client, query=query) + self.assertEqual(response_json["results"]["hits"]["total"]["value"], 1) diff --git a/tests/utils/search_test_utils.py b/tests/utils/search_test_utils.py index 8e220a2f6ac..1dd389ac969 100644 --- a/tests/utils/search_test_utils.py +++ b/tests/utils/search_test_utils.py @@ -1,2 +1,21 @@ +import json +from arches.app.utils.betterJSONSerializer import JSONSerializer +from django.contrib.auth.models import User, Group + + def sync_es(search_engine, index="test_resources"): search_engine.es.indices.refresh(index=index) + + +def get_response_json(client, query={}): + for filter_type, query_string in list(query.items()): + if not isinstance(query_string, str): + query_json_string = JSONSerializer().serialize(query_string) + query[filter_type] = query_json_string + resource_reviewer_group = Group.objects.get(name="Resource Reviewer") + test_user = User.objects.get(username="unpriviliged_user") + test_user.groups.add(resource_reviewer_group) + client.login(username="unpriviliged_user", password="test") + response = client.get("/search/resources", query) + response_json = json.loads(response.content) + return response_json diff --git a/tests/views/search_tests.py b/tests/views/search_tests.py index 301b6dcf54c..c3332036bc4 100644 --- a/tests/views/search_tests.py +++ b/tests/views/search_tests.py @@ -19,7 +19,7 @@ import json from tests.base_test import ArchesTestCase -from tests.utils.search_test_utils import sync_es +from tests.utils.search_test_utils import sync_es, get_response_json from django.http import HttpRequest from django.urls import reverse from django.contrib.auth.models import User, Group @@ -223,7 +223,8 @@ def test_temporal_only_search_1(self): "dateNodeId": "", "inverted": False, } - response_json = get_response_json(self.client, temporal_filter=temporal_filter) + query = {"time-filter": temporal_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 3) self.assertCountEqual( extract_pks(response_json), @@ -246,7 +247,8 @@ def test_temporal_only_search_2(self): "dateNodeId": "", "inverted": True, } - response_json = get_response_json(self.client, temporal_filter=temporal_filter) + query = {"time-filter": temporal_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 2) self.assertCountEqual( extract_pks(response_json), @@ -268,7 +270,8 @@ def test_temporal_only_search_3(self): "dateNodeId": self.search_model_creation_date_nodeid, "inverted": False, } - response_json = get_response_json(self.client, temporal_filter=temporal_filter) + query = {"time-filter": temporal_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 2) self.assertCountEqual( extract_pks(response_json), @@ -290,7 +293,8 @@ def test_temporal_only_search_4(self): "dateNodeId": self.search_model_creation_date_nodeid, "inverted": True, } - response_json = get_response_json(self.client, temporal_filter=temporal_filter) + query = {"time-filter": temporal_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 0) def test_temporal_only_search_5(self): @@ -305,7 +309,8 @@ def test_temporal_only_search_5(self): "dateNodeId": "", "inverted": False, } - response_json = get_response_json(self.client, temporal_filter=temporal_filter) + query = {"time-filter": temporal_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 2) self.assertCountEqual( extract_pks(response_json), @@ -327,7 +332,8 @@ def test_temporal_only_search_6(self): "dateNodeId": "", "inverted": True, } - response_json = get_response_json(self.client, temporal_filter=temporal_filter) + query = {"time-filter": temporal_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 3) self.assertCountEqual( extract_pks(response_json), @@ -350,7 +356,8 @@ def test_temporal_only_search_7(self): "dateNodeId": "", "inverted": False, } - response_json = get_response_json(self.client, temporal_filter=temporal_filter) + query = {"time-filter": temporal_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 0) def test_temporal_only_search_8(self): @@ -365,7 +372,8 @@ def test_temporal_only_search_8(self): "dateNodeId": "", "inverted": True, } - response_json = get_response_json(self.client, temporal_filter=temporal_filter) + query = {"time-filter": temporal_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 3) self.assertCountEqual( extract_pks(response_json), @@ -388,7 +396,8 @@ def test_temporal_only_search_9(self): "dateNodeId": "", "inverted": False, } - response_json = get_response_json(self.client, temporal_filter=temporal_filter) + query = {"time-filter": temporal_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 3) self.assertCountEqual( extract_pks(response_json), @@ -411,7 +420,8 @@ def test_temporal_only_search_10(self): "dateNodeId": "", "inverted": True, } - response_json = get_response_json(self.client, temporal_filter=temporal_filter) + query = {"time-filter": temporal_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 0) def test_temporal_only_search_11(self): @@ -426,7 +436,8 @@ def test_temporal_only_search_11(self): "dateNodeId": "", "inverted": True, } - response_json = get_response_json(self.client, temporal_filter=temporal_filter) + query = {"time-filter": temporal_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 2) self.assertCountEqual( extract_pks(response_json), @@ -459,9 +470,8 @@ def test_temporal_and_term_search_1(self): "inverted": False, } ] - response_json = get_response_json( - self.client, temporal_filter=temporal_filter, term_filter=term_filter - ) + query = {"time-filter": temporal_filter, "term-filter": term_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 1) self.assertCountEqual(extract_pks(response_json), [str(self.date_resource.pk)]) @@ -488,9 +498,8 @@ def test_temporal_and_term_search_2(self): "inverted": False, } ] - response_json = get_response_json( - self.client, temporal_filter=temporal_filter, term_filter=term_filter - ) + query = {"time-filter": temporal_filter, "term-filter": term_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 0) def test_term_search_1(self): @@ -510,7 +519,8 @@ def test_term_search_1(self): "inverted": False, } ] - response_json = get_response_json(self.client, term_filter=term_filter) + query = {"term-filter": term_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 2) self.assertCountEqual( extract_pks(response_json), @@ -534,7 +544,8 @@ def test_term_search_2(self): "inverted": True, } ] - response_json = get_response_json(self.client, term_filter=term_filter) + query = {"term-filter": term_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 2) self.assertCountEqual( extract_pks(response_json), @@ -562,7 +573,8 @@ def test_term_search_3(self): "inverted": False, } ] - response_json = get_response_json(self.client, term_filter=term_filter) + query = {"term-filter": term_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 1) self.assertCountEqual(extract_pks(response_json), [str(self.name_resource.pk)]) @@ -584,7 +596,8 @@ def test_term_search_4(self): "inverted": True, } ] - response_json = get_response_json(self.client, term_filter=term_filter) + query = {"term-filter": term_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 3) self.assertCountEqual( extract_pks(response_json), @@ -629,7 +642,8 @@ def test_term_search_on_resource_instance_id(self): } ] - response_json = get_response_json(self.client, term_filter=term_filter) + query = {"term-filter": term_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 1) def test_concept_search_1(self): @@ -649,7 +663,8 @@ def test_concept_search_1(self): "inverted": False, } ] - response_json = get_response_json(self.client, term_filter=term_filter) + query = {"term-filter": term_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 2) self.assertCountEqual( extract_pks(response_json), @@ -676,7 +691,8 @@ def test_concept_search_2(self): "inverted": True, } ] - response_json = get_response_json(self.client, term_filter=term_filter) + query = {"term-filter": term_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 2) self.assertCountEqual( extract_pks(response_json), @@ -706,7 +722,8 @@ def test_spatial_search_1(self): } ], } - response_json = get_response_json(self.client, spatial_filter=spatial_filter) + query = {"map-filter": spatial_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 1) self.assertCountEqual(extract_pks(response_json), [str(self.name_resource.pk)]) @@ -733,7 +750,8 @@ def test_spatial_search_2(self): } ], } - response_json = get_response_json(self.client, spatial_filter=spatial_filter) + query = {"map-filter": spatial_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 0) # self.assertCountEqual(extract_pks(response_json), [str(self.name_resource.pk)]) @@ -752,7 +770,8 @@ def test_temporal_and_permission_search_1(self): "dateNodeId": "", "inverted": False, } - response_json = get_response_json(self.client, temporal_filter=temporal_filter) + query = {"time-filter": temporal_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 2) self.assertCountEqual( extract_pks(response_json), @@ -774,7 +793,8 @@ def test_temporal_and_permission_search_2(self): "dateNodeId": "", "inverted": False, } - response_json = get_response_json(self.client, temporal_filter=temporal_filter) + query = {"time-filter": temporal_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 2) self.assertCountEqual( extract_pks(response_json), @@ -877,7 +897,8 @@ def test_custom_resource_index(self): "inverted": False, } ] - response_json = get_response_json(self.client, term_filter=term_filter) + query = {"term-filter": term_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 1) term_filter = [ { @@ -890,7 +911,8 @@ def test_custom_resource_index(self): "inverted": True, } ] - response_json = get_response_json(self.client, term_filter=term_filter) + query = {"term-filter": term_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 3) term_filter = [ @@ -904,7 +926,8 @@ def test_custom_resource_index(self): "inverted": False, } ] - response_json = get_response_json(self.client, term_filter=term_filter) + query = {"term-filter": term_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 4) term_filter = [ @@ -918,7 +941,8 @@ def test_custom_resource_index(self): "inverted": True, } ] - response_json = get_response_json(self.client, term_filter=term_filter) + query = {"term-filter": term_filter} + response_json = get_response_json(self.client, query=query) self.assertEqual(response_json["results"]["hits"]["total"]["value"], 0) @@ -929,26 +953,6 @@ def extract_pks(response_json): ] -def get_response_json( - client, temporal_filter=None, term_filter=None, spatial_filter=None, query=None -): - # declared here due to mutability issues - query = query if query else {} - if temporal_filter is not None: - query["time-filter"] = JSONSerializer().serialize(temporal_filter) - if term_filter is not None: - query["term-filter"] = JSONSerializer().serialize(term_filter) - if spatial_filter is not None: - query["map-filter"] = JSONSerializer().serialize(spatial_filter) - resource_reviewer_group = Group.objects.get(name="Resource Reviewer") - test_user = User.objects.get(username="unpriviliged_user") - test_user.groups.add(resource_reviewer_group) - client.login(username="unpriviliged_user", password="test") - response = client.get("/search/resources", query) - response_json = json.loads(response.content) - return response_json - - class TestEsMappingModifier(EsMappingModifier): counter = 1