From a81026e1086b53cedfbabe912684a1c9da2ba2db Mon Sep 17 00:00:00 2001 From: Emily Soth Date: Tue, 1 Aug 2023 15:53:43 -0700 Subject: [PATCH 01/13] elevate warnings to errors in pytest, except deprecation warnings #334 --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 7be33192b2..8313dc85aa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -74,3 +74,6 @@ no_lines_before = 'LOCALFOLDER' [tool.setuptools.packages.find] where = ["src"] + +[tool.pytest.ini_options] +filterwarnings = ["error", "ignore::DeprecationWarning"] From 325d3fe17cbede7065f2fc26eeaf2fe2e4ca87b5 Mon Sep 17 00:00:00 2001 From: Emily Soth Date: Tue, 1 Aug 2023 15:54:16 -0700 Subject: [PATCH 02/13] remove warning filter for swy #334 --- .../seasonal_water_yield.py | 21 ------------------- 1 file changed, 21 deletions(-) diff --git a/src/natcap/invest/seasonal_water_yield/seasonal_water_yield.py b/src/natcap/invest/seasonal_water_yield/seasonal_water_yield.py index d7528cd3cd..ac6ac2df25 100644 --- a/src/natcap/invest/seasonal_water_yield/seasonal_water_yield.py +++ b/src/natcap/invest/seasonal_water_yield/seasonal_water_yield.py @@ -535,27 +535,6 @@ def execute(args): Returns: None. """ - # This upgrades warnings to exceptions across this model. - # I found this useful to catch all kinds of weird inputs to the model - # during debugging and think it makes sense to have in production of this - # model too. - try: - warnings.filterwarnings('error') - _execute(args) - finally: - warnings.resetwarnings() - - -def _execute(args): - """Execute the seasonal water yield model. - - Args: - See the parameters for - `natcap.invest.seasonal_water_yield.seasonal_wateryield.execute`. - - Returns: - None - """ LOGGER.info('prepare and test inputs for common errors') # fail early on a missing required rain events table From cf0f862470da736a4b20453d2788c14969038e15 Mon Sep 17 00:00:00 2001 From: Emily Soth Date: Tue, 1 Aug 2023 15:56:09 -0700 Subject: [PATCH 03/13] undo change from wrong branch --- workbench/src/main/main.js | 15 +-------------- 1 file changed, 1 insertion(+), 14 deletions(-) diff --git a/workbench/src/main/main.js b/workbench/src/main/main.js index da45528d50..b4a779c297 100644 --- a/workbench/src/main/main.js +++ b/workbench/src/main/main.js @@ -62,7 +62,6 @@ if (!process.env.PORT) { let mainWindow; let splashScreen; let flaskSubprocess; -let forceQuit = false; export function destroyWindow() { mainWindow = null; @@ -134,15 +133,6 @@ export const createWindow = async () => { logger.error(details); }); - mainWindow.on('close', (event) => { - // 'close' is triggered by the red traffic light button on mac - // override this behavior and just minimize, - // unless we're actually quitting the app - if (process.platform === 'darwin' & !forceQuit) { - event.preventDefault(); - } - }); - mainWindow.on('closed', () => { mainWindow = null; }); @@ -194,20 +184,17 @@ export function main() { createWindow(); } }); - app.on('window-all-closed', async (event) => { + app.on('window-all-closed', async () => { // On OS X it is common for applications and their menu bar // to stay active until the user quits explicitly with Cmd + Q - event.preventDefault(); if (process.platform !== 'darwin') { app.quit(); } }); - let shuttingDown = false; app.on('before-quit', async (event) => { // prevent quitting until after we're done with cleanup, // then programatically quit - forceQuit = true; if (shuttingDown) { return; } event.preventDefault(); shuttingDown = true; From 060e31b7eaaafefdb745d589486b446e8ef04d32 Mon Sep 17 00:00:00 2001 From: Emily Soth Date: Wed, 2 Aug 2023 09:30:57 -0700 Subject: [PATCH 04/13] fix bugs revealed by elevating warnings #334 --- src/natcap/invest/datastack.py | 5 +- .../invest/forest_carbon_edge_effect.py | 5 +- .../recreation/out_of_core_quadtree.pyx | 3 +- .../invest/recreation/recmodel_client.py | 7 +- .../invest/recreation/recmodel_server.py | 64 ++++++++++--------- .../recreation/recmodel_workspace_fetcher.py | 6 +- tests/test_datastack.py | 48 ++++++++------ tests/test_recreation.py | 24 ++++--- tests/test_usage_logging.py | 6 +- tests/test_wind_energy.py | 6 +- 10 files changed, 99 insertions(+), 75 deletions(-) diff --git a/src/natcap/invest/datastack.py b/src/natcap/invest/datastack.py index 06b7b9a44b..babebf6d71 100644 --- a/src/natcap/invest/datastack.py +++ b/src/natcap/invest/datastack.py @@ -489,8 +489,9 @@ def extract_datastack_archive(datastack_path, dest_dir_path): _tarfile_safe_extract(datastack_path, dest_dir_path) # get the arguments dictionary - arguments_dict = json.load(open( - os.path.join(dest_dir_path, DATASTACK_PARAMETER_FILENAME)))['args'] + with open(os.path.join( + dest_dir_path, DATASTACK_PARAMETER_FILENAME)) as datastack_file: + arguments_dict = json.load(datastack_file)['args'] def _rewrite_paths(args_param): """Converts paths in `args_param` to paths in `dest_dir_path.""" diff --git a/src/natcap/invest/forest_carbon_edge_effect.py b/src/natcap/invest/forest_carbon_edge_effect.py index e38ead6eed..a85aa036f9 100644 --- a/src/natcap/invest/forest_carbon_edge_effect.py +++ b/src/natcap/invest/forest_carbon_edge_effect.py @@ -865,8 +865,9 @@ def _calculate_tropical_forest_edge_carbon_map( # kd_tree.data.shape: (d, 2) # theta_model_parameters.shape: (d, 3) # method_model_parameter.shape: (d,) - kd_tree, theta_model_parameters, method_model_parameter = pickle.load( - open(spatial_index_pickle_path, 'rb')) + with open(spatial_index_pickle_path, 'rb') as spatial_index_pickle_file: + kd_tree, theta_model_parameters, method_model_parameter = pickle.load( + spatial_index_pickle_file) # create output raster and open band for writing # fill nodata, in case we skip entire memory blocks that are non-forest diff --git a/src/natcap/invest/recreation/out_of_core_quadtree.pyx b/src/natcap/invest/recreation/out_of_core_quadtree.pyx index 735a836e39..89e5a2f950 100644 --- a/src/natcap/invest/recreation/out_of_core_quadtree.pyx +++ b/src/natcap/invest/recreation/out_of_core_quadtree.pyx @@ -82,7 +82,8 @@ class OutOfCoreQuadTree(object): """Flush any cached data to disk.""" self.node_data_manager.flush() if self.pickle_filename is not None: - pickle.dump(self, open(self.pickle_filename, 'wb')) + with open(self.pickle_filename, 'wb') as pickle_file: + pickle.dump(self, pickle_file) def build_node_shapes(self, ogr_polygon_layer): """Add features to an ogr.Layer to visualize quadtree segmentation. diff --git a/src/natcap/invest/recreation/recmodel_client.py b/src/natcap/invest/recreation/recmodel_client.py index 034b24a3fc..0a931b4aeb 100644 --- a/src/natcap/invest/recreation/recmodel_client.py +++ b/src/natcap/invest/recreation/recmodel_client.py @@ -653,7 +653,8 @@ def _retrieve_photo_user_days( aoizip.write(filename, os.path.basename(filename)) # convert shapefile to binary string for serialization - zip_file_binary = open(compressed_aoi_path, 'rb').read() + with open(compressed_aoi_path, 'rb') as aoifile: + zip_file_binary = aoifile.read() # transfer zipped file to server start_time = time.time() @@ -667,8 +668,8 @@ def _retrieve_photo_user_days( f'workspace_id: {workspace_id}') # unpack result - open(compressed_pud_path, 'wb').write( - result_zip_file_binary) + with open(compressed_pud_path, 'wb') as pud_file: + pud_file.write(result_zip_file_binary) temporary_output_dir = tempfile.mkdtemp(dir=output_dir) zipfile.ZipFile(compressed_pud_path, 'r').extractall( temporary_output_dir) diff --git a/src/natcap/invest/recreation/recmodel_server.py b/src/natcap/invest/recreation/recmodel_server.py index b2582df935..302c41599f 100644 --- a/src/natcap/invest/recreation/recmodel_server.py +++ b/src/natcap/invest/recreation/recmodel_server.py @@ -176,7 +176,8 @@ def fetch_workspace_aoi(self, workspace_id): # pylint: disable=no-self-use workspace_path = os.path.join(self.cache_workspace, workspace_id) out_zip_file_path = os.path.join( workspace_path, str('server_in')+'.zip') - return open(out_zip_file_path, 'rb').read() + with open(out_zip_file_path, 'rb') as out_zipfile: + return out_zipfile.read() @_try_except_wrapper("exception in calc_photo_user_days_in_aoi") def calc_photo_user_days_in_aoi( @@ -239,7 +240,8 @@ def calc_photo_user_days_in_aoi( LOGGER.info( 'calc user days complete sending binary back on %s', workspace_path) - return open(aoi_pud_archive_path, 'rb').read(), workspace_id + with open(aoi_pud_archive_path, 'rb') as aoi_pud_archive: + return aoi_pud_archive.read(), workspace_id def _calc_aggregated_points_in_aoi( self, aoi_path, workspace_path, date_range, out_vector_filename): @@ -268,7 +270,8 @@ def _calc_aggregated_points_in_aoi( pud_poly_feature_queue = multiprocessing.Queue(4) n_polytest_processes = multiprocessing.cpu_count() - global_qt = pickle.load(open(self.qt_pickle_filename, 'rb')) + with open(self.qt_pickle_filename, 'rb') as qt_pickle: + global_qt = pickle.load(qt_pickle) aoi_layer = aoi_vector.GetLayer() aoi_extent = aoi_layer.GetExtent() aoi_ref = aoi_layer.GetSpatialRef() @@ -422,39 +425,39 @@ def _calc_aggregated_points_in_aoi( n_poly_tested = 0 monthly_table_path = os.path.join(workspace_path, 'monthly_table.csv') - monthly_table = open(monthly_table_path, 'w') date_range_year = [ date.tolist().timetuple().tm_year for date in date_range] table_headers = [ '%s-%s' % (year, month) for year in range( int(date_range_year[0]), int(date_range_year[1])+1) for month in range(1, 13)] - monthly_table.write('poly_id,' + ','.join(table_headers) + '\n') + with open(monthly_table_path, 'w') as monthly_table: + monthly_table.write('poly_id,' + ','.join(table_headers) + '\n') - while True: - result_tuple = pud_poly_feature_queue.get() - n_poly_tested += 1 - if result_tuple == 'STOP': - n_processes_alive -= 1 - if n_processes_alive == 0: - break - continue - last_time = recmodel_client.delay_op( - last_time, LOGGER_TIME_DELAY, lambda: LOGGER.info( - '%.2f%% of polygons tested', 100 * float(n_poly_tested) / - pud_aoi_layer.GetFeatureCount())) - poly_id, pud_list, pud_monthly_set = result_tuple - poly_feat = pud_aoi_layer.GetFeature(poly_id) - for pud_index, pud_id in enumerate(pud_id_suffix_list): - poly_feat.SetField('PUD_%s' % pud_id, pud_list[pud_index]) - pud_aoi_layer.SetFeature(poly_feat) - - line = '%s,' % poly_id - line += ( - ",".join(['%s' % len(pud_monthly_set[header]) - for header in table_headers])) - line += '\n' # final newline - monthly_table.write(line) + while True: + result_tuple = pud_poly_feature_queue.get() + n_poly_tested += 1 + if result_tuple == 'STOP': + n_processes_alive -= 1 + if n_processes_alive == 0: + break + continue + last_time = recmodel_client.delay_op( + last_time, LOGGER_TIME_DELAY, lambda: LOGGER.info( + '%.2f%% of polygons tested', 100 * float(n_poly_tested) / + pud_aoi_layer.GetFeatureCount())) + poly_id, pud_list, pud_monthly_set = result_tuple + poly_feat = pud_aoi_layer.GetFeature(poly_id) + for pud_index, pud_id in enumerate(pud_id_suffix_list): + poly_feat.SetField('PUD_%s' % pud_id, pud_list[pud_index]) + pud_aoi_layer.SetFeature(poly_feat) + + line = '%s,' % poly_id + line += ( + ",".join(['%s' % len(pud_monthly_set[header]) + for header in table_headers])) + line += '\n' # final newline + monthly_table.write(line) LOGGER.info('done with polygon test, syncing to disk') pud_aoi_layer = None @@ -714,7 +717,8 @@ def _calc_poly_pud( """ start_time = time.time() LOGGER.info('in a _calc_poly_process, loading %s', local_qt_pickle_path) - local_qt = pickle.load(open(local_qt_pickle_path, 'rb')) + with open(local_qt_pickle_path, 'rb') as qt_pickle: + local_qt = pickle.load(qt_pickle) LOGGER.info('local qt load took %.2fs', time.time() - start_time) aoi_vector = gdal.OpenEx(aoi_path, gdal.OF_VECTOR) diff --git a/src/natcap/invest/recreation/recmodel_workspace_fetcher.py b/src/natcap/invest/recreation/recmodel_workspace_fetcher.py index 55a6c28ff2..53ecc050b5 100644 --- a/src/natcap/invest/recreation/recmodel_workspace_fetcher.py +++ b/src/natcap/invest/recreation/recmodel_workspace_fetcher.py @@ -50,7 +50,7 @@ def execute(args): args['workspace_id']) # unpack result - open(os.path.join( - output_dir, '%s.zip' % args['workspace_id']), 'wb').write( - workspace_aoi_binary) + with open(os.path.join( + output_dir, '%s.zip' % args['workspace_id']), 'wb') as file: + file.write(workspace_aoi_binary) LOGGER.info("fetched aoi") diff --git a/tests/test_datastack.py b/tests/test_datastack.py index dbf30ec566..bd83fa3daf 100644 --- a/tests/test_datastack.py +++ b/tests/test_datastack.py @@ -132,11 +132,11 @@ def test_collect_simple_parameters(self): self.assertEqual(len(os.listdir(out_directory)), 3) # We expect the workspace to be excluded from the resulting args dict. - self.assertEqual( - json.load(open( - os.path.join(out_directory, - datastack.DATASTACK_PARAMETER_FILENAME)))['args'], - {'a': 1, 'b': 'hello there', 'c': 'plain bytestring', 'd': ''}) + with open(os.path.join( + out_directory, datastack.DATASTACK_PARAMETER_FILENAME)) as file: + self.assertEqual( + json.load(file)['args'], + {'a': 1, 'b': 'hello there', 'c': 'plain bytestring', 'd': ''}) def test_collect_rasters(self): """Datastack: test collect GDAL rasters.""" @@ -158,10 +158,10 @@ def test_collect_rasters(self): out_directory = os.path.join(self.workspace, 'extracted_archive') datastack._tarfile_safe_extract(archive_path, out_directory) - archived_params = json.load( - open(os.path.join( + with open(os.path.join( out_directory, - datastack.DATASTACK_PARAMETER_FILENAME)))['args'] + datastack.DATASTACK_PARAMETER_FILENAME)) as datastack_file: + archived_params = json.load(datastack_file)['args'] self.assertEqual(len(archived_params), 1) model_array = pygeoprocessing.raster_to_numpy_array( @@ -200,10 +200,10 @@ def test_collect_vectors(self): out_directory = os.path.join(dest_dir, 'extracted_archive') datastack._tarfile_safe_extract(archive_path, out_directory) - archived_params = json.load( - open(os.path.join( + with open(os.path.join( out_directory, - datastack.DATASTACK_PARAMETER_FILENAME)))['args'] + datastack.DATASTACK_PARAMETER_FILENAME)) as datastack_file: + archived_params = json.load(datastack_file)['args'] _assert_vectors_equal( params['vector'], os.path.join(out_directory, archived_params['vector'])) @@ -242,9 +242,10 @@ def test_nonspatial_files(self): out_directory = os.path.join(self.workspace, 'extracted_archive') datastack._tarfile_safe_extract(archive_path, out_directory) - archived_params = json.load( - open(os.path.join(out_directory, - datastack.DATASTACK_PARAMETER_FILENAME)))['args'] + with open(os.path.join( + out_directory, + datastack.DATASTACK_PARAMETER_FILENAME)) as datastack_file: + archived_params = json.load(datastack_file)['args'] self.assertTrue(filecmp.cmp( params['some_file'], os.path.join(out_directory, archived_params['some_file']), @@ -279,9 +280,10 @@ def test_duplicate_filepaths(self): out_directory = os.path.join(self.workspace, 'extracted_archive') datastack._tarfile_safe_extract(archive_path, out_directory) - archived_params = json.load( - open(os.path.join(out_directory, - datastack.DATASTACK_PARAMETER_FILENAME)))['args'] + with open(os.path.join( + out_directory, + datastack.DATASTACK_PARAMETER_FILENAME)) as datastack_file: + archived_params = json.load(datastack_file)['args'] # Assert that the archived 'foo' and 'bar' params point to the same # file. @@ -473,7 +475,8 @@ def test_relative_parameter_set(self): # make the sample data so filepaths are interpreted correctly for file_base in ('foo', 'bar', 'file1', 'file2'): test_filepath = os.path.join(self.workspace, file_base + '.txt') - open(test_filepath, 'w').write('hello!') + with open(test_filepath, 'w') as file: + file.write('hello!') os.makedirs(params['data_dir']) # Write the parameter set @@ -481,7 +484,8 @@ def test_relative_parameter_set(self): params, modelname, paramset_filename, relative=True) # Check that the written parameter set file contains relative paths - raw_args = json.load(open(paramset_filename))['args'] + with open(paramset_filename) as param_file: + raw_args = json.load(param_file)['args'] self.assertEqual(raw_args['foo'], 'foo.txt') self.assertEqual(raw_args['bar'], 'foo.txt') self.assertEqual(raw_args['file_list'], ['file1.txt', 'file2.txt']) @@ -517,7 +521,8 @@ def test_relative_parameter_set_windows(self): # make the sample data so filepaths are interpreted correctly for base_name in ('foo', 'bar', 'doh'): - open(params[base_name], 'w').write('hello!') + with open(params[base_name], 'w') as file: + file.write('hello!') os.makedirs(params['data_dir']) # Write the parameter set @@ -525,7 +530,8 @@ def test_relative_parameter_set_windows(self): params, modelname, paramset_filename, relative=True) # Check that the written parameter set file contains relative paths - raw_args = json.load(open(paramset_filename))['args'] + with open(paramset_filename) as param_file: + raw_args = json.load(param_file)['args'] self.assertEqual(raw_args['foo'], 'foo.txt') # Expecting linux style path separators for Windows self.assertEqual(raw_args['bar'], 'inter_dir/bar.txt') diff --git a/tests/test_recreation.py b/tests/test_recreation.py index 1d6d62e486..e1083ddca1 100644 --- a/tests/test_recreation.py +++ b/tests/test_recreation.py @@ -236,7 +236,8 @@ def test_workspace_fetcher(self): myzip.write(filename, os.path.basename(filename)) # convert shapefile to binary string for serialization - zip_file_binary = open(aoi_archive_path, 'rb').read() + with open(aoi_archive_path, 'rb') as file: + zip_file_binary = file.read() date_range = (('2005-01-01'), ('2014-12-31')) out_vector_filename = 'test_aoi_for_subset_pud.shp' @@ -286,7 +287,8 @@ def test_local_aggregate_points(self): myzip.write(filename, os.path.basename(filename)) # convert shapefile to binary string for serialization - zip_file_binary = open(aoi_archive_path, 'rb').read() + with open(aoi_archive_path, 'rb') as file: + zip_file_binary = file.read() # transfer zipped file to server date_range = (('2005-01-01'), ('2014-12-31')) @@ -297,7 +299,8 @@ def test_local_aggregate_points(self): # unpack result result_zip_path = os.path.join(self.workspace_dir, 'pud_result.zip') - open(result_zip_path, 'wb').write(zip_result) + with open(result_zip_path, 'wb') as file: + file.write(zip_result) zipfile.ZipFile(result_zip_path, 'r').extractall(self.workspace_dir) result_vector_path = os.path.join( @@ -312,7 +315,8 @@ def test_local_aggregate_points(self): out_workspace_dir = os.path.join(self.workspace_dir, 'workspace_zip') os.makedirs(out_workspace_dir) workspace_zip_path = os.path.join(out_workspace_dir, 'workspace.zip') - open(workspace_zip_path, 'wb').write(workspace_zip_binary) + with open(workspace_zip_path, 'wb') as file: + file.write(workspace_zip_binary) zipfile.ZipFile(workspace_zip_path, 'r').extractall(out_workspace_dir) utils._assert_vectors_equal( aoi_path, @@ -664,11 +668,13 @@ def test_local_aoi(self): self.recreation_server._calc_aggregated_points_in_aoi( aoi_path, self.workspace_dir, date_range, out_vector_filename) - output_lines = open(os.path.join( - self.workspace_dir, 'monthly_table.csv'), 'r').readlines() - expected_lines = open(os.path.join( - REGRESSION_DATA, 'expected_monthly_table_for_subset.csv'), - 'r').readlines() + with open(os.path.join( + self.workspace_dir, 'monthly_table.csv'), 'r') as file: + output_lines = file.readlines() + with open(os.path.join( + REGRESSION_DATA, 'expected_monthly_table_for_subset.csv'), + 'r') as file: + expected_lines = file.readlines() if output_lines != expected_lines: raise ValueError( diff --git a/tests/test_usage_logging.py b/tests/test_usage_logging.py index f367450935..98b0d93d94 100644 --- a/tests/test_usage_logging.py +++ b/tests/test_usage_logging.py @@ -84,5 +84,7 @@ def test_bounding_boxes(self): bb_union, [-87.237771, -85.526132, -87.23321, -85.526491]) # Verify that no errors were raised in calculating the bounding boxes. - self.assertTrue('ERROR' not in open(output_logfile).read(), - 'Exception logged when there should not have been.') + with open(output_logfile) as logfile: + self.assertTrue( + 'ERROR' not in logfile.read(), + 'Exception logged when there should not have been.') diff --git a/tests/test_wind_energy.py b/tests/test_wind_energy.py index d9b788c47f..678d1478a8 100644 --- a/tests/test_wind_energy.py +++ b/tests/test_wind_energy.py @@ -202,7 +202,8 @@ def test_wind_data_to_point_vector(self): } wind_data_pickle_path = os.path.join( self.workspace_dir, 'wind_data.pickle') - pickle.dump(wind_data, open(wind_data_pickle_path, 'wb')) + with open(wind_data_pickle_path, 'wb') as file: + pickle.dump(wind_data, file) layer_name = "datatopoint" out_path = os.path.join(self.workspace_dir, 'datatopoint.shp') @@ -253,7 +254,8 @@ def test_wind_data_to_point_vector_360(self): } wind_data_pickle_path = os.path.join( self.workspace_dir, 'wind_data.pickle') - pickle.dump(wind_data, open(wind_data_pickle_path, 'wb')) + with open(wind_data_pickle_path, 'wb') as file: + pickle.dump(wind_data, file) layer_name = "datatopoint" out_path = os.path.join(self.workspace_dir, 'datatopoint.shp') From e3efb52c492272efddeabd9f61b019a4d4b00774 Mon Sep 17 00:00:00 2001 From: Emily Soth Date: Mon, 7 Aug 2023 10:49:52 -0700 Subject: [PATCH 05/13] avoid overflow warning in ndr: use constant float32 nodata value #334 --- src/natcap/invest/ndr/ndr.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/src/natcap/invest/ndr/ndr.py b/src/natcap/invest/ndr/ndr.py index fd8d39353b..0e9853612d 100644 --- a/src/natcap/invest/ndr/ndr.py +++ b/src/natcap/invest/ndr/ndr.py @@ -1149,11 +1149,12 @@ def _normalize_raster(base_raster_path_band, target_normalized_raster_path): value_mean = value_sum if value_count > 0.0: value_mean /= value_count + target_nodata = float(numpy.finfo(numpy.float32).min) def _normalize_raster_op(array): """Divide values by mean.""" result = numpy.empty(array.shape, dtype=numpy.float32) - result[:] = numpy.float32(base_nodata) + result[:] = target_nodata valid_mask = slice(None) if base_nodata is not None: @@ -1163,11 +1164,6 @@ def _normalize_raster_op(array): result[valid_mask] /= value_mean return result - # It's possible for base_nodata to extend outside what can be represented - # in a float32, yet GDAL expects a python float. Casting to numpy.float32 - # and back to a python float allows for the nodata value to reflect the - # actual nodata pixel values. - target_nodata = float(numpy.float32(base_nodata)) pygeoprocessing.raster_calculator( [base_raster_path_band], _normalize_raster_op, target_normalized_raster_path, gdal.GDT_Float32, From 8caccc3dcc44d98ff30c847d9d3cadca8e9c9a50 Mon Sep 17 00:00:00 2001 From: Emily Soth Date: Mon, 7 Aug 2023 12:45:53 -0700 Subject: [PATCH 06/13] resolve more warnings #334 --- src/natcap/invest/coastal_vulnerability.py | 2 +- tests/test_cli.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/natcap/invest/coastal_vulnerability.py b/src/natcap/invest/coastal_vulnerability.py index 7ce14522ba..8d4b4b78c1 100644 --- a/src/natcap/invest/coastal_vulnerability.py +++ b/src/natcap/invest/coastal_vulnerability.py @@ -2840,7 +2840,7 @@ def assemble_results_and_calculate_exposure( for feature in output_layer: shore_id = feature.GetField(SHORE_ID_FIELD) # The R_hab ranks were stored in a CSV, now this dataframe: - rank = habitat_df[habitat_df[SHORE_ID_FIELD] == shore_id][R_hab_name] + rank = habitat_df[habitat_df[SHORE_ID_FIELD] == shore_id][R_hab_name].item() feature.SetField(str(R_hab_name), float(rank)) # The other variables were stored in pickles, now this dict: for fieldname in final_values_dict: diff --git a/tests/test_cli.py b/tests/test_cli.py index 4236453330..038ce5b0f1 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -44,7 +44,8 @@ def test_run_coastal_blue_carbon_workspace_in_json(self): os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'coastal_blue_carbon', 'cbc_galveston_bay.invs.json') - datastack_dict = json.load(open(parameter_set_path)) + with open(parameter_set_path) as datastack_file: + datastack_dict = json.load(datastack_file) datastack_dict['args']['workspace_dir'] = self.workspace_dir new_parameter_set_path = os.path.join( self.workspace_dir, 'paramset.invs.json') @@ -58,8 +59,7 @@ def test_run_coastal_blue_carbon_workspace_in_json(self): cli.main([ 'run', 'coastal_blue_carbon', # uses an exact modelname - '--datastack', new_parameter_set_path, - '--headless', # unused, but recognized for backwards compat + '--datastack', new_parameter_set_path ]) patched_model.assert_called_once() From f24fd891b7b29ba6cb4ec6b000d43c71e8df4cbe Mon Sep 17 00:00:00 2001 From: Emily Soth Date: Tue, 8 Aug 2023 15:53:09 -0700 Subject: [PATCH 07/13] reset default warnings filter for warning redirection test #334 --- tests/test_utils.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index eb75c186e0..f43e45ae8c 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -575,13 +575,15 @@ def test_prepare_workspace(self): from natcap.invest import utils workspace = os.path.join(self.workspace, 'foo') - try: + with warnings.catch_warnings(): + # restore the warnings filter to default, overriding any + # global pytest filter. this preserves the warnings so that + # they may be redirected to the log. + warnings.simplefilter('default') with utils.prepare_workspace(workspace, 'some_model'): warnings.warn('deprecated', UserWarning) gdal.Open('file should not exist') - except Warning as warning_raised: - self.fail('Warning was not captured: %s' % warning_raised) self.assertTrue(os.path.exists(workspace)) logfile_glob = glob.glob(os.path.join(workspace, '*.txt')) From 56b821031bdc59cc52b104163b1a5463d05735dd Mon Sep 17 00:00:00 2001 From: Emily Soth Date: Wed, 23 Aug 2023 16:53:37 -0700 Subject: [PATCH 08/13] rec model server tests: terminate server process after each test --- tests/test_recreation.py | 441 ++++++++++++++++++--------------------- 1 file changed, 200 insertions(+), 241 deletions(-) diff --git a/tests/test_recreation.py b/tests/test_recreation.py index e1083ddca1..efb66d995e 100644 --- a/tests/test_recreation.py +++ b/tests/test_recreation.py @@ -13,6 +13,7 @@ import json import queue import multiprocessing +import time import numpy from osgeo import gdal @@ -140,11 +141,13 @@ def test_basic_operation(self): file_manager.read(1234) -class TestRecServer(unittest.TestCase): - """Tests that set up local rec server on a port and call through.""" +class TestRecServerLoop(unittest.TestCase): + """Tests that use the rec server execute loop running in another process.""" def setUp(self): """Setup workspace.""" + from natcap.invest.recreation import recmodel_server + self.workspace_dir = tempfile.mkdtemp() self.resampled_data_path = os.path.join( self.workspace_dir, 'resampled_data.csv') @@ -152,78 +155,143 @@ def setUp(self): os.path.join(SAMPLE_DATA, 'sample_data.csv'), self.resampled_data_path, resample_factor=10) + # attempt to get an open port; could result in race condition but + # will be okay for a test. if this test ever fails because of port + # in use, that's probably why + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.bind(('', 0)) + self.port = sock.getsockname()[1] + sock.close() + sock = None + + server_args = { + 'hostname': 'localhost', + 'port': self.port, + 'raw_csv_point_data_path': self.resampled_data_path, + 'cache_workspace': self.workspace_dir, + 'min_year': 2008, + 'max_year': 2015, + 'max_points_per_node': 200, + } + + self.server_process = multiprocessing.Process( + target=recmodel_server.execute, args=(server_args,), daemon=False) + self.server_process.start() + time.sleep(5) + def tearDown(self): """Delete workspace.""" + self.server_process.terminate() shutil.rmtree(self.workspace_dir, ignore_errors=True) - def test_hashfile(self): - """Recreation test for hash of file.""" - from natcap.invest.recreation import recmodel_server - file_hash = recmodel_server._hashfile( - self.resampled_data_path, blocksize=2**20, fast_hash=False) - # The exact encoded string that is hashed is dependent on python - # version, with Python 3 including b prefix and \n suffix. - # these hashes are for [py2.7, py3.6] - self.assertIn(file_hash, ['c052e7a0a4c5e528', 'c8054b109d7a9d2a']) + def test_all_metrics_local_server(self): + """Recreation test with all but trivial predictor metrics. - def test_hashfile_fast(self): - """Recreation test for hash and fast hash of file.""" - from natcap.invest.recreation import recmodel_server - file_hash = recmodel_server._hashfile( - self.resampled_data_path, blocksize=2**20, fast_hash=True) - # we can't assert the full hash since it is dependant on the file - # last access time and we can't reliably set that in Python. - # instead we just check that at the very least it ends with _fast_hash - self.assertTrue(file_hash.endswith('_fast_hash')) + Executes Recreation model all the way through scenario prediction. + With this 'extra_fields_features' AOI, we also cover two edge cases: + 1) the AOI has a pre-existing field that the model wishes to create. + 2) the AOI has features only covering nodata raster predictor values. + """ + from natcap.invest.recreation import recmodel_client + args = { + 'aoi_path': os.path.join( + SAMPLE_DATA, 'andros_aoi_with_extra_fields_features.shp'), + 'compute_regression': True, + 'start_year': '2008', + 'end_year': '2014', + 'grid_aoi': False, + 'predictor_table_path': os.path.join( + SAMPLE_DATA, 'predictors_all.csv'), + 'scenario_predictor_table_path': os.path.join( + SAMPLE_DATA, 'predictors_all.csv'), + 'results_suffix': '', + 'workspace_dir': self.workspace_dir, + 'hostname': 'localhost', + 'port': self.port, + } + recmodel_client.execute(args) - def test_year_order(self): - """Recreation ensure that end year < start year raise ValueError.""" - from natcap.invest.recreation import recmodel_server + out_grid_vector_path = os.path.join( + args['workspace_dir'], 'predictor_data.shp') + expected_grid_vector_path = os.path.join( + REGRESSION_DATA, 'predictor_data_all_metrics.shp') + utils._assert_vectors_equal( + out_grid_vector_path, expected_grid_vector_path, 1e-3) - with self.assertRaises(ValueError): - # intentionally construct start year > end year - recmodel_server.RecModel( - self.resampled_data_path, - 2014, 2005, os.path.join(self.workspace_dir, 'server_cache')) + out_scenario_path = os.path.join( + args['workspace_dir'], 'scenario_results.shp') + expected_scenario_path = os.path.join( + REGRESSION_DATA, 'scenario_results_all_metrics.shp') + utils._assert_vectors_equal( + out_scenario_path, expected_scenario_path, 1e-3) + + def test_results_suffix_on_serverside_files(self): + """Recreation test suffix gets added to files created on server.""" + from natcap.invest.recreation import recmodel_client + + args = { + 'aoi_path': os.path.join( + SAMPLE_DATA, 'andros_aoi_with_extra_fields_features.shp'), + 'compute_regression': False, + 'start_year': '2014', + 'end_year': '2015', + 'grid_aoi': False, + 'results_suffix': 'hello', + 'workspace_dir': self.workspace_dir, + 'hostname': 'localhost', + 'port': self.port, + } + recmodel_client.execute(args) + + self.assertTrue(os.path.exists( + os.path.join(args['workspace_dir'], 'monthly_table_hello.csv'))) + self.assertTrue(os.path.exists( + os.path.join(args['workspace_dir'], 'pud_results_hello.shp'))) + + @_timeout(30.0) + def test_execute_local_server(self): + """Recreation base regression test on sample data on local server. + + Executes Recreation model all the way through scenario prediction. + With this florida AOI, raster and vector predictors do not + intersect the AOI. This makes for a fast test and incidentally + covers an edge case. + """ + from natcap.invest.recreation import recmodel_client + + args = { + 'aoi_path': os.path.join( + SAMPLE_DATA, 'local_recreation_aoi_florida_utm18n.shp'), + 'cell_size': 40000.0, + 'compute_regression': True, + 'start_year': '2008', + 'end_year': '2014', + 'hostname': 'localhost', + 'port': self.port, + 'grid_aoi': True, + 'grid_type': 'hexagon', + 'predictor_table_path': os.path.join( + SAMPLE_DATA, 'predictors.csv'), + 'results_suffix': '', + 'scenario_predictor_table_path': os.path.join( + SAMPLE_DATA, 'predictors_scenario.csv'), + 'workspace_dir': self.workspace_dir, + } + + recmodel_client.execute(args) + + _assert_regression_results_eq( + args['workspace_dir'], + os.path.join(REGRESSION_DATA, 'file_list_base_florida_aoi.txt'), + os.path.join(args['workspace_dir'], 'scenario_results.shp'), + os.path.join(REGRESSION_DATA, 'local_server_scenario_results.csv')) @_timeout(30.0) def test_workspace_fetcher(self): """Recreation test workspace fetcher on a local Pyro4 empty server.""" - from natcap.invest.recreation import recmodel_server from natcap.invest.recreation import recmodel_workspace_fetcher - # Attempt a few connections, we've had this test be flaky on the - # entire suite run which we suspect is because of a race condition - server_launched = False - for _ in range(3): - try: - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.bind(('', 0)) - port = sock.getsockname()[1] - sock.close() - sock = None - - server_args = { - 'hostname': 'localhost', - 'port': port, - 'raw_csv_point_data_path': self.resampled_data_path, - 'cache_workspace': self.workspace_dir, - 'min_year': 2010, - 'max_year': 2015, - } - - server_thread = threading.Thread( - target=recmodel_server.execute, args=(server_args,)) - server_thread.daemon = True - server_thread.start() - server_launched = True - break - except: - LOGGER.warn("Can't start server process on port %d", port) - if not server_launched: - self.fail("Server didn't start") - - path = "PYRO:natcap.invest.recreation@localhost:%s" % port + path = "PYRO:natcap.invest.recreation@localhost:%s" % self.port LOGGER.info("Local server path %s", path) recreation_server = Pyro4.Proxy(path) aoi_path = os.path.join( @@ -247,7 +315,7 @@ def test_workspace_fetcher(self): fetcher_args = { 'workspace_dir': self.workspace_dir, 'hostname': 'localhost', - 'port': port, + 'port': self.port, 'workspace_id': workspace_id, } try: @@ -269,6 +337,76 @@ def test_workspace_fetcher(self): aoi_path, os.path.join(out_workspace_dir, 'test_aoi_for_subset.shp')) + def test_results_suffix_on_serverside_files(self): + """Recreation test suffix gets added to files created on server.""" + from natcap.invest.recreation import recmodel_client + + args = { + 'aoi_path': os.path.join( + SAMPLE_DATA, 'andros_aoi_with_extra_fields_features.shp'), + 'compute_regression': False, + 'start_year': '2014', + 'end_year': '2015', + 'grid_aoi': False, + 'results_suffix': 'hello', + 'workspace_dir': self.workspace_dir, + 'hostname': 'localhost', + 'port': self.port, + } + recmodel_client.execute(args) + + self.assertTrue(os.path.exists( + os.path.join(args['workspace_dir'], 'monthly_table_hello.csv'))) + self.assertTrue(os.path.exists( + os.path.join(args['workspace_dir'], 'pud_results_hello.shp'))) + + +class TestRecServer(unittest.TestCase): + """Tests for recmodel_server functions and the RecModel object.""" + + def setUp(self): + """Setup workspace.""" + self.workspace_dir = tempfile.mkdtemp() + self.resampled_data_path = os.path.join( + self.workspace_dir, 'resampled_data.csv') + _resample_csv( + os.path.join(SAMPLE_DATA, 'sample_data.csv'), + self.resampled_data_path, resample_factor=10) + + def tearDown(self): + """Delete workspace.""" + shutil.rmtree(self.workspace_dir, ignore_errors=True) + + def test_hashfile(self): + """Recreation test for hash of file.""" + from natcap.invest.recreation import recmodel_server + file_hash = recmodel_server._hashfile( + self.resampled_data_path, blocksize=2**20, fast_hash=False) + # The exact encoded string that is hashed is dependent on python + # version, with Python 3 including b prefix and \n suffix. + # these hashes are for [py2.7, py3.6] + self.assertIn(file_hash, ['c052e7a0a4c5e528', 'c8054b109d7a9d2a']) + + def test_hashfile_fast(self): + """Recreation test for hash and fast hash of file.""" + from natcap.invest.recreation import recmodel_server + file_hash = recmodel_server._hashfile( + self.resampled_data_path, blocksize=2**20, fast_hash=True) + # we can't assert the full hash since it is dependant on the file + # last access time and we can't reliably set that in Python. + # instead we just check that at the very least it ends with _fast_hash + self.assertTrue(file_hash.endswith('_fast_hash')) + + def test_year_order(self): + """Recreation ensure that end year < start year raise ValueError.""" + from natcap.invest.recreation import recmodel_server + + with self.assertRaises(ValueError): + # intentionally construct start year > end year + recmodel_server.RecModel( + self.resampled_data_path, + 2014, 2005, os.path.join(self.workspace_dir, 'server_cache')) + def test_local_aggregate_points(self): """Recreation test single threaded local AOI aggregate calculation.""" from natcap.invest.recreation import recmodel_server @@ -464,184 +602,6 @@ def test_numpy_pickling_queue(self): # assert that no warning was raised self.assertTrue(len(ws) == 0) - @_timeout(30.0) - def test_execute_local_server(self): - """Recreation base regression test on sample data on local server. - - Executes Recreation model all the way through scenario prediction. - With this florida AOI, raster and vector predictors do not - intersect the AOI. This makes for a fast test and incidentally - covers an edge case. - """ - from natcap.invest.recreation import recmodel_client - from natcap.invest.recreation import recmodel_server - - # attempt to get an open port; could result in race condition but - # will be okay for a test. if this test ever fails because of port - # in use, that's probably why - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.bind(('', 0)) - port = sock.getsockname()[1] - sock.close() - sock = None - - server_args = { - 'hostname': 'localhost', - 'port': port, - 'raw_csv_point_data_path': self.resampled_data_path, - 'cache_workspace': self.workspace_dir, - 'min_year': 2004, - 'max_year': 2015, - 'max_points_per_node': 200, - } - - server_thread = threading.Thread( - target=recmodel_server.execute, args=(server_args,)) - server_thread.daemon = True - server_thread.start() - - args = { - 'aoi_path': os.path.join( - SAMPLE_DATA, 'local_recreation_aoi_florida_utm18n.shp'), - 'cell_size': 40000.0, - 'compute_regression': True, - 'start_year': '2005', - 'end_year': '2014', - 'hostname': 'localhost', - 'port': port, - 'grid_aoi': True, - 'grid_type': 'hexagon', - 'predictor_table_path': os.path.join( - SAMPLE_DATA, 'predictors.csv'), - 'results_suffix': '', - 'scenario_predictor_table_path': os.path.join( - SAMPLE_DATA, 'predictors_scenario.csv'), - 'workspace_dir': self.workspace_dir, - } - - recmodel_client.execute(args) - - _assert_regression_results_eq( - args['workspace_dir'], - os.path.join(REGRESSION_DATA, 'file_list_base_florida_aoi.txt'), - os.path.join(args['workspace_dir'], 'scenario_results.shp'), - os.path.join(REGRESSION_DATA, 'local_server_scenario_results.csv')) - - def test_all_metrics_local_server(self): - """Recreation test with all but trivial predictor metrics. - - Executes Recreation model all the way through scenario prediction. - With this 'extra_fields_features' AOI, we also cover two edge cases: - 1) the AOI has a pre-existing field that the model wishes to create. - 2) the AOI has features only covering nodata raster predictor values. - """ - from natcap.invest.recreation import recmodel_client - from natcap.invest.recreation import recmodel_server - - # attempt to get an open port; could result in race condition but - # will be okay for a test. if this test ever fails because of port - # in use, that's probably why - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.bind(('', 0)) - port = sock.getsockname()[1] - sock.close() - sock = None - - server_args = { - 'hostname': 'localhost', - 'port': port, - 'raw_csv_point_data_path': self.resampled_data_path, - 'cache_workspace': self.workspace_dir, - 'min_year': 2008, - 'max_year': 2015, - 'max_points_per_node': 200, - } - - server_thread = threading.Thread( - target=recmodel_server.execute, args=(server_args,)) - server_thread.daemon = True - server_thread.start() - - args = { - 'aoi_path': os.path.join( - SAMPLE_DATA, 'andros_aoi_with_extra_fields_features.shp'), - 'compute_regression': True, - 'start_year': '2008', - 'end_year': '2014', - 'grid_aoi': False, - 'predictor_table_path': os.path.join( - SAMPLE_DATA, 'predictors_all.csv'), - 'scenario_predictor_table_path': os.path.join( - SAMPLE_DATA, 'predictors_all.csv'), - 'results_suffix': '', - 'workspace_dir': self.workspace_dir, - 'hostname': server_args['hostname'], - 'port': server_args['port'], - } - recmodel_client.execute(args) - - out_grid_vector_path = os.path.join( - args['workspace_dir'], 'predictor_data.shp') - expected_grid_vector_path = os.path.join( - REGRESSION_DATA, 'predictor_data_all_metrics.shp') - utils._assert_vectors_equal( - out_grid_vector_path, expected_grid_vector_path, 1e-3) - - out_scenario_path = os.path.join( - args['workspace_dir'], 'scenario_results.shp') - expected_scenario_path = os.path.join( - REGRESSION_DATA, 'scenario_results_all_metrics.shp') - utils._assert_vectors_equal( - out_scenario_path, expected_scenario_path, 1e-3) - - def test_results_suffix_on_serverside_files(self): - """Recreation test suffix gets added to files created on server.""" - from natcap.invest.recreation import recmodel_client - from natcap.invest.recreation import recmodel_server - - # attempt to get an open port; could result in race condition but - # will be okay for a test. if this test ever fails because of port - # in use, that's probably why - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.bind(('', 0)) - port = sock.getsockname()[1] - sock.close() - sock = None - - server_args = { - 'hostname': 'localhost', - 'port': port, - 'raw_csv_point_data_path': self.resampled_data_path, - 'cache_workspace': self.workspace_dir, - 'min_year': 2014, - 'max_year': 2015, - 'max_points_per_node': 200, - } - - server_thread = threading.Thread( - target=recmodel_server.execute, args=(server_args,)) - server_thread.daemon = True - server_thread.start() - - args = { - 'aoi_path': os.path.join( - SAMPLE_DATA, 'andros_aoi_with_extra_fields_features.shp'), - 'compute_regression': False, - 'start_year': '2014', - 'end_year': '2015', - 'grid_aoi': False, - 'results_suffix': 'hello', - 'workspace_dir': self.workspace_dir, - 'hostname': server_args['hostname'], - 'port': server_args['port'], - } - recmodel_client.execute(args) - - self.assertTrue(os.path.exists( - os.path.join(args['workspace_dir'], 'monthly_table_hello.csv'))) - self.assertTrue(os.path.exists( - os.path.join(args['workspace_dir'], 'pud_results_hello.shp'))) - class TestLocalRecServer(unittest.TestCase): """Tests using a local rec server.""" @@ -839,7 +799,6 @@ def test_overlapping_features_in_polygon_predictor(self): with open(predictor_target_path, 'r') as file: data = json.load(file) - print(data) actual_value = list(data.values())[0] expected_value = 1 self.assertEqual(actual_value, expected_value) From 352f0e22d5cfdbd73734c7b9d26fc7db3e8390cf Mon Sep 17 00:00:00 2001 From: Emily Soth Date: Wed, 23 Aug 2023 17:07:25 -0700 Subject: [PATCH 09/13] add comment explaining time.sleep delay --- tests/test_recreation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_recreation.py b/tests/test_recreation.py index efb66d995e..1f7757dfaa 100644 --- a/tests/test_recreation.py +++ b/tests/test_recreation.py @@ -177,7 +177,7 @@ def setUp(self): self.server_process = multiprocessing.Process( target=recmodel_server.execute, args=(server_args,), daemon=False) self.server_process.start() - time.sleep(5) + time.sleep(5) # need a few seconds for the server to be ready def tearDown(self): """Delete workspace.""" From ada6e9ae193dd1081ed563e7e7526fa6d37ea043 Mon Sep 17 00:00:00 2001 From: Emily Soth Date: Wed, 23 Aug 2023 17:27:11 -0700 Subject: [PATCH 10/13] fix merge error --- workbench/src/main/main.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/workbench/src/main/main.js b/workbench/src/main/main.js index 95fd22489d..12e60b9cfd 100644 --- a/workbench/src/main/main.js +++ b/workbench/src/main/main.js @@ -60,6 +60,7 @@ if (!process.env.PORT) { let mainWindow; let splashScreen; let flaskSubprocess; +let forceQuit = false; export function destroyWindow() { mainWindow = null; @@ -137,7 +138,7 @@ export const createWindow = async () => { mainWindow.minimize() } }); - + mainWindow.on('closed', () => { mainWindow = null; }); @@ -194,6 +195,7 @@ export function main() { app.on('before-quit', async (event) => { // prevent quitting until after we're done with cleanup, // then programatically quit + forceQuit = true; if (shuttingDown) { return; } event.preventDefault(); shuttingDown = true; From f53f75649d7af3d14b2f6b663ad9e9b410b2194c Mon Sep 17 00:00:00 2001 From: Emily Soth Date: Wed, 6 Sep 2023 14:40:28 -0700 Subject: [PATCH 11/13] remove duplicate rec model test #334 --- tests/test_recreation.py | 23 ----------------------- 1 file changed, 23 deletions(-) diff --git a/tests/test_recreation.py b/tests/test_recreation.py index dc53f9745b..0bc6cc31b5 100644 --- a/tests/test_recreation.py +++ b/tests/test_recreation.py @@ -225,29 +225,6 @@ def test_all_metrics_local_server(self): utils._assert_vectors_equal( out_scenario_path, expected_scenario_path, 1e-3) - def test_results_suffix_on_serverside_files(self): - """Recreation test suffix gets added to files created on server.""" - from natcap.invest.recreation import recmodel_client - - args = { - 'aoi_path': os.path.join( - SAMPLE_DATA, 'andros_aoi_with_extra_fields_features.shp'), - 'compute_regression': False, - 'start_year': '2014', - 'end_year': '2015', - 'grid_aoi': False, - 'results_suffix': 'hello', - 'workspace_dir': self.workspace_dir, - 'hostname': 'localhost', - 'port': self.port, - } - recmodel_client.execute(args) - - self.assertTrue(os.path.exists( - os.path.join(args['workspace_dir'], 'monthly_table_hello.csv'))) - self.assertTrue(os.path.exists( - os.path.join(args['workspace_dir'], 'pud_results_hello.shp'))) - @_timeout(30.0) def test_execute_local_server(self): """Recreation base regression test on sample data on local server. From ead9c20447c384eb92189c8efa21106e3eb59e88 Mon Sep 17 00:00:00 2001 From: Emily Soth Date: Wed, 6 Sep 2023 14:41:02 -0700 Subject: [PATCH 12/13] display deprecation warnings in tests as usual #334 --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 8313dc85aa..58ce1b4c62 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,4 +76,5 @@ no_lines_before = 'LOCALFOLDER' where = ["src"] [tool.pytest.ini_options] -filterwarnings = ["error", "ignore::DeprecationWarning"] +# raise warnings to errors, except for deprecation warnings +filterwarnings = ["error", "default::DeprecationWarning"] From 21173f045dd1ea09ff24f12dcfa2dc80a00a4bc6 Mon Sep 17 00:00:00 2001 From: Emily Soth Date: Wed, 6 Sep 2023 14:43:44 -0700 Subject: [PATCH 13/13] add comment to rec server test suggesting alternate ways to wait for server #334 --- tests/test_recreation.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/test_recreation.py b/tests/test_recreation.py index 0bc6cc31b5..daf11a5fa9 100644 --- a/tests/test_recreation.py +++ b/tests/test_recreation.py @@ -177,7 +177,11 @@ def setUp(self): self.server_process = multiprocessing.Process( target=recmodel_server.execute, args=(server_args,), daemon=False) self.server_process.start() - time.sleep(5) # need a few seconds for the server to be ready + # need a few seconds for the server to be ready + # Dave suggested that if this turns out to be flaky, we could instead + # listen for the stdout from the server process indicating it's done + # initializing, or poll the server and retry multiple times. + time.sleep(5) def tearDown(self): """Delete workspace."""