diff --git a/.github/workflows/github_autotools_intel.yml b/.github/workflows/github_autotools_intel.yml index a09f8c87d3..372af083dc 100644 --- a/.github/workflows/github_autotools_intel.yml +++ b/.github/workflows/github_autotools_intel.yml @@ -17,7 +17,7 @@ jobs: CC: mpiicc FC: mpiifort CFLAGS: "-I/libs/include" - FCFLAGS: "-I/libs/include -g -traceback ${{ matrix.io-flag }}" + FCFLAGS: "-I/libs/include -g -traceback" LDFLAGS: "-L/libs/lib" TEST_VERBOSE: 1 I_MPI_FABRICS: "shm" # needed for mpi in image @@ -55,7 +55,10 @@ jobs: - name: checkout uses: actions/checkout@v4 - name: Configure - run: autoreconf -if ./configure.ac && ./configure --with-yaml + run: | + autoreconf -if ./configure.ac + export LD_LIBRARY_PATH="/libs/lib:$LD_LIBRARY_PATH" + ./configure --with-yaml ${{ matrix.io-flag }} - name: Compile run: make -j || make - name: Run test suite diff --git a/.github/workflows/github_cmake_gnu.yml b/.github/workflows/github_cmake_gnu.yml index de71dcbbdf..8512d5fa8a 100644 --- a/.github/workflows/github_cmake_gnu.yml +++ b/.github/workflows/github_cmake_gnu.yml @@ -15,10 +15,11 @@ jobs: omp-flags: [ -DOPENMP=on, -DOPENMP=off ] libyaml-flag: [ "", -DWITH_YAML=on ] io-flag: [ "", -DUSE_DEPRECATED_IO=on ] + build-type: [ "-DCMAKE_BUILD_TYPE=Release", "-DCMAKE_BUILD_TYPE=Debug" ] container: image: ghcr.io/noaa-gfdl/fms/fms-ci-rocky-gnu:13.2.0 env: - CMAKE_FLAGS: "${{ matrix.omp-flags }} ${{ matrix.io-flag }} ${{ matrix.libyaml-flag }} -D64BIT=on" + CMAKE_FLAGS: "${{ matrix.build-type }} ${{ matrix.omp-flags }} ${{ matrix.io-flag }} ${{ matrix.libyaml-flag }} -D64BIT=on" steps: - name: Checkout code uses: actions/checkout@v4 diff --git a/CHANGELOG.md b/CHANGELOG.md index 3359cc8743..8f7cbe6066 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,50 @@ and this project uses `yyyy.rr[.pp]`, where `yyyy` is the year a patch is releas `rr` is a sequential release number (starting from `01`), and an optional two-digit sequential patch number (starting from `01`). +## [2024.02] - 2024-07-11 + +### Known Issues +- Diag Manager Rewrite: See [below](#20240102---2024-06-14) for known output file differences regarding the new diag manager. The new diag_manager is disabled by default, so this differences will only be present if `use_modern_diag` is set to true in the `diag_manager_nml`. +- BUILD(HDF5): HDF5 version 1.14.3 generates floating point exceptions, and will cause errors if FMS is built with FPE traps enabled. FPE traps are turned on when using the debug target in mkmf. +- GCC: version 14.1.0 is unsupported due to a bug with strings that has come up previously in earlier versions. This will be caught by the configure script, but will cause compilation errors if using other build systems. + +### Added +- TIME_INTERP: Enables use of `verbose` option in `time_interp_external2` calls from `data_override`. The option is enabled in `data_override_nml` by setting `debug_data_override` to true. (#1516) +- COUPLER: Adds optional argument to `coupler_types_send_data` routine that contains the return statuses for any calls made to the diag_manager's `send_data` routine. (#1530) +- MPP: Adds a separate error log file `warnfile..out` that only holds output from any `mpp_error` calls made during a run (#1544) +### Changed +- DIAG_MANAGER: The `diag_field_log.out` output file of all registered fields will now include the PE number of the root PE at the time of writing (ie. diag_field_log.out.0). This is to prevent overwritting the file in cases where the root PE may change. (#1497) + +### Fixed +- CMAKE: Fixes real kind flags being overwritten when using the Debug release type (#1532) +- HORIZ_INTERP: Fixes allocation issues when using method-specific horiz_interp_new routines (such as `horiz_interp_bilinear_new`) by setting `is_allocated` and the `method_type` during initialization for each method. (#1538) + + +### Tag Commit Hashes +- 2024.02-alpha1 5757c7813f1170efd28f5a4206395534894095b4 +- 2024.02-alpha2 5757c7813f1170efd28f5a4206395534894095b4 +- 2024.02-beta1 ca592ef8f47c246f4dc56d348d62235bd0ceaa9d +- 2024.02-beta2 ca592ef8f47c246f4dc56d348d62235bd0ceaa9d + +## [2024.01.02] - 2024-06-14 + +### Known Issues +- Diag Manager Rewrite: + - Expected output file changes: + - If the model run time is less than the output frequency, old diag_manager would write a specific value (9.96921e+36). The new diag_manager will not, so only fill values will be present. + - A `scalar_axis` dimension will not be added to scalar variables + - The `average_*` variables will no longer be added as they are non-standard conventions + - Attributes added via `diag_field_add_attributes` in the old code were saved as `NF90_FLOAT` regardless of precision, but will now be written as the precision that is passed in + - Subregional output will have a global attribute `is_subregional = True` set for non-global history files. + - The `grid_type` and `grid_tile` global attributes will no longer be added for all files, and some differences may be seen in the exact order of the `associated_files` attribute + +- DIAG_MANAGER: When using the `do_diag_field_log` nml option, the output log file may be ovewritten if using a multiple root pe's +- BUILD(HDF5): HDF5 version 1.14.3 generates floating point exceptions, and will cause errors if FMS is built with FPE traps enabled. +- GCC: version 14.1.0 is unsupported due to a bug with strings that has come up previously in earlier versions. This will be caught by the configure script, but will cause compilation errors if using other build systems. + +### Fixed +- DIAG_MANAGER: Fixes incorrect dates being appended to static file names + ## [2024.01.01] - 2024-05-30 ### Known Issues diff --git a/CMAKE_INSTRUCTIONS.md b/CMAKE_INSTRUCTIONS.md index d627f12fa1..7f4858c30a 100644 --- a/CMAKE_INSTRUCTIONS.md +++ b/CMAKE_INSTRUCTIONS.md @@ -50,17 +50,18 @@ make install ``` ### User configurable options: -By default, FMS is built without `OpenMP` and in `single precision (r4)` +By default, FMS is built without `OpenMP`, in `single precision (r4)` and delivered in static library files. The 64BIT and 32BIT precision options will build distinct libraries when enabled with the given default real size, libfms_r4 or libfms_r8. The following build options are available: ``` --DOPENMP "Build FMS with OpenMP support" DEFAULT: OFF --D32BIT "Build 32-bit (r4) FMS library" DEFAULT: ON --D64BIT "Build 64-bit (r8) FMS library" DEFAULT: OFF --DFPIC "Build with position independent code" DEFAULT: OFF +-DOPENMP "Build FMS with OpenMP support" DEFAULT: OFF +-D32BIT "Build 32-bit (r4) FMS library" DEFAULT: ON +-D64BIT "Build 64-bit (r8) FMS library" DEFAULT: OFF +-DFPIC "Build with position independent code" DEFAULT: OFF +-DSHARED_LIBS "Build shared/dynamic libraries" DEFAULT: OFF -DCONSTANTS "Build with constants parameter definitions" DEFAULT:GFDL OPTIONS:GFS|GEOS|GFDL -DINTERNAL_FILE_NML "Enable compiler definition -DINTERNAL_FILE_NML" DEFAULT: ON diff --git a/CMakeLists.txt b/CMakeLists.txt index 8dca1bdcf5..b7e66e6057 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -21,12 +21,9 @@ cmake_minimum_required(VERSION 3.12 FATAL_ERROR) -# add build type for debug, overrides default flags (set with $FCFLAGS, $CFLAGS) -set(CMAKE_Fortran_FLAGS_DEBUG) - # Define the CMake project project(FMS - VERSION 2024.01.01 + VERSION 2024.02.0 DESCRIPTION "GFDL FMS Library" HOMEPAGE_URL "https://www.gfdl.noaa.gov/fms" LANGUAGES C Fortran) @@ -55,10 +52,11 @@ endif() list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake) # Build options -option(OPENMP "Build FMS with OpenMP support" OFF) -option(32BIT "Build 32-bit (r4) FMS library" ON) -option(64BIT "Build 64-bit (r8) FMS library" OFF) -option(FPIC "Build with position independent code" OFF) +option(OPENMP "Build FMS with OpenMP support" OFF) +option(32BIT "Build 32-bit (r4) FMS library" ON) +option(64BIT "Build 64-bit (r8) FMS library" OFF) +option(FPIC "Build with position independent code" OFF) +option(SHARED_LIBS "Build shared/dynamic libraries" OFF) # Options for compiler definitions option(INTERNAL_FILE_NML "Enable compiler definition -DINTERNAL_FILE_NML" ON) @@ -339,11 +337,8 @@ foreach(kind ${kinds}) target_compile_definitions(${libTgt}_f PRIVATE "${fms_defs}") target_compile_definitions(${libTgt}_f PRIVATE "${${kind}_defs}") - string(TOLOWER ${CMAKE_BUILD_TYPE} build_type) - if (NOT build_type STREQUAL debug) - set_target_properties(${libTgt}_f PROPERTIES COMPILE_FLAGS - "${${kind}_flags}") - endif() + set_target_properties(${libTgt}_f PROPERTIES COMPILE_FLAGS "${${kind}_flags}") + set_target_properties(${libTgt}_f PROPERTIES Fortran_MODULE_DIRECTORY ${moduleDir}) @@ -366,8 +361,15 @@ foreach(kind ${kinds}) endif() # FMS (C + Fortran) - add_library(${libTgt} STATIC $ - $) + if (SHARED_LIBS) + message(STATUS "Shared library target: ${libTgt}") + add_library(${libTgt} SHARED $ + $) + else () + message(STATUS "Static library target: ${libTgt}") + add_library(${libTgt} STATIC $ + $) + endif () target_include_directories(${libTgt} PUBLIC $ @@ -403,7 +405,8 @@ foreach(kind ${kinds}) target_compile_definitions(${libTgt} PRIVATE "${fms_defs}") target_compile_definitions(${libTgt} PRIVATE "${${kind}_defs}") - target_link_libraries(${libTgt} PUBLIC NetCDF::NetCDF_Fortran + target_link_libraries(${libTgt} PUBLIC NetCDF::NetCDF_C + NetCDF::NetCDF_Fortran MPI::MPI_Fortran) if(OpenMP_Fortran_FOUND) diff --git a/configure.ac b/configure.ac index de3a262646..a2699db3e5 100644 --- a/configure.ac +++ b/configure.ac @@ -25,7 +25,7 @@ AC_PREREQ([2.69]) # Initialize with name, version, and support email address. AC_INIT([GFDL FMS Library], - [2024.01.01], + [2024.02-dev], [gfdl.climate.model.info@noaa.gov], [FMS], [https://www.github.com/NOAA-GFDL/FMS]) diff --git a/coupler/coupler_types.F90 b/coupler/coupler_types.F90 index 515eb8ed8f..ab616ed981 100644 --- a/coupler/coupler_types.F90 +++ b/coupler/coupler_types.F90 @@ -2944,10 +2944,12 @@ end subroutine CT_set_diags_3d !> @brief Write out all diagnostics of elements of a coupler_2d_bc_type - !! TODO this should really be a function in order to return the status of send_data call - subroutine CT_send_data_2d(var, Time) + subroutine CT_send_data_2d(var, Time, return_statuses) type(coupler_2d_bc_type), intent(in) :: var !< BC_type structure with the diagnostics to write type(time_type), intent(in) :: time !< The current model time + logical, allocatable, optional, intent(out) :: return_statuses(:,:) !< Return status of send data calls + !! first index is index of boundary condition + !! second index is field/value within that boundary condition integer :: m, n logical :: used @@ -2966,18 +2968,33 @@ subroutine CT_send_data_2d(var, Time) ! num_bcs .lt. 1 -> loop doesn't run but shouldn't error out if(associated(var%bc) .or. var%num_bcs .lt. 1) then + + ! allocate array for returned send data statuses + if( present(return_statuses) .and. var%num_bcs .gt. 0) then + allocate(return_statuses(var%num_bcs, var%bc(1)%num_fields)) + endif + do n = 1, var%num_bcs do m = 1, var%bc(n)%num_fields if (var%bc(n)%field(m)%id_diag > 0) then used = send_data(var%bc(n)%field(m)%id_diag, var%bc(n)%field(m)%values, Time) + if(allocated(return_statuses)) return_statuses(n,m) = used endif enddo enddo + else if(associated(var%bc_r4)) then + + ! allocate array for returned send data statuses + if( present(return_statuses) .and. var%num_bcs .gt. 0) then + allocate(return_statuses(var%num_bcs, var%bc_r4(1)%num_fields)) + endif + do n = 1, var%num_bcs do m = 1, var%bc_r4(n)%num_fields if (var%bc_r4(n)%field(m)%id_diag > 0) then used = send_data(var%bc_r4(n)%field(m)%id_diag, var%bc_r4(n)%field(m)%values, Time) + if(allocated(return_statuses)) return_statuses(n,m) = used endif enddo enddo @@ -2988,10 +3005,12 @@ subroutine CT_send_data_2d(var, Time) end subroutine CT_send_data_2d !> @brief Write out all diagnostics of elements of a coupler_3d_bc_type - !! TODO this should really be a function in order to return the status of send_data call - subroutine CT_send_data_3d(var, Time) + subroutine CT_send_data_3d(var, Time, return_statuses) type(coupler_3d_bc_type), intent(in) :: var !< BC_type structure with the diagnostics to write type(time_type), intent(in) :: time !< The current model time + logical, allocatable, optional, intent(out) :: return_statuses(:,:) !< Return status of send data calls + !! first index is index of boundary condition + !! second index is field/value within that boundary condition integer :: m, n logical :: used @@ -3010,18 +3029,32 @@ subroutine CT_send_data_3d(var, Time) ! num_bcs .lt. 1 -> loop doesn't run but shouldn't error out if(associated(var%bc) .or. var%num_bcs .lt. 1) then + + ! allocate array for returned send data statuses + if( present(return_statuses) .and. var%num_bcs .gt. 0) then + allocate(return_statuses(var%num_bcs, var%bc(1)%num_fields)) + endif + do n = 1, var%num_bcs do m = 1, var%bc(n)%num_fields if (var%bc(n)%field(m)%id_diag > 0) then used = send_data(var%bc(n)%field(m)%id_diag, var%bc(n)%field(m)%values, Time) + if(allocated(return_statuses)) return_statuses(n,m) = used endif enddo enddo else if(associated(var%bc_r4)) then + + ! allocate array for returned send data statuses + if( present(return_statuses) .and. var%num_bcs .gt. 0) then + allocate(return_statuses(var%num_bcs, var%bc_r4(1)%num_fields)) + endif + do n = 1, var%num_bcs do m = 1, var%bc_r4(n)%num_fields if (var%bc_r4(n)%field(m)%id_diag > 0) then used = send_data(var%bc_r4(n)%field(m)%id_diag, var%bc_r4(n)%field(m)%values, Time) + if(allocated(return_statuses)) return_statuses(n,m) = used endif enddo enddo diff --git a/data_override/README.MD b/data_override/README.MD index f9e19464aa..b35879edf2 100644 --- a/data_override/README.MD +++ b/data_override/README.MD @@ -7,29 +7,29 @@ - [How to use it?](README.MD#2-how-to-use-it) - [Converting legacy data_table to data_table.yaml](README.MD#3-converting-legacy-data_table-to-data_tableyaml) - [Examples](README.MD#4-examples) +- [External Weight File Structure](README.MD#5-external-weight-file-structure) #### 1. YAML Data Table format: Each entry in the data_table has the following key values: -- **gridname:** Name of the grid to interpolate the data to. The acceptable values are "ICE", "OCN", "ATM", and "LND" -- **fieldname_code:** Name of the field as it is in the code to interpolate. -- **fieldname_file:** Name of the field as it is writen in the file. **Required** only if overriding from a file -- **file_name:** Name of the file where the variable is located, including the directory. **Required** only if overriding from a file -- **interpol_method:** Method used to interpolate the field. The acceptable values are "bilinear", "bicubic", and "none". "none" implies that the field in the file is already in the model grid. The LIMA format is no longer supported. **Required** only if overriding from a file +- **grid_name:** Name of the grid to interpolate the data to. The acceptable values are "ICE", "OCN", "ATM", and "LND" +- **fieldname_in_model:** Name of the field as it is in the code to interpolate. +- **override_file:** Optional subsection with key/value pairs defining how to override from a netcdf file. + - **file_name:** Name of the file where the variable is located, including the directory + - **fieldname_in_file:** Name of the field as it is writen in the file + - **interp_method:** Method used to interpolate the field. The acceptable values are "bilinear", "bicubic", and "none". "none" implies that the field in the file is already in the model grid. The LIMA format is no longer supported + - **multi_file:** Optional subsection with key/value pairs to use multiple(3) input netcdf files instead of 1. Note that **file_name** must be the second file in the set when using multiple input netcdf files + - **prev_file_name:** The name of the first file in the set + - **next_file_name:** The name of the third file in the set + - **external_weights:** Optional subsection with key/value pairs defining the external weights file to used for the interpolation. + - **file_name:** Name of the file where the external weights are located, including the directory + - **source:** Name of the source that generated the external weights. The only acceptable value is "fregrid" - **factor:** Factor that will be multiplied after the data is interpolated - -If it is desired to interpolate the data to a region of the model grid. The following **optional** arguments are available. -- **region_type:** The region type. The acceptable values are "inside_region" and "outside_region" -- **lon_start:** The starting latitude in the same units as the grid data in the file -- **lon_end:** The ending latitude in the same units as the grid data in the file -- **lat_start:** The starting longitude in the same units as the grid data in the file -- **lon_end:** The ending longitude in the same units as the grid data in the file - -If it is desired to use multiple(3) input netcdf files instead of 1. The following **optional** keys are available. -- **is_multi_file:** Set to `True` is using the multi-file feature -- **prev_file_name:** The name of the first file in the set -- **next_file_name:** The name of the third file in the set - -Note that **file_name** must be the second file in the set. **prev_file_name** and/or **next_file_name** are required if **is_multi_file** is set to `True` +- **subregion:** Optional subsection with key/value pairs that define a subregion of the model grid to interpolate the data to. + - **type:** The region type. The acceptable values are "inside_region" and "outside_region" + - **lon_start:** The starting latitude in the same units as the grid data in the file + - **lon_end:** The ending latitude in the same units as the grid data in the file + - **lat_start:** The starting longitude in the same units as the grid data in the file + - **lon_end:** The ending longitude in the same units as the grid data in the file #### 2. How to use it? In order to use the yaml data format, [libyaml](https://github.com/yaml/libyaml) needs to be installed and linked with FMS. Additionally, FMS must be compiled with -Duse_yaml macro. If using autotools, you can add `--with-yaml`, which will add the macro for you and check that libyaml is linked correctly. @@ -55,21 +55,22 @@ In the **legacy format**, the data_table will look like: In the **yaml format**, the data_table will look like ``` data_table: - - gridname : ICE - fieldname_code : sic_obs - fieldname_file : sic - file_name : INPUT/hadisst_ice.data.nc - interpol_method : bilinear - factor : 0.01 + - grid_name : ICE + fieldname_in_model : sic_obs + override_file: + - file_name : INPUT/hadisst_ice.data.nc + fieldname_in_file : sic + interp_method : bilinear + factor : 0.01 ``` Which corresponds to the following model code: ```F90 call data_override('ICE', 'sic_obs', icec, Spec_Time) ``` where: -- `ICE` corresponds to the gridname in the data_table -- `sic_obs` corresponds to the fieldname_code in the data_table -- `icec` is the variable to write the data to +- `ICE` is the component domain for which the variable is being interpolated and corresponds to the grid_name in the data_table +- `sic_obs` corresponds to the fieldname_in_model in the data_table +- `icec` is the storage array that holds the interpolated data - `Spec_Time` is the time to interpolate the data to. Additionally, it is required to call data_override_init (in this case with the ICE domain). The grid_spec.nc file must also contain the coordinate information for the domain being used. @@ -82,15 +83,15 @@ call data_override_init(Ice_domain_in=Ice_domain) In the **legacy format**, the data_table will look like: ``` -"ICE", "sit_obs", "", "INPUT/hadisst_ice.data.nc", "none", 2.0 +"ICE", "sit_obs", "", "INPUT/hadisst_ice.data.nc", "none", 2.0 ``` In the **yaml format**, the data_table will look like: -``` +``` yaml data_table: - - gridname : ICE - fieldname_code : sit_obs - factor : 0.01 + - grid_name : ICE + fieldname_in_model : sit_obs + factor : 0.01 ``` Which corresponds to the following model code: @@ -98,9 +99,9 @@ Which corresponds to the following model code: call data_override('ICE', 'sit_obs', icec, Spec_Time) ``` where: -- `ICE` corresponds to the gridname in the data_table -- `sit_obs` corresponds to the fieldname_code in the data_table -- `icec` is the variable to write the data to +- `ICE` is the component domain for which the variable is being interpolated and corresponds to the grid_name in the data_table +- `sit_obs` corresponds to the fieldname_in_model in the data_table +- `icec` is the storage array that holds the interpolated data - `Spec_Time` is the time to interpolate the data to. Additionally, it is required to call data_override_init (in this case with the ICE domain). The grid_spec.nc file is still required to initialize data_override with the ICE domain. @@ -117,14 +118,15 @@ In the **legacy format**, the data_table will look like: ``` In the **yaml format**, the data_table will look like: -``` +``` yaml data_table: - - gridname : OCN - fieldname_code : runoff - fieldname_file : runoff - file_name : INPUT/runoff.daitren.clim.nc - interpol_method : none - factor : 1.0 + - grid_name : OCN + fieldname_in_model : runoff + override_file: + - file_name : INPUT/runoff.daitren.clim.nc + fieldname_in_file : runoff + interp_method : none + factor : 1.0 ``` Which corresponds to the following model code: @@ -132,9 +134,9 @@ Which corresponds to the following model code: call data_override('OCN', 'runoff', runoff_data, Spec_Time) ``` where: -- `OCN` corresponds to the gridname in the data_table -- `runoff` corresponds to the fieldname_code in the data_table -- `runoff_data` is the variable to write the data to +- `OCN` is the component domain for which the variable is being interpolated and corresponds to the grid_name in the data_table +- `runoff` corresponds to the fieldname_in_model in the data_table +- `runoff_data` is the storage array that holds the interpolated data - `Spec_Time` is the time to interpolate the data to. Additionally, it is required to call data_override_init (in this case with the ocean domain). The grid_spec.nc file is still required to initialize data_override with the ocean domain and to determine if the data in the file is in the same grid as the ocean. @@ -142,3 +144,59 @@ Additionally, it is required to call data_override_init (in this case with the o ```F90 call data_override_init(Ocn_domain_in=Ocn_domain) ``` + +**4.4** The following example uses the multi-file capability +``` yaml +data_table: + - grid_name : ICE + fieldname_in_model : sic_obs + override_file: + - file_name : INPUT/hadisst_ice.data_yr1.nc + fieldname_in_file : sic + interp_method : bilinear + multi_file: + - next_file_name: INPUT/hadisst_ice.data_yr2.nc + prev_file_name: INPUT/hadisst_ice.data_yr0.nc + factor : 0.01 +``` +Data override determines which file to use depending on the model time. This is to prevent having to combine the 3 yearly files into one, since the end of the previous file and the beginning of the next file are needed for yearly simulations. + +**4.5** The following example uses the external weight file capability +``` yaml +data_table: + - grid_name : ICE + fieldname_in_model : sic_obs + override_file: + - file_name : INPUT/hadisst_ice.data.nc + fieldname_in_file : sic + interp_method : bilinear + external_weights: + - file_name: INPUT/remamp_file.nc + source: fregrid + factor : 0.01 +``` + +#### 5. External Weight File Structure + +**5.1** Bilinear weight file example from fregrid + +``` +dimensions: + nlon = 5 ; + nlat = 6 ; + three = 3 ; + four = 4 ; +variables: + int index(three, nlat, nlon) ; + double weight(four, nlat, nlon) ; +``` +- `nlon` and `nlat` must be equal to the size of the global domain. +- `index(1,:,:)` corresponds to the index (i) of the longitudes point in the data file, closest to each model lon, lat +- `index(2,:,:)` corresponds to the index (j) of the lattidude point in the data file, closest to each model lon, lat +- `index(3,:,:)` corresponds to the tile (it should be 1 since data_override does not support interpolation **from** cubesphere grids) + - From there the four corners are (i,j), (i,j+1) (i+1) (i+1,j+1) +- The weights for the four corners + - weight(:,:,1) -> (i,j) + - weight(:,:,2) -> (i,j+1) + - weight(:,:,3) -> (i+1,j) + - weight(:,:,4) -> (i+1,j+1) diff --git a/data_override/include/data_override.inc b/data_override/include/data_override.inc index a5acb1bef1..e79ee5d042 100644 --- a/data_override/include/data_override.inc +++ b/data_override/include/data_override.inc @@ -27,7 +27,7 @@ use constants_mod, only: DEG_TO_RAD use mpp_mod, only : mpp_error, FATAL, WARNING, NOTE, stdout, stdlog, mpp_max use mpp_mod, only : input_nml_file use horiz_interp_mod, only : horiz_interp_init, horiz_interp_new, horiz_interp_type, & - assignment(=) + horiz_interp_read_weights use time_interp_external2_mod, only: time_interp_external_init, & time_interp_external, & time_interp_external_bridge, get_time_axis, & @@ -63,6 +63,9 @@ type data_type character(len=128) :: fieldname_file !< fieldname used in the netcdf data file character(len=512) :: file_name !< name of netCDF data file character(len=128) :: interpol_method !< interpolation method (default "bilinear") + logical :: ext_weights + character(len=128) :: ext_weights_file_name + character(len=128) :: ext_weights_source real(FMS_DATA_OVERRIDE_KIND_) :: factor !< For unit conversion, default=1, see OVERVIEW above real(FMS_DATA_OVERRIDE_KIND_) :: lon_start, lon_end, lat_start, lat_end integer :: region_type @@ -94,10 +97,21 @@ type override_type integer :: is_src, ie_src, js_src, je_src end type override_type +!> Private type for holding horiz_interp_type for a weight file +!! This is needed so that if variables use the same weight file, +!! then we won't have to read the weight file again +!> @ingroup data_override_mod +type fmsExternalWeights_type + character(len=:), allocatable :: weight_filename !< Name of the weight file + type(horiz_interp_type) :: horiz_interp !< Horiz interp type read in from the weight file +end type fmsExternalWeights_type + integer, parameter :: lkind = FMS_DATA_OVERRIDE_KIND_ integer, parameter :: max_table=100, max_array=100 integer :: table_size !< actual size of data table +integer :: nweight_files !< Number of weight files that have been used +type(fmsExternalWeights_type), allocatable, target :: external_weights(:) !< External weights types logical :: module_is_initialized = .FALSE. type(domain2D) :: ocn_domain,atm_domain,lnd_domain, ice_domain @@ -223,6 +237,8 @@ end if if (file_exists("data_table")) & call mpp_error(FATAL, "You cannot have the legacy data_table if use_data_table_yaml=.true.") call read_table_yaml(data_table) + allocate(external_weights(table_size)) + nweight_files = 0 else if (file_exists("data_table.yaml"))& call mpp_error(FATAL, "You cannot have the yaml data_table if use_data_table_yaml=.false.") @@ -552,6 +568,7 @@ subroutine read_table(data_table) data_entry%lat_end = -1.0_lkind data_entry%region_type = NO_REGION endif + data_entry%ext_weights = .false. data_table(ntable) = data_entry enddo call mpp_error(FATAL,'too many enries in data_table') @@ -570,7 +587,8 @@ subroutine read_table_yaml(data_table) type(data_type), dimension(:), allocatable, intent(out) :: data_table !< Contents of the data_table.yaml integer, allocatable :: entry_id(:) - integer :: nentries + integer :: sub_block_id(1), sub2_block_id(1) + integer :: nentries, mentries integer :: i character(len=50) :: buffer integer :: file_id @@ -585,53 +603,90 @@ subroutine read_table_yaml(data_table) call get_block_ids(file_id, "data_table", entry_id) do i = 1, nentries - call get_value_from_key(file_id, entry_id(i), "gridname", data_table(i)%gridname) - call check_for_valid_gridname(data_table(i)%gridname) - call get_value_from_key(file_id, entry_id(i), "fieldname_code", data_table(i)%fieldname_code) - - data_table(i)%fieldname_file = "" - call get_value_from_key(file_id, entry_id(i), "fieldname_file", data_table(i)%fieldname_file, & - & is_optional=.true.) - - data_table(i)%multifile = .false. - call get_value_from_key(file_id, entry_id(i), "is_multi_file", data_table(i)%multifile, & - & is_optional=.true.) - - if (data_table(i)%multifile) then - data_table(i)%prev_file_name = "" - data_table(i)%next_file_name = "" - call get_value_from_key(file_id, entry_id(i), "prev_file_name", data_table(i)%prev_file_name, & - & is_optional=.true.) - call get_value_from_key(file_id, entry_id(i), "next_file_name", data_table(i)%next_file_name, & - & is_optional=.true.) + call get_value_from_key(file_id, entry_id(i), "factor", data_table(i)%factor) + call get_value_from_key(file_id, entry_id(i), "grid_name", data_table(i)%gridname) + call check_for_valid_gridname(data_table(i)%gridname) + call get_value_from_key(file_id, entry_id(i), "fieldname_in_model", data_table(i)%fieldname_code) + + mentries = get_num_blocks(file_id, "override_file", parent_block_id=entry_id(i)) + data_table(i)%file_name = "" + data_table(i)%fieldname_file = "" + data_table(i)%interpol_method = "none" + data_table(i)%multifile = .false. + data_table(i)%ext_weights = .false. + data_table(i)%region_type = NO_REGION + data_table(i)%prev_file_name = "" + data_table(i)%next_file_name = "" + data_table(i)%ext_weights_file_name = "" + data_table(i)%ext_weights_source = "" + + ! If there is no override_file block, then not overriding from file, so move on to the next entry + if (mentries .eq. 0) cycle + + if(mentries.gt.1) call mpp_error(FATAL, "Too many override_file blocks in data table. "//& + "Check your data_table.yaml entry for field:"//trim(data_table(i)%gridname)//":"//& + trim(data_table(i)%fieldname_code)) + call get_block_ids(file_id, "override_file", sub_block_id, parent_block_id=entry_id(i)) + + call get_value_from_key(file_id, sub_block_id(1), "file_name", data_table(i)%file_name) + call get_value_from_key(file_id, sub_block_id(1), "fieldname_in_file", data_table(i)%fieldname_file) + call get_value_from_key(file_id, sub_block_id(1), "interp_method", data_table(i)%interpol_method) + call check_interpol_method(data_table(i)%interpol_method, data_table(i)%file_name, & + & data_table(i)%fieldname_file) + + mentries = get_num_blocks(file_id, "multi_file", parent_block_id=sub_block_id(1)) + if(mentries.gt.1) call mpp_error(FATAL, "Too many multi_file blocks in tata table. "//& + "Check your data_table.yaml entry for field:"//trim(data_table(i)%gridname)//":"//& + trim(data_table(i)%fieldname_code)) + + if(mentries.gt.0) data_table(i)%multifile = .true. + + if (data_table(i)%multifile) then + call get_block_ids(file_id, "multi_file", sub2_block_id, parent_block_id=sub_block_id(1)) + call get_value_from_key(file_id, sub2_block_id(1), "prev_file_name", data_table(i)%prev_file_name) + call get_value_from_key(file_id, sub2_block_id(1), "next_file_name", data_table(i)%next_file_name) if (trim(data_table(i)%prev_file_name) .eq. "" .and. trim(data_table(i)%next_file_name) .eq. "") & call mpp_error(FATAL, "The prev_file_name and next_file_name must be present if is_multi_file. "//& "Check your data_table.yaml entry for field:"//trim(data_table(i)%gridname)//":"//& trim(data_table(i)%fieldname_code)) - endif + endif - data_table(i)%file_name = "" - call get_value_from_key(file_id, entry_id(i), "file_name", data_table(i)%file_name, & - & is_optional=.true.) + mentries = get_num_blocks(file_id, "external_weights", parent_block_id=sub_block_id(1)) + if(mentries.gt.1) call mpp_error(FATAL, "Too many external_weight blocks in data table. "//& + "Check your data_table.yaml entry for field:"//trim(data_table(i)%gridname)//":"//& + trim(data_table(i)%fieldname_code)) + + if(mentries.gt.0) data_table(i)%ext_weights = .true. + + if (data_table(i)%ext_weights) then + call get_block_ids(file_id, "external_weights", sub2_block_id, parent_block_id=sub_block_id(1)) + call get_value_from_key(file_id, sub2_block_id(1), "file_name", data_table(i)%ext_weights_file_name) + call get_value_from_key(file_id, sub2_block_id(1), "source", data_table(i)%ext_weights_source) + if (trim(data_table(i)%ext_weights_file_name) .eq. "" .and. trim(data_table(i)%ext_weights_source) .eq. "") & + call mpp_error(FATAL, "The file_name and source must be present when using external weights"//& + "Check your data_table.yaml entry for field:"//trim(data_table(i)%gridname)//":"//& + trim(data_table(i)%fieldname_code)) + endif - data_table(i)%interpol_method = "none" - call get_value_from_key(file_id, entry_id(i), "interpol_method", data_table(i)%interpol_method, & - & is_optional=.true.) - call check_interpol_method(data_table(i)%interpol_method, data_table(i)%file_name, & - data_table(i)%fieldname_file) + mentries = get_num_blocks(file_id, "subregion", parent_block_id=entry_id(i)) + if(mentries.gt.1) call mpp_error(FATAL, "Too many subregion blocks in data table. "//& + "Check your data_table.yaml entry for field:"//trim(data_table(i)%gridname)//":"//& + trim(data_table(i)%fieldname_code)) - call get_value_from_key(file_id, entry_id(i), "factor", data_table(i)%factor) - buffer = "" - call get_value_from_key(file_id, entry_id(i), "region_type", buffer, is_optional=.true.) - call check_and_set_region_type(buffer, data_table(i)%region_type) + buffer = "" + if(mentries.gt.0) then + call get_block_ids(file_id, "subregion", sub_block_id, parent_block_id=entry_id(i)) + call get_value_from_key(file_id, sub_block_id(1), "type", buffer) + endif + call check_and_set_region_type(buffer, data_table(i)%region_type) if (data_table(i)%region_type .ne. NO_REGION) then - call get_value_from_key(file_id, entry_id(i), "lon_start", data_table(i)%lon_start, is_optional=.true.) - call get_value_from_key(file_id, entry_id(i), "lon_end", data_table(i)%lon_end, is_optional=.true.) - call get_value_from_key(file_id, entry_id(i), "lat_start", data_table(i)%lat_start, is_optional=.true.) - call get_value_from_key(file_id, entry_id(i), "lat_end", data_table(i)%lat_end, is_optional=.true.) + call get_value_from_key(file_id, sub_block_id(1), "lon_start", data_table(i)%lon_start) + call get_value_from_key(file_id, sub_block_id(1), "lon_end", data_table(i)%lon_end) + call get_value_from_key(file_id, sub_block_id(1), "lat_start", data_table(i)%lat_start) + call get_value_from_key(file_id, sub_block_id(1), "lat_end", data_table(i)%lat_end) call check_valid_lat_lon(data_table(i)%lon_start, data_table(i)%lon_end, & - data_table(i)%lat_start, data_table(i)%lat_end) + data_table(i)%lat_start, data_table(i)%lat_end) endif end do @@ -863,7 +918,7 @@ subroutine DATA_OVERRIDE_0D_(gridname,fieldname_code,data_out,time,override,data ! record fieldname, gridname in override_array override_array(curr_position)%fieldname = fieldname_code override_array(curr_position)%gridname = gridname - id_time = init_external_field(filename,fieldname,verbose=.false.) + id_time = init_external_field(filename,fieldname,verbose=debug_data_override) if(id_time<0) call mpp_error(FATAL,'data_override:field not found in init_external_field 1') override_array(curr_position)%t_index = id_time else !curr_position >0 @@ -877,7 +932,7 @@ subroutine DATA_OVERRIDE_0D_(gridname,fieldname_code,data_out,time,override,data if_multi1: if (multifile) then id_time_prev = -1 if_prev1: if (trim(prevfilename) /= '') then - id_time_prev = init_external_field(prevfilename,fieldname,verbose=.false.) + id_time_prev = init_external_field(prevfilename,fieldname,verbose=debug_data_override) dims = get_external_field_size(id_time) prev_dims = get_external_field_size(id_time_prev) ! check consistency of spatial dims @@ -890,7 +945,7 @@ subroutine DATA_OVERRIDE_0D_(gridname,fieldname_code,data_out,time,override,data endif if_prev1 id_time_next = -1 if_next1: if (trim(nextfilename) /= '') then - id_time_next = init_external_field(nextfilename,fieldname,verbose=.false.) + id_time_next = init_external_field(nextfilename,fieldname,verbose=debug_data_override) dims = get_external_field_size(id_time) next_dims = get_external_field_size(id_time_next) ! check consistency of spatial dims @@ -922,17 +977,17 @@ subroutine DATA_OVERRIDE_0D_(gridname,fieldname_code,data_out,time,override,data prev_dims = get_external_field_size(id_time_prev) if (timelast_record) then if (id_time_next<0) call mpp_error(FATAL,'data_override:next file needed with multifile') if (time>data_table(index1)%time_next_records(1)) call mpp_error(FATAL, & 'data_override: time_interp_external_bridge should only be called to bridge with next file') - call time_interp_external_bridge(id_time, id_time_next,time,data_out,verbose=.false.) + call time_interp_external_bridge(id_time, id_time_next,time,data_out,verbose=debug_data_override) else ! first_record < time < last_record, do not use bridge - call time_interp_external(id_time,time,data_out,verbose=.false.) + call time_interp_external(id_time,time,data_out,verbose=debug_data_override) endif if_time2 else ! standard behavior - call time_interp_external(id_time,time,data_out,verbose=.false.) + call time_interp_external(id_time,time,data_out,verbose=debug_data_override) endif if_multi2 @@ -1032,6 +1087,9 @@ subroutine DATA_OVERRIDE_3D_(gridname,fieldname_code,return_data,time,override,d integer :: endingj !< Ending y index for the compute domain relative to the input buffer integer :: nhalox !< Number of halos in the x direction integer :: nhaloy !< Number of halos in the y direction + logical :: found_weight_file !< .True. if the weight file has already been read + integer :: nglat !< Number of latitudes in the global domain + integer :: nglon !< Number of longitudes in the global domain use_comp_domain = .false. if(.not.module_is_initialized) & @@ -1165,7 +1223,7 @@ subroutine DATA_OVERRIDE_3D_(gridname,fieldname_code,return_data,time,override,d endif if_multi3 !--- we always only pass data on compute domain - id_time = init_external_field(filename,fieldname,domain=domain,verbose=.false., & + id_time = init_external_field(filename,fieldname,domain=domain,verbose=debug_data_override, & use_comp_domain=use_comp_domain, nwindows=nwindows, ongrid=ongrid) ! if using consecutive files for data_override, get time axis for previous and next files @@ -1174,7 +1232,7 @@ subroutine DATA_OVERRIDE_3D_(gridname,fieldname_code,return_data,time,override,d id_time_prev = -1 if_prev4:if (trim(prevfilename) /= '') then id_time_prev = init_external_field(prevfilename,fieldname,domain=domain, & - verbose=.false.,use_comp_domain=use_comp_domain, & + verbose=debug_data_override,use_comp_domain=use_comp_domain, & nwindows = nwindows, ongrid=ongrid) dims = get_external_field_size(id_time) prev_dims = get_external_field_size(id_time_prev) @@ -1189,7 +1247,7 @@ subroutine DATA_OVERRIDE_3D_(gridname,fieldname_code,return_data,time,override,d id_time_next = -1 if_next4: if (trim(nextfilename) /= '') then id_time_next = init_external_field(nextfilename,fieldname,domain=domain, & - verbose=.false.,use_comp_domain=use_comp_domain, & + verbose=debug_data_override,use_comp_domain=use_comp_domain, & nwindows = nwindows, ongrid=ongrid) dims = get_external_field_size(id_time) next_dims = get_external_field_size(id_time_next) @@ -1211,7 +1269,7 @@ subroutine DATA_OVERRIDE_3D_(gridname,fieldname_code,return_data,time,override,d override_array(curr_position)%nt_index = id_time_next else !ongrid=false id_time = init_external_field(filename,fieldname,domain=domain, axis_names=axis_names,& - axis_sizes=axis_sizes, verbose=.false.,override=.true.,use_comp_domain=use_comp_domain, & + axis_sizes=axis_sizes, verbose=debug_data_override,override=.true.,use_comp_domain=use_comp_domain, & nwindows = nwindows) ! if using consecutive files for data_override, get time axis for previous and next files @@ -1220,7 +1278,7 @@ subroutine DATA_OVERRIDE_3D_(gridname,fieldname_code,return_data,time,override,d id_time_prev = -1 if_prev5: if (trim(prevfilename) /= '') then id_time_prev = init_external_field(prevfilename,fieldname,domain=domain, axis_names=axis_names,& - axis_sizes=axis_sizes, verbose=.false.,override=.true.,use_comp_domain=use_comp_domain, & + axis_sizes=axis_sizes, verbose=debug_data_override,override=.true.,use_comp_domain=use_comp_domain, & nwindows = nwindows) prev_dims = get_external_field_size(id_time_prev) allocate(data_table(index1)%time_prev_records(prev_dims(4))) @@ -1229,7 +1287,7 @@ subroutine DATA_OVERRIDE_3D_(gridname,fieldname_code,return_data,time,override,d id_time_next = -1 if_next5: if (trim(nextfilename) /= '') then id_time_next = init_external_field(nextfilename,fieldname,domain=domain, axis_names=axis_names,& - axis_sizes=axis_sizes, verbose=.false.,override=.true.,use_comp_domain=use_comp_domain, & + axis_sizes=axis_sizes, verbose=debug_data_override,override=.true.,use_comp_domain=use_comp_domain, & nwindows = nwindows) next_dims = get_external_field_size(id_time_next) allocate(data_table(index1)%time_next_records(next_dims(4))) @@ -1424,18 +1482,45 @@ subroutine DATA_OVERRIDE_3D_(gridname,fieldname_code,return_data,time,override,d call mpp_error(FATAL,'error: gridname not recognized in data_override') end select - select case (data_table(index1)%interpol_method) - case ('bilinear') + if (data_table(index1)%ext_weights) then + found_weight_file = .false. + do i = 1, nweight_files + if (external_weights(i)%weight_filename .eq. trim(data_table(index1)%ext_weights_file_name)) then + override_array(curr_position)%horz_interp(window_id) = external_weights(i)%horiz_interp + found_weight_file = .true. + exit + endif + enddo + + if (.not. found_weight_file) then + nweight_files = nweight_files + 1 + external_weights(nweight_files)%weight_filename = trim(data_table(index1)%ext_weights_file_name) + + call mpp_get_global_domain(domain, xsize=nglon, ysize=nglat) + call horiz_interp_read_weights(external_weights(nweight_files)%horiz_interp, & + external_weights(nweight_files)%weight_filename, & + lon_local(isw:iew,jsw:jew), lat_local(isw:iew,jsw:jew), & + override_array(curr_position)%lon_in(is_src:ie_src+1), & + override_array(curr_position)%lat_in(js_src:je_src+1), & + data_table(index1)%ext_weights_source, & + data_table(index1)%interpol_method, isw, iew, jsw, jew, nglon, nglat) + + override_array(curr_position)%horz_interp(window_id) = external_weights(nweight_files)%horiz_interp + endif + else + select case (data_table(index1)%interpol_method) + case ('bilinear') call horiz_interp_new (override_array(curr_position)%horz_interp(window_id), & override_array(curr_position)%lon_in(is_src:ie_src+1), & override_array(curr_position)%lat_in(js_src:je_src+1), & lon_local(isw:iew,jsw:jew), lat_local(isw:iew,jsw:jew), interp_method="bilinear") - case ('bicubic') + case ('bicubic') call horiz_interp_new (override_array(curr_position)%horz_interp(window_id), & override_array(curr_position)%lon_in(is_src:ie_src+1), & override_array(curr_position)%lat_in(js_src:je_src+1), & lon_local(isw:iew,jsw:jew), lat_local(isw:iew,jsw:jew), interp_method="bicubic") - end select + end select + endif override_array(curr_position)%need_compute(window_id) = .false. endif @@ -1481,7 +1566,7 @@ subroutine DATA_OVERRIDE_3D_(gridname,fieldname_code,return_data,time,override,d if (timelast_record) then ! next file must be init and time must be between last record of current file and @@ -1490,14 +1575,14 @@ subroutine DATA_OVERRIDE_3D_(gridname,fieldname_code,return_data,time,override,d if (time>data_table(index1)%time_next_records(1)) call mpp_error(FATAL, & 'data_override: time_interp_external_bridge should only be called to bridge with next file') ! bridge with next file - call time_interp_external_bridge(id_time,id_time_next,time,return_data(:,:,1),verbose=.false., & + call time_interp_external_bridge(id_time,id_time_next,time,return_data(:,:,1),verbose=debug_data_override, & is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) else ! first_record <= time <= last_record, do not use bridge - call time_interp_external(id_time,time,return_data(:,:,1),verbose=.false., & + call time_interp_external(id_time,time,return_data(:,:,1),verbose=debug_data_override, & is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) endif if_time6 else ! standard behavior - call time_interp_external(id_time,time,return_data(:,:,1),verbose=.false., & + call time_interp_external(id_time,time,return_data(:,:,1),verbose=debug_data_override, & is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) endif if_multi6 @@ -1518,8 +1603,9 @@ subroutine DATA_OVERRIDE_3D_(gridname,fieldname_code,return_data,time,override,d 'data_override: time_interp_external_bridge should only be called to bridge with previous file') ! bridge with previous file call time_interp_external_bridge(id_time_prev,id_time,time,& - return_data(startingi:endingi,startingj:endingj,1),verbose=.false., & - is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) + return_data(startingi:endingi,startingj:endingj,1), & + verbose=debug_data_override,is_in=is_in,ie_in=ie_in, & + js_in=js_in,je_in=je_in,window_id=window_id) elseif (time>last_record) then ! next file must be init and time must be between last record of current file and ! first record of next file @@ -1528,15 +1614,18 @@ subroutine DATA_OVERRIDE_3D_(gridname,fieldname_code,return_data,time,override,d 'data_override: time_interp_external_bridge should only be called to bridge with next file') ! bridge with next file call time_interp_external_bridge(id_time,id_time_next,time,& - return_data(startingi:endingi,startingj:endingj,1),verbose=.false., & - is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) + return_data(startingi:endingi,startingj:endingj,1), & + verbose=debug_data_override,is_in=is_in,ie_in=ie_in, & + js_in=js_in,je_in=je_in,window_id=window_id) else ! first_record <= time <= last_record, do not use bridge - call time_interp_external(id_time,time,return_data(startingi:endingi,startingj:endingj,1),verbose=.false., & - is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) + call time_interp_external(id_time,time,return_data(startingi:endingi,startingj:endingj,1), & + verbose=debug_data_override,is_in=is_in,ie_in=ie_in, & + js_in=js_in,je_in=je_in,window_id=window_id) endif if_time7 else ! standard behavior - call time_interp_external(id_time,time,return_data(startingi:endingi,startingj:endingj,1),verbose=.false., & - is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) + call time_interp_external(id_time,time,return_data(startingi:endingi,startingj:endingj,1), & + verbose=debug_data_override,is_in=is_in,ie_in=ie_in, & + js_in=js_in,je_in=je_in,window_id=window_id) endif if_multi7 end if @@ -1556,20 +1645,20 @@ subroutine DATA_OVERRIDE_3D_(gridname,fieldname_code,return_data,time,override,d prev_dims = get_external_field_size(id_time_prev) if (timelast_record) then if (id_time_next<0) call mpp_error(FATAL,'data_override:next file needed with multifile') if (time>data_table(index1)%time_next_records(1)) call mpp_error(FATAL, & 'data_override: time_interp_external_bridge should only be called to bridge with next file') - call time_interp_external_bridge(id_time,id_time_next,time,return_data,verbose=.false., & + call time_interp_external_bridge(id_time,id_time_next,time,return_data,verbose=debug_data_override, & is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) else ! first_record <= time <= last_record, do not use bridge - call time_interp_external(id_time,time,return_data,verbose=.false., & + call time_interp_external(id_time,time,return_data,verbose=debug_data_override, & is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) endif if_time8 else ! standard behavior - call time_interp_external(id_time,time,return_data,verbose=.false., & + call time_interp_external(id_time,time,return_data,verbose=debug_data_override, & is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) endif if_multi8 @@ -1587,22 +1676,26 @@ subroutine DATA_OVERRIDE_3D_(gridname,fieldname_code,return_data,time,override,d if (timelast_record) then if (id_time_next<0) call mpp_error(FATAL,'data_override:next file needed with multifile') if (time>data_table(index1)%time_next_records(1)) call mpp_error(FATAL, & 'data_override: time_interp_external_bridge should only be called to bridge with next file') call time_interp_external_bridge(id_time,id_time_next,time,& - return_data(startingi:endingi,startingj:endingj,:),verbose=.false., & - is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) + return_data(startingi:endingi,startingj:endingj,:), & + verbose=debug_data_override,is_in=is_in,ie_in=ie_in, & + js_in=js_in,je_in=je_in,window_id=window_id) else ! first_record <= time <= last_record, do not use bridge - call time_interp_external(id_time,time,return_data(startingi:endingi,startingj:endingj,:),verbose=.false., & - is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) + call time_interp_external(id_time,time,return_data(startingi:endingi,startingj:endingj,:), & + verbose=debug_data_override,is_in=is_in,ie_in=ie_in, & + js_in=js_in,je_in=je_in,window_id=window_id) endif if_time9 else ! standard behavior - call time_interp_external(id_time,time,return_data(startingi:endingi,startingj:endingj,:),verbose=.false., & - is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) + call time_interp_external(id_time,time,return_data(startingi:endingi,startingj:endingj,:), & + verbose=debug_data_override,is_in=is_in,ie_in=ie_in, & + js_in=js_in,je_in=je_in,window_id=window_id) endif if_multi9 end if @@ -1622,23 +1715,25 @@ subroutine DATA_OVERRIDE_3D_(gridname,fieldname_code,return_data,time,override,d prev_dims = get_external_field_size(id_time_prev) if (timelast_record) then if (id_time_next<0) call mpp_error(FATAL,'data_override:next file needed with multifile') if (time>data_table(index1)%time_next_records(1)) call mpp_error(FATAL, & 'data_override: time_interp_external_bridge should only be called to bridge with next file') - call time_interp_external_bridge(id_time,id_time_next,time,return_data(:,:,1),verbose=.false., & + call time_interp_external_bridge(id_time,id_time_next,time,return_data(:,:,1), & + verbose=debug_data_override, & horz_interp=override_array(curr_position)%horz_interp(window_id), & is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) else ! first_record <= time <= last_record, do not use bridge - call time_interp_external(id_time,time,return_data(:,:,1),verbose=.false., & + call time_interp_external(id_time,time,return_data(:,:,1),verbose=debug_data_override, & horz_interp=override_array(curr_position)%horz_interp(window_id), & is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) endif if_time10 else ! standard behavior - call time_interp_external(id_time,time,return_data(:,:,1),verbose=.false., & + call time_interp_external(id_time,time,return_data(:,:,1),verbose=debug_data_override, & horz_interp=override_array(curr_position)%horz_interp(window_id), & is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) endif if_multi10 @@ -1660,29 +1755,31 @@ subroutine DATA_OVERRIDE_3D_(gridname,fieldname_code,return_data,time,override,d prev_dims = get_external_field_size(id_time_prev) if (timelast_record) then if (id_time_next<0) call mpp_error(FATAL,'data_override:next file needed with multifile') if (time>data_table(index1)%time_next_records(1)) call mpp_error(FATAL, & 'data_override: time_interp_external_bridge should only be called to bridge with next file') - call time_interp_external_bridge(id_time,id_time_next,time,return_data(:,:,1),verbose=.false., & - horz_interp=override_array(curr_position)%horz_interp(window_id), & - mask_out =mask_out(:,:,1), & - is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) + call time_interp_external_bridge(id_time,id_time_next,time,return_data(:,:,1), & + verbose=debug_data_override, & + horz_interp=override_array(curr_position)%horz_interp(window_id), & + mask_out =mask_out(:,:,1), & + is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) else ! first_record <= time <= last_record, do not use bridge - call time_interp_external(id_time,time,return_data(:,:,1),verbose=.false., & - horz_interp=override_array(curr_position)%horz_interp(window_id), & - mask_out =mask_out(:,:,1), & - is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) + call time_interp_external(id_time,time,return_data(:,:,1),verbose=debug_data_override, & + horz_interp=override_array(curr_position)%horz_interp(window_id), & + mask_out =mask_out(:,:,1), & + is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) endif if_time11 else ! standard behavior - call time_interp_external(id_time,time,return_data(:,:,1),verbose=.false., & - horz_interp=override_array(curr_position)%horz_interp(window_id), & - mask_out =mask_out(:,:,1), & - is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) + call time_interp_external(id_time,time,return_data(:,:,1),verbose=debug_data_override, & + horz_interp=override_array(curr_position)%horz_interp(window_id), & + mask_out =mask_out(:,:,1), & + is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) endif if_multi11 where(mask_out(:,:,1)) @@ -1707,23 +1804,23 @@ subroutine DATA_OVERRIDE_3D_(gridname,fieldname_code,return_data,time,override,d prev_dims = get_external_field_size(id_time_prev) if (timelast_record) then if (id_time_next<0) call mpp_error(FATAL,'data_override:next file needed with multifile') if (time>data_table(index1)%time_next_records(1)) call mpp_error(FATAL, & 'data_override: time_interp_external_bridge should only be called to bridge with next file') - call time_interp_external_bridge(id_time,id_time_next,time,return_data,verbose=.false., & + call time_interp_external_bridge(id_time,id_time_next,time,return_data,verbose=debug_data_override, & horz_interp=override_array(curr_position)%horz_interp(window_id), & is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) else ! first_record <= time <= last_record, do not use bridge - call time_interp_external(id_time,time,return_data,verbose=.false., & + call time_interp_external(id_time,time,return_data,verbose=debug_data_override, & horz_interp=override_array(curr_position)%horz_interp(window_id), & is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) endif if_time12 else ! standard behavior - call time_interp_external(id_time,time,return_data,verbose=.false., & + call time_interp_external(id_time,time,return_data,verbose=debug_data_override, & horz_interp=override_array(curr_position)%horz_interp(window_id), & is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) endif if_multi12 @@ -1742,7 +1839,7 @@ subroutine DATA_OVERRIDE_3D_(gridname,fieldname_code,return_data,time,override,d prev_dims = get_external_field_size(id_time_prev) if (timedata_table(index1)%time_next_records(1)) call mpp_error(FATAL, & 'data_override: time_interp_external_bridge should only be called to bridge with next file') - call time_interp_external_bridge(id_time,id_time_next,time,return_data,verbose=.false., & + call time_interp_external_bridge(id_time,id_time_next,time,return_data,verbose=debug_data_override, & horz_interp=override_array(curr_position)%horz_interp(window_id), & mask_out =mask_out, & is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) else ! first_record <= time <= last_record, do not use bridge - call time_interp_external(id_time,time,return_data,verbose=.false., & - horz_interp=override_array(curr_position)%horz_interp(window_id), & + call time_interp_external(id_time,time,return_data,verbose=debug_data_override, & + horz_interp=override_array(curr_position)%horz_interp(window_id), & mask_out =mask_out, & is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) endif if_time13 else ! standard behavior - call time_interp_external(id_time,time,return_data,verbose=.false., & - horz_interp=override_array(curr_position)%horz_interp(window_id), & + call time_interp_external(id_time,time,return_data,verbose=debug_data_override, & + horz_interp=override_array(curr_position)%horz_interp(window_id), & mask_out =mask_out, & is_in=is_in,ie_in=ie_in,js_in=js_in,je_in=je_in,window_id=window_id) endif if_multi13 diff --git a/diag_manager/README.md b/diag_manager/README.md index 60ab87bbd5..ddf0dac681 100644 --- a/diag_manager/README.md +++ b/diag_manager/README.md @@ -1,4 +1,4 @@ -The purpose of this document is to document the differences between the old diag manager and the new (modern) diag manager. +The purpose of this document is to document the differences between the old diag manager and the new (modern) diag manager. ## Contents - [1. Diag Table Format](README.md#1-diag-table-format) @@ -10,7 +10,9 @@ The purpose of this document is to document the differences between the old diag - [7. History files data output "changes"](README.md#7-history-files-data-output-changes) ### 1. Diag Table Format -The modern diag manager uses a YAML format instead of the legacy ascii table. A description of the YAML diag table can be found [here](diag_yaml_format.md). +The modern diag manager uses a YAML format instead of the legacy ascii table. A description of the YAML diag table can +be found [here](diag_yaml_format.md). A formal specification, in the form of a JSON schema, can be found in the +[gfdl_msd_schemas](https://github.com/NOAA-GFDL/gfdl_msd_schemas) repository on Github. ### 2. Scalar Axis The old diag manager was adding a `scalar_axis` dimension of size 1 for scalar variables @@ -70,7 +72,7 @@ This time_bounds variable is refernced as a variable attribute of time: ### 4. Subregional Files #### A. `is_subregional` global attribute: -Subregional files will have a global NetCDF attribute `is_subregional = True` set for non-global history files. This attribute will be used in PP tools. +Subregional files will have a global NetCDF attribute `is_subregional = True` set for non-global history files. This attribute will be used in PP tools. #### B. Subregional dimension names: In some cases, the old diag manager was adding `sub0X` to the dimension names where X is a number greater than 1. This was causing problems in PP tools that were expecting the dimension to have `sub01` in the name. The new diag manager will not have this problem. diff --git a/diag_manager/diag_data.F90 b/diag_manager/diag_data.F90 index abf08d18f7..80bdc195bf 100644 --- a/diag_manager/diag_data.F90 +++ b/diag_manager/diag_data.F90 @@ -611,8 +611,8 @@ function get_var_type(var) & type is (character(len=*)) var_type = string class default - call mpp_error(FATAL, "get_var_type:: The variable does not have a supported type. "& - &"The supported types are r4, r8, i4, i8 and string.") + call mpp_error(FATAL, "get_var_type:: The variable does not have a supported type. & + &The supported types are r4, r8, i4, i8 and string.") end select end function get_var_type diff --git a/diag_manager/diag_manager.F90 b/diag_manager/diag_manager.F90 index eeab1a5227..be448fcfb6 100644 --- a/diag_manager/diag_manager.F90 +++ b/diag_manager/diag_manager.F90 @@ -246,6 +246,7 @@ MODULE diag_manager_mod USE fms_diag_outfield_mod, ONLY: fmsDiagOutfieldIndex_type, fmsDiagOutfield_type USE fms_diag_fieldbuff_update_mod, ONLY: fieldbuff_update, fieldbuff_copy_missvals, & & fieldbuff_copy_fieldvals + USE fms_string_utils_mod, ONLY: string USE netcdf, ONLY: NF90_INT, NF90_FLOAT, NF90_CHAR @@ -4210,7 +4211,7 @@ SUBROUTINE diag_manager_init(diag_model_subset, time_init, err_msg) END IF if (use_modern_diag) then - CALL fms_diag_object%init(diag_subset_output) + CALL fms_diag_object%init(diag_subset_output, time_init) endif if (.not. use_modern_diag) then CALL parse_diag_table(DIAG_SUBSET=diag_subset_output, ISTAT=mystat, ERR_MSG=err_msg_local) @@ -4224,7 +4225,7 @@ SUBROUTINE diag_manager_init(diag_model_subset, time_init, err_msg) ! open diag field log file IF ( do_diag_field_log.AND.mpp_pe().EQ.mpp_root_pe() ) THEN - open(newunit=diag_log_unit, file='diag_field_log.out', action='WRITE') + open(newunit=diag_log_unit, file='diag_field_log.out.'//string(mpp_pe()), action='WRITE') WRITE (diag_log_unit,'(777a)') & & 'Module', FIELD_LOG_SEPARATOR, 'Field', FIELD_LOG_SEPARATOR, & & 'Long Name', FIELD_LOG_SEPARATOR, 'Units', FIELD_LOG_SEPARATOR, & diff --git a/diag_manager/diag_yaml_format.md b/diag_manager/diag_yaml_format.md index 63ed4630c0..d9e93c3593 100644 --- a/diag_manager/diag_yaml_format.md +++ b/diag_manager/diag_yaml_format.md @@ -14,6 +14,7 @@ The purpose of this document is to explain the diag_table yaml format. - [2.5 Global Meta Data Section](diag_yaml_format.md#25-global-meta-data-section) - [2.6 Sub_region Section](diag_yaml_format.md#26-sub_region-section) - [3. More examples](diag_yaml_format.md#3-more-examples) +- [4. Schema](diag_yaml_format.md#4-schema) ### 1. Converting from legacy ascii diag_table format @@ -99,6 +100,9 @@ Below are some *optional* keys that may be added. - **new_file_freq** is a string that defines the frequency and the frequency units (with a space between the frequency number and units) for closing the existing file - **start_time** is an array of 6 integer indicating when to start the file for the first time. It is in the format [year month day hour minute second]. Requires “new_file_freq” - **filename_time** is the time used to set the name of new files when using new_file_freq. The acceptable values are begin (which will use the begining of the file's time bounds), middle (which will use the middle of the file's time bounds), and end (which will use the end of the file's time bounds). The default is middle +- **reduction** is the reduction method that will be used for all the variables in the file. This is overriden if the reduction is specified at the variable level. The acceptable values are average, diurnalXX (where XX is the number of diurnal samples), powXX (whre XX is the power level), min, max, none, rms, and sum. +- **kind** is a string that defines the type of variable as it will be written out in the file. This is overriden if the kind is specified at the variable level. Acceptable values are r4, r8, i4, and i8. +- **module** is a string that defines the module where the variable is registered in the model code. This is overriden if the module is specified at the variable level. **Example:** The following will create a new file every 6 hours starting at Jan 1 2020. Variable data will be written to the file every 6 hours. @@ -340,3 +344,8 @@ diag_files: unlimdim: records write_file: false ``` + +### 4. Schema +A formal specification of the file format, in the form of a JSON schema, can be +found in the [gfdl_msd_schemas](https://github.com/NOAA-GFDL/gfdl_msd_schemas) +repository on Github. diff --git a/diag_manager/fms_diag_axis_object.F90 b/diag_manager/fms_diag_axis_object.F90 index a28d22b291..0befd2988a 100644 --- a/diag_manager/fms_diag_axis_object.F90 +++ b/diag_manager/fms_diag_axis_object.F90 @@ -1033,8 +1033,8 @@ subroutine check_if_valid_domain_position(domain_position) select case (domain_position) case (CENTER, NORTH, EAST) case default - call mpp_error(FATAL, "diag_axit_init: Invalid domain_positon. "& - "The acceptable values are NORTH, EAST, CENTER") + call mpp_error(FATAL, "diag_axit_init: Invalid domain_positon. & + &The acceptable values are NORTH, EAST, CENTER") end select end subroutine check_if_valid_domain_position @@ -1045,8 +1045,8 @@ subroutine check_if_valid_direction(direction) select case(direction) case(-1, 0, 1) case default - call mpp_error(FATAL, "diag_axit_init: Invalid direction. "& - "The acceptable values are-1 0 1") + call mpp_error(FATAL, "diag_axit_init: Invalid direction. & + &The acceptable values are-1 0 1") end select end subroutine check_if_valid_direction diff --git a/diag_manager/fms_diag_field_object.F90 b/diag_manager/fms_diag_field_object.F90 index 550037a904..2e582ce438 100644 --- a/diag_manager/fms_diag_field_object.F90 +++ b/diag_manager/fms_diag_field_object.F90 @@ -383,18 +383,18 @@ subroutine fms_register_diag_field_obj & if (present(area)) then if (area < 0) call mpp_error("fms_register_diag_field_obj", & - "The area id passed with field_name"//trim(varname)//" has not been registered."& - "Check that there is a register_diag_field call for the AREA measure and that is in the"& - "diag_table.yaml", FATAL) + "The area id passed with field_name"//trim(varname)//" has not been registered. & + &Check that there is a register_diag_field call for the AREA measure and that is in the & + &diag_table.yaml", FATAL) allocate(this%area) this%area = area endif if (present(volume)) then if (volume < 0) call mpp_error("fms_register_diag_field_obj", & - "The volume id passed with field_name"//trim(varname)//" has not been registered."& - "Check that there is a register_diag_field call for the VOLUME measure and that is in the"& - "diag_table.yaml", FATAL) + "The volume id passed with field_name"//trim(varname)//" has not been registered. & + &Check that there is a register_diag_field call for the VOLUME measure and that is in the & + &diag_table.yaml", FATAL) allocate(this%volume) this%volume = volume endif @@ -1610,9 +1610,9 @@ subroutine add_area_volume(this, area, volume) if (area > 0) then this%area = area else - call mpp_error(FATAL, "diag_field_add_cell_measures: the area id is not valid. "& - &"Verify that the area_id passed in to the field:"//this%varname//& - &" is valid and that the field is registered and in the diag_table.yaml") + call mpp_error(FATAL, "diag_field_add_cell_measures: the area id is not valid. & + &Verify that the area_id passed in to the field:"//this%varname// & + " is valid and that the field is registered and in the diag_table.yaml") endif endif @@ -1620,9 +1620,9 @@ subroutine add_area_volume(this, area, volume) if (volume > 0) then this%volume = volume else - call mpp_error(FATAL, "diag_field_add_cell_measures: the volume id is not valid. "& - &"Verify that the volume_id passed in to the field:"//this%varname//& - &" is valid and that the field is registered and in the diag_table.yaml") + call mpp_error(FATAL, "diag_field_add_cell_measures: the volume id is not valid. & + &Verify that the volume_id passed in to the field:"//this%varname// & + " is valid and that the field is registered and in the diag_table.yaml") endif endif diff --git a/diag_manager/fms_diag_file_object.F90 b/diag_manager/fms_diag_file_object.F90 index 060dfd8c3a..269accc298 100644 --- a/diag_manager/fms_diag_file_object.F90 +++ b/diag_manager/fms_diag_file_object.F90 @@ -35,7 +35,7 @@ module fms_diag_file_object_mod get_base_second, time_unit_list, time_average, time_rms, time_max, time_min, time_sum, & time_diurnal, time_power, time_none, avg_name, no_units, pack_size_str, & middle_time, begin_time, end_time, MAX_STR_LEN, index_gridtype, latlon_gridtype, & - null_gridtype, flush_nc_files + null_gridtype, flush_nc_files, diag_init_time use time_manager_mod, only: time_type, operator(>), operator(/=), operator(==), get_date, get_calendar_type, & VALID_CALENDAR_TYPES, operator(>=), date_to_string, & OPERATOR(/), OPERATOR(+), operator(<) @@ -259,9 +259,13 @@ logical function fms_diag_files_object_init (files_array) !> Set the start_time of the file to the base_time and set up the *_output variables obj%done_writing_data = .false. - obj%start_time = get_base_time() - obj%last_output = get_base_time() - obj%model_time = get_base_time() + + !! Set this to the time passed in to diag_manager_init + !! This will be the base_time if nothing was passed in + !! This time is appended to the filename if the prepend_date namelist is .True. + obj%start_time = diag_init_time + obj%last_output = diag_init_time + obj%model_time = diag_init_time obj%next_output = diag_time_inc(obj%start_time, obj%get_file_freq(), obj%get_file_frequnit()) obj%next_next_output = diag_time_inc(obj%next_output, obj%get_file_freq(), obj%get_file_frequnit()) @@ -1003,20 +1007,21 @@ end subroutine define_new_subaxis !! So it needs to make sure that the start_time is the same for each variable. The initial value is the base_time subroutine add_start_time(this, start_time) class(fmsDiagFile_type), intent(inout) :: this !< The file object - TYPE(time_type), intent(in) :: start_time !< Start time to add to the fileobj + TYPE(time_type), intent(in) :: start_time !< Start time passed into register_diag_field - !< If the start_time sent in is equal to the base_time return because - !! this%start_time was already set to the base_time - if (start_time .eq. get_base_time()) return + !< If the start_time sent in is equal to the diag_init_time return because + !! this%start_time was already set to the diag_init_time + if (start_time .eq. diag_init_time) return - if (this%start_time .ne. get_base_time()) then - !> If the this%start_time is not equal to the base_time from the diag_table - !! this%start_time was already updated so make sure it is the same or error out + if (this%start_time .ne. diag_init_time) then + !> If the this%start_time is not equal to the diag_init_time from the diag_table + !! this%start_time was already updated so make sure it is the same for the current variable + !! or error out if (this%start_time .ne. start_time)& - call mpp_error(FATAL, "The variables associated with the file:"//this%get_file_fname()//" have"& - &" different start_time") + call mpp_error(FATAL, "The variables associated with the file:"//this%get_file_fname()//" have & + &different start_time") else - !> If the this%start_time is equal to the base_time, + !> If the this%start_time is equal to the diag_init_time, !! simply update it with the start_time and set up the *_output variables this%model_time = start_time this%start_time = start_time diff --git a/diag_manager/fms_diag_input_buffer.F90 b/diag_manager/fms_diag_input_buffer.F90 index 92952ecadc..0a4f0737e0 100644 --- a/diag_manager/fms_diag_input_buffer.F90 +++ b/diag_manager/fms_diag_input_buffer.F90 @@ -133,8 +133,8 @@ function allocate_input_buffer_object(this, input_data, axis_ids, diag_axis) & allocate(integer(kind=i4_kind) :: this%buffer(length(1), length(2), length(3), length(4))) this%buffer = 0_i8_kind class default - err_msg = "The data input is not one of the supported types."& - "Only r4, r8, i4, and i8 types are supported." + err_msg = "The data input is not one of the supported types. & + &Only r4, r8, i4, and i8 types are supported." end select this%weight = 1.0_r8_kind diff --git a/diag_manager/fms_diag_object.F90 b/diag_manager/fms_diag_object.F90 index 70141a0077..c985a6c30d 100644 --- a/diag_manager/fms_diag_object.F90 +++ b/diag_manager/fms_diag_object.F90 @@ -22,7 +22,7 @@ module fms_diag_object_mod &DIAG_FIELD_NOT_FOUND, diag_not_registered, max_axes, TWO_D_DOMAIN, & &get_base_time, NULL_AXIS_ID, get_var_type, diag_not_registered, & &time_none, time_max, time_min, time_sum, time_average, time_diurnal, & - &time_power, time_rms, r8, NO_DOMAIN + &time_power, time_rms, r8, NO_DOMAIN, diag_init_time USE time_manager_mod, ONLY: set_time, set_date, get_time, time_type, OPERATOR(>=), OPERATOR(>),& & OPERATOR(<), OPERATOR(==), OPERATOR(/=), OPERATOR(/), OPERATOR(+), ASSIGNMENT(=), get_date, & @@ -118,14 +118,23 @@ module fms_diag_object_mod !! Reads the diag_table.yaml and fills in the yaml object !! Allocates the diag manager object arrays for files, fields, and buffers !! Initializes variables -subroutine fms_diag_object_init (this,diag_subset_output) +subroutine fms_diag_object_init (this,diag_subset_output, time_init) class(fmsDiagObject_type) :: this !< Diag mediator/controller object integer :: diag_subset_output !< Subset of the diag output? + INTEGER, DIMENSION(6), OPTIONAL, INTENT(IN) :: time_init !< Model time diag_manager initialized + #ifdef use_yaml if (this%initialized) return ! allocate(diag_objs(get_num_unique_fields())) CALL diag_yaml_object_init(diag_subset_output) + + !! Doing this here, because the base_time is not set until the yaml is parsed + !! if time_init is present, it will be set in diag_manager_init + if (.not. present(time_init)) then + diag_init_time = get_base_time() + endif + this%axes_initialized = fms_diag_axis_object_init(this%diag_axis) this%files_initialized = fms_diag_files_object_init(this%FMS_diag_files) this%fields_initialized = fms_diag_fields_object_init(this%FMS_diag_fields) @@ -502,8 +511,8 @@ FUNCTION fms_diag_axis_init(this, axis_name, axis_data, units, cart_name, axis_l type is (fmsDiagFullAxis_type) if(present(edges)) then if (edges < 0 .or. edges > this%registered_axis) & - call mpp_error(FATAL, "diag_axit_init: The edge axis has not been defined. "& - "Call diag_axis_init for the edge axis first") + call mpp_error(FATAL, "diag_axit_init: The edge axis has not been defined. & + &Call diag_axis_init for the edge axis first") select type (edges_axis => this%diag_axis(edges)%axis) type is (fmsDiagFullAxis_type) edges_name = edges_axis%get_axis_name() diff --git a/diag_manager/fms_diag_yaml.F90 b/diag_manager/fms_diag_yaml.F90 index 17ce86e4d5..07b012ef6f 100644 --- a/diag_manager/fms_diag_yaml.F90 +++ b/diag_manager/fms_diag_yaml.F90 @@ -139,6 +139,15 @@ module fms_diag_yaml_mod !! and values(dim=2) to be !! added as global meta data to !! the file + character (len=:), allocatable :: default_var_precision !< The precision for all of the variables in the file + !! This may be overriden if the precison was defined + !! at the variable level + character (len=:), allocatable :: default_var_reduction !< The reduction for all of the variables in the file + !! This may be overriden if the reduction was defined at + !! the variable level + character (len=:), allocatable :: default_var_module !< The module for all of the variables in the file + !! This may be overriden if the modules was defined at the + !! variable level contains !> All getter functions (functions named get_x(), for member field named x) @@ -468,7 +477,8 @@ subroutine diag_yaml_object_init(diag_subset_output) diag_yaml%diag_fields(var_count)%var_axes_names = "" diag_yaml%diag_fields(var_count)%var_file_is_subregional = diag_yaml%diag_files(file_count)%has_file_sub_region() - call fill_in_diag_fields(diag_yaml_id, var_ids(j), diag_yaml%diag_fields(var_count), allow_averages) + call fill_in_diag_fields(diag_yaml_id, diag_yaml%diag_files(file_count), var_ids(j), & + diag_yaml%diag_fields(var_count), allow_averages) !> Save the variable name in the diag_file type diag_yaml%diag_files(file_count)%file_varlist(file_var_count) = diag_yaml%diag_fields(var_count)%var_varname @@ -604,12 +614,19 @@ subroutine fill_in_diag_files(diag_yaml_id, diag_file_id, yaml_fileobj) &" has multiple global_meta blocks") endif + call diag_get_value_from_key(diag_yaml_id, diag_file_id, "reduction", yaml_fileobj%default_var_reduction, & + is_optional=.true.) + call diag_get_value_from_key(diag_yaml_id, diag_file_id, "kind", yaml_fileobj%default_var_precision, & + is_optional=.true.) + call diag_get_value_from_key(diag_yaml_id, diag_file_id, "module", yaml_fileobj%default_var_module, & + is_optional=.true.) end subroutine !> @brief Fills in a diagYamlFilesVar_type with the contents of a variable block in !! diag_table.yaml -subroutine fill_in_diag_fields(diag_file_id, var_id, field, allow_averages) +subroutine fill_in_diag_fields(diag_file_id, yaml_fileobj, var_id, field, allow_averages) integer, intent(in) :: diag_file_id !< Id of the file block in the yaml file + type(diagYamlFiles_type), intent(in) :: yaml_fileobj !< The yaml file obj for the variables integer, intent(in) :: var_id !< Id of the variable block in the yaml file type(diagYamlFilesVar_type), intent(inout) :: field !< diagYamlFilesVar_type obj to read the contents into logical, intent(in) :: allow_averages !< .True. if averages are allowed for this file @@ -623,8 +640,17 @@ subroutine fill_in_diag_fields(diag_file_id, var_id, field, allow_averages) character(len=:), ALLOCATABLE :: buffer !< buffer to store the reduction method as it is read from the yaml call diag_get_value_from_key(diag_file_id, var_id, "var_name", field%var_varname) - call diag_get_value_from_key(diag_file_id, var_id, "reduction", buffer) + + if (yaml_fileobj%default_var_reduction .eq. "") then + !! If there is no default, the reduction method is required + call diag_get_value_from_key(diag_file_id, var_id, "reduction", buffer) + else + call diag_get_value_from_key(diag_file_id, var_id, "reduction", buffer, is_optional=.true.) + !! If the reduction was not set for the variable, override it with the default + if (trim(buffer) .eq. "") buffer = yaml_fileobj%default_var_reduction + endif call set_field_reduction(field, buffer) + deallocate(buffer) if (.not. allow_averages) then if (field%var_reduction .ne. time_none) & @@ -633,9 +659,27 @@ subroutine fill_in_diag_fields(diag_file_id, var_id, field, allow_averages) "Check your diag_table.yaml for the field:"//trim(field%var_varname)) endif - call diag_get_value_from_key(diag_file_id, var_id, "module", field%var_module) - deallocate(buffer) - call diag_get_value_from_key(diag_file_id, var_id, "kind", buffer) + if (yaml_fileobj%default_var_module .eq. "") then + call diag_get_value_from_key(diag_file_id, var_id, "module", field%var_module) + else + call diag_get_value_from_key(diag_file_id, var_id, "module", buffer, is_optional=.true.) + !! If the module was set for the variable, override it with the default + if (trim(buffer) .eq. "") then + field%var_module = yaml_fileobj%default_var_module + else + field%var_module = trim(buffer) + endif + deallocate(buffer) + endif + + if (yaml_fileobj%default_var_precision .eq. "") then + !! If there is no default, the kind is required + call diag_get_value_from_key(diag_file_id, var_id, "kind", buffer) + else + call diag_get_value_from_key(diag_file_id, var_id, "kind", buffer, is_optional=.true.) + !! If the kind was set for the variable, override it with the default + if (trim(buffer) .eq. "") buffer = yaml_fileobj%default_var_precision + endif call set_field_kind(field, buffer) call diag_get_value_from_key(diag_file_id, var_id, "output_name", field%var_outname, is_optional=.true.) @@ -936,8 +980,8 @@ function set_valid_time_units(time_units, error_msg) & time_units_int = DIAG_YEARS case default time_units_int =DIAG_NULL - call mpp_error(FATAL, trim(error_msg)//" is not valid. Acceptable values are "& - "seconds, minutes, hours, days, months, years") + call mpp_error(FATAL, trim(error_msg)//" is not valid. Acceptable values are & + &seconds, minutes, hours, days, months, years") end select end function set_valid_time_units diff --git a/diag_manager/schema.diag b/diag_manager/schema.diag deleted file mode 100644 index b232577ff9..0000000000 --- a/diag_manager/schema.diag +++ /dev/null @@ -1,141 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - "type": "object", - "required": ["title", "base_date"], - "additionalProperties": false, - "properties": { - "title": { - "type": "string" - }, - "base_date": { - "type": "string" - }, - "diag_files": { - "type": "array", - "items": { - "type": "object", - "required": ["file_name", "freq", "time_units", "unlimdim"], - "additionalProperties": false, - "properties": { - "file_name": { - "type": "string" - }, - "freq": { - "anyOf": [ - {"type": "string"}, - {"type": "number"} - ], - "pattern": "^-[1]{1,1} *[ seconds| minutes| hours| days| months| years]*|^0&|^[1-9]+ [seconds|minutes|hours|days|months|years]{1,1}" - }, - "time_units": { - "type": "string", - "enum": ["seconds", "minutes", "hours", "days", "months", "years"] - }, - "unlimdim": { - "type": "string" - }, - "write_file": { - "type": "boolean" - }, - "global_meta": { - }, - "sub_region": { - "type": "array", - "minItems": 1, - "maxItems": 1, - "required": ["grid_type", "corner1", "corner2", "corner3", "corner4"], - "properties": { - "grid_type": { - "type": "string", - "enum": ["indices", "latlon"] - }, - "corner1": { - "type": "array", - "minItems": 2, - "maxItems": 2, - "items": { - "type": "number" - } - }, - "corner2": { - "type": "array", - "minItems": 2, - "maxItems": 2, - "items": { - "type": "number" - } - }, - "corner3": { - "type": "array", - "minItems": 2, - "maxItems": 2, - "items": { - "type": "number" - } - }, - "corner4": { - "type": "array", - "minItems": 2, - "maxItems": 2, - "items": { - "type": "number" - } - }, - "tile": { - "type": "number" - } - } - }, - "new_file_freq": { - "type": "string", - "pattern": "[0-9]{1,} [a-z]{1,}" - }, - "start_time": { - "type": "string" - }, - "file_duration": { - "type": "string" - }, - "varlist": { - "type": "array", - "items": { - "type": "object", - "required": ["var_name", "reduction", "module", "kind"], - "additionalProperties": false, - "properties": { - "kind": { - "type": "string", - "enum": ["r4", "r8", "i4", "i8"] - }, - "module": { - "type": "string" - }, - "reduction": { - "type": "string", - "pattern": "^average$|^min$|^max$|^none$|^rms$|^sum$|^diurnal[1-9]+|^pow[1-9]+" - }, - "var_name": { - "type": "string" - }, - "write_var": { - "type": "boolean" - }, - "output_name": { - "type": "string" - }, - "long_name": { - "type": "string" - }, - "attributes": { - }, - "zbounds": { - "type": "string" - } - } - } - } - } - } - } - } -} diff --git a/field_manager/field_manager.F90 b/field_manager/field_manager.F90 index 8b56e54fdb..8d9804c2e1 100644 --- a/field_manager/field_manager.F90 +++ b/field_manager/field_manager.F90 @@ -593,7 +593,7 @@ subroutine read_field_table_yaml(nfields, table_name) character(len=fm_string_len) :: tbl_name !< field_table yaml file character(len=fm_string_len) :: method_control !< field_table yaml file integer :: h, i, j, k, l, m !< dummy integer buffer -type (fmTable_t) :: my_table !< the field table +type (fmTable_t) :: my_table !< the field table integer :: model !< model assocaited with the current field character(len=fm_path_name_len) :: list_name !< field_manager list name character(len=fm_string_len) :: subparamvalue !< subparam value to be used when defining new name @@ -614,14 +614,12 @@ subroutine read_field_table_yaml(nfields, table_name) return endif +! Construct my_table object +call build_fmTable(my_table, trim(tbl_name)) -! Define my_table object and read in number of fields -my_table = fmTable_t(trim(tbl_name)) -call my_table%get_blocks -call my_table%create_children -do h=1,my_table%nchildren - do i=1,my_table%children(h)%nchildren - do j=1,my_table%children(h)%children(i)%nchildren +do h=1,size(my_table%types) + do i=1,size(my_table%types(h)%models) + do j=1,size(my_table%types(h)%models(i)%variables) num_fields = num_fields + 1 end do end do @@ -630,9 +628,9 @@ subroutine read_field_table_yaml(nfields, table_name) allocate(fields(num_fields)) current_field = 0 -do h=1,my_table%nchildren - do i=1,my_table%children(h)%nchildren - select case (my_table%children(h)%children(i)%name) +do h=1,size(my_table%types) + do i=1,size(my_table%types(h)%models) + select case (my_table%types(h)%models(i)%name) case ('coupler_mod') model = MODEL_COUPLER case ('atmos_mod') @@ -645,58 +643,58 @@ subroutine read_field_table_yaml(nfields, table_name) model = MODEL_ICE case default call mpp_error(FATAL, trim(error_header)//'The model name is unrecognised : & - &'//trim(my_table%children(h)%children(i)%name)) + &'//trim(my_table%types(h)%models(i)%name)) end select - do j=1,my_table%children(h)%children(i)%nchildren + do j=1,size(my_table%types(h)%models(i)%variables) current_field = current_field + 1 - list_name = list_sep//lowercase(trim(my_table%children(h)%children(i)%name))//list_sep//& - lowercase(trim(my_table%children(h)%name))//list_sep//& - lowercase(trim(my_table%children(h)%children(i)%children(j)%name)) + list_name = list_sep//lowercase(trim(my_table%types(h)%models(i)%name))//list_sep//& + lowercase(trim(my_table%types(h)%name))//list_sep//& + lowercase(trim(my_table%types(h)%models(i)%variables(j)%name)) index_list_name = fm_new_list(list_name, create = .true.) if ( index_list_name == NO_FIELD ) & call mpp_error(FATAL, trim(error_header)//'Could not set field list for '//trim(list_name)) fm_success = fm_change_list(list_name) fields(current_field)%model = model - fields(current_field)%field_name = lowercase(trim(my_table%children(h)%children(i)%children(j)%name)) - fields(current_field)%field_type = lowercase(trim(my_table%children(h)%name)) - fields(current_field)%num_methods = size(my_table%children(h)%children(i)%children(j)%key_ids) + fields(current_field)%field_name = lowercase(trim(my_table%types(h)%models(i)%variables(j)%name)) + fields(current_field)%field_type = lowercase(trim(my_table%types(h)%name)) + fields(current_field)%num_methods = size(my_table%types(h)%models(i)%variables(j)%keys) allocate(fields(current_field)%methods(fields(current_field)%num_methods)) if(fields(current_field)%num_methods.gt.0) then - if (my_table%children(h)%children(i)%children(j)%nchildren .gt. 0) subparams = .true. - do k=1,size(my_table%children(h)%children(i)%children(j)%keys) + subparams = (size(my_table%types(h)%models(i)%variables(j)%attributes) .gt. 0) + do k=1,size(my_table%types(h)%models(i)%variables(j)%keys) fields(current_field)%methods(k)%method_type = & - lowercase(trim(my_table%children(h)%children(i)%children(j)%keys(k))) + lowercase(trim(my_table%types(h)%models(i)%variables(j)%keys(k))) fields(current_field)%methods(k)%method_name = & - lowercase(trim(my_table%children(h)%children(i)%children(j)%values(k))) + lowercase(trim(my_table%types(h)%models(i)%variables(j)%values(k))) if (.not.subparams) then - call new_name_yaml(list_name, my_table%children(h)%children(i)%children(j)%keys(k),& - my_table%children(h)%children(i)%children(j)%values(k) ) + call new_name(list_name, my_table%types(h)%models(i)%variables(j)%keys(k),& + my_table%types(h)%models(i)%variables(j)%values(k) ) else subparamindex=-1 - do l=1,my_table%children(h)%children(i)%children(j)%nchildren - if(lowercase(trim(my_table%children(h)%children(i)%children(j)%children(l)%paramname)).eq.& + do l=1,size(my_table%types(h)%models(i)%variables(j)%attributes) + if(lowercase(trim(my_table%types(h)%models(i)%variables(j)%attributes(l)%paramname)).eq.& lowercase(trim(fields(current_field)%methods(k)%method_type))) then subparamindex = l exit end if end do if (subparamindex.eq.-1) then - call new_name_yaml(list_name, my_table%children(h)%children(i)%children(j)%keys(k),& - my_table%children(h)%children(i)%children(j)%values(k) ) + call new_name(list_name, my_table%types(h)%models(i)%variables(j)%keys(k),& + my_table%types(h)%models(i)%variables(j)%values(k) ) else - do m=1,size(my_table%children(h)%children(i)%children(j)%children(subparamindex)%keys) + do m=1,size(my_table%types(h)%models(i)%variables(j)%attributes(subparamindex)%keys) method_control = " " subparamvalue = " " - if (trim(my_table%children(h)%children(i)%children(j)%values(k)).eq.'fm_yaml_null') then + if (trim(my_table%types(h)%models(i)%variables(j)%values(k)).eq.'fm_yaml_null') then fm_yaml_null = '' else - fm_yaml_null = trim(my_table%children(h)%children(i)%children(j)%values(k))//'/' + fm_yaml_null = trim(my_table%types(h)%models(i)%variables(j)%values(k))//'/' end if - method_control = trim(my_table%children(h)%children(i)%children(j)%keys(k))//"/"//& + method_control = trim(my_table%types(h)%models(i)%variables(j)%keys(k))//"/"//& &trim(fm_yaml_null)//& - &trim(my_table%children(h)%children(i)%children(j)%children(subparamindex)%keys(m)) - subparamvalue = trim(my_table%children(h)%children(i)%children(j)%children(subparamindex)%values(m)) - call new_name_yaml(list_name, method_control, subparamvalue) + &trim(my_table%types(h)%models(i)%variables(j)%attributes(subparamindex)%keys(m)) + subparamvalue = trim(my_table%types(h)%models(i)%variables(j)%attributes(subparamindex)%values(m)) + call new_name(list_name, method_control, subparamvalue) end do end if end if @@ -707,7 +705,6 @@ subroutine read_field_table_yaml(nfields, table_name) end do if (present(nfields)) nfields = num_fields -call my_table%destruct end subroutine read_field_table_yaml !> @brief Subroutine to add new values to list parameters. diff --git a/field_manager/fm_yaml.F90 b/field_manager/fm_yaml.F90 index 32cc63cb1b..3dabc2d093 100644 --- a/field_manager/fm_yaml.F90 +++ b/field_manager/fm_yaml.F90 @@ -16,6 +16,7 @@ !* You should have received a copy of the GNU Lesser General Public !* License along with FMS. If not, see . !*********************************************************************** + !> @defgroup fm_yaml_mod fm_yaml_mod !> @ingroup fm_yaml !> @brief Reads entries from a field table yaml into a @@ -31,493 +32,330 @@ !> @{ module fm_yaml_mod #ifdef use_yaml + use yaml_parser_mod +use mpp_mod, only: mpp_error, fatal implicit none private -integer :: i, table_i, type_i, model_i, var_i, var_j, attr_j !< counters - !> @} -! close documentation grouping -!> @brief This type represents the subparameters for a given variable parameter. -!> This type contains the name of the associated parameter, the key / value pairs for this subparameter, -!! and the following methods: getting names and properties, and self destruction. +public :: build_fmTable + +!> @brief This type represents a subparameter block for a given variable parameter. +!> This type contains the name of the associated parameter and the subparameter key/value pairs !> @ingroup fm_yaml_mod type, public :: fmAttr_t - integer :: yfid !< file id of a yaml file integer :: id !< block id of this var character(len=:), allocatable :: paramname !< name of associated parameter - character(len=:), dimension(:), allocatable :: keys !< name of the variable - character(len=:), dimension(:), allocatable :: values !< name of the variable - contains - procedure :: destruct => destruct_fmAttr_t - procedure :: get_names_and_props => get_name_fmAttr_t + character(len=:), dimension(:), allocatable :: keys !< name of the attribute + character(len=:), dimension(:), allocatable :: values !< value of the attribute end type fmAttr_t !> @brief This type represents the entries for a given variable, e.g. dust. -!> This type contains the name of the variable, the block id, the key / value pairs for this variable's parameters, -!! any applicable subparameters, and the following methods: -!! getting blocks, getting names and properties, creating children (subparameters), and self destruction. +!> This type contains the name of the variable, the block id, the key/value pairs for the +!> variable's parameters, and any applicable subparameters !> @ingroup fm_yaml_mod type, public :: fmVar_t - integer :: yfid !< file id of a yaml file integer :: id !< block id of this var character(len=:), allocatable :: name !< name of the variable - integer, dimension(:), allocatable :: key_ids !< key ids for params character(len=:), dimension(:), allocatable :: keys !< names of params character(len=:), dimension(:), allocatable :: values !< values of params - character(len=9) :: blockname="subparams" !< name of the root block - integer :: nchildren !< number of attributes - integer, allocatable :: child_ids(:) !< array of attribute ids - type (fmAttr_t), allocatable :: children(:) !< attributes in this var - contains - procedure :: get_blocks => get_blocks_fmVar_t - procedure :: destruct => destruct_fmVar_t - procedure :: get_names_and_props => get_name_fmVar_t - procedure :: create_children => create_children_fmVar_t + type (fmAttr_t), allocatable :: attributes(:) !< attributes in this var end type fmVar_t !> @brief This type represents the entries for a given model, e.g. land, ocean, atmosphere. -!> This type contains the name of the model, the block id, the variables within this model, -!! and the following methods: getting blocks, getting the name, creating children (variables), and self destruction. +!> This type contains the name of the model, the block id, and the variables within this model !> @ingroup fm_yaml_mod type, public :: fmModel_t - integer :: yfid !< file id of a yaml file integer :: id !< block id of this model character(len=:), allocatable :: name !< name of the model - character(len=7) :: blockname="varlist" !< name of the root block - integer :: nchildren !< number of var types - integer, allocatable :: child_ids(:) !< array of var ids - type (fmVar_t), allocatable :: children(:) !< variables in this model - contains - procedure :: get_blocks => get_blocks_fmModel_t - procedure :: destruct => destruct_fmModel_t - procedure :: get_name => get_name_fmModel_t - procedure :: create_children => create_children_fmModel_t + type (fmVar_t), allocatable :: variables(:) !< variables in this model end type fmModel_t !> @brief This type represents the entries for a specific field type, e.g. a tracer. -!> This type contains the name of the field type, the block id, the models within this field type, -!! and the following methods: getting blocks, getting the name, creating children (models), and self destruction. +!> This type contains the name of the field type, the block id, and the models within this field type !> @ingroup fm_yaml_mod type, public :: fmType_t - integer :: yfid !< file id of a yaml file integer :: id !< block id of this type character(len=:), allocatable :: name !< name of the type - character(len=7) :: blockname="modlist" !< name of the root block - integer :: nchildren !< number of model types - integer, allocatable :: child_ids(:) !< array of model ids - type (fmModel_t), allocatable :: children(:) !< models in this type - contains - procedure :: get_blocks => get_blocks_fmType_t - procedure :: destruct => destruct_fmType_t - procedure :: get_name => get_name_fmType_t - procedure :: create_children => create_children_fmType_t + type (fmModel_t), allocatable :: models(:) !< models in this type end type fmType_t -!> @brief This type represents the entirety of the field table. -!> This type contains the file id of the yaml file, the field types within this table, and the following methods: -!! getting blocks, creating children (field types), and self destruction. +!> @brief This type contains the field types within a field table. !> @ingroup fm_yaml_mod type, public :: fmTable_t - integer :: yfid !< file id of a yaml file - character(len=11) :: blockname="field_table" !< name of the root block - integer :: nchildren !< number of field types - integer, allocatable :: child_ids(:) !< array of type ids - type (fmType_t), allocatable :: children(:) !< field types in this table - contains - procedure :: get_blocks => get_blocks_fmTable_t - procedure :: destruct => destruct_fmTable_t - procedure :: create_children => create_children_fmTable_t + type (fmType_t), allocatable :: types(:) !< field types in this table end type fmTable_t -!> @brief Interface to construct the fmTable type. -!> @ingroup fm_yaml_mod -interface fmTable_t - module procedure construct_fmTable_t -end interface fmTable_t - -!> @brief Interface to construct the fmType type. -!> @ingroup fm_yaml_mod -interface fmType_t - module procedure construct_fmType_t -end interface fmType_t - -!> @brief Interface to construct the fmModel type. -!> @ingroup fm_yaml_mod -interface fmModel_t - module procedure construct_fmModel_t -end interface fmModel_t - -!> @brief Interface to construct the fmVar type. -!> @ingroup fm_yaml_mod -interface fmVar_t - module procedure construct_fmVar_t -end interface fmVar_t - -!> @brief Interface to construct the fmAttr type. -!> @ingroup fm_yaml_mod -interface fmAttr_t - module procedure construct_fmAttr_t -end interface fmAttr_t - contains !> @addtogroup fm_yaml_mod !> @{ -!> @brief Function to construct the fmTable_t type. -!! -!> Given an optional filename, construct the fmTable type using routines from the yaml parser. -!! @returns the fmTable type -function construct_fmTable_t(filename) result(this) - type (fmTable_t) :: this !< the field table +!> @brief Subroutine to populate an fmTable by reading a yaml file, given an optional filename. +subroutine build_fmTable(fmTable, filename) + type(fmTable_t), intent(out) :: fmTable !< the field table character(len=*), intent(in), optional :: filename !< the name of the yaml file + integer :: yfid !< file id of the yaml file + integer :: ntypes !< number of field types attached to this table + integer :: i !< Loop counter if (.not. present(filename)) then - this%yfid = open_and_parse_file("field_table.yaml") + yfid = open_and_parse_file("field_table.yaml") else - this%yfid = open_and_parse_file(trim(filename)) + yfid = open_and_parse_file(trim(filename)) endif - this%nchildren = get_num_blocks(this%yfid, this%blockname) - allocate(this%child_ids(this%nchildren)) -end function construct_fmTable_t -!> @brief Function to construct the fmType_t type. -!! -!> Given the appropriate block id, construct the fmType type using routines from the yaml parser. -!! @returns the fmType type -function construct_fmType_t(in_yfid, in_id) result(this) - type (fmType_t) :: this !< the type object - integer, intent(in) :: in_yfid !< yaml file id - integer, intent(in) :: in_id !< block_id of type from parent - - this%yfid = in_yfid - this%id = in_id - this%nchildren = get_num_blocks(this%yfid, this%blockname, this%id) - allocate(this%child_ids(this%nchildren)) -end function construct_fmType_t - -!> @brief Function to construct the fmModel_t type. -!! -!> Given the appropriate block id, construct the fmModel type using routines from the yaml parser. -!! @returns the fmModel type -function construct_fmModel_t(in_yfid, in_id) result(this) - type (fmModel_t) :: this !< the model object - integer, intent(in) :: in_yfid !< yaml file id - integer, intent(in) :: in_id !< block_id of model from parent - - this%yfid = in_yfid - this%id = in_id - this%nchildren = get_num_blocks(this%yfid, this%blockname, this%id) - allocate(this%child_ids(this%nchildren)) -end function construct_fmModel_t - -!> @brief Function to construct the fmVar_t type. -!! -!> Given the appropriate block id, construct the fmVar type using routines from the yaml parser. -!! @returns the fmVar type -function construct_fmVar_t(in_yfid, in_id) result(this) - type (fmVar_t) :: this !< the var object - integer, intent(in) :: in_yfid !< yaml file id - integer, intent(in) :: in_id !< block_id of var from parent - - this%yfid = in_yfid - this%id = in_id - this%nchildren = get_num_blocks(this%yfid, this%blockname, this%id) - allocate(this%child_ids(this%nchildren)) -end function construct_fmVar_t - -!> @brief Function to construct the fmAttr_t type. -!! -!> Given the appropriate block id, construct the fmAttr type using routines from the yaml parser. -!! @returns the fmAttr type -function construct_fmAttr_t(in_yfid, in_id) result(this) - type (fmAttr_t) :: this !< the var object - integer, intent(in) :: in_yfid !< yaml file id - integer, intent(in) :: in_id !< block_id of var from parent - - this%yfid = in_yfid - this%id = in_id -end function construct_fmAttr_t - -!> @brief Subroutine to destruct the fmTable_t type. -!! -!> Deallocates this type's allocatables and calls the destruct routine for this type's children. -subroutine destruct_fmTable_t(this) - class (fmTable_t) :: this !< the field table - - if (allocated(this%child_ids)) deallocate(this%child_ids) - if (allocated(this%children)) then - do table_i=1,this%nchildren - call destruct_fmType_t(this%children(table_i)) - end do - end if - if (allocated(this%children)) deallocate(this%children) -end subroutine destruct_fmTable_t - -!> @brief Subroutine to destruct the fmType_t type. -!! -!> Deallocates this type's allocatables and calls the destruct routine for this type's children. -subroutine destruct_fmType_t(this) - class (fmType_t) :: this !< type object - - if (allocated(this%name)) deallocate(this%name) - if (allocated(this%child_ids)) deallocate(this%child_ids) - if (allocated(this%children)) then - do type_i=1,this%nchildren - call destruct_fmModel_t(this%children(type_i)) - end do - end if - if (allocated(this%children)) deallocate(this%children) -end subroutine destruct_fmType_t - -!> @brief Subroutine to destruct the fmModel_t type. -!! -!> Deallocates this type's allocatables and calls the destruct routine for this type's children. -subroutine destruct_fmModel_t(this) - class (fmModel_t) :: this !< model object - - if (allocated(this%name)) deallocate(this%name) - if (allocated(this%child_ids)) deallocate(this%child_ids) - if (allocated(this%children)) then - do model_i=1,this%nchildren - call destruct_fmVar_t(this%children(model_i)) - end do - end if - if (allocated(this%children)) deallocate(this%children) -end subroutine destruct_fmModel_t - -!> @brief Subroutine to destruct the fmVar_t type. -!! -!> Deallocates this type's allocatables and calls the destruct routine for this type's children. -subroutine destruct_fmVar_t(this) - class (fmVar_t) :: this !< variable object - - if (allocated(this%name)) deallocate(this%name) - if (allocated(this%key_ids)) deallocate(this%key_ids) - if (allocated(this%keys)) deallocate(this%keys) - if (allocated(this%values)) deallocate(this%values) - if (allocated(this%child_ids)) deallocate(this%child_ids) - if (allocated(this%children)) then - do var_i=1,this%nchildren - call destruct_fmAttr_t(this%children(var_i)) - end do - end if - if (allocated(this%children)) deallocate(this%children) -end subroutine destruct_fmVar_t - -!> @brief Subroutine to destruct the fmAttr_t type. -!! -!> Deallocates this type's allocatables. -subroutine destruct_fmAttr_t(this) - class (fmAttr_t) :: this !< variable object - - if (allocated(this%paramname)) deallocate(this%paramname) - if (allocated(this%keys)) deallocate(this%keys) - if (allocated(this%values)) deallocate(this%values) -end subroutine destruct_fmAttr_t - -!> @brief gets the block ids for children of this type. -subroutine get_blocks_fmTable_t(this) - class (fmTable_t) :: this !< field table object - - call get_block_ids(this%yfid, this%blockname, this%child_ids) -end subroutine get_blocks_fmTable_t - -!> @brief gets the block ids for children of this type. -subroutine get_blocks_fmType_t(this) - class (fmType_t) :: this !< type object - - call get_block_ids(this%yfid, this%blockname, this%child_ids, this%id) -end subroutine get_blocks_fmType_t - -!> @brief Gets the name of this field type and adds it to the fmType_t. -!! Note that there should only be one key value pair (which is why the get_key_value call uses key_ids(1)). -subroutine get_name_fmType_t(this) - class (fmType_t) :: this !< type object - integer :: nkeys !< numkeys - integer, allocatable :: key_ids(:) !< array of key ids - character(len=256) :: key_value !< the value of a key - - nkeys = get_nkeys(this%yfid, this%id) - allocate(key_ids(nkeys)) - call get_key_ids(this%yfid, this%id, key_ids) - call get_key_value(this%yfid, key_ids(1), key_value) - this%name = trim(key_value) -end subroutine get_name_fmType_t - -!> @brief gets the block ids for children of this type. -subroutine get_blocks_fmModel_t(this) - class (fmModel_t) :: this !< model object - - call get_block_ids(this%yfid, this%blockname, this%child_ids, this%id) -end subroutine get_blocks_fmModel_t - -!> @brief Gets the name of this model and adds it to the fmModel_t. -!! Note that there should only be one key value pair (which is why the get_key_value call uses key_ids(1)). -subroutine get_name_fmModel_t(this) - class (fmModel_t) :: this !< model object - integer :: nkeys !< numkeys - integer, allocatable :: key_ids(:) !< array of key ids - character(len=256) :: key_value !< the value of a key - - nkeys = get_nkeys(this%yfid, this%id) - allocate(key_ids(nkeys)) - call get_key_ids(this%yfid, this%id, key_ids) - call get_key_value(this%yfid, key_ids(1), key_value) - this%name = trim(key_value) -end subroutine get_name_fmModel_t - -!> @brief gets the block ids for children of this type. -subroutine get_blocks_fmVar_t(this) - class (fmVar_t) :: this !< variable object - - call get_block_ids(this%yfid, this%blockname, this%child_ids, this%id) -end subroutine get_blocks_fmVar_t - -!> @brief Gets the name of this variable as well as the associated parameters and adds them to fmVar_t. -!! Note that the length of the character arrays for the parameter names and values are allocatable. -!! This is why they are read twice. -subroutine get_name_fmVar_t(this) - class (fmVar_t) :: this !< variable object - integer :: nkeys !< numkeys - integer :: maxln !< max string length names - integer :: maxlv !< max string length values - integer, allocatable :: key_ids(:) !< array of key ids - character(len=256) :: key_name !< the name of a key - character(len=256) :: key_value !< the value of a key - - nkeys = get_nkeys(this%yfid, this%id) + ntypes = get_num_blocks(yfid, "field_table", 0) + allocate(fmTable%types(ntypes)) + + ! Gets the block ids for the associated types of fmTable. + call get_block_ids(yfid, "field_table", fmTable%types(:)%id) + + do i=1,ntypes + call build_fmType(fmTable%types(i), yfid) + enddo +end subroutine build_fmTable + +!> @brief Populates an fmType, which is assumed to already have its `id` parameter set. +subroutine build_fmType(fmType, yfid) + type(fmType_t), intent(inout) :: fmType !< type object + integer, intent(in) :: yfid !< file id of the yaml file + integer, dimension(1) :: key_ids !< array of key ids + character(len=256) :: key_name !< the name of a key + character(len=256) :: key_value !< the value of a key + integer :: nmodels !< number of models attached to this type + integer :: i !< Loop counter + + nmodels = get_num_blocks(yfid, "modlist", fmType%id) + allocate(fmType%models(nmodels)) + + ! Gets the block ids for the associated models of fmType. + call get_block_ids(yfid, "modlist", fmType%models(:)%id, fmType%id) + + if (get_nkeys(yfid, fmType%id).ne.1) then + call mpp_error(FATAL, "fm_yaml_mod: A single `field_type` key is expected") + endif + + call get_key_ids(yfid, fmType%id, key_ids) + call get_key_name(yfid, key_ids(1), key_name) + call get_key_value(yfid, key_ids(1), key_value) + + if (trim(key_name).ne."field_type") then + call mpp_error(FATAL, "fm_yaml_mod: A single `field_type` key is expected") + endif + + fmType%name = trim(key_value) + + do i=1,nmodels + call build_fmModel(fmType%models(i), yfid) + enddo +end subroutine build_fmType + +!> @brief Populates an fmModel, which is assumed to already have its `id` parameter set. +subroutine build_fmModel(fmModel, yfid) + type(fmModel_t), intent(inout) :: fmModel !< model object + integer, intent(in) :: yfid !< file id of the yaml file + integer, dimension(1) :: key_ids !< array of key ids + character(len=256) :: key_name !< the name of a key + character(len=256) :: key_value !< the value of a key + integer :: nvars !< number of variables attached to this model + integer :: i !< Loop counter + + nvars = get_num_blocks(yfid, "varlist", fmModel%id) + allocate(fmModel%variables(nvars)) + + ! gets the block ids for the associated variables of fmModel. + call get_block_ids(yfid, "varlist", fmModel%variables(:)%id, fmModel%id) + + if (get_nkeys(yfid, fmModel%id).ne.1) then + call mpp_error(FATAL, "fm_yaml_mod: A single `model_type` key is expected") + endif + + call get_key_ids(yfid, fmModel%id, key_ids) + call get_key_name(yfid, key_ids(1), key_name) + call get_key_value(yfid, key_ids(1), key_value) + + if (trim(key_name).ne."model_type") then + call mpp_error(FATAL, "fm_yaml_mod: A single `model_type` key is expected") + endif + + fmModel%name = trim(key_value) + + do i=1,nvars + call build_fmVar(fmModel%variables(i), yfid) + enddo +end subroutine build_fmModel + +!> @brief Populates an fmVar and creates any associated fmAttrs +subroutine build_fmVar(fmVar, yfid) + type(fmVar_t), intent(inout) :: fmVar !< variable object + integer, intent(in) :: yfid !< file id of the yaml file + integer :: nkeys !< number of keys defined for this var + integer, allocatable :: key_ids(:) !< array of key ids + character(len=256) :: key_name !< the name of a key + character(len=256) :: key_value !< the value of a key + integer :: nattrs !< number of attribute blocks attached to this var + integer :: nmethods !< total number of methods attached to this var + integer :: maxln !< max string length of method names + integer :: maxlv !< max string length of method values + character(:), allocatable :: attr_method_keys(:) !< Keys of methods defined in attribute blocks + character(:), allocatable :: attr_method_values(:) !< Values of methods defined in attribute blocks + integer :: i_name !< Index of the key containing the variable's name + integer :: i, j !< Loop indices + + ! Read attribute blocks attached to this variable + call fmVar_read_attrs(fmVar, yfid, attr_method_keys, attr_method_values) + nattrs = size(attr_method_keys) + + nkeys = get_nkeys(yfid, fmVar%id) allocate(key_ids(nkeys)) - call get_key_ids(this%yfid, this%id, key_ids) - call get_key_value(this%yfid, key_ids(1), key_value) - this%name = trim(key_value) - if (nkeys .gt. 1) then - maxln = 0 - maxlv = 0 - do var_j=2,nkeys - call get_key_name(this%yfid, key_ids(var_j), key_name) - call get_key_value(this%yfid, key_ids(var_j), key_value) + call get_key_ids(yfid, fmVar%id, key_ids) + + maxln = len(attr_method_keys) + maxlv = len(attr_method_values) + i_name = -1 + + do i=1,nkeys + call get_key_name(yfid, key_ids(i), key_name) + call get_key_value(yfid, key_ids(i), key_value) + + if (trim(key_name) .eq. "variable") then + if (i_name .ne. -1) then + call mpp_error(FATAL, "fm_yaml_mod: A variable can have only one `variable` key") + endif + + fmVar%name = trim(key_value) + i_name = i + else maxln = max(maxln, len_trim(key_name)) maxlv = max(maxlv, len_trim(key_value)) - end do - allocate(this%key_ids(nkeys-1)) - allocate(character(len=maxln)::this%keys(nkeys-1)) - allocate(character(len=maxlv)::this%values(nkeys-1)) - do var_j=2,nkeys - this%key_ids(var_j-1) = key_ids(var_j) - call get_key_name(this%yfid, key_ids(var_j), key_name) - call get_key_value(this%yfid, key_ids(var_j), key_value) - this%keys(var_j-1) = trim(key_name) - this%values(var_j-1) = trim(key_value) - end do - else - allocate(this%key_ids(0)) - end if -end subroutine get_name_fmVar_t - -!> @brief Gets the name of the parameter and the key value pairs for the subparameters and adds them to fmAttr_t. -!! Note that the length of the character arrays for the subparameter names and values are allocatable. -!! This is why they are read twice. -subroutine get_name_fmAttr_t(this) - class (fmAttr_t) :: this !< variable object - integer :: nkeys !< numkeys - integer :: maxln !< max string length names - integer :: maxlv !< max string length values - integer, allocatable :: key_ids(:) !< array of key ids - character(len=256) :: key_name !< the name of a key - character(len=256) :: key_value !< the value of a key - character(len=256) :: paramname !< the value of a key - - call get_key_name(this%yfid, this%id-1, paramname) - allocate(character(len=len_trim(paramname))::this%paramname) - this%paramname = trim(paramname) - nkeys = get_nkeys(this%yfid, this%id) - allocate(key_ids(nkeys)) - call get_key_ids(this%yfid, this%id, key_ids) - maxln = 0 - maxlv = 0 - do attr_j=1,nkeys - call get_key_name(this%yfid, key_ids(attr_j), key_name) - call get_key_value(this%yfid, key_ids(attr_j), key_value) - maxln = max(maxln, len_trim(key_name)) - maxlv = max(maxlv, len_trim(key_value)) - end do - allocate(character(len=maxln)::this%keys(nkeys)) - allocate(character(len=maxlv)::this%values(nkeys)) - do attr_j=1,nkeys - call get_key_name(this%yfid, key_ids(attr_j), key_name) - call get_key_value(this%yfid, key_ids(attr_j), key_value) - this%keys(attr_j) = trim(key_name) - this%values(attr_j) = trim(key_value) - end do -end subroutine get_name_fmAttr_t - -!> @brief Creates the children (fmType_t) of this type (fmTable_t). -!! -!! Note that this includes the creation function as well as the routines necessary to populate the child type, -!! including calling the create_children routine for the child type (this makes it somewhat recursive). -subroutine create_children_fmTable_t(this) - class (fmTable_t) :: this !< the field table - - allocate(this%children(this%nchildren)) - do table_i=1,this%nchildren - this%children(table_i) = fmType_t(this%yfid, this%child_ids(table_i)) - call this%children(table_i)%get_blocks - call this%children(table_i)%get_name - call this%children(table_i)%create_children - end do -end subroutine create_children_fmTable_t - -!> @brief Creates the children (fmModel_t) of this type (fmType_t). -!! -!! Note that this includes the creation function as well as the routines necessary to populate the child type, -!! including calling the create_children routine for the child type (this makes it somewhat recursive). -subroutine create_children_fmType_t(this) - class (fmType_t) :: this !< type object - - allocate(this%children(this%nchildren)) - do type_i=1,this%nchildren - this%children(type_i) = fmModel_t(this%yfid, this%child_ids(type_i)) - call this%children(type_i)%get_blocks - call this%children(type_i)%get_name - call this%children(type_i)%create_children - end do -end subroutine create_children_fmType_t - -!> @brief Creates the children (fmVar_t) of this type (fmModel_t). -!! -!! Note that this includes the creation function as well as the routines necessary to populate the child type, -!! including calling the create_children routine for the child type (this makes it somewhat recursive). -subroutine create_children_fmModel_t(this) - class (fmModel_t) :: this !< model object - - allocate(this%children(this%nchildren)) - do model_i=1,this%nchildren - this%children(model_i) = fmVar_t(this%yfid, this%child_ids(model_i)) - call this%children(model_i)%get_blocks - call this%children(model_i)%get_names_and_props - call this%children(model_i)%create_children - end do -end subroutine create_children_fmModel_t - -!> @brief Creates the children (fmAttr_t) of this type (fmVar_t). -!! -!! Note that this includes the creation function as well as the routines necessary to populate the child type. -subroutine create_children_fmVar_t(this) - class (fmVar_t) :: this !< var object - - if (this%nchildren .gt. 0) then - allocate(this%children(this%nchildren)) - do var_i=1,this%nchildren - this%children(var_i) = fmAttr_t(this%yfid, this%child_ids(var_i)) - call this%children(var_i)%get_names_and_props - end do - end if -end subroutine create_children_fmVar_t + endif + enddo + + if (i_name .eq. -1) then + call mpp_error(FATAL, "fm_yaml_mod: Every variable must have a `variable` key") + endif + + ! Number of methods is the number of keys (excluding `variable`), plus one for each attribute block. + nmethods = nkeys - 1 + nattrs + + allocate(character(len=maxln)::fmVar%keys(nmethods)) + allocate(character(len=maxlv)::fmVar%values(nmethods)) + + j = 1 + do i=1,nkeys + if (i.eq.i_name) cycle ! Exclude `variable` key + + call get_key_name(yfid, key_ids(i), key_name) + call get_key_value(yfid, key_ids(i), key_value) + fmVar%keys(j) = trim(key_name) + fmVar%values(j) = trim(key_value) + + j = j + 1 + enddo + + ! Add methods defined within attribute blocks. + fmVar%keys(j:) = attr_method_keys + fmVar%values(j:) = attr_method_values +end subroutine build_fmVar + +!> @brief Reads the attribute blocks attached to a variable and populates the associated fmAttr structures. +!! Returns two arrays containing key/value pairs of all methods defined via attribute blocks. +subroutine fmVar_read_attrs(fmVar, yfid, method_keys, method_values) + type(fmVar_t), intent(inout) :: fmVar !< variable object + integer, intent(in) :: yfid !< file id of the yaml file + character(:), allocatable, intent(out) :: method_keys(:) !< Method keys (names of attribute blocks) + character(:), allocatable, intent(out) :: method_values(:) !< Method values from attribute blocks + integer :: nattrs !< number of attribute blocks + integer :: nkeys !< number of keys in an attribute block + integer, allocatable :: key_ids(:) !< array of key ids + character(len=256) :: key_name !< the name of a key + character(len=256) :: key_value !< the value of a key + integer :: maxln_m !< max string length of method names + integer :: maxlv_m !< max string length of method values + integer :: maxln_a !< max string length of subparameter names + integer :: maxlv_a !< max string length of subparameter values + integer,allocatable :: name_key_id(:) !< Indices of attribute `value` keys + integer :: i, j, k !< Loop counters + + nattrs = get_num_unique_blocks(yfid, fmVar%id) + allocate(fmVar%attributes(nattrs)) + allocate(name_key_id(nattrs)) + + ! gets the block ids for the associated attributes of fmVar. + call get_unique_block_ids(yfid, fmVar%attributes(:)%id, fmVar%id) + + maxln_m = 0 + maxlv_m = 0 + name_key_id = -1 + + do i=1,nattrs + associate (fmAttr => fmVar%attributes(i)) + call get_block_name(yfid, fmAttr%id, key_value) + fmAttr%paramname = trim(key_value) + + nkeys = get_nkeys(yfid, fmAttr%id) + allocate(key_ids(nkeys)) + call get_key_ids(yfid, fmAttr%id, key_ids) + + maxln_a = 0 + maxlv_a = 0 + + do j=1,nkeys + call get_key_name(yfid, key_ids(j), key_name) + call get_key_value(yfid, key_ids(j), key_value) + + if (trim(key_name) .eq. "value") then + if (name_key_id(i) .ne. -1) then + call mpp_error(FATAL, "fm_yaml_mod: A variable attribute block can only have one `value` key") + endif + + maxln_m = max(maxln_m, len(fmAttr%paramname)) + maxlv_m = max(maxlv_m, len_trim(key_value)) + + name_key_id(i) = key_ids(j) + else + maxln_a = max(maxln_a, len_trim(key_name)) + maxlv_a = max(maxlv_a, len_trim(key_value)) + endif + enddo + + if (name_key_id(i) .eq. -1) then + call mpp_error(FATAL, "fm_yaml_mod: Every variable attribute must have a `value` key") + endif + + allocate(character(len=maxln_a)::fmAttr%keys(nkeys - 1)) + allocate(character(len=maxlv_a)::fmAttr%values(nkeys - 1)) + + k = 1 + do j=1,nkeys + if (key_ids(j).eq.name_key_id(i)) cycle + + call get_key_name(yfid, key_ids(j), key_name) + call get_key_value(yfid, key_ids(j), key_value) + fmAttr%keys(k) = trim(key_name) + fmAttr%values(k) = trim(key_value) + + k = k + 1 + enddo + + deallocate(key_ids) + end associate + enddo + + allocate(character(len=maxln_m)::method_keys(nattrs)) + allocate(character(len=maxlv_m)::method_values(nattrs)) + + do i=1,nattrs + method_keys(i) = fmVar%attributes(i)%paramname + call get_key_value(yfid, name_key_id(i), method_values(i)) + enddo +end subroutine fmVar_read_attrs + #endif end module fm_yaml_mod + !> @} ! close documentation grouping diff --git a/fms2_io/netcdf_io.F90 b/fms2_io/netcdf_io.F90 index 962076d5a5..65f9eccf9e 100644 --- a/fms2_io/netcdf_io.F90 +++ b/fms2_io/netcdf_io.F90 @@ -1090,8 +1090,8 @@ subroutine netcdf_save_restart(fileobj, unlim_dim_level) integer :: i if (.not. fileobj%is_restart) then - call error("write_restart:: file "//trim(fileobj%path)//" is not a restart file."& - &" Be sure the file was opened with is_restart=.true.") + call error("write_restart:: file "//trim(fileobj%path)//" is not a restart file. & + &Be sure the file was opened with is_restart=.true.") endif do i = 1, fileobj%num_restart_vars if (associated(fileobj%restart_vars(i)%data0d)) then @@ -1132,8 +1132,8 @@ subroutine netcdf_restore_state(fileobj, unlim_dim_level) integer :: i if (.not. fileobj%is_restart) then - call error("read_restart:: file "//trim(fileobj%path)//" is not a restart file."& - &" Be sure the file was opened with is_restart=.true.") + call error("read_restart:: file "//trim(fileobj%path)//" is not a restart file. & + &Be sure the file was opened with is_restart=.true.") endif do i = 1, fileobj%num_restart_vars if (associated(fileobj%restart_vars(i)%data0d)) then @@ -1283,8 +1283,8 @@ subroutine get_dimension_names(fileobj, names, broadcast) ndims = get_num_dimensions(fileobj, broadcast=.false.) if (ndims .gt. 0) then if (size(names) .ne. ndims) then - call error("'names' has to be the same size of the number of dimensions."& - &" Check your get_dimension_names call for file "//trim(fileobj%path)) + call error("'names' has to be the same size of the number of dimensions. & + &Check your get_dimension_names call for file "//trim(fileobj%path)) endif else call error("get_dimension_names: the file "//trim(fileobj%path)//" does not have any dimensions") @@ -1304,8 +1304,8 @@ subroutine get_dimension_names(fileobj, names, broadcast) if (.not. fileobj%is_root) then if (ndims .gt. 0) then if (size(names) .ne. ndims) then - call error("'names' has to be the same size of the number of dimensions."& - &" Check your get_dimension_names call for file "//trim(fileobj%path)) + call error("'names' has to be the same size of the number of dimensions. & + &Check your get_dimension_names call for file "//trim(fileobj%path)) endif else call error("get_dimension_names: the file "//trim(fileobj%path)//" does not have any dimensions") @@ -1507,8 +1507,8 @@ subroutine get_variable_names(fileobj, names, broadcast) nvars = get_num_variables(fileobj, broadcast=.false.) if (nvars .gt. 0) then if (size(names) .ne. nvars) then - call error("'names' has to be the same size of the number of variables."& - &" Check your get_variable_names call for file "//trim(fileobj%path)) + call error("'names' has to be the same size of the number of variables. & + &Check your get_variable_names call for file "//trim(fileobj%path)) endif else call error("get_variable_names: the file "//trim(fileobj%path)//" does not have any variables") @@ -1528,8 +1528,8 @@ subroutine get_variable_names(fileobj, names, broadcast) if (.not. fileobj%is_root) then if (nvars .gt. 0) then if (size(names) .ne. nvars) then - call error("'names' has to be the same size of the number of variables."& - &" Check your get_variable_names call for file "//trim(fileobj%path)) + call error("'names' has to be the same size of the number of variables. & + &Check your get_variable_names call for file "//trim(fileobj%path)) endif else call error("get_variable_names: the file "//trim(fileobj%path)//" does not have any variables") @@ -1641,9 +1641,9 @@ subroutine get_variable_dimension_names(fileobj, variable_name, dim_names, & call check_netcdf_code(err, append_error_msg) if (ndims .gt. 0) then if (size(dim_names) .ne. ndims) then - call error("'names' has to be the same size of the number of dimensions for the variable."& - &" Check your get_variable_dimension_names call for file "//trim(fileobj%path)//& - &" and variable:"//trim(variable_name)) + call error("'names' has to be the same size of the number of dimensions for the variable. & + &Check your get_variable_dimension_names call for file "//trim(fileobj%path)// & + " and variable:"//trim(variable_name)) endif else call error("get_variable_dimension_names: the variable: "//trim(variable_name)//" in file: "//trim(fileobj%path)& @@ -1664,9 +1664,9 @@ subroutine get_variable_dimension_names(fileobj, variable_name, dim_names, & if (.not. fileobj%is_root) then if (ndims .gt. 0) then if (size(dim_names) .ne. ndims) then - call error("'names' has to be the same size of the number of dimensions for the variable."& - &" Check your get_variable_dimension_names call for file "//trim(fileobj%path)//& - &" and variable:"//trim(variable_name)) + call error("'names' has to be the same size of the number of dimensions for the variable. & + & Check your get_variable_dimension_names call for file "//trim(fileobj%path)// & + " and variable:"//trim(variable_name)) endif else call error("get_variable_dimension_names: the variable: "//trim(variable_name)//" in file: "//trim(fileobj%path)& @@ -1707,9 +1707,9 @@ subroutine get_variable_size(fileobj, variable_name, dim_sizes, broadcast) call check_netcdf_code(err, append_error_msg) if (ndims .gt. 0) then if (size(dim_sizes) .ne. ndims) then - call error("'dim_sizes' has to be the same size of the number of dimensions for the variable."& - &" Check your get_variable_size call for file "//trim(fileobj%path)//& - &" and variable:"//trim(variable_name)) + call error("'dim_sizes' has to be the same size of the number of dimensions for the variable. & + &Check your get_variable_size call for file "//trim(fileobj%path)// & + " and variable:"//trim(variable_name)) endif else call error("get_variable_size: the variable: "//trim(variable_name)//" in file: "//trim(fileobj%path)//& @@ -1729,9 +1729,9 @@ subroutine get_variable_size(fileobj, variable_name, dim_sizes, broadcast) if (.not. fileobj%is_root) then if (ndims .gt. 0) then if (size(dim_sizes) .ne. ndims) then - call error("'dim_sizes' has to be the same size of the number of dimensions for the variable."& - &" Check your get_variable_size call for file "//trim(fileobj%path)//& - &" and variable:"//trim(variable_name)) + call error("'dim_sizes' has to be the same size of the number of dimensions for the variable. & + &Check your get_variable_size call for file "//trim(fileobj%path)// & + " and variable:"//trim(variable_name)) endif else call error("get_variable_size: the variable: "//trim(variable_name)//" in file: "//trim(fileobj%path)//& @@ -2227,8 +2227,8 @@ function is_registered_to_restart(fileobj, variable_name) & integer :: i if (.not. fileobj%is_restart) then - call error("file "//trim(fileobj%path)//" is not a restart file. "& - //"Add is_restart=.true. to your open_file call") + call error("file "//trim(fileobj%path)//" is not a restart file. & + &Add is_restart=.true. to your open_file call") endif is_registered = .false. do i = 1, fileobj%num_restart_vars @@ -2320,8 +2320,8 @@ subroutine write_restart_bc(fileobj, unlim_dim_level) integer :: i !< No description if (.not. fileobj%is_restart) then - call error("file "//trim(fileobj%path)//" is not a restart file. "& - &"Add is_restart=.true. to your open_file call") + call error("file "//trim(fileobj%path)//" is not a restart file. & + &Add is_restart=.true. to your open_file call") endif !> Loop through the variables, root pe gathers the data from the other pes and writes out the checksum. diff --git a/horiz_interp/horiz_interp.F90 b/horiz_interp/horiz_interp.F90 index 820e9079b9..9a910ccf11 100644 --- a/horiz_interp/horiz_interp.F90 +++ b/horiz_interp/horiz_interp.F90 @@ -49,11 +49,12 @@ module horiz_interp_mod use mpp_mod, only: input_nml_file, WARNING, mpp_pe, mpp_root_pe use constants_mod, only: pi use horiz_interp_type_mod, only: horiz_interp_type, assignment(=) -use horiz_interp_type_mod, only: CONSERVE, BILINEAR, SPHERICA, BICUBIC +use horiz_interp_type_mod, only: CONSERVE, BILINEAR, SPHERICAL, BICUBIC use horiz_interp_conserve_mod, only: horiz_interp_conserve_init, horiz_interp_conserve use horiz_interp_conserve_mod, only: horiz_interp_conserve_new, horiz_interp_conserve_del use horiz_interp_bilinear_mod, only: horiz_interp_bilinear_init, horiz_interp_bilinear use horiz_interp_bilinear_mod, only: horiz_interp_bilinear_new, horiz_interp_bilinear_del +use horiz_interp_bilinear_mod, only: horiz_interp_read_weights_bilinear use horiz_interp_bicubic_mod, only: horiz_interp_bicubic_init, horiz_interp_bicubic use horiz_interp_bicubic_mod, only: horiz_interp_bicubic_new, horiz_interp_bicubic_del use horiz_interp_spherical_mod, only: horiz_interp_spherical_init, horiz_interp_spherical @@ -66,7 +67,7 @@ module horiz_interp_mod !---- interfaces ---- public horiz_interp_type, horiz_interp, horiz_interp_new, horiz_interp_del, & - horiz_interp_init, horiz_interp_end, assignment(=) + horiz_interp_init, horiz_interp_end, assignment(=), horiz_interp_read_weights !> Allocates space and initializes a derived-type variable !! that contains pre-computed interpolation indices and weights. @@ -137,6 +138,12 @@ module horiz_interp_mod module procedure horiz_interp_new_1d_dst_r8 end interface + !> Subroutines for reading in weight files and using that to fill in the horiz_interp type instead + !! calculating it + interface horiz_interp_read_weights + module procedure horiz_interp_read_weights_r4 + module procedure horiz_interp_read_weights_r8 + end interface horiz_interp_read_weights !> Subroutine for performing the horizontal interpolation between two grids. !! @@ -294,7 +301,7 @@ subroutine horiz_interp_del ( Interp ) call horiz_interp_bilinear_del(Interp ) case (BICUBIC) call horiz_interp_bicubic_del(Interp ) - case (SPHERICA) + case (SPHERICAL) call horiz_interp_spherical_del(Interp ) end select diff --git a/horiz_interp/horiz_interp_bicubic.F90 b/horiz_interp/horiz_interp_bicubic.F90 index 25ac5c1a54..b4e8778cd1 100644 --- a/horiz_interp/horiz_interp_bicubic.F90 +++ b/horiz_interp/horiz_interp_bicubic.F90 @@ -47,7 +47,7 @@ module horiz_interp_bicubic_mod use mpp_mod, only: mpp_error, FATAL, stdout, mpp_pe, mpp_root_pe use fms_mod, only: write_version_number - use horiz_interp_type_mod, only: horiz_interp_type + use horiz_interp_type_mod, only: horiz_interp_type, BICUBIC use constants_mod, only: PI use platform_mod, only: r4_kind, r8_kind diff --git a/horiz_interp/horiz_interp_bilinear.F90 b/horiz_interp/horiz_interp_bilinear.F90 index 318d2c039b..d8db732b22 100644 --- a/horiz_interp/horiz_interp_bilinear.F90 +++ b/horiz_interp/horiz_interp_bilinear.F90 @@ -32,16 +32,18 @@ module horiz_interp_bilinear_mod use mpp_mod, only: mpp_error, FATAL, stdout, mpp_pe, mpp_root_pe use fms_mod, only: write_version_number use constants_mod, only: PI - use horiz_interp_type_mod, only: horiz_interp_type, stats + use horiz_interp_type_mod, only: horiz_interp_type, stats, BILINEAR use platform_mod, only: r4_kind, r8_kind use axis_utils2_mod, only: nearest_index + use fms2_io_mod, only: open_file, close_file, read_data, FmsNetcdfFile_t, get_dimension_size + use fms_string_utils_mod, only: string implicit none private public :: horiz_interp_bilinear_new, horiz_interp_bilinear, horiz_interp_bilinear_del - public :: horiz_interp_bilinear_init + public :: horiz_interp_bilinear_init, horiz_interp_read_weights_bilinear !> Creates a @ref horiz_interp_type for bilinear interpolation. !> @ingroup horiz_interp_bilinear_mod @@ -52,6 +54,14 @@ module horiz_interp_bilinear_mod module procedure horiz_interp_bilinear_new_2d_r8 end interface + !> Subroutines for reading in weight files and using that to fill in the horiz_interp type instead + !! calculating it + !> @ingroup horiz_interp_bilinear_mod + interface horiz_interp_read_weights_bilinear + module procedure horiz_interp_read_weights_bilinear_r4 + module procedure horiz_interp_read_weights_bilinear_r8 + end interface + interface horiz_interp_bilinear module procedure horiz_interp_bilinear_r4 module procedure horiz_interp_bilinear_r8 diff --git a/horiz_interp/horiz_interp_conserve.F90 b/horiz_interp/horiz_interp_conserve.F90 index b1b04a1b34..5f345e9769 100644 --- a/horiz_interp/horiz_interp_conserve.F90 +++ b/horiz_interp/horiz_interp_conserve.F90 @@ -44,7 +44,7 @@ module horiz_interp_conserve_mod use fms_mod, only: write_version_number use grid2_mod, only: get_great_circle_algorithm use constants_mod, only: PI - use horiz_interp_type_mod, only: horiz_interp_type + use horiz_interp_type_mod, only: horiz_interp_type, CONSERVE implicit none diff --git a/horiz_interp/horiz_interp_spherical.F90 b/horiz_interp/horiz_interp_spherical.F90 index 128b7fd47d..28110d343b 100644 --- a/horiz_interp/horiz_interp_spherical.F90 +++ b/horiz_interp/horiz_interp_spherical.F90 @@ -36,7 +36,7 @@ module horiz_interp_spherical_mod use fms_mod, only : write_version_number use fms_mod, only : check_nml_error use constants_mod, only : pi - use horiz_interp_type_mod, only : horiz_interp_type, stats + use horiz_interp_type_mod, only : horiz_interp_type, stats, SPHERICAL implicit none private diff --git a/horiz_interp/horiz_interp_type.F90 b/horiz_interp/horiz_interp_type.F90 index 7f8b300a99..e87870698c 100644 --- a/horiz_interp/horiz_interp_type.F90 +++ b/horiz_interp/horiz_interp_type.F90 @@ -38,10 +38,10 @@ module horiz_interp_type_mod ! parameter to determine interpolation method integer, parameter :: CONSERVE = 1 integer, parameter :: BILINEAR = 2 - integer, parameter :: SPHERICA = 3 + integer, parameter :: SPHERICAL = 3 integer, parameter :: BICUBIC = 4 -public :: CONSERVE, BILINEAR, SPHERICA, BICUBIC +public :: CONSERVE, BILINEAR, SPHERICAL, BICUBIC public :: horiz_interp_type, stats, assignment(=) !> @} diff --git a/horiz_interp/include/horiz_interp.inc b/horiz_interp/include/horiz_interp.inc index ec0540b442..c3fe335b14 100644 --- a/horiz_interp/include/horiz_interp.inc +++ b/horiz_interp/include/horiz_interp.inc @@ -120,7 +120,7 @@ deallocate(lon_src_1d, lat_src_1d, lon_dst_1d, lat_dst_1d) endif case ("spherical") - Interp%interp_method = SPHERICA + Interp%interp_method = SPHERICAL nlon_in = size(lon_in(:)); nlat_in = size(lat_in(:)) nlon_out = size(lon_out(:)); nlat_out = size(lat_out(:)) allocate(lon_src(nlon_in,nlat_in), lat_src(nlon_in,nlat_in)) @@ -246,7 +246,7 @@ deallocate(lon_src_1d,lat_src_1d) endif case ("spherical") - Interp%interp_method = SPHERICA + Interp%interp_method = SPHERICAL nlon_in = size(lon_in(:)); nlat_in = size(lat_in(:)) allocate(lon_src(nlon_in,nlat_in), lat_src(nlon_in,nlat_in)) do i = 1, nlon_in @@ -329,7 +329,7 @@ end if case ("spherical") - Interp%interp_method = SPHERICA + Interp%interp_method = SPHERICAL call horiz_interp_spherical_new ( Interp, lon_in, lat_in, lon_out, lat_out, & num_nbrs, max_dist, src_modulo ) case ("bilinear") @@ -409,7 +409,7 @@ call horiz_interp_bilinear_new ( Interp, lon_in, lat_in, lon_dst, lat_dst, & verbose, src_modulo ) case ("spherical") - Interp%interp_method = SPHERICA + Interp%interp_method = SPHERICAL call horiz_interp_spherical_new ( Interp, lon_in, lat_in, lon_dst, lat_dst, & num_nbrs, max_dist, src_modulo) case default @@ -454,7 +454,7 @@ case(BICUBIC) call horiz_interp_bicubic(Interp,data_in, data_out, verbose, mask_in, mask_out, & missing_value, missing_permit ) - case(SPHERICA) + case(SPHERICAL) call horiz_interp_spherical(Interp,data_in, data_out, verbose, mask_in, mask_out, & missing_value ) case default @@ -840,4 +840,52 @@ return end function IS_LAT_LON_ + + !> Subroutine for reading a weight file and use it to fill in the horiz interp type +!! for the bilinear interpolation method. + subroutine HORIZ_INTERP_READ_WEIGHTS_(Interp, weight_filename, lon_out, lat_out, lon_in, lat_in, & + weight_file_source, interp_method, isw, iew, jsw, jew, nglon, nglat) + type(horiz_interp_type), intent(inout) :: Interp !< Horiz interp time to fill + character(len=*), intent(in) :: weight_filename !< Name of the weight file + real(FMS_HI_KIND_), intent(in) :: lat_out(:,:) !< Output (model) latitude + real(FMS_HI_KIND_), intent(in) :: lon_out(:,:) !< Output (model) longitude + real(FMS_HI_KIND_), intent(in) :: lat_in(:) !< Input (data) latitude + real(FMS_HI_KIND_), intent(in) :: lon_in(:) !< Input (data) longitude + character(len=*), intent(in) :: weight_file_source !< Source of the weight file + character(len=*), intent(in) :: interp_method !< The interp method to use + integer, intent(in) :: isw, iew, jsw, jew !< Starting and ending indices of the compute domain + integer, intent(in) :: nglon !< Number of longitudes in the global domain + integer, intent(in) :: nglat !< Number of latitudes in the globl domain + + integer :: i, j !< For do loops + integer :: nlon_in !< Number of longitude in the data + integer :: nlat_in !< Number of latitude in the data grid + real(FMS_HI_KIND_), allocatable :: lon_src_1d(:) !< Center points of the longitude data grid + real(FMS_HI_KIND_), allocatable :: lat_src_1d(:) !< Center points of the lattiude data grid + integer, parameter :: kindl = FMS_HI_KIND_ !< real kind size currently compiling + + select case (trim(interp_method)) + case ("bilinear") + !! This is to reproduce the behavior in horiz_interp_new + !! The subroutine assumes that the data grid (lon_in, lat_in) are + !! the edges and not the centers. + !! Data_override passes in the edges, which are calculated using the axis_edges subroutine + nlon_in = size(lon_in(:))-1; nlat_in = size(lat_in(:))-1 + allocate(lon_src_1d(nlon_in), lat_src_1d(nlat_in)) + do i = 1, nlon_in + lon_src_1d(i) = (lon_in(i) + lon_in(i+1)) * 0.5_kindl + enddo + do j = 1, nlat_in + lat_src_1d(j) = (lat_in(j) + lat_in(j+1)) * 0.5_kindl + enddo + + call horiz_interp_read_weights_bilinear(Interp, weight_filename, lon_out, lat_out, & + lon_src_1d, lat_src_1d, weight_file_source, interp_method, & + isw, iew, jsw, jew, nglon, nglat) + deallocate(lon_src_1d,lat_src_1d) + case default + call mpp_error(FATAL, "Reading weight from file is not supported for the "//& + trim(interp_method)//" method. It is currently only supported for bilinear") + end select + end subroutine HORIZ_INTERP_READ_WEIGHTS_ !> @} diff --git a/horiz_interp/include/horiz_interp_bicubic.inc b/horiz_interp/include/horiz_interp_bicubic.inc index 5ff567dbb8..e4f180c657 100644 --- a/horiz_interp/include/horiz_interp_bicubic.inc +++ b/horiz_interp/include/horiz_interp_bicubic.inc @@ -190,6 +190,8 @@ ! xf > xcu, no valid boundary point') enddo enddo + Interp% HI_KIND_TYPE_ % is_allocated = .true. + Interp%interp_method = BICUBIC end subroutine HORIZ_INTERP_BICUBIC_NEW_1D_S_ !> @brief Creates a new @ref horiz_interp_type @@ -343,11 +345,13 @@ ! xcu, no valid boundary point') enddo enddo + Interp% HI_KIND_TYPE_ % is_allocated = .true. + Interp%interp_method = BICUBIC end subroutine HORIZ_INTERP_BICUBIC_NEW_1D_ !> @brief Perform bicubic horizontal interpolation - subroutine HORIZ_INTERP_BICUBIC_NEW_( Interp, data_in, data_out, verbose, mask_in, mask_out, missing_value, & + subroutine HORIZ_INTERP_BICUBIC_( Interp, data_in, data_out, verbose, mask_in, mask_out, missing_value, & & missing_permit) type (horiz_interp_type), intent(in) :: Interp real(FMS_HI_KIND_), intent(in), dimension(:,:) :: data_in @@ -427,7 +431,7 @@ enddo enddo return - end subroutine HORIZ_INTERP_BICUBIC_NEW_ + end subroutine HORIZ_INTERP_BICUBIC_ !--------------------------------------------------------------------------- diff --git a/horiz_interp/include/horiz_interp_bicubic_r4.fh b/horiz_interp/include/horiz_interp_bicubic_r4.fh index 1d3b148480..bc9c0037d7 100644 --- a/horiz_interp/include/horiz_interp_bicubic_r4.fh +++ b/horiz_interp/include/horiz_interp_bicubic_r4.fh @@ -30,8 +30,8 @@ #undef HORIZ_INTERP_BICUBIC_NEW_1D_ #define HORIZ_INTERP_BICUBIC_NEW_1D_ horiz_interp_bicubic_new_1d_r4 -#undef HORIZ_INTERP_BICUBIC_NEW_ -#define HORIZ_INTERP_BICUBIC_NEW_ horiz_interp_bicubic_r4 +#undef HORIZ_INTERP_BICUBIC_ +#define HORIZ_INTERP_BICUBIC_ horiz_interp_bicubic_r4 #undef BCUINT_ #define BCUINT_ bcuint_r4 diff --git a/horiz_interp/include/horiz_interp_bicubic_r8.fh b/horiz_interp/include/horiz_interp_bicubic_r8.fh index d269767726..e37a234bf5 100644 --- a/horiz_interp/include/horiz_interp_bicubic_r8.fh +++ b/horiz_interp/include/horiz_interp_bicubic_r8.fh @@ -30,8 +30,8 @@ #undef HORIZ_INTERP_BICUBIC_NEW_1D_ #define HORIZ_INTERP_BICUBIC_NEW_1D_ horiz_interp_bicubic_new_1d_r8 -#undef HORIZ_INTERP_BICUBIC_NEW_ -#define HORIZ_INTERP_BICUBIC_NEW_ horiz_interp_bicubic_r8 +#undef HORIZ_INTERP_BICUBIC_ +#define HORIZ_INTERP_BICUBIC_ horiz_interp_bicubic_r8 #undef BCUINT_ #define BCUINT_ bcuint_r8 diff --git a/horiz_interp/include/horiz_interp_bilinear.inc b/horiz_interp/include/horiz_interp_bilinear.inc index 9e352d9c31..f998b823f7 100644 --- a/horiz_interp/include/horiz_interp_bilinear.inc +++ b/horiz_interp/include/horiz_interp_bilinear.inc @@ -191,6 +191,8 @@ ' data required between latitudes:', glt_min, glt_max, & ' data set is between latitudes:', lat_in(1), lat_in(nlat_in) endif + Interp% HI_KIND_TYPE_ % is_allocated = .true. + Interp% interp_method = BILINEAR return @@ -396,6 +398,8 @@ enddo enddo + Interp% HI_KIND_TYPE_ % is_allocated = .true. + Interp% interp_method = BILINEAR end subroutine !####################################################################### @@ -1205,4 +1209,93 @@ return end subroutine + + !> Subroutine for reading a weight file and use it to fill in the horiz interp type + !! for the bilinear interpolation method. + subroutine HORIZ_INTERP_READ_WEIGHTS_BILINEAR_(Interp, weight_filename, lon_out, lat_out, lon_in, lat_in, & + weight_file_source, interp_method, isw, iew, jsw, jew, nglon, nglat) + type(horiz_interp_type), intent(inout) :: Interp !< Horiz interp time to fill + character(len=*), intent(in) :: weight_filename !< Name of the weight file + real(FMS_HI_KIND_), target, intent(in) :: lat_out(:,:) !< Output (model) latitude + real(FMS_HI_KIND_), target, intent(in) :: lon_out(:,:) !< Output (model) longitude + real(FMS_HI_KIND_), intent(in) :: lat_in(:) !< Input (data) latitude + real(FMS_HI_KIND_), intent(in) :: lon_in(:) !< Input (data) longitude + character(len=*), intent(in) :: weight_file_source !< Source of the weight file + character(len=*), intent(in) :: interp_method !< The interp method to use + integer, intent(in) :: isw, iew, jsw, jew !< Starting and ending indices of the compute domain + integer, intent(in) :: nglon !< Number of longitudes in the global domain + integer, intent(in) :: nglat !< Number of latitudes in the globl domain + + + real(FMS_HI_KIND_), allocatable :: var(:,:,:) !< Dummy variable to read the indices and weight into + type(FmsNetcdfFile_t) :: weight_fileobj !< FMS2io fileob for the weight file + integer :: nlon !< Number of longitudes in the model grid as read + !! from the weight file + integer :: nlat !< Number of latitude in the model grid as read + !! from the weight file + + if (.not. open_file(weight_fileobj, weight_filename, "read" )) & + call mpp_error(FATAL, "Error opening the weight file:"//& + &trim(weight_filename)) + + !< Check that weight file has the correct dimensions + select case (trim(weight_file_source)) + case ("fregrid") + call get_dimension_size(weight_fileobj, "nlon", nlon) + if (nlon .ne. nglon) & + call mpp_error(FATAL, "The nlon from the weight file is not the same as in the input grid."//& + &" From weight file:"//string(nlon)//" from input grid:"//string(size(lon_out,1))) + call get_dimension_size(weight_fileobj, "nlat", nlat) + if (nlat .ne. nglat) & + call mpp_error(FATAL, "The nlat from the weight file is not the same as in the input grid."//& + &" From weight file:"//string(nlat)//" from input grid:"//string(size(lon_out,2))) + case default + call mpp_error(FATAL, trim(weight_file_source)//& + &" is not a supported weight file source. fregrid is the only supported weight file source." ) + end select + + Interp%nlon_src = size(lon_in(:)) ; Interp%nlat_src = size(lat_in(:)) + Interp%nlon_dst = size(lon_out,1); Interp%nlat_dst = size(lon_out,2) + + allocate ( Interp % HI_KIND_TYPE_ % wti (Interp%nlon_dst,Interp%nlat_dst,2), & + Interp % HI_KIND_TYPE_ % wtj (Interp%nlon_dst,Interp%nlat_dst,2), & + Interp % i_lon (Interp%nlon_dst,Interp%nlat_dst,2), & + Interp % j_lat (Interp%nlon_dst,Interp%nlat_dst,2)) + + + !! Three is for lon, lat, tile + !! Currently, interpolation is only supported from lat,lon input data + allocate(var(Interp%nlon_dst,Interp%nlat_dst, 3)) + call read_data(weight_fileobj, "index", var, corner=(/isw, jsw, 1/), edge_lengths=(/iew-isw+1, jew-jsw+1, 3/)) + + !! Each point has a lon (i), and lat(j) index + !! From there the four corners are (i,j), (i,j+1) (i+1) (i+1,j+1) + Interp % i_lon (:,:,1) = var(:,:,1) + Interp % i_lon (:,:,2) = Interp % i_lon (:,:,1) + 1 + where (Interp % i_lon (:,:,2) > size(lon_in(:))) Interp % i_lon (:,:,2) = 1 + + Interp % j_lat (:,:,1) = var(:,:,2) + Interp % j_lat (:,:,2) = Interp % j_lat (:,:,1) + 1 + where (Interp % j_lat (:,:,2) > size(lat_in(:))) Interp % j_lat (:,:,2) = 1 + + deallocate(var) + + allocate(var(Interp%nlon_dst,Interp%nlat_dst, 4)) + call read_data(weight_fileobj, "weight", var, corner=(/isw, jsw, 1/), edge_lengths=(/iew-isw+1, jew-jsw+1, 4/)) + + !! The weights for the four corners + !! var(:,:,1) -> (i,j) + !! var(:,:,2) -> (i,j+1) + !! var(:,:,3) -> (i+1,j) + !! var(:,:,4) -> (i+1,j+1) + Interp % HI_KIND_TYPE_ % wti = var(:,:,1:2) + Interp % HI_KIND_TYPE_ % wtj = var(:,:,3:4) + deallocate(var) + + Interp% HI_KIND_TYPE_ % is_allocated = .true. + Interp% interp_method = BILINEAR + Interp% I_am_initialized = .True. + call close_file(weight_fileobj) + end subroutine HORIZ_INTERP_READ_WEIGHTS_BILINEAR_ + !> @} diff --git a/horiz_interp/include/horiz_interp_bilinear_r4.fh b/horiz_interp/include/horiz_interp_bilinear_r4.fh index 8880914e43..36c462a057 100644 --- a/horiz_interp/include/horiz_interp_bilinear_r4.fh +++ b/horiz_interp/include/horiz_interp_bilinear_r4.fh @@ -45,5 +45,8 @@ #undef INTERSECT_ #define INTERSECT_ intersect_r4 +#undef HORIZ_INTERP_READ_WEIGHTS_BILINEAR_ +#define HORIZ_INTERP_READ_WEIGHTS_BILINEAR_ horiz_interp_read_weights_bilinear_r4 + #include "horiz_interp_bilinear.inc" !> @} diff --git a/horiz_interp/include/horiz_interp_bilinear_r8.fh b/horiz_interp/include/horiz_interp_bilinear_r8.fh index 37a2e6920b..05187557fc 100644 --- a/horiz_interp/include/horiz_interp_bilinear_r8.fh +++ b/horiz_interp/include/horiz_interp_bilinear_r8.fh @@ -45,5 +45,8 @@ #undef INTERSECT_ #define INTERSECT_ intersect_r8 +#undef HORIZ_INTERP_READ_WEIGHTS_BILINEAR_ +#define HORIZ_INTERP_READ_WEIGHTS_BILINEAR_ horiz_interp_read_weights_bilinear_r8 + #include "horiz_interp_bilinear.inc" !> @} diff --git a/horiz_interp/include/horiz_interp_conserve.inc b/horiz_interp/include/horiz_interp_conserve.inc index 0ec17fcacd..1d2212dabc 100644 --- a/horiz_interp/include/horiz_interp_conserve.inc +++ b/horiz_interp/include/horiz_interp_conserve.inc @@ -215,6 +215,9 @@ subroutine HORIZ_INTERP_CONSERVE_NEW_1DX1D_ ( Interp, lon_in, lat_in, lon_out, l endif !----------------------------------------------------------------------- + Interp% HI_KIND_TYPE_ % is_allocated = .true. + Interp% interp_method = CONSERVE + end subroutine HORIZ_INTERP_CONSERVE_NEW_1DX1D_ !####################################################################### @@ -384,6 +387,9 @@ subroutine HORIZ_INTERP_CONSERVE_NEW_1DX1D_ ( Interp, lon_in, lat_in, lon_out, l deallocate(i_src, j_src, i_dst, j_dst, xgrid_area, dst_area ) + Interp% HI_KIND_TYPE_ % is_allocated = .true. + Interp% interp_method = CONSERVE + end subroutine HORIZ_INTERP_CONSERVE_NEW_1DX2D_ !####################################################################### @@ -493,6 +499,9 @@ subroutine HORIZ_INTERP_CONSERVE_NEW_1DX1D_ ( Interp, lon_in, lat_in, lon_out, l deallocate(i_src, j_src, i_dst, j_dst, xgrid_area, dst_area) + Interp% HI_KIND_TYPE_ % is_allocated = .true. + Interp% interp_method = CONSERVE + end subroutine HORIZ_INTERP_CONSERVE_NEW_2DX1D_ !####################################################################### @@ -600,6 +609,9 @@ subroutine HORIZ_INTERP_CONSERVE_NEW_1DX1D_ ( Interp, lon_in, lat_in, lon_out, l deallocate(i_src, j_src, i_dst, j_dst, xgrid_area, dst_area ) + Interp% HI_KIND_TYPE_ % is_allocated = .true. + Interp% interp_method = CONSERVE + end subroutine HORIZ_INTERP_CONSERVE_NEW_2DX2D_ !######################################################################## diff --git a/horiz_interp/include/horiz_interp_r4.fh b/horiz_interp/include/horiz_interp_r4.fh index a3211ee6e5..89b3e60559 100644 --- a/horiz_interp/include/horiz_interp_r4.fh +++ b/horiz_interp/include/horiz_interp_r4.fh @@ -60,5 +60,8 @@ #undef IS_LAT_LON_ #define IS_LAT_LON_ is_lat_lon_r4 +#undef HORIZ_INTERP_READ_WEIGHTS_ +#define HORIZ_INTERP_READ_WEIGHTS_ horiz_interp_read_weights_r4 + #include "horiz_interp.inc" !> @} diff --git a/horiz_interp/include/horiz_interp_r8.fh b/horiz_interp/include/horiz_interp_r8.fh index 713be92065..312a31403a 100644 --- a/horiz_interp/include/horiz_interp_r8.fh +++ b/horiz_interp/include/horiz_interp_r8.fh @@ -60,5 +60,8 @@ #undef IS_LAT_LON_ #define IS_LAT_LON_ is_lat_lon_r8 +#undef HORIZ_INTERP_READ_WEIGHTS_ +#define HORIZ_INTERP_READ_WEIGHTS_ horiz_interp_read_weights_r8 + #include "horiz_interp.inc" !> @} diff --git a/horiz_interp/include/horiz_interp_spherical.inc b/horiz_interp/include/horiz_interp_spherical.inc index cc00a4264e..f848622a7c 100644 --- a/horiz_interp/include/horiz_interp_spherical.inc +++ b/horiz_interp/include/horiz_interp_spherical.inc @@ -188,6 +188,8 @@ Interp%nlon_src = map_src_xsize; Interp%nlat_src = map_src_ysize Interp%nlon_dst = map_dst_xsize; Interp%nlat_dst = map_dst_ysize + Interp% HI_KIND_TYPE_ % is_allocated = .true. + Interp% interp_method = SPHERICAL return diff --git a/libFMS.F90 b/libFMS.F90 index 42879958f5..9180be32f5 100644 --- a/libFMS.F90 +++ b/libFMS.F90 @@ -414,8 +414,7 @@ module fms fms_horiz_interp_del => horiz_interp_del, fms_horiz_interp_init => horiz_interp_init, & fms_horiz_interp_end => horiz_interp_end use horiz_interp_type_mod, only: FmsHorizInterp_type => horiz_interp_type, & - assignment(=), CONSERVE, BILINEAR, SPHERICA, BICUBIC, & - fms_horiz_interp_type_stats => stats + assignment(=), fms_horiz_interp_type_stats => stats !! used via horiz_interp ! horiz_interp_bicubic_mod, horiz_interp_bilinear_mod ! horiz_interp_conserve_mod, horiz_interp_spherical_mod diff --git a/libFMS/Makefile.am b/libFMS/Makefile.am index 1ab8d13920..507d428451 100644 --- a/libFMS/Makefile.am +++ b/libFMS/Makefile.am @@ -28,7 +28,7 @@ lib_LTLIBRARIES = libFMS.la # These linker flags specify libtool version info. # See http://www.gnu.org/software/libtool/manual/libtool.html#Libtool-versioning # for information regarding incrementing `-version-info`. -libFMS_la_LDFLAGS = -version-info 19:1:0 +libFMS_la_LDFLAGS = -version-info 20:0:0 # Add the convenience libraries to the FMS library. libFMS_la_LIBADD = $(top_builddir)/platform/libplatform.la diff --git a/mpp/include/mpp_comm_mpi.inc b/mpp/include/mpp_comm_mpi.inc index d7fd2352ae..155cc722a8 100644 --- a/mpp/include/mpp_comm_mpi.inc +++ b/mpp/include/mpp_comm_mpi.inc @@ -111,6 +111,7 @@ if (t_level == 3) return call mpp_init_logfile() + call mpp_init_warninglog() if (present(alt_input_nml_path)) then call read_input_nml(alt_input_nml_path=alt_input_nml_path) else @@ -205,6 +206,7 @@ subroutine mpp_exit() call mpp_sync() call FLUSH( out_unit ) + close(warn_unit) if( pe.EQ.root_pe )then write( out_unit,'(/a,i6,a)' ) 'Tabulating mpp_clock statistics across ', npes, ' PEs...' diff --git a/mpp/include/mpp_util.inc b/mpp/include/mpp_util.inc index f8458806e6..a86fcba626 100644 --- a/mpp/include/mpp_util.inc +++ b/mpp/include/mpp_util.inc @@ -119,6 +119,35 @@ end do end if end subroutine mpp_init_logfile + + !> Opens the warning log file, called during mpp_init + subroutine mpp_init_warninglog() + logical :: exist + character(len=11) :: this_pe + if( pe.EQ.root_pe )then + write(this_pe,'(a,i6.6,a)') '.',pe,'.out' + inquire( file=trim(warnfile)//this_pe, exist=exist ) + if(exist)then + open(newunit=warn_unit, file=trim(warnfile)//this_pe, status='REPLACE' ) + else + open(newunit=warn_unit, file=trim(warnfile)//this_pe, status='NEW' ) + endif + end if + end subroutine mpp_init_warninglog + + !> @brief This function returns unit number for the warning log + !! if on the root pe, otherwise returns the etc_unit value (usually /dev/null) + function warnlog() + integer :: warnlog + if(.not. module_is_initialized) call mpp_error(FATAL, "mpp_mod: warnlog cannot be called before mpp_init") + if(root_pe .eq. pe) then + warnlog = warn_unit + else + warnlog = etc_unit + endif + return + end function warnlog + !##################################################################### subroutine mpp_set_warn_level(flag) integer, intent(in) :: flag diff --git a/mpp/include/mpp_util_mpi.inc b/mpp/include/mpp_util_mpi.inc index 7d235be83b..688a9c9311 100644 --- a/mpp/include/mpp_util_mpi.inc +++ b/mpp/include/mpp_util_mpi.inc @@ -60,13 +60,21 @@ subroutine mpp_error_basic( errortype, errormsg ) !$OMP CRITICAL (MPP_ERROR_CRITICAL) select case( errortype ) case(NOTE) - if(pe==root_pe)write( out_unit,'(a)' )trim(text) + if(pe==root_pe) then + write( out_unit,'(a)' )trim(text) + write( warn_unit,'(a)' )trim(text) + endif case default errunit = stderr() write( errunit, '(/a/)' )trim(text) - if(pe==root_pe)write( out_unit,'(/a/)' )trim(text) + if(pe==root_pe) then + write( out_unit,'(/a/)' )trim(text) + write( warn_unit,'(/a/)' )trim(text) + endif if( errortype.EQ.FATAL .OR. warnings_are_fatal )then FLUSH(out_unit) + FLUSH(warn_unit) + close(warn_unit) #ifdef __INTEL_COMPILER ! Get traceback and return quietly for correct abort call TRACEBACKQQ(user_exit_code=-1) diff --git a/mpp/mpp.F90 b/mpp/mpp.F90 index e12a5d63ae..078c99b955 100644 --- a/mpp/mpp.F90 +++ b/mpp/mpp.F90 @@ -202,7 +202,7 @@ module mpp_mod public :: mpp_init_test_read_namelist, mpp_init_test_etc_unit, mpp_init_test_requests_allocated !--- public interface from mpp_util.h ------------------------------ - public :: stdin, stdout, stderr, stdlog, lowercase, uppercase, mpp_error, mpp_error_state + public :: stdin, stdout, stderr, stdlog, warnlog, lowercase, uppercase, mpp_error, mpp_error_state public :: mpp_set_warn_level, mpp_sync, mpp_sync_self, mpp_pe public :: mpp_npes, mpp_root_pe, mpp_set_root_pe, mpp_declare_pelist public :: mpp_get_current_pelist, mpp_set_current_pelist, mpp_get_current_pelist_name @@ -1273,7 +1273,9 @@ module mpp_mod logical :: mpp_record_timing_data=.TRUE. type(clock),save :: clocks(MAX_CLOCKS) integer :: log_unit, etc_unit - character(len=32) :: configfile='logfile' + integer :: warn_unit !< unit number of the warning log + character(len=32), parameter :: configfile='logfile' + character(len=32), parameter :: warnfile='warnfile' !< base name for warninglog (appends "..out") integer :: peset_num=0, current_peset_num=0 integer :: world_peset_num ! @brief c functions binding !> @ingroup yaml_parser_mod interface @@ -63,11 +72,11 @@ module yaml_parser_mod !> @brief Private c function that opens and parses a yaml file (see yaml_parser_binding.c) !! @return Flag indicating if the read was successful function open_and_parse_file_wrap(filename, file_id) bind(c) & - result(success) + result(error_code) use iso_c_binding, only: c_char, c_int, c_bool character(kind=c_char), intent(in) :: filename(*) !< Filename of the yaml file integer(kind=c_int), intent(out) :: file_id !< File id corresponding to the yaml file that was opened - logical(kind=c_bool) :: success !< Flag indicating if the read was successful + logical(kind=c_int) :: error_code !< Flag indicating the error message (1 if sucessful) end function open_and_parse_file_wrap !> @brief Private c function that checks if a file_id is valid (see yaml_parser_binding.c) @@ -127,6 +136,17 @@ function get_value(file_id, key_id) bind(c) & type(c_ptr) :: key_value end function get_value +!> @brief Private c function that get the block name from a block_id in a yaml file +!! @return String containing the value obtained +function get_block(file_id, block_id) bind(c) & + result(block_name) + use iso_c_binding, only: c_ptr, c_int, c_bool + integer(kind=c_int), intent(in) :: file_id !< File id corresponding to the yaml file that was opened + integer(kind=c_int), intent(in) :: block_id !< Block_id to get the block name for + + type(c_ptr) :: block_name +end function get_block + !> @brief Private c function that determines the value of a key in yaml_file (see yaml_parser_binding.c) !! @return c pointer with the value obtained function get_value_from_key_wrap(file_id, block_id, key_name, success) bind(c) & @@ -194,6 +214,26 @@ function is_valid_block_id(file_id, block_id) bind(c) & logical(kind=c_bool) :: is_valid !< Flag indicating if the file_id is valid end function is_valid_block_id +!> @brief Private c function that determines the number of unique blocks that belong to +!! a parent block with parent_block_id in the yaml file (see yaml_parser_binding.c) +!! @return Number of unique blocks +function get_num_unique_blocks_bind(file_id, parent_block_id) bind(c) & + result(nblocks) + use iso_c_binding, only: c_char, c_int, c_bool + integer(kind=c_int), intent(in) :: file_id !< File id of the yaml file to search + integer(kind=c_int) :: parent_block_id !< Id of the parent block + + integer(kind=c_int) :: nblocks +end function get_num_unique_blocks_bind + +!> @brief Private c function that gets the the ids of the unique blocks in the yaml file +!! (see yaml_parser_binding.c) +subroutine get_unique_block_ids_bind(file_id, block_ids, parent_block_id) bind(c) + use iso_c_binding, only: c_char, c_int, c_bool, c_ptr + integer(kind=c_int), intent(in) :: file_id !< File id corresponding to the yaml file that was opened + integer(kind=c_int), intent(inout) :: block_ids(*) !< Id of the parent_block + integer(kind=c_int) :: parent_block_id !< Id of the parent block +end subroutine get_unique_block_ids_bind end interface !> @addtogroup yaml_parser_mod @@ -206,7 +246,7 @@ function open_and_parse_file(filename) & result(file_id) character(len=*), intent(in) :: filename !< Filename of the yaml file - logical :: success !< Flag indicating if the read was successful + integer :: error_code !< Flag indicating any errors in the parsing or 1 if sucessful logical :: yaml_exists !< Flag indicating whether the yaml exists integer :: file_id @@ -217,11 +257,28 @@ function open_and_parse_file(filename) & call mpp_error(NOTE, "The yaml file:"//trim(filename)//" does not exist, hopefully this is your intent!") return end if - success = open_and_parse_file_wrap(trim(filename)//c_null_char, file_id) - if (.not. success) call mpp_error(FATAL, "Error opening the yaml file:"//trim(filename)//". Check the file!") + error_code = open_and_parse_file_wrap(trim(filename)//c_null_char, file_id) + call check_error_code(error_code, filename) end function open_and_parse_file +!> @brief Checks the error code from a open_and_parse_file_wrap function call +subroutine check_error_code(error_code, filename) + integer, intent(in) :: error_code + character(len=*), intent(in) :: filename + + select case (error_code) + case (SUCCESSFUL) + return + case (MISSING_FILE) + call mpp_error(FATAL, "Error opening the yaml file:"//trim(filename)) + case (PARSER_INIT_ERROR) + call mpp_error(FATAL, "Error initializing the parser for the file:"//trim(filename)) + case (INVALID_YAML) + call mpp_error(FATAL, "Error parsing the file:"//trim(filename)//". Check that your yaml file is valid") + end select +end subroutine check_error_code + !> @brief Gets the key from a file id subroutine get_key_name(file_id, key_id, key_name) integer, intent(in) :: key_id !< Id of the key-value pair of interest @@ -463,6 +520,52 @@ subroutine get_key_ids (file_id, block_id, key_ids) call get_key_ids_binding (file_id, block_id, key_ids) end subroutine get_key_ids +!> @brief Gets the number of unique blocks +!! @return The number of unique blocks +function get_num_unique_blocks(file_id, parent_block_id) & + result(nblocks) + integer, intent(in) :: file_id !< File id corresponding to the yaml file that was opened + integer, intent(in), optional :: parent_block_id !< Id of the parent_block + integer :: nblocks + + if (.not. is_valid_file_id(file_id)) call mpp_error(FATAL, & + & "The file id in your get_num_unique_blocks call is invalid! Check your call.") + + if (.not. present(parent_block_id)) then + nblocks = get_num_unique_blocks_bind(file_id, 0) + else + if (.not. is_valid_block_id(file_id, parent_block_id)) call mpp_error(FATAL, & + & "The parent_block id in your get_block_ids call is invalid! Check your call.") + nblocks = get_num_unique_blocks_bind(file_id, parent_block_id) + endif +end function + +!> @brief Gets the ids of the unique block ids +subroutine get_unique_block_ids(file_id, block_ids, parent_block_id) + integer, intent(in) :: file_id !< File id corresponding to the yaml file that was opened + integer, intent(inout) :: block_ids(:) !< Ids of each unique block + integer, intent(in), optional :: parent_block_id !< Id of the parent_block + + if (.not. is_valid_file_id(file_id)) call mpp_error(FATAL, & + & "The file id in your get_num_unique_blocks_ids call is invalid! Check your call.") + + if (.not. present(parent_block_id)) then + call get_unique_block_ids_bind(file_id, block_ids, 0) + else + if (.not. is_valid_block_id(file_id, parent_block_id)) call mpp_error(FATAL, & + & "The parent_block id in your get_block_ids call is invalid! Check your call.") + call get_unique_block_ids_bind(file_id, block_ids, parent_block_id) + endif +end subroutine get_unique_block_ids + +!> @brief Gets the block name form the block id +subroutine get_block_name(file_id, block_id, block_name) + integer, intent(in) :: file_id !< File id corresponding to the yaml file that was opened + integer, intent(in) :: block_id !< Id of the block to get the name from + character(len=*), intent(out) :: block_name !< Name of the block + + block_name = fms_c2f_string(get_block(file_id, block_id)) +end subroutine #endif end module yaml_parser_mod !> @} diff --git a/parser/yaml_parser_binding.c b/parser/yaml_parser_binding.c index 28f9e15ee0..778b6267dc 100644 --- a/parser/yaml_parser_binding.c +++ b/parser/yaml_parser_binding.c @@ -97,6 +97,14 @@ char *get_value(int *file_id, int *key_id) return my_files.files[j].keys[*key_id].value; } +/* @brief Private c functions get gets the block name from a block id + @return String containing the value obtained */ +char *get_block(int *file_id, int *block_id) +{ + int j = *file_id; /* To minimize the typing :) */ + return my_files.files[j].keys[*block_id].parent_name; +} + /* @brief Private c function that determines they value of a key in yaml_file @return c pointer with the value obtained */ char *get_value_from_key_wrap(int *file_id, int *block_id, char *key_name, int *sucess) /*, char *key_name) */ @@ -136,6 +144,82 @@ int get_num_blocks_all(int *file_id, char *block_name) return nblocks; } +/* @brief Private c function that determines the number of unique blocks (i.e diag_files, varlist, etc) + @return The number of unique blocks */ +int get_num_unique_blocks_bind(int *file_id, int *parent_block_id) +{ + int nblocks = 0; /* Number of blocks */ + int i; /* For loops */ + int j = *file_id; /* To minimize the typing :) */ + char block_names[my_files.files[j].nkeys][255]; /* Array that stores the names of the unique blocks*/ + bool found; /* True if the block name was already found (i.e it not unqiue)*/ + int k; /* For loops */ + + for ( i = 1; i <= my_files.files[j].nkeys; i++ ) + { + if (my_files.files[j].keys[i].parent_key == *parent_block_id ) + { + if (strcmp(my_files.files[j].keys[i].parent_name, "") == 0){ + continue; + } + found = false; + for (k = 1; k <= nblocks; k++) + { + if (strcmp(block_names[k], my_files.files[j].keys[i].parent_name) == 0) + { + found = true; + break; + } + } + + if (found) continue; + + nblocks = nblocks + 1; + strcpy(block_names[nblocks], my_files.files[j].keys[i].parent_name); + // printf("Block names: %s \n", block_names[nblocks]); + } + } + return nblocks; +} + +/* @brief Private c function that determines the ids of the unique blocks (i.e diag_files, varlist, etc) + @return The ids of the unique blocks */ +void get_unique_block_ids_bind(int *file_id, int *block_ids, int *parent_block_id) +{ + int nblocks = 0; /* Number of blocks */ + int i; /* For loops */ + int j = *file_id; /* To minimize the typing :) */ + char block_names[my_files.files[j].nkeys][255]; /* Array that stores the names of the unique blocks*/ + bool found; /* True if the block name was already found (i.e it not unqiue)*/ + int k; /* For loops */ + + for ( i = 1; i <= my_files.files[j].nkeys; i++ ) + { + if (my_files.files[j].keys[i].parent_key == *parent_block_id ) + { + if (strcmp(my_files.files[j].keys[i].parent_name, "") == 0){ + continue; + } + found = false; + for (k = 1; k <= nblocks; k++) + { + if (strcmp(block_names[k], my_files.files[j].keys[i].parent_name) == 0) + { + found = true; + break; + } + } + + if (found) continue; + + nblocks = nblocks + 1; + block_ids[nblocks - 1] = my_files.files[j].keys[i].key_number; + strcpy(block_names[nblocks], my_files.files[j].keys[i].parent_name); + //printf("Block names: %s \n", block_names[nblocks]); + } + } + return; +} /* @brief Private c function that determines the number of blocks with block_name that belong to a parent block with parent_block_id in the yaml file @return Number of blocks with block_name */ @@ -216,7 +300,7 @@ bool is_valid_file_id(int *file_id) /* @brief Private c function that opens and parses a yaml file and saves it in a struct @return Flag indicating if the read was sucessful */ -bool open_and_parse_file_wrap(char *filename, int *file_id) +int open_and_parse_file_wrap(char *filename, int *file_id) { yaml_parser_t parser; yaml_token_t token; @@ -246,9 +330,9 @@ bool open_and_parse_file_wrap(char *filename, int *file_id) /* printf("Opening file: %s.\nThere are %i files opened.\n", filename, j); */ file = fopen(filename, "r"); - if (file == NULL) return false; + if (file == NULL) return -1; - if(!yaml_parser_initialize(&parser)) return false; + if(!yaml_parser_initialize(&parser)) return -2; my_files.files[j].keys = (key_value_pairs*)calloc(1, sizeof(key_value_pairs)); @@ -257,7 +341,9 @@ bool open_and_parse_file_wrap(char *filename, int *file_id) /* Set input file */ yaml_parser_set_input_file(&parser, file); do { - yaml_parser_scan(&parser, &token); + if (!yaml_parser_scan(&parser, &token)) { + return -3; + } switch(token.type) { case YAML_KEY_TOKEN: @@ -336,7 +422,7 @@ bool open_and_parse_file_wrap(char *filename, int *file_id) /* printf("closing file: %s\n", filename); */ fclose(file); - return true; + return 1; } #endif diff --git a/test_fms/coupler/test_atmos_ocean_fluxes.F90 b/test_fms/coupler/test_atmos_ocean_fluxes.F90 index 80a8294251..742ac4c50f 100644 --- a/test_fms/coupler/test_atmos_ocean_fluxes.F90 +++ b/test_fms/coupler/test_atmos_ocean_fluxes.F90 @@ -23,7 +23,7 @@ !! @description This program tests the two main subroutines in atmos_ocean_fluxes. program test_atmos_ocean_fluxes - use fms_mod, only: fms_init + use fms_mod, only: fms_init, fms_end use coupler_types_mod, only: coupler_1d_bc_type use field_manager_mod, only: fm_exists, fm_get_value use fm_util_mod, only: fm_util_get_real_array @@ -81,6 +81,7 @@ program test_atmos_ocean_fluxes call test_atmos_ocean_fluxes_init !> checking gas_fluxes, gas_fields_atm, and gas_fields_ice have been initialized correctly call test_coupler_1d_bc_type + call fms_end contains !-------------------------------------- diff --git a/test_fms/coupler/test_coupler.sh b/test_fms/coupler/test_coupler.sh index 030a33269a..4512cca557 100755 --- a/test_fms/coupler/test_coupler.sh +++ b/test_fms/coupler/test_coupler.sh @@ -26,6 +26,7 @@ # Set common test settings. . ../test-lib.sh +rm -f input.nml touch input.nml # diag_table for test @@ -112,6 +113,25 @@ test_expect_success "coupler types interfaces (r8_kind)" ' mpirun -n 4 ./test_coupler_types_r8 ' +# delete lines from the table to make sure we see the difference in the send_data return status +sed -i '8,12{d}' diag_table +sed -i '10,13{d}' diag_table.yaml +sed -i '18,25{d}' diag_table.yaml +cat <<_EOF > input.nml +&test_coupler_types_nml + fail_return_status=.true. +/ +_EOF + + +test_expect_success "coupler types interfaces - check send_data return vals (r4_kind)" ' + mpirun -n 4 ./test_coupler_types_r4 +' + +test_expect_success "coupler types interfaces - check send_data return vals (r8_kind)" ' + mpirun -n 4 ./test_coupler_types_r8 +' + mkdir RESTART test_expect_success "coupler register restart 2D(r4_kind)" ' diff --git a/test_fms/coupler/test_coupler_types.F90 b/test_fms/coupler/test_coupler_types.F90 index 8beb9f4695..4204f768b6 100644 --- a/test_fms/coupler/test_coupler_types.F90 +++ b/test_fms/coupler/test_coupler_types.F90 @@ -31,7 +31,7 @@ program test_coupler_types use fms_mod, only: fms_init, fms_end, stdout, string -use mpp_mod, only: mpp_error, mpp_pe, mpp_root_pe, FATAL, mpp_sync, mpp_init +use mpp_mod, only: mpp_error, mpp_pe, mpp_root_pe, FATAL, mpp_sync, mpp_init, input_nml_file use mpp_domains_mod, only: domain2d, mpp_define_domains, mpp_define_io_domain, mpp_get_data_domain, domain1D use mpp_domains_mod, only: mpp_domains_set_stack_size use coupler_types_mod, only: coupler_3d_bc_type, coupler_2d_bc_type, coupler_1d_bc_type @@ -70,6 +70,12 @@ program test_coupler_types character(len=128) :: chksum_2d, chksum_3d real(FMS_CP_TEST_KIND_), allocatable :: expected_2d(:,:), expected_3d(:,:,:) integer :: err, ncid, dim1D, varid, day +logical, allocatable :: return_stats(:,:) + +logical :: fail_return_status = .false. !< if true checks for one of the coupler_type_send_data calls to fail and + !! return a false value + +NAMELIST /test_coupler_types_nml/ fail_return_status call fms_init call time_manager_init @@ -77,6 +83,9 @@ program test_coupler_types call mpp_init call set_calendar_type(JULIAN) +read(input_nml_file, test_coupler_types_nml, iostat=err) +if(err > 0) call mpp_error(FATAL, "test_coupler_types:: error reading test input nml") + ! basic domain set up nlat=60; nlon=60; nz=12 layout = (/2, 2/) @@ -216,8 +225,22 @@ program test_coupler_types time_t = set_date(1, 1, day) call coupler_type_increment_data(bc_2d_cp, bc_2d_new) ! increment _new with cp call coupler_type_increment_data(bc_3d_cp, bc_3d_new) - call coupler_type_send_data(bc_2d_new, time_t) - call coupler_type_send_data(bc_3d_new, time_t) + call coupler_type_send_data(bc_2d_new, time_t, return_stats) + if( fail_return_status ) then + if( ALL(return_stats) ) call mpp_error(FATAL, "test_coupler_types:: send_data calls returned true, "// & + "expected false return value from incorrect diag_table") + else + if( .not. ALL(return_stats) ) call mpp_error(FATAL, & + "test_coupler_types:: coupler_type_send_data returned false with valid diag_table") + endif + call coupler_type_send_data(bc_3d_new, time_t, return_stats) + if( fail_return_status ) then + if( ALL(return_stats) ) call mpp_error(FATAL, "test_coupler_types:: send_data calls returned true, "// & + "expected false return value from incorrect diag_table") + else + if( .not. ALL(return_stats) ) call mpp_error(FATAL, & + "test_coupler_types:: coupler_type_send_data returned false with valid diag_table") + endif enddo time_t = set_date(1, 2, 1) call diag_manager_end(time_t) @@ -314,4 +337,4 @@ subroutine check_field_data_3d(bc_3d, expected) enddo end subroutine check_field_data_3d -end program \ No newline at end of file +end program diff --git a/test_fms/data_override/Makefile.am b/test_fms/data_override/Makefile.am index 69f09540fa..087bd91ea3 100644 --- a/test_fms/data_override/Makefile.am +++ b/test_fms/data_override/Makefile.am @@ -73,11 +73,11 @@ TESTS_ENVIRONMENT= test_input_path="@TEST_INPUT_PATH@" \ # Run the test program. TESTS = test_data_override2.sh test_data_override_init.sh test_data_override2_mono.sh test_data_override2_ongrid.sh \ - test_data_override2_scalar.sh + test_data_override2_scalar.sh test_data_override_weights.sh # Include these files with the distribution. EXTRA_DIST = test_data_override2.sh test_data_override_init.sh test_data_override2_mono.sh test_data_override2_ongrid.sh \ - test_data_override2_scalar.sh + test_data_override2_scalar.sh test_data_override_weights.sh # Clean up CLEANFILES = input.nml *.nc* *.out diag_table data_table data_table.yaml INPUT/* *.dpi *.spi *.dyn *.spl *-files/* diff --git a/test_fms/data_override/test_data_override2_mono.sh b/test_fms/data_override/test_data_override2_mono.sh index cf47a152f9..be1cce4103 100755 --- a/test_fms/data_override/test_data_override2_mono.sh +++ b/test_fms/data_override/test_data_override2_mono.sh @@ -59,17 +59,19 @@ _EOF cat <<_EOF > data_table.yaml data_table: -- gridname: OCN - fieldname_code: runoff_increasing - fieldname_file: runoff - file_name: ./INPUT/bilinear_increasing.nc - interpol_method: bilinear +- grid_name: OCN + fieldname_in_model: runoff_increasing + override_file: + - fieldname_in_file: runoff + file_name: ./INPUT/bilinear_increasing.nc + interp_method: bilinear factor: 1.0 -- gridname: OCN - fieldname_code: runoff_decreasing - fieldname_file: runoff - file_name: ./INPUT/bilinear_decreasing.nc - interpol_method: bilinear +- grid_name: OCN + fieldname_in_model: runoff_decreasing + override_file: + - fieldname_in_file: runoff + file_name: ./INPUT/bilinear_decreasing.nc + interp_method: bilinear factor: 1.0 _EOF diff --git a/test_fms/data_override/test_data_override2_ongrid.sh b/test_fms/data_override/test_data_override2_ongrid.sh index 2e1d7a1b03..e9f36712ce 100755 --- a/test_fms/data_override/test_data_override2_ongrid.sh +++ b/test_fms/data_override/test_data_override2_ongrid.sh @@ -52,12 +52,13 @@ use_data_table_yaml=.True. _EOF cat <<_EOF > data_table.yaml data_table: - - gridname : OCN - fieldname_code : runoff - fieldname_file : runoff - file_name : INPUT/runoff.daitren.clim.1440x1080.v20180328.nc - interpol_method : none - factor : 1.0 + - grid_name: OCN + fieldname_in_model: runoff + override_file: + - fieldname_in_file: runoff + file_name: INPUT/runoff.daitren.clim.1440x1080.v20180328.nc + interp_method: none + factor: 1.0 _EOF fi @@ -83,4 +84,4 @@ test_expect_success "data_override get_grid_v1 (${KIND})" ' done rm -rf INPUT *.nc # remove any leftover files to reduce size -test_done \ No newline at end of file +test_done diff --git a/test_fms/data_override/test_data_override2_scalar.sh b/test_fms/data_override/test_data_override2_scalar.sh index faf9aca08f..ac19b2b0a6 100755 --- a/test_fms/data_override/test_data_override2_scalar.sh +++ b/test_fms/data_override/test_data_override2_scalar.sh @@ -48,11 +48,12 @@ use_data_table_yaml=.True. _EOF cat <<_EOF > data_table.yaml data_table: - - gridname : OCN - fieldname_code : co2 - fieldname_file : co2 - file_name : INPUT/scalar.nc - interpol_method : none + - grid_name: OCN + fieldname_in_model: co2 + override_file: + - fieldname_in_file: co2 + file_name: INPUT/scalar.nc + interp_method: none factor : 1.0 _EOF fi @@ -68,4 +69,4 @@ test_expect_success "data_override scalar field (${KIND})" ' done rm -rf INPUT *.nc # remove any leftover files to reduce size -test_done \ No newline at end of file +test_done diff --git a/test_fms/data_override/test_data_override_ongrid.F90 b/test_fms/data_override/test_data_override_ongrid.F90 index 4345bb9f86..a05eb9d6c8 100644 --- a/test_fms/data_override/test_data_override_ongrid.F90 +++ b/test_fms/data_override/test_data_override_ongrid.F90 @@ -39,8 +39,8 @@ program test_data_override_ongrid integer, parameter :: lkind = DO_TEST_KIND_ integer, dimension(2) :: layout = (/2,3/) !< Domain layout -integer :: nlon !< Number of points in x axis -integer :: nlat !< Number of points in y axis +integer :: nlon = 360 !< Number of points in x axis +integer :: nlat = 180 !< Number of points in y axis type(domain2d) :: Domain !< Domain with mask table integer :: is !< Starting x index integer :: ie !< Ending x index @@ -51,9 +51,10 @@ program test_data_override_ongrid integer, parameter :: ongrid = 1 integer, parameter :: bilinear = 2 integer, parameter :: scalar = 3 +integer, parameter :: weight_file = 4 integer :: test_case = ongrid -namelist / test_data_override_ongrid_nml / nhalox, nhaloy, test_case +namelist / test_data_override_ongrid_nml / nhalox, nhaloy, test_case, nlon, nlat, layout call mpp_init call fms2_io_init @@ -61,8 +62,6 @@ program test_data_override_ongrid read (input_nml_file, test_data_override_ongrid_nml, iostat=io_status) if (io_status > 0) call mpp_error(FATAL,'=>test_data_override_ongrid: Error reading input.nml') - - !< Wait for the root PE to catch up call mpp_sync @@ -70,9 +69,6 @@ program test_data_override_ongrid call set_calendar_type(NOLEAP) -nlon = 360 -nlat = 180 - !< Create a domain nlonXnlat with mask call mpp_domains_set_stack_size(17280000) call mpp_define_domains( (/1,nlon,1,nlat/), layout, Domain, xhalo=nhalox, yhalo=nhaloy, name='test_data_override_emc') @@ -86,6 +82,8 @@ program test_data_override_ongrid call generate_bilinear_input_file () case (scalar) call generate_scalar_input_file () +case (weight_file) + call generate_weight_input_file () end select call mpp_sync() @@ -101,6 +99,8 @@ program test_data_override_ongrid call bilinear_test() case (scalar) call scalar_test() +case (weight_file) + call weight_file_test() end select call mpp_exit @@ -443,6 +443,99 @@ subroutine bilinear_test() deallocate(runoff_decreasing, runoff_increasing) end subroutine bilinear_test +subroutine generate_weight_input_file() + call create_grid_spec_file () + call create_ocean_mosaic_file() + call create_ocean_hgrid_file() + call create_bilinear_data_file(.true.) + call create_weight_file() +end subroutine + +subroutine create_weight_file() + type(FmsNetcdfFile_t) :: fileobj + real(kind=r8_kind), allocatable :: vdata(:,:,:) + character(len=5) :: dim_names(3) + + dim_names(1) = "nlon" + dim_names(2) = "nlat" + if (open_file(fileobj, "INPUT/remap_file.nc", "overwrite")) then + call register_axis(fileobj, "nlon", nlon) + call register_axis(fileobj, "nlat", nlat) + call register_axis(fileobj, "three", 3) + call register_axis(fileobj, "four", 4) + + dim_names(3) = "three" + call register_field(fileobj, "index", "int", dim_names) + + dim_names(3) = "four" + call register_field(fileobj, "weight", "double", dim_names) + + allocate(vdata(nlon,nlat,3)) + vdata(1,:,1) = 1 + vdata(2,:,1) = 2 + vdata(3,:,1) = 3 + vdata(4,:,1) = 4 + vdata(5,:,1) = 5 + vdata(:,1:2,2) = 1 + vdata(:,3,2) = 2 + vdata(:,4,2) = 3 + vdata(:,5,2) = 4 + vdata(:,6,2) = 5 + vdata(:,:,3) = 1 + call write_data(fileobj, "index", vdata) + deallocate(vdata) + + allocate(vdata(nlon,nlat,4)) + vdata = 0.5_r8_kind + vdata(:,1,3) = 1_r8_kind + vdata(:,6,3) = 1_r8_kind + vdata(:,1,4) = 0_r8_kind + vdata(:,6,4) = 0_r8_kind + + call write_data(fileobj, "weight", vdata) + deallocate(vdata) + + call close_file(fileobj) + endif +end subroutine create_weight_file + +subroutine weight_file_test() + type(time_type) :: Time !< Time + real(lkind), allocatable, dimension(:,:) :: runoff !< Data from normal override + real(lkind), allocatable, dimension(:,:) :: runoff_weight !< Data from weight file override + real(lkind) :: threshold !< Threshold for the difference in answers + + integer :: i, j, k + logical :: success + + allocate(runoff(is:ie,js:je)) + allocate(runoff_weight(is:ie,js:je)) + + runoff = 999_lkind + runoff_weight = 999_lkind + Time = set_date(1,1,4,0,0,0) + call data_override('OCN','runoff_obs',runoff, Time, override=success) + if (.not. success) call mpp_error(FATAL, "Data override failed") + call data_override('OCN','runoff_obs_weights',runoff_weight, Time, override=success) + if (.not. success) call mpp_error(FATAL, "Data override failed") + + threshold = 1e-09 + if (lkind .eq. 4) then + threshold = 1e-03 + endif + + do i = is, ie + do j = js, je + if (abs(runoff(i,j) - runoff_weight(i,j)) .gt. threshold) then + call mpp_error(FATAL, "The data is not the same: "// & + string(i)//","//string(j)//":"// & + string(runoff(i,j))//" vs "//string(runoff_weight(i,j))) + endif + enddo + enddo + deallocate(runoff, runoff_weight) +end subroutine weight_file_test + !> @brief Generates the input for the bilinear data_override test_case subroutine generate_scalar_input_file() if (mpp_pe() .eq. mpp_root_pe()) then diff --git a/test_fms/data_override/test_data_override_weights.sh b/test_fms/data_override/test_data_override_weights.sh new file mode 100755 index 0000000000..a3bc8902e4 --- /dev/null +++ b/test_fms/data_override/test_data_override_weights.sh @@ -0,0 +1,76 @@ +#!/bin/sh + +#*********************************************************************** +#* GNU Lesser General Public License +#* +#* This file is part of the GFDL Flexible Modeling System (FMS). +#* +#* FMS is free software: you can redistribute it and/or modify it under +#* the terms of the GNU Lesser General Public License as published by +#* the Free Software Foundation, either version 3 of the License, or (at +#* your option) any later version. +#* +#* FMS is distributed in the hope that it will be useful, but WITHOUT +#* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +#* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +#* for more details. +#* +#* You should have received a copy of the GNU Lesser General Public +#* License along with FMS. If not, see . +#*********************************************************************** +# +# Copyright (c) 2019-2021 Ed Hartnett, Uriel Ramirez, Seth Underwood + +# Set common test settings. +. ../test-lib.sh + +output_dir +[ ! -d "INPUT" ] && mkdir -p "INPUT" + +cat <<_EOF > data_table.yaml +data_table: +- grid_name: OCN + fieldname_in_model: runoff_obs + override_file: + - fieldname_in_file: runoff + file_name: ./INPUT/bilinear_increasing.nc + interp_method: bilinear + factor: 1.0 +- grid_name: OCN + fieldname_in_model: runoff_obs_weights + override_file: + - fieldname_in_file: runoff + file_name: ./INPUT/bilinear_increasing.nc + interp_method: bilinear + external_weights: + - file_name: ./INPUT/remap_file.nc + source: fregrid + factor: 1.0 +_EOF + +cat <<_EOF > input.nml +&data_override_nml + use_data_table_yaml = .True. +/ + +&test_data_override_ongrid_nml + test_case = 4 + nlon = 5 + nlat = 6 + layout = 1, 2 +/ +_EOF + +#The test only runs with yaml +if [ -z $parser_skip ]; then + for KIND in r4 r8 + do + rm -rf INPUT/. + test_expect_success "test_data_override with and without weight files -yaml (${KIND})" ' + mpirun -n 2 ../test_data_override_ongrid_${KIND} + ' + done +fi + +rm -rf INPUT +test_done diff --git a/test_fms/diag_manager/Makefile.am b/test_fms/diag_manager/Makefile.am index df5a8a19fa..a224eb2451 100644 --- a/test_fms/diag_manager/Makefile.am +++ b/test_fms/diag_manager/Makefile.am @@ -34,7 +34,7 @@ check_PROGRAMS = test_diag_manager test_diag_manager_time \ check_time_min check_time_max check_time_sum check_time_avg test_diag_diurnal check_time_diurnal \ check_time_pow check_time_rms check_subregional test_cell_measures test_var_masks \ check_var_masks test_multiple_send_data test_diag_out_yaml test_output_every_freq \ - test_dm_weights + test_dm_weights test_prepend_date # This is the source code for the test. test_output_every_freq_SOURCES = test_output_every_freq.F90 @@ -64,6 +64,7 @@ check_subregional_SOURCES = check_subregional.F90 test_var_masks_SOURCES = test_var_masks.F90 check_var_masks_SOURCES = check_var_masks.F90 test_multiple_send_data_SOURCES = test_multiple_send_data.F90 +test_prepend_date_SOURCES = test_prepend_date.F90 TEST_EXTENSIONS = .sh SH_LOG_DRIVER = env AM_TAP_AWK='$(AWK)' $(SHELL) \ @@ -73,7 +74,7 @@ SH_LOG_DRIVER = env AM_TAP_AWK='$(AWK)' $(SHELL) \ TESTS = test_diag_manager2.sh test_time_none.sh test_time_min.sh test_time_max.sh test_time_sum.sh \ test_time_avg.sh test_time_pow.sh test_time_rms.sh test_time_diurnal.sh test_cell_measures.sh \ test_subregional.sh test_var_masks.sh test_multiple_send_data.sh test_output_every_freq.sh \ - test_dm_weights.sh test_flush_nc_file.sh + test_dm_weights.sh test_flush_nc_file.sh test_prepend_date.sh testing_utils.mod: testing_utils.$(OBJEXT) @@ -81,7 +82,7 @@ testing_utils.mod: testing_utils.$(OBJEXT) EXTRA_DIST = test_diag_manager2.sh check_crashes.sh test_time_none.sh test_time_min.sh test_time_max.sh \ test_time_sum.sh test_time_avg.sh test_time_pow.sh test_time_rms.sh test_time_diurnal.sh \ test_cell_measures.sh test_subregional.sh test_var_masks.sh test_multiple_send_data.sh \ - test_flush_nc_file.sh test_dm_weights.sh test_output_every_freq.sh + test_flush_nc_file.sh test_dm_weights.sh test_output_every_freq.sh test_prepend_date.sh if USING_YAML skipflag="" diff --git a/test_fms/diag_manager/test_diag_manager2.sh b/test_fms/diag_manager/test_diag_manager2.sh index 629934b0b4..7b280c8855 100755 --- a/test_fms/diag_manager/test_diag_manager2.sh +++ b/test_fms/diag_manager/test_diag_manager2.sh @@ -516,6 +516,84 @@ if [ -z "${skipflag}" ]; then title: test_diag_manager base_date: 2 1 1 0 0 0 diag_files: +- file_name: wild_card_name%4yr%2mo%2dy%2hr + filename_time: end + freq: 6 hours + time_units: hours + unlimdim: time + new_file_freq: 6 hours + start_time: 2 1 1 0 0 0 + file_duration: 12 hours + module: test_diag_manager_mod + reduction: average + kind: r4 + varlist: + - var_name: sst + output_name: sst + global_meta: + - is_a_file: true +- file_name: normal + freq: 24 days + time_units: hours + unlimdim: records +# Here the module, kind and reduction are being overwritten with whats on the variable + module: potato_mod + kind: r8 + reduction: min + varlist: + - module: test_diag_manager_mod + var_name: sst + output_name: sst + reduction: average + kind: r4 + write_var: true + attributes: + - do_sst: .true. + sub_region: + - grid_type: latlon + corner1: -80, 0 + corner2: -80, 75 + corner3: -60, 0 + corner4: -60, 75 +- file_name: normal2 + freq: -1 + time_units: hours + unlimdim: records + write_file: true + module: test_diag_manager_mod + reduction: none + kind: r4 + varlist: + - var_name: sstt + output_name: sstt + long_name: S S T + - var_name: sstt2 + output_name: sstt2 + long_name: S S T + write_var: false + sub_region: + - grid_type: index + tile: 1 + corner1: 10, 15 + corner2: 20, 15 + corner3: 10, 25 + corner4: 20, 25 +- file_name: normal3 + freq: -1 + time_units: hours + unlimdim: records + write_file: false +_EOF + +my_test_count=`expr $my_test_count + 1` + test_expect_success "diag_yaml test with the simple diag table.yaml (test $my_test_count)" ' + mpirun -n 1 ../test_diag_yaml + ' + + cat <<_EOF > diag_table.yaml +title: test_diag_manager +base_date: 2 1 1 0 0 0 +diag_files: - file_name: wild_card_name%4yr%2mo%2dy%2hr filename_time: end freq: 6 hours diff --git a/test_fms/diag_manager/test_prepend_date.F90 b/test_fms/diag_manager/test_prepend_date.F90 new file mode 100644 index 0000000000..24a5ae2986 --- /dev/null +++ b/test_fms/diag_manager/test_prepend_date.F90 @@ -0,0 +1,124 @@ +!*********************************************************************** +!* GNU Lesser General Public License +!* +!* This file is part of the GFDL Flexible Modeling System (FMS). +!* +!* FMS is free software: you can redistribute it and/or modify it under +!* the terms of the GNU Lesser General Public License as published by +!* the Free Software Foundation, either version 3 of the License, or (at +!* your option) any later version. +!* +!* FMS is distributed in the hope that it will be useful, but WITHOUT +!* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +!* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +!* for more details. +!* +!* You should have received a copy of the GNU Lesser General Public +!* License along with FMS. If not, see . +!*********************************************************************** + +!> @brief This programs tests diag manager when the init date is prepended to the file name +program test_prepend_date + + use fms_mod, only: fms_init, fms_end, string + use diag_manager_mod, only: diag_axis_init, send_data, diag_send_complete, diag_manager_set_time_end, & + register_diag_field, diag_manager_init, diag_manager_end, register_static_field, & + diag_axis_init + use time_manager_mod, only: time_type, operator(+), JULIAN, set_time, set_calendar_type, set_date + use mpp_mod, only: FATAL, mpp_error, input_nml_file + use fms2_io_mod, only: FmsNetcdfFile_t, open_file, close_file, read_data, get_dimension_size + use platform_mod, only: r4_kind + + implicit none + + integer :: id_var0, id_var2, id_var1 !< diag field ids + integer :: id_axis1 !< Id for axis + logical :: used !< for send_data calls + integer :: ntimes = 48 !< Number of time steps + real :: vdata !< Buffer to store the data + type(time_type) :: Time !< "Model" time + type(time_type) :: Time_step !< Time step for the "simulation" + integer :: i !< For do loops + logical :: pass_diag_time = .True. !< .True. if passing the time to diag_manager_init + + integer :: io_status !< Status when reading the namelist + + namelist / test_prepend_date_nml / pass_diag_time + + call fms_init + + read (input_nml_file, test_prepend_date_nml, iostat=io_status) + if (io_status > 0) call mpp_error(FATAL,'=>test_prepend_date: Error reading input.nml') + + call set_calendar_type(JULIAN) + + ! This is going to be different from the base_date + if (pass_diag_time) then + call diag_manager_init(time_init=(/2, 1, 1, 0, 0, 0/)) + else + call diag_manager_init() + endif + + Time = set_date(2,1,1,0,0,0) + Time_step = set_time (3600,0) !< 1 hour + call diag_manager_set_time_end(set_date(2,1,3,0,0,0)) + + id_axis1 = diag_axis_init('dummy_axis', (/real(1.)/), "mullions", "X") + id_var0 = register_diag_field ('ocn_mod', 'var0', Time) + id_var2 = register_static_field ('ocn_mod', 'var2', (/id_axis1/)) + + ! This is a different start_time, should lead to a crash if the variable is in the diag table yaml + id_var1 = register_diag_field ('ocn_mod', 'var1', set_date(2,1,6,0,0,0)) + + used = send_data(id_var2, real(123.456)) + do i = 1, ntimes + Time = Time + Time_step + vdata = real(i) + + used = send_data(id_var0, vdata, Time) !< Sending data every hour! + + call diag_send_complete(Time_step) + enddo + + call diag_manager_end(Time) + + call check_output() + call fms_end + + contains + + !< @brief Check the diag manager output + subroutine check_output() + type(FmsNetcdfFile_t) :: fileobj !< Fms2io fileobj + integer :: var_size !< Size of the variable reading + real(kind=r4_kind), allocatable :: var_data(:) !< Buffer to read variable data to + integer :: j !< For looping + + if (.not. open_file(fileobj, "00020101.test_non_static.nc", "read")) & + call mpp_error(FATAL, "Error opening file:00020101.test_non_static.nc to read") + + call get_dimension_size(fileobj, "time", var_size) + if (var_size .ne. 48) call mpp_error(FATAL, "The dimension of time in the file:test_0days is not the "//& + "correct size!") + allocate(var_data(var_size)) + var_data = -999.99 + + call read_data(fileobj, "var0", var_data) + do j = 1, var_size + if (var_data(j) .ne. real(j, kind=r4_kind)) call mpp_error(FATAL, "The variable data for var1 at time level:"//& + string(j)//" is not the correct value!") + enddo + + call close_file(fileobj) + + if (.not. open_file(fileobj, "00020101.test_static.nc", "read")) & + call mpp_error(FATAL, "Error opening file:00020101.test_static.nc to read") + + call read_data(fileobj, "var2", var_data(1)) + if (var_data(1) .ne. real(123.456, kind=r4_kind)) call mpp_error(FATAL, & + "The variable data for var2 is not the correct value!") + + call close_file(fileobj) + + end subroutine check_output +end program test_prepend_date diff --git a/test_fms/diag_manager/test_prepend_date.sh b/test_fms/diag_manager/test_prepend_date.sh new file mode 100755 index 0000000000..13bbf7c77a --- /dev/null +++ b/test_fms/diag_manager/test_prepend_date.sh @@ -0,0 +1,87 @@ +#!/bin/sh + +#*********************************************************************** +#* GNU Lesser General Public License +#* +#* This file is part of the GFDL Flexible Modeling System (FMS). +#* +#* FMS is free software: you can redistribute it and/or modify it under +#* the terms of the GNU Lesser General Public License as published by +#* the Free Software Foundation, either version 3 of the License, or (at +#* your option) any later version. +#* +#* FMS is distributed in the hope that it will be useful, but WITHOUT +#* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +#* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +#* for more details. +#* +#* You should have received a copy of the GNU Lesser General Public +#* License along with FMS. If not, see . +#*********************************************************************** + +# Set common test settings. +. ../test-lib.sh + +if [ -z "${skipflag}" ]; then +# create and enter directory for in/output files +output_dir + +cat <<_EOF > diag_table.yaml +title: test_prepend_date +base_date: 1 1 1 0 0 0 +diag_files: +- file_name: test_non_static + time_units: hours + unlimdim: time + freq: 1 hours + varlist: + - module: ocn_mod + var_name: var0 + reduction: average + kind: r4 +- file_name: test_static + time_units: hours + unlimdim: time + freq: -1 hours + varlist: + - module: ocn_mod + var_name: var2 + reduction: none + kind: r4 +_EOF + +# remove any existing files that would result in false passes during checks +rm -f *.nc +my_test_count=1 +printf "&diag_manager_nml \n use_modern_diag=.true. \n/" | cat > input.nml +test_expect_success "Running diag_manager and checking that the date was prepended correctly (test $my_test_count)" ' + mpirun -n 1 ../test_prepend_date +' + +cat <<_EOF > diag_table.yaml +title: test_prepend_date +base_date: 1 1 1 0 0 0 +diag_files: +- file_name: test_non_static + time_units: hours + unlimdim: time + freq: 1 hours + varlist: + - module: ocn_mod + var_name: var0 + reduction: average + kind: r4 + - module: ocn_mod + var_name: var1 + reduction: average + kind: r4 +_EOF + +printf "&diag_manager_nml \n use_modern_diag=.true. \n/ \n &test_prepend_date_nml \n pass_diag_time=.false. \n /" | cat > input.nml + +test_expect_failure "Running diag_manager with fields that have a different start time (test $my_test_count)" ' + mpirun -n 1 ../test_prepend_date +' + +fi +test_done diff --git a/test_fms/field_manager/test_field_manager2.sh b/test_fms/field_manager/test_field_manager2.sh index d3a165b164..2485701598 100755 --- a/test_fms/field_manager/test_field_manager2.sh +++ b/test_fms/field_manager/test_field_manager2.sh @@ -66,16 +66,16 @@ field_table: - variable: radon longname: radon-222 units: VMR*1E21 - profile_type: fixed - subparams: - - surface_value: 0.0E+00 + profile_type: + - value: fixed + surface_value: 0.0E+00 convection: all - model_type: ocean_mod varlist: - variable: biotic1 - diff_horiz: linear - subparams: - - slope: ok + diff_horiz: + - value: linear + slope: ok longname: biotic one - variable: age_ctl - model_type: land_mod diff --git a/test_fms/fms/Makefile.am b/test_fms/fms/Makefile.am index f1ceef9ed9..8c2e2fb46b 100644 --- a/test_fms/fms/Makefile.am +++ b/test_fms/fms/Makefile.am @@ -49,7 +49,7 @@ TESTS = test_fms2.sh # These will also be included in the distribution. EXTRA_DIST = test_fms2.sh -CLEANFILES = input.nml logfile.*.out *.mod *.o *.dpi *.spi *.dyn *.spl +CLEANFILES = input.nml *.out *.mod *.o *.dpi *.spi *.dyn *.spl clean-local: rm -rf RESTART diff --git a/test_fms/horiz_interp/test_horiz_interp.F90 b/test_fms/horiz_interp/test_horiz_interp.F90 index fd0d077a91..c56cf931f8 100644 --- a/test_fms/horiz_interp/test_horiz_interp.F90 +++ b/test_fms/horiz_interp/test_horiz_interp.F90 @@ -38,9 +38,12 @@ program horiz_interp_test use fms_mod, only : check_nml_error, fms_init use horiz_interp_mod, only : horiz_interp_init, horiz_interp_new, horiz_interp_del use horiz_interp_mod, only : horiz_interp, horiz_interp_type -use horiz_interp_spherical_mod, only: horiz_interp_spherical_wght -use horiz_interp_type_mod, only: SPHERICA +use horiz_interp_type_mod, only: SPHERICAL use constants_mod, only : constants_init, PI +use horiz_interp_bilinear_mod, only: horiz_interp_bilinear_new +use horiz_interp_spherical_mod, only: horiz_interp_spherical_wght, horiz_interp_spherical_new +use horiz_interp_bicubic_mod, only: horiz_interp_bicubic_new +use horiz_interp_conserve_mod, only: horiz_interp_conserve_new use platform_mod implicit none @@ -957,28 +960,30 @@ subroutine test_horiz_interp_conserve !> Tests the assignment overload for horiz_interp_type !! creates some new instances of the derived type for the different methods !! and tests equality of fields after initial weiht calculations + !! Also tests creating the types via the method-specific *_new routines to ensure + !! they can be created/deleted without allocation errors. subroutine test_assignment() type(horiz_interp_type) :: Interp_new1, Interp_new2, Interp_cp, intp_3 - !! grid data points - real(HI_TEST_KIND_), allocatable, dimension(:) :: lat_in_1D, lon_in_1D - real(HI_TEST_KIND_), allocatable, dimension(:,:) :: lat_in_2D, lon_in_2D - !! output data points - real(HI_TEST_KIND_), allocatable, dimension(:) :: lat_out_1D, lon_out_1D - real(HI_TEST_KIND_), allocatable, dimension(:,:) :: lat_out_2D, lon_out_2D - real(HI_TEST_KIND_), allocatable, dimension(:) :: lat_out_bil, lon_out_bil - real(HI_TEST_KIND_), allocatable, dimension(:,:) :: lat_in_bil, lon_in_bil - !! array sizes and number of lat/lon per index - real(HI_TEST_KIND_) :: nlon_in, nlat_in - real(HI_TEST_KIND_) :: nlon_out, nlat_out - real(HI_TEST_KIND_) :: dlon_src, dlat_src, dlon_dst, dlat_dst - !! parameters for lon/lat setup - real(HI_TEST_KIND_) :: lon_src_beg = 0._lkind, lon_src_end = 360._lkind - real(HI_TEST_KIND_) :: lat_src_beg = -90._lkind, lat_src_end = 90._lkind - real(HI_TEST_KIND_) :: lon_dst_beg = 0.0_lkind, lon_dst_end = 360._lkind - real(HI_TEST_KIND_) :: lat_dst_beg = -90._lkind, lat_dst_end = 90._lkind - real(HI_TEST_KIND_) :: D2R = real(PI,HI_TEST_KIND_)/180._lkind - real(HI_TEST_KIND_) :: R2D = 180._lkind/real(PI,HI_TEST_KIND_) - real(HI_TEST_KIND_), parameter :: SMALL = 1.0e-10_lkind + real(HI_TEST_KIND_), allocatable, dimension(:) :: lat_in_1D, lon_in_1D !< 1D grid data points + real(HI_TEST_KIND_), allocatable, dimension(:,:) :: lat_in_2D, lon_in_2D !< 2D grid data points + real(HI_TEST_KIND_), allocatable, dimension(:) :: lat_out_1D, lon_out_1D !< 1D grid output points + real(HI_TEST_KIND_), allocatable, dimension(:,:) :: lat_out_2D, lon_out_2D !< 2D grid output points + integer :: nlon_in, nlat_in !< array sizes for input grids + integer :: nlon_out, nlat_out !< array sizes for output grids + real(HI_TEST_KIND_) :: dlon_src, dlat_src, dlon_dst, dlat_dst !< lon/lat size per data point + real(HI_TEST_KIND_) :: lon_src_beg = 0._lkind, lon_src_end = 360._lkind!< source grid starting/ending + !! longitudes + real(HI_TEST_KIND_) :: lat_src_beg = -90._lkind, lat_src_end = 90._lkind !< source grid starting/ending + !! latitudes + real(HI_TEST_KIND_) :: lon_dst_beg = 0.0_lkind, lon_dst_end = 360._lkind !< destination grid + !! starting/ending longitudes + real(HI_TEST_KIND_) :: lat_dst_beg = -90._lkind, lat_dst_end = 90._lkind !< destination grid + !! starting/ending latitudes + real(HI_TEST_KIND_) :: D2R = real(PI,HI_TEST_KIND_)/180._lkind !< radians per degree + real(HI_TEST_KIND_) :: R2D = 180._lkind/real(PI,HI_TEST_KIND_) !< degrees per radian + real(HI_TEST_KIND_), allocatable :: lon_src_1d(:), lat_src_1d(:) !< src data used for bicubic test + real(HI_TEST_KIND_), allocatable :: lon_dst_1d(:), lat_dst_1d(:) !< destination data used for bicubic test + ! set up longitude and latitude of source/destination grid. dlon_src = (lon_src_end-lon_src_beg)/real(ni_src, lkind) @@ -1062,6 +1067,15 @@ subroutine test_assignment() call horiz_interp_del(Interp_new1) call horiz_interp_del(Interp_new2) call horiz_interp_del(Interp_cp) + ! test deletion after direct calls + call horiz_interp_conserve_new(Interp_new1, lon_in_1d, lat_in_1d, lon_out_1d, lat_out_1d) + call horiz_interp_del(Interp_new1) + call horiz_interp_conserve_new(Interp_new1, lon_in_1d, lat_in_1d, lon_out_2d, lat_out_2d) + call horiz_interp_del(Interp_new1) + call horiz_interp_conserve_new(Interp_new1, lon_in_2d, lat_in_2d, lon_out_1d, lat_out_1d) + call horiz_interp_del(Interp_new1) + call horiz_interp_conserve_new(Interp_new1, lon_in_2d, lat_in_2d, lon_out_2d, lat_out_2d) + call horiz_interp_del(Interp_new1) ! bicubic only works with 1d src ! 1dx1d @@ -1084,6 +1098,28 @@ subroutine test_assignment() call horiz_interp_del(Interp_new1) call horiz_interp_del(Interp_new2) call horiz_interp_del(Interp_cp) + ! test deletion after direct calls + ! this set up is usually done within horiz_interp_new + nlon_in = size(lon_in_1d(:))-1; nlat_in = size(lat_in_1d(:))-1 + nlon_out = size(lon_out_1d(:))-1; nlat_out = size(lat_out_1d(:))-1 + allocate(lon_src_1d(nlon_in), lat_src_1d(nlat_in)) + allocate(lon_dst_1d(nlon_out), lat_dst_1d(nlat_out)) + do i = 1, nlon_in + lon_src_1d(i) = (lon_in_1d(i) + lon_in_1d(i+1)) * 0.5_lkind + enddo + do j = 1, nlat_in + lat_src_1d(j) = (lat_in_1d(j) + lat_in_1d(j+1)) * 0.5_lkind + enddo + do i = 1, nlon_out + lon_dst_1d(i) = (lon_out_1d(i) + lon_out_1d(i+1)) * 0.5_lkind + enddo + do j = 1, nlat_out + lat_dst_1d(j) = (lat_out_1d(j) + lat_out_1d(j+1)) * 0.5_lkind + enddo + call horiz_interp_bicubic_new(Interp_new1, lon_src_1d, lat_src_1d, lon_out_2d, lat_out_2d) + call horiz_interp_del(Interp_new1) + call horiz_interp_bicubic_new(Interp_new1, lon_src_1d, lat_src_1d, lon_dst_1d, lat_dst_1d) + call horiz_interp_del(Interp_new1) deallocate(lon_out_2D, lat_out_2D, lon_in_2D, lat_in_2D) allocate(lon_out_2D(ni_dst, nj_dst), lat_out_2D(ni_dst, nj_dst)) @@ -1117,11 +1153,14 @@ subroutine test_assignment() call horiz_interp_del(Interp_new1) call horiz_interp_del(Interp_new2) call horiz_interp_del(Interp_cp) + ! check deletion after direct calls + call horiz_interp_spherical_new(Interp_new1, lon_in_2d, lat_in_2d, lon_out_2d, lat_out_2d) + call horiz_interp_del(Interp_new1) ! bilinear ! 1dx1d - call horiz_interp_new(Interp_new1, lon_in_1D, lat_in_1D, lon_in_1D, lat_in_1D, interp_method="bilinear") - call horiz_interp_new(Interp_new2, lon_in_1D, lat_in_1D, lon_in_1D, lat_in_1D, interp_method="bilinear") + call horiz_interp_new(Interp_new1, lon_in_1D, lat_in_1D, lon_out_1D, lat_out_1D, interp_method="bilinear") + call horiz_interp_new(Interp_new2, lon_in_1D, lat_in_1D, lon_out_1D, lat_out_1D, interp_method="bilinear") Interp_cp = Interp_new1 call mpp_error(NOTE,"testing horiz_interp_type assignment 1x1d bilinear") call check_type_eq(Interp_cp, Interp_new2) @@ -1130,8 +1169,8 @@ subroutine test_assignment() call horiz_interp_del(Interp_new2) call horiz_interp_del(Interp_cp) ! 1dx2d - call horiz_interp_new(Interp_new1, lon_in_1D, lat_in_1D, lon_in_2D, lat_in_2D, interp_method="bilinear") - call horiz_interp_new(Interp_new2, lon_in_1D, lat_in_1D, lon_in_2D, lat_in_2D, interp_method="bilinear") + call horiz_interp_new(Interp_new1, lon_in_1D, lat_in_1D, lon_out_2D, lat_out_2D, interp_method="bilinear") + call horiz_interp_new(Interp_new2, lon_in_1D, lat_in_1D, lon_out_2D, lat_out_2D, interp_method="bilinear") Interp_cp = Interp_new1 call mpp_error(NOTE,"testing horiz_interp_type assignment 1x2d bilinear") call check_type_eq(Interp_cp, Interp_new2) @@ -1160,8 +1199,8 @@ subroutine test_assignment() call horiz_interp_del(Interp_new2) call horiz_interp_del(Interp_cp) ! 2dx2d - call horiz_interp_new(Interp_new1, lon_in_2D, lat_in_2D, lon_in_2D, lat_in_2D, interp_method="bilinear") - call horiz_interp_new(Interp_new2, lon_in_2D, lat_in_2D, lon_in_2D, lat_in_2D, interp_method="bilinear") + call horiz_interp_new(Interp_new1, lon_in_2D, lat_in_2D, lon_out_2D, lat_out_2D, interp_method="bilinear") + call horiz_interp_new(Interp_new2, lon_in_2D, lat_in_2D, lon_out_2D, lat_out_2D, interp_method="bilinear") Interp_cp = Interp_new1 call mpp_error(NOTE,"testing horiz_interp_type assignment 1x2d bilinear") call check_type_eq(Interp_cp, Interp_new2) @@ -1169,6 +1208,11 @@ subroutine test_assignment() call horiz_interp_del(Interp_new1) call horiz_interp_del(Interp_new2) call horiz_interp_del(Interp_cp) + ! check deletion after direct calls + call horiz_interp_bilinear_new(Interp_new1, lon_in_1d, lat_in_1d, lon_out_2d, lat_out_2d) + call horiz_interp_del(Interp_new1) + call horiz_interp_bilinear_new(Interp_new1, lon_in_2d, lat_in_2d, lon_out_2d, lat_out_2d) + call horiz_interp_del(Interp_new1) end subroutine !> helps assignment test with derived type comparisons @@ -1230,7 +1274,7 @@ subroutine check_type_eq(interp_1, interp_2) call mpp_error(FATAL, "Invalid value for copied horiz_interp_type field: mask_in") endif !! only set during spherical - if(interp_1%interp_method .eq. SPHERICA) then + if(interp_1%interp_method .eq. SPHERICAL) then if( interp_2%horizInterpReals4_type%max_src_dist .ne. interp_1%horizInterpReals4_type%max_src_dist) & call mpp_error(FATAL, "Invalid value for copied horiz_interp_type field: max_src_dist") endif @@ -1292,7 +1336,7 @@ subroutine check_type_eq(interp_1, interp_2) endif !! only set during spherical - if(interp_1%interp_method .eq. SPHERICA) then + if(interp_1%interp_method .eq. SPHERICAL) then if( interp_2%horizInterpReals8_type%max_src_dist .ne. interp_1%horizInterpReals8_type%max_src_dist) & call mpp_error(FATAL, "Invalid value for copied horiz_interp_type field: max_src_dist") endif diff --git a/test_fms/mpp/Makefile.am b/test_fms/mpp/Makefile.am index 1d44b9bc93..c131f99545 100644 --- a/test_fms/mpp/Makefile.am +++ b/test_fms/mpp/Makefile.am @@ -67,7 +67,8 @@ check_PROGRAMS = test_mpp \ test_mpp_init_logfile \ test_mpp_clock_begin_end_id \ test_mpp_nesting \ - test_mpp_chksum + test_mpp_chksum \ + test_stdlog # These are the sources for the tests. test_mpp_SOURCES = test_mpp.F90 @@ -133,6 +134,7 @@ test_mpp_init_logfile_SOURCES=test_mpp_init_logfile.F90 test_mpp_clock_begin_end_id_SOURCES=test_mpp_clock_begin_end_id.F90 test_super_grid_SOURCES = test_super_grid.F90 test_mpp_chksum_SOURCES = test_mpp_chksum.F90 +test_stdlog_SOURCES = test_stdlog.F90 # ifort gets a internal error during compilation for this test, issue #1071 # we'll just remove the openmp flag if present since it doesn't use openmp at all @@ -177,7 +179,8 @@ TESTS = test_mpp_domains2.sh \ test_mpp_clock_begin_end_id.sh \ test_super_grid.sh \ test_mpp_nesting.sh \ - test_mpp_chksum.sh + test_mpp_chksum.sh \ + test_stdlog.sh # Define test file extensions and log driver TEST_EXTENSIONS = .sh @@ -221,7 +224,8 @@ EXTRA_DIST = test_mpp_domains2.sh \ test_mpp_clock_begin_end_id.sh \ test_super_grid.sh \ test_mpp_nesting.sh \ - test_mpp_chksum.sh + test_mpp_chksum.sh \ + test_stdlog.sh fill_halo.mod: fill_halo.$(OBJEXT) compare_data_checksums.mod: compare_data_checksums.$(OBJEXT) diff --git a/test_fms/mpp/test_stdlog.F90 b/test_fms/mpp/test_stdlog.F90 new file mode 100644 index 0000000000..61ee8d81c8 --- /dev/null +++ b/test_fms/mpp/test_stdlog.F90 @@ -0,0 +1,97 @@ +!*********************************************************************** +!* GNU Lesser General Public License +!* +!* This file is part of the GFDL Flexible Modeling System (FMS). +!* +!* FMS is free software: you can redistribute it and/or modify it under +!* the terms of the GNU Lesser General Public License as published by +!* the Free Software Foundation, either version 3 of the License, or (at +!* your option) any later version. +!* +!* FMS is distributed in the hope that it will be useful, but WITHOUT +!* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +!* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +!* for more details. +!* +!* You should have received a copy of the GNU Lesser General Public +!* License along with FMS. If not, see . +!*********************************************************************** + +!> @file +!! @brief Unit test for the stdlog and checking warning log functionality +!! @author Ryan Mulhall +!! @email gfdl.climate.model.info@noaa.gov +program test_stdlog + use mpp_mod, only : mpp_init, mpp_init_test_peset_allocated, stdlog + use mpp_mod, only : mpp_pe, mpp_root_pe, mpp_error, FATAL, WARNING, NOTE + use fms_mod, only : input_nml_file, check_nml_error + + integer :: log_unit !< Stores the returned standard log unit number + integer :: warn_unit + integer :: pe !< pe value + integer :: root_pe !< root pe value + integer :: ierr !< Error code + + integer :: test_num = 1 + namelist / test_stdlog_nml / test_num + + call mpp_init() + + read(input_nml_file, nml=test_stdlog_nml, iostat=io) + ierr = check_nml_error(io, 'test_stdlog_nml') + + pe = mpp_pe() + root_pe = mpp_root_pe() + log_unit = stdlog() + + print * , "running test num: ", test_num + + select case(test_num) + case(1) + call test_write(.false.) + case(2) + call test_write(.true.) + case(3) + call check_write() + end select + + call MPI_FINALIZE(ierr) + + contains + + subroutine test_write(do_error_test) + logical, intent(in) :: do_error_test !< causes a fatal error to check output if true + + write(log_unit, *) "asdf" + call mpp_error(NOTE, "test note output") + call mpp_error(WARNING, "test warning output") + if(do_error_test) call mpp_error(FATAL, "test fatal output") + end subroutine test_write + + subroutine check_write() + integer :: i, ref_num, u_num_warn + character(len=128) :: line + character(len=23), parameter :: warn_fname = 'warnfile.000000.out.old' + character(len=128) :: ref_line(4) + + ref_line(1) = "NOTE from PE 0: MPP_DOMAINS_SET_STACK_SIZE: stack size set to 32768." + ref_line(2) = "NOTE from PE 0: test note output" + ref_line(3) = "WARNING from PE 0: test warning output" + ref_line(4) = "FATAL from PE 0: test fatal output" + open(newunit=u_num_warn, file=warn_fname, status="old", action="read") + ref_num = 1 + do i=1, 7 + read(u_num_warn, '(A)') line + if (trim(line) == '') cycle + !! if we're testing with the old io enabled, we'll have some additional output we can skip + if (trim(line) == 'NOTE from PE 0: MPP_IO_SET_STACK_SIZE: stack size set to 131072.') cycle + if(trim(line) .ne. trim(ref_line(ref_num))) call mpp_error(FATAL, "warnfile output does not match reference data"& + //"reference line:"//ref_line(ref_num) & + //"output line:"//line) + ref_num = ref_num + 1 + enddo + close(u_num_warn) + end subroutine check_write + +end program test_stdlog + diff --git a/test_fms/mpp/test_stdlog.sh b/test_fms/mpp/test_stdlog.sh new file mode 100755 index 0000000000..191ff93bcc --- /dev/null +++ b/test_fms/mpp/test_stdlog.sh @@ -0,0 +1,52 @@ +#!/bin/sh + +#*********************************************************************** +# GNU Lesser General Public License +# +# This file is part of the GFDL Flexible Modeling System (FMS). +# +# FMS is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by +# the Free Software Foundation, either version 3 of the License, or (at +# your option) any later version. +# +# FMS is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with FMS. If not, see . +#*********************************************************************** + +# This is part of the GFDL FMS package. This is a shell script to +# execute tests in the test_fms/mpp directory. + +# Ryan Mulhall 02/2021 + +# Set common test settings. +. ../test-lib.sh + +output_dir + +# ensure input.nml file present +cat <<_EOF > input.nml +&test_stdlog_nml + test_num = 1 +/ +_EOF +# Run test with one processor +test_expect_success "test stdlog and stdwarn" ' + mpirun -n 2 ../test_stdlog +' +sed -i 's/1/2/' input.nml +test_expect_failure "test stdlog and stdwarn with fatal output" ' + mpirun -n 2 ../test_stdlog +' +# move file so we don't overwrite +mv warnfile.*.out warnfile.000000.out.old +sed -i 's/2/3/' input.nml +test_expect_success "check stdwarn output" ' + mpirun -n 1 ../test_stdlog +' +test_done diff --git a/test_fms/parser/Makefile.am b/test_fms/parser/Makefile.am index ae8c282b99..569f64e901 100644 --- a/test_fms/parser/Makefile.am +++ b/test_fms/parser/Makefile.am @@ -29,7 +29,8 @@ AM_CPPFLAGS = -I${top_srcdir}/include -I$(MODDIR) LDADD = ${top_builddir}/libFMS/libFMS.la # Build this test program. -check_PROGRAMS = parser_demo2 test_yaml_parser check_crashes parser_demo test_output_yaml +check_PROGRAMS = parser_demo2 test_yaml_parser check_crashes parser_demo test_output_yaml \ + generic_blocks # This is the source code for the test. test_yaml_parser_SOURCES = test_yaml_parser.F90 @@ -37,6 +38,7 @@ check_crashes_SOURCES = check_crashes.F90 parser_demo_SOURCES = parser_demo.F90 parser_demo2_SOURCES = parser_demo2.F90 test_output_yaml_SOURCES = test_output_yaml.F90 +generic_blocks_SOURCES = generic_blocks.F90 # Run the test program. TESTS = test_yaml_parser.sh diff --git a/test_fms/parser/generic_blocks.F90 b/test_fms/parser/generic_blocks.F90 new file mode 100644 index 0000000000..d44beb9fcb --- /dev/null +++ b/test_fms/parser/generic_blocks.F90 @@ -0,0 +1,132 @@ +!*********************************************************************** +!* GNU Lesser General Public License +!* +!* This file is part of the GFDL Flexible Modeling System (FMS). +!* +!* FMS is free software: you can redistribute it and/or modify it under +!* the terms of the GNU Lesser General Public License as published by +!* the Free Software Foundation, either version 3 of the License, or (at +!* your option) any later version. +!* +!* FMS is distributed in the hope that it will be useful, but WITHOUT +!* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +!* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +!* for more details. +!* +!* You should have received a copy of the GNU Lesser General Public +!* License along with FMS. If not, see . +!*********************************************************************** + +!> @brief This programs tests the subroutines get_num_unique_blocks, get_unique_block_ids, and +!! get_block_name +program generic_blocks +#ifdef use_yaml + use fms_mod, only: fms_init, fms_end + use mpp_mod, only: mpp_error, FATAL + use yaml_parser_mod + + implicit none + + integer :: yaml_id !< Id of the yaml file + integer, allocatable :: field_table_ids(:) !< The Ids of the field table entries + integer, allocatable :: modlist_ids(:) !< The ids of the mods entries + integer, allocatable :: varlist_ids(:) !< The ids of the variable entries + integer, allocatable :: block_ids(:) !< The ids of the block entries + integer, allocatable :: misc_block_ids(:) !< The ids of the misc block entries + integer, allocatable :: key_ids(:) !< The ids of the keys + character(len=50) :: variable_name !< The variable name + character(len=50) :: model_type_name !< The model type + character(len=50) :: block_name !< The name of the block + character(len=50) :: key_name !< The name of the key + character(len=50) :: key_value !< The value of the key + character(len=50) :: varnames(2) !< The expected names of the variables + character(len=50) :: blocknames1(1) !< The expected names of the blocks for the first variable + character(len=50) :: blocknames2(2) !< The expected names of the blocks for the second variable + character(len=50) :: keys(5) !< The expected names of the keys + character(len=50) :: values(5) !< The expected names values of they keys + integer :: key_count !< To keep track of the expected answers + + logical :: correct_answer !< True if the answer is correct + integer :: i, j, k, l, m, n !< For do loops + + call fms_init() + varnames(1) = "sphum" + varnames(2) = "soa" + + blocknames1(1) = "profile_type" + blocknames2(1) = "chem_param" + blocknames2(2) = "profile_type" + + key_count = 0 + keys(1) = "value"; values(1) = "fixed" + keys(2) = "surface_value"; values(2) = "3.0e-06" + keys(3) = "value"; values(3) = "aerosol" + keys(4) = "value"; values(4) = "fixed" + keys(5) = "surface_value"; values(5) = "1.0e-32" + + yaml_id = open_and_parse_file("sample.yaml") + allocate(field_table_ids(get_num_blocks(yaml_id, "field_table"))) + call get_block_ids(yaml_id, "field_table", field_table_ids) + do i = 1, size(field_table_ids) + allocate(modlist_ids(get_num_blocks(yaml_id, "modlist", parent_block_id=field_table_ids(i)))) + call get_block_ids(yaml_id, "modlist", modlist_ids, field_table_ids(i)) + + do j = 1, size(modlist_ids) + call get_value_from_key(yaml_id, modlist_ids(j), "model_type", model_type_name) + print *, "Modlist::", trim(model_type_name) + if (trim(model_type_name) .ne. "atmos_mod") & + call mpp_error(FATAL, "Modlist is not the expected result") + + allocate(varlist_ids(get_num_blocks(yaml_id, "varlist", parent_block_id=modlist_ids(j)))) + call get_block_ids(yaml_id, "varlist", varlist_ids, modlist_ids(j)) + + do k = 1, size(varlist_ids) + call get_value_from_key(yaml_id, varlist_ids(k), "variable", variable_name) + print *, "Variable::", trim(variable_name) + if (trim(variable_name) .ne. varnames(k)) & + call mpp_error(FATAL, "Variable is not the expected result") + + allocate(block_ids(get_num_unique_blocks(yaml_id, parent_block_id=varlist_ids(k)))) + call get_unique_block_ids(yaml_id, block_ids, parent_block_id=varlist_ids(k)) + do l = 1, size(block_ids) + call get_block_name(yaml_id, block_ids(l), block_name) + print *, "Block_name::", trim(block_name) + + if (k == 1) then + correct_answer = trim(blocknames1(l)) .eq. trim(block_name) + else + correct_answer = trim(blocknames2(l)) .eq. trim(block_name) + endif + + if (.not. correct_answer) call mpp_error(FATAL, "blockname is not the expected result") + allocate(misc_block_ids(get_num_blocks(yaml_id, block_name, parent_block_id=varlist_ids(k)))) + call get_block_ids(yaml_id, block_name, misc_block_ids, parent_block_id=varlist_ids(k)) + do m = 1, size(misc_block_ids) + allocate(key_ids(get_nkeys(yaml_id, misc_block_ids(m)))) + call get_key_ids(yaml_id, misc_block_ids(m), key_ids) + do n = 1, size(key_ids) + key_count = key_count + 1 + call get_key_name(yaml_id, key_ids(n), key_name) + call get_key_value(yaml_id, key_ids(n), key_value) + print *, "KEY:", trim(key_name), " VALUE:", trim(key_value) + + if (trim(key_name) .ne. trim(keys(key_count))) & + call mpp_error(FATAL, "The key is not correct") + + if (trim(key_value) .ne. trim(values(key_count))) & + call mpp_error(FATAL, "The value is not correct") + enddo + deallocate(key_ids) + enddo + deallocate(misc_block_ids) + enddo + deallocate(block_ids) + print *, "---------" + enddo + deallocate(varlist_ids) + enddo + deallocate(modlist_ids) + enddo + call fms_end() +#endif +end program generic_blocks diff --git a/test_fms/parser/parser_demo.F90 b/test_fms/parser/parser_demo.F90 index 5b4ccfd88e..208e41e807 100644 --- a/test_fms/parser/parser_demo.F90 +++ b/test_fms/parser/parser_demo.F90 @@ -38,7 +38,6 @@ program parser_demo real(kind=r8_kind) :: r8_buffer !< Buffer to read r8 to call fms_init -call fms_end diag_yaml_id = open_and_parse_file("diag_table.yaml") print *, "" @@ -113,6 +112,7 @@ program parser_demo print *, "" enddo deallocate(file_ids) +call fms_end #endif end program parser_demo diff --git a/test_fms/parser/parser_demo2.F90 b/test_fms/parser/parser_demo2.F90 index c230559a4e..674ab85fbb 100644 --- a/test_fms/parser/parser_demo2.F90 +++ b/test_fms/parser/parser_demo2.F90 @@ -39,7 +39,6 @@ program parser_demo character(len=255) :: key_name !< The name of a key call fms_init -call fms_end diag_yaml_id = open_and_parse_file("diag_table.yaml") print *, "" @@ -102,6 +101,7 @@ program parser_demo print *, "" enddo deallocate(file_ids) +call fms_end #endif diff --git a/test_fms/parser/test_yaml_parser.sh b/test_fms/parser/test_yaml_parser.sh index 3fb08aca25..e5405baaf3 100755 --- a/test_fms/parser/test_yaml_parser.sh +++ b/test_fms/parser/test_yaml_parser.sh @@ -26,7 +26,7 @@ . ../test-lib.sh if [ ! -z $parser_skip ]; then - SKIP_TESTS='test_yaml_parser.[1-23]' + SKIP_TESTS='test_yaml_parser.[1-25]' fi touch input.nml @@ -268,4 +268,65 @@ test_expect_failure "wrong buffer size block id" ' mpirun -n 1 ./check_crashes ' +cat <<_EOF > sample.yaml +field_table: +- field_type: tracer + modlist: + - model_type: atmos_mod + varlist: + - variable: sphum + longname: specific humidity + units: kg/kg + profile_type: + - value: fixed + surface_value: 3.0e-06 + - variable: soa + longname: SOA tracer + units: mmr + convection: all + chem_param: + - value: aerosol + profile_type: + - value: fixed + surface_value: 1.0e-32 +_EOF + +test_expect_success "Generic blocks names" ' + mpirun -n 1 ./generic_blocks +' + +cat <<_EOF > diag_table.yaml +title: c384L49_esm5PIcontrol +baseDate: [1960 1 1 1 1 1 1] +diag_files: +- fileName: "atmos_daily" + freq: 24 + frequnit: hours + timeunit: days + unlimdim: time + varlist: + - varName: tdata + reduction: False + module: mullions + mullions: 10 + fill_value: -999.9 + - varName: pdata + outName:pressure + reduction: False + kind: double + module: "moist" +- fileName: atmos_8xdaily + freq: 3 + frequnit: hours + timeunit: days + unlimdim: time + varlist: + - varName: tdata + reduction: False + module: "moist" +_EOF + +test_expect_failure "Use an invalid yaml" ' + mpirun -n 1 ./parser_demo +' test_done diff --git a/test_fms/tracer_manager/test_tracer_manager2.sh b/test_fms/tracer_manager/test_tracer_manager2.sh index b35122fa3d..0c85ac76a5 100755 --- a/test_fms/tracer_manager/test_tracer_manager2.sh +++ b/test_fms/tracer_manager/test_tracer_manager2.sh @@ -75,25 +75,25 @@ field_table: - variable: radon longname: radon-222 units: VMR*1E21 - profile_type: fixed - subparams: - - surface_value: 0.0e+00 + profile_type: + - value: fixed + surface_value: 0.0e+00 convection: all - model_type: atmos_mod varlist: - variable: immadeup longname: im_made_up_for_testing units: hbar - profile_type: profile - subparams: - - surface_value: 1.02e-12 + profile_type: + - value: profile + surface_value: 1.02e-12 top_value: 1.0e-15 - model_type: ocean_mod varlist: - variable: biotic1 - diff_horiz: linear - subparams: - - slope: ok + diff_horiz: + - value: linear + slope: ok longname: biotic one - variable: age_ctl - model_type: ocean_mod @@ -101,9 +101,9 @@ field_table: - variable: immadeup2 longname: im_made_up2_for_testing units: hbar - profile_type: profile - subparams: - - surface_value: 1.0e-12 + profile_type: + - value: profile + surface_value: 1.0e-12 bottom_value: 1.0e-9 - model_type: land_mod varlist: