From 8de7633fadf3071b7ca85bbb35ed9b3481fe2764 Mon Sep 17 00:00:00 2001 From: Jan-Hendrik Malles Date: Fri, 17 May 2024 09:58:47 +0200 Subject: [PATCH 01/32] Adding fix for ORAS5 --- esmvalcore/cmor/_fixes/oras5/__init__.py | 0 esmvalcore/cmor/_fixes/oras5/_base_fixes.py | 131 +++++++++++++ esmvalcore/cmor/_fixes/oras5/oras5.py | 199 ++++++++++++++++++++ esmvalcore/config-developer.yml | 10 + 4 files changed, 340 insertions(+) create mode 100644 esmvalcore/cmor/_fixes/oras5/__init__.py create mode 100644 esmvalcore/cmor/_fixes/oras5/_base_fixes.py create mode 100644 esmvalcore/cmor/_fixes/oras5/oras5.py diff --git a/esmvalcore/cmor/_fixes/oras5/__init__.py b/esmvalcore/cmor/_fixes/oras5/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/esmvalcore/cmor/_fixes/oras5/_base_fixes.py b/esmvalcore/cmor/_fixes/oras5/_base_fixes.py new file mode 100644 index 0000000000..74fc9d8b4d --- /dev/null +++ b/esmvalcore/cmor/_fixes/oras5/_base_fixes.py @@ -0,0 +1,131 @@ +"""Fix base classes for ORAS5 on-the-fly CMORizer.""" + +import logging +from pathlib import Path + +import iris +import numpy as np +import dask.array as da +import xarray as xr +from iris import Constraint +from iris.experimental.ugrid import Connectivity, Mesh + +from ..icon.icon import IconFix + +logger = logging.getLogger(__name__) + + +class Oras5Fix(IconFix): + """Base class for fixes.""" + + CACHE_DIR = Path.home() / '.esmvaltool' / 'cache' + CACHE_VALIDITY = 7 * 24 * 60 * 60 # [s]; = 1 week + TIMEOUT = 5 * 60 # [s]; = 5 min + GRID_FILE_ATTR = 'grid_file_uri' + + def __init__(self, *args, **kwargs): + """Initialize fix.""" + super().__init__(*args, **kwargs) + self._horizontal_grids = {} + self._meshes = {} + + + def _create_mesh(self, cube): + """Create mesh from horizontal grid file. + + Note + ---- + This functions creates a new :class:`iris.experimental.ugrid.Mesh` from + the ``clat`` (already present in the cube), ``clon`` (already present + in the cube), ``vertex_index``, ``vertex_of_cell``, ``vlat``, and + ``vlon`` variables of the horizontal grid file. + + We do not use :func:`iris.experimental.ugrid.Mesh.from_coords` with the + existing latitude and longitude coordinates here because this would + produce lots of duplicated entries for the node coordinates. The reason + for this is that the node coordinates are constructed from the bounds; + since each node is contained 6 times in the bounds array (each node is + shared by 6 neighboring cells) the number of nodes is 6 times higher + with :func:`iris.experimental.ugrid.Mesh.from_coords` compared to using + the information already present in the horizontal grid file. + + """ + + horizontal_grid = self.get_horizontal_grid(cube) + mesh = horizontal_grid.extract_cube(Constraint('cell_area')) + face_lon = mesh.coord('longitude').core_points().flatten() + face_lat = mesh.coord('latitude').core_points().flatten() + + node_lon = mesh.coord('longitude').core_bounds().flatten() + node_lat = mesh.coord('latitude').core_bounds().flatten() + + # Make the node locations a 2D array + nodes_flat = np.stack([node_lon, node_lat], axis=1) + + # Find the unique nodes to be able to associate them with the faces + # Unfortunately, dask does not support the axis parameter... + nodes_unique, indices = np.unique(nodes_flat, return_inverse=True, + axis=0) + + node_lon = da.from_array(nodes_unique[:,0]) + node_lat = da.from_array(nodes_unique[:,1]) + + n_faces = len(face_lat) + n_vertices = int(len(indices) / n_faces) + + # Reshaping to N_faces x M_nodes array + indices = da.reshape(da.from_array(indices), (n_faces, n_vertices)) + + # Add the mask, which should not have a True entry for ORAS5 + mask = da.full(da.shape(indices), False) + + ### Define the connectivity + connectivity = Connectivity( + indices=da.ma.masked_array(indices,mask=mask), + cf_role='face_node_connectivity', + start_index=0, + location_axis=0, + ) + + face_lon = (face_lon + 360) % 360 + node_lon = (node_lon + 360) % 360 + + # Put everything together to get a U-Grid style mesh + node_lat = iris.coords.AuxCoord(node_lat, standard_name='latitude', + var_name='lat', long_name='latitude', + units='degrees_north') + node_lon = iris.coords.AuxCoord(node_lon, standard_name='longitude', + var_name='lon', long_name='longitude', + units='degrees_east') + face_lat = iris.coords.AuxCoord(face_lat, standard_name='latitude', + var_name='lat', long_name='latitude', + units='degrees_north') + face_lon = iris.coords.AuxCoord(face_lon, standard_name='longitude', + var_name='lon', long_name='longitude', + units='degrees_east') + + mesh = Mesh( + topology_dimension=2, + node_coords_and_axes=[(node_lat, 'y'), (node_lon, 'x')], + connectivities=[connectivity], + face_coords_and_axes=[(face_lat, 'y'), (face_lon, 'x')], + ) + + return mesh + + def _get_grid_from_facet(self): + """Get horizontal grid from user-defined facet `horizontal_grid`.""" + grid_path = self._get_path_from_facet( + 'horizontal_grid', 'Horizontal grid file' + ) + grid_name = grid_path.name + + # If already loaded, return the horizontal grid + if grid_name in self._horizontal_grids: + return self._horizontal_grids[grid_name] + + # Load file + self._horizontal_grids[grid_name] = iris.load_raw(grid_path) + # self._horizontal_grids[grid_name] = xr.open_dataset(grid_path) + logger.debug("Loaded ORAS5 grid file from %s", grid_path) + return self._horizontal_grids[grid_name] \ No newline at end of file diff --git a/esmvalcore/cmor/_fixes/oras5/oras5.py b/esmvalcore/cmor/_fixes/oras5/oras5.py new file mode 100644 index 0000000000..8b9fa71b5a --- /dev/null +++ b/esmvalcore/cmor/_fixes/oras5/oras5.py @@ -0,0 +1,199 @@ +"""On-the-fly CMORizer for ORAS5.""" + +import logging + +import iris +import iris.util +import numpy as np +import dask.array as da +from iris import Constraint +from iris.coords import DimCoord +from iris.cube import CubeList + +from ..shared import fix_ocean_depth_coord + +from ._base_fixes import Oras5Fix +from ..icon.icon import AllVars as AllVars_ICON + +logger = logging.getLogger(__name__) + + +class AllVars(Oras5Fix, AllVars_ICON): + """Fixes for all variables.""" + + def fix_metadata(self, cubes): + + """Fix metadata.""" + cubes = self.add_additional_cubes(cubes) + cube = self.get_cube(cubes) + + # This is just a quick solution for other than horizontal coordinates, + # needs to be adapted to also deal with depth. + time = cube.coord('time') + + # Adding the option to make the irregular (2d) grid unstructured (1d) + # to take advantage of UGRID + if self.extra_facets.get('make_unstructured', True): + # ORAS5 has 1 redundant row and 2 redundant columns that need to be + # removed. + data = cube.core_data()[...,:-1,1:-1].T.flatten() + data = da.reshape(data, (len(time.points), len(data))) + lat_points = cube.coord('latitude').core_points() + lat_points = lat_points[:-1,1:-1].T.flatten() + lon_points = cube.coord('longitude').core_points() + lon_points = lon_points[:-1,1:-1].T.flatten() + + lat_coord = iris.coords.AuxCoord(lat_points, + standard_name='latitude', + units=cube.coord('latitude').units) + lon_coord = iris.coords.AuxCoord(lon_points, + standard_name='longitude', + units=cube.coord('longitude').units) + + # See above concerning additional coordinates and dimensions + new_cube = iris.cube.Cube(data, dim_coords_and_dims=[(time,0)]) + new_cube.add_aux_coord(lat_coord, 1) + new_cube.add_aux_coord(lon_coord, 1) + + new_cube.long_name = cube.long_name + cube = new_cube + + else: + # ORAS5 has 1 redundant row and 2 redundant columns that need to be + # removed. + cube = cube[...,:-1,1:-1] + lon_shape = cube.coord('longitude').points.shape + mesh = self.get_horizontal_grid(cube) + mesh = mesh.extract_cube(Constraint('cell_area')) + lon_bnds = mesh.coord('longitude').bounds + lat_bnds = mesh.coord('latitude').bounds + lon_bnds = np.reshape(lon_bnds, (lon_shape[0], lon_shape[1], + min(lon_bnds.shape))) + lat_bnds = np.reshape(lat_bnds, (lon_shape[0], lon_shape[1], + min(lat_bnds.shape))) + cube.coord('longitude').bounds = lon_bnds + cube.coord('latitude').bounds = lat_bnds + + # Fix time + if self.vardef.has_coord_with_standard_name('time'): + cube = self._fix_time(cube, cubes) + + if cube.coords(axis='Z'): + fix_ocean_depth_coord(cube) + + # Fix latitude + if self.vardef.has_coord_with_standard_name('latitude'): + lat_idx = self._fix_lat(cube) + else: + lat_idx = None + + # Fix longitude + if self.vardef.has_coord_with_standard_name('longitude'): + lon_idx = self._fix_lon(cube) + else: + lon_idx = None + + # Fix unstructured mesh of unstructured grid if present + if self._is_unstructured_grid(lat_idx, lon_idx): + self._fix_mesh(cube, lat_idx) + + # Fix metadata of variable + self.fix_var_metadata(cube) + + return CubeList([cube]) + + def _add_coord_from_grid_file(self, cube, coord_name): + """Add coordinate from grid file to cube. + + Note + ---- + Assumes that the input cube has a single unnamed dimension, which will + be used as dimension for the new coordinate. + + Parameters + ---------- + cube: iris.cube.Cube + ICON data to which the coordinate from the grid file is added. + coord_name: str + Name of the coordinate to add from the grid file. Must be one of + ``'latitude'``, ``'longitude'``. + + Raises + ------ + ValueError + Invalid ``coord_name`` is given; input cube does not contain a + single unnamed dimension that can be used to add the new + coordinate. + + """ + + # Use 'cell_area' as dummy cube to extract desired coordinates + # Note: it might be necessary to expand this when more coord_names are + # supported + horizontal_grid = self.get_horizontal_grid(cube) + if type(horizontal_grid) == iris.cube.CubeList: + grid_cube = horizontal_grid.extract_cube( + Constraint('cell_area')) + coord = grid_cube.coord(coord_name) + else: + if coord_name == 'longitude': + coord = iris.coords.AuxCoord( + points = (horizontal_grid.grid_center_lon + .values), + bounds = (horizontal_grid.grid_corner_lon + .values), + standard_name = 'longitude', + units = 'degrees') + elif coord_name == 'latitude': + coord = iris.coords.AuxCoord( + points = (horizontal_grid.grid_center_lat + .values), + bounds = (horizontal_grid.grid_corner_lat + .values), + standard_name = 'latitude', + units = 'degrees') + + # Find index of mesh dimension (= single unnamed dimension) + n_unnamed_dimensions = cube.ndim - len(cube.dim_coords) + if n_unnamed_dimensions != 1: + raise ValueError( + f"Cannot determine coordinate dimension for coordinate " + f"'{coord_name}', cube does not contain a single unnamed " + f"dimension:\n{cube}") + coord_dims = () + for idx in range(cube.ndim): + if not cube.coords(dimensions=idx, dim_coords=True): + coord_dims = (idx,) + break + + # Adapt coordinate names so that the coordinate can be referenced with + # 'cube.coord(coord_name)'; the exact name will be set at a later stage + coord.standard_name = None + coord.long_name = coord_name + cube.add_aux_coord(coord, coord_dims) + + def _fix_mesh(self, cube, mesh_idx): + """Fix mesh.""" + # Remove any already-present dimensional coordinate describing the mesh + # dimension + if cube.coords(dimensions=mesh_idx, dim_coords=True): + cube.remove_coord(cube.coord(dimensions=mesh_idx, dim_coords=True)) + + # Add dimensional coordinate that describes the mesh dimension + index_coord = DimCoord( + np.arange(cube.shape[mesh_idx[0]]), + var_name='i', + long_name=('first spatial index for variables stored on an ' + 'unstructured grid'), + units='1', + ) + cube.add_dim_coord(index_coord, mesh_idx) + + # If desired, get mesh and replace the original latitude and longitude + # coordinates with their new mesh versions + if self.extra_facets.get('ugrid', True): + mesh = self.get_mesh(cube) + cube.remove_coord('latitude') + cube.remove_coord('longitude') + for mesh_coord in mesh.to_MeshCoords('face'): + cube.add_aux_coord(mesh_coord, mesh_idx) diff --git a/esmvalcore/config-developer.yml b/esmvalcore/config-developer.yml index 9d5f1bc62f..00bf09d17b 100644 --- a/esmvalcore/config-developer.yml +++ b/esmvalcore/config-developer.yml @@ -194,3 +194,13 @@ CESM: output_file: '{project}_{dataset}_{case}_{gcomp}_{scomp}_{type}_{mip}_{short_name}' cmor_type: 'CMIP6' cmor_default_table_prefix: 'CMIP6_' + +ORAS5: + cmor_strict: false + input_dir: + default: '/' + input_file: + default: '*{raw_name}*{version}*.nc' + output_file: '{project}_{dataset}_{version}_{mip}_{short_name}' + cmor_type: 'CMIP6' + cmor_default_table_prefix: 'CMIP6_' From 9aa0986eb21e651eb8adcf02f84906b0a71e0677 Mon Sep 17 00:00:00 2001 From: Jan-Hendrik Malles Date: Wed, 4 Dec 2024 10:34:11 +0100 Subject: [PATCH 02/32] Adjustments for 4D data. --- esmvalcore/cmor/_fixes/oras5/oras5.py | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/esmvalcore/cmor/_fixes/oras5/oras5.py b/esmvalcore/cmor/_fixes/oras5/oras5.py index 6517034139..c2d2d06bae 100644 --- a/esmvalcore/cmor/_fixes/oras5/oras5.py +++ b/esmvalcore/cmor/_fixes/oras5/oras5.py @@ -31,8 +31,8 @@ def fix_metadata(self, cubes): if self.vardef.has_coord_with_standard_name("time"): cube = self._fix_time(cube, cubes) - if cube.coords(axis="Z"): - fix_ocean_depth_coord(cube) + # Fix depth + self._fix_depth(cube) # Fix latitude if self.vardef.has_coord_with_standard_name("latitude"): @@ -91,7 +91,7 @@ def _fix_cube(self, cube): if isinstance(coord, iris.coords.DimCoord): dim = cube.coord_dims(coord) coords_add.append((coord, dim)) - data = cube.core_data().T.flatten() + data = da.moveaxis(cube.core_data(), -1, -2).flatten() dim_shape = tuple(cube.data.shape[:-2]) data_shape = tuple(data.shape / np.prod(dim_shape)) data = da.reshape(data, dim_shape + data_shape) @@ -131,16 +131,11 @@ def _add_coord_from_grid_file(self, cube, coord_name): grid_cube = horizontal_grid.extract_cube(Constraint("cell_area")) coord = grid_cube.coord(coord_name) points = coord.core_points().flatten() - bounds = da.moveaxis(da.from_array(coord.core_bounds()), -1, 0) - bounds_shape = np.shape(bounds) - bounds = bounds.flatten() - bounds = da.reshape( - bounds, - (int(bounds_shape[0]), int(len(bounds) / bounds_shape[0])), - ) + bounds = da.from_array(coord.core_bounds()).flatten() + bounds = da.reshape(bounds, (int(len(bounds) / 4), 4)) coord = iris.coords.AuxCoord( points=(points), - bounds=(bounds.T), + bounds=(bounds), standard_name=coord_name, units="degrees", ) @@ -248,3 +243,12 @@ def _fix_mesh(self, cube, mesh_idx): cube.remove_coord("longitude") for mesh_coord in mesh.to_MeshCoords("face"): cube.add_aux_coord(mesh_coord, mesh_idx) + + def _fix_depth(self, cube): + """Fix depth coordinate.""" + for i in range(len(cube.coords())): + if "levels" in cube.coords()[i].name(): + cube.coords()[i].attributes = {"positive": "down"} + + if cube.coords(axis="Z"): + fix_ocean_depth_coord(cube) From 01ef2d4ae88fd3c7eb6a5bc7be2cfb341e5f1952 Mon Sep 17 00:00:00 2001 From: Jan-Hendrik Malles Date: Thu, 5 Dec 2024 10:40:12 +0100 Subject: [PATCH 03/32] Adding mappings and some minor adjustments --- esmvalcore/cmor/_fixes/oras5/_base_fixes.py | 95 +++++++++++++++++++ esmvalcore/cmor/_fixes/oras5/oras5.py | 2 +- .../config/extra_facets/oras5-mappings.yml | 50 ++++++++++ 3 files changed, 146 insertions(+), 1 deletion(-) create mode 100644 esmvalcore/config/extra_facets/oras5-mappings.yml diff --git a/esmvalcore/cmor/_fixes/oras5/_base_fixes.py b/esmvalcore/cmor/_fixes/oras5/_base_fixes.py index 0f53c49f81..63be455018 100644 --- a/esmvalcore/cmor/_fixes/oras5/_base_fixes.py +++ b/esmvalcore/cmor/_fixes/oras5/_base_fixes.py @@ -93,3 +93,98 @@ def _create_mesh(self, cube): ) return mesh + + def get_horizontal_grid(self, cube): + """Get copy of ORAS5 horizontal grid. + + If given, retrieve grid from `horizontal_grid` facet specified by the + user. + + Parameters + ---------- + cube: iris.cube.Cube + Cube for which the ORS5 horizontal grid is retrieved. If the facet + `horizontal_grid` is not specified by the user, it raises a + NotImplementedError. + + Returns + ------- + iris.cube.CubeList + Copy of ORAS5 horizontal grid. + + Raises + ------ + FileNotFoundError + Path specified by `horizontal_grid` facet (absolute or relative to + `auxiliary_data_dir`) does not exist. + NotImplementedError + No `horizontal_grid` facet is defined. + + """ + if self.extra_facets.get("horizontal_grid") is not None: + grid = self._get_grid_from_facet() + else: + raise NotImplementedError( + "Full path to suitable ORAS5 grid must be specified in facet " + "'horizontal_grid'" + ) + + return grid.copy() + + def _get_grid_from_facet(self): + """Get horizontal grid from user-defined facet `horizontal_grid`.""" + grid_path = self._get_path_from_facet( + "horizontal_grid", "Horizontal grid file" + ) + grid_name = grid_path.name + + # If already loaded, return the horizontal grid + if grid_name in self._horizontal_grids: + return self._horizontal_grids[grid_name] + + # Load file + self._horizontal_grids[grid_name] = self._load_cubes(grid_path) + logger.debug("Loaded ORAS5 grid file from %s", grid_path) + return self._horizontal_grids[grid_name] + + def get_mesh(self, cube): + """Get mesh. + + Note + ---- + If possible, this function uses a cached version of the mesh to save + time. + + Parameters + ---------- + cube: iris.cube.Cube + Cube for which the mesh is retrieved. + + Returns + ------- + iris.mesh.MeshXY + Mesh of the cube. + + Raises + ------ + FileNotFoundError + Path specified by `horizontal_grid` facet (absolute or relative to + `auxiliary_data_dir`) does not exist. + NotImplementedError + No `horizontal_grid` facet is defined. + + """ + # Use `horizontal_grid` facet to determine grid name + grid_path = self._get_path_from_facet( + "horizontal_grid", "Horizontal grid file" + ) + grid_name = grid_path.name + + # Reuse mesh if possible + if grid_name in self._meshes: + logger.debug("Reusing ORAS5 mesh for grid %s", grid_name) + else: + logger.debug("Creating ORAS5 mesh for grid %s", grid_name) + self._meshes[grid_name] = self._create_mesh(cube) + + return self._meshes[grid_name] diff --git a/esmvalcore/cmor/_fixes/oras5/oras5.py b/esmvalcore/cmor/_fixes/oras5/oras5.py index c2d2d06bae..993611016d 100644 --- a/esmvalcore/cmor/_fixes/oras5/oras5.py +++ b/esmvalcore/cmor/_fixes/oras5/oras5.py @@ -108,7 +108,7 @@ def _add_coord_from_grid_file(self, cube, coord_name): Parameters ---------- cube: iris.cube.Cube - ICON data to which the coordinate from the grid file is added. + ORAS5 data to which the coordinate from the grid file is added. coord_name: str Name of the coordinate to add from the grid file. Must be one of ``'latitude'``, ``'longitude'``. diff --git a/esmvalcore/config/extra_facets/oras5-mappings.yml b/esmvalcore/config/extra_facets/oras5-mappings.yml new file mode 100644 index 0000000000..e93f9519d8 --- /dev/null +++ b/esmvalcore/config/extra_facets/oras5-mappings.yml @@ -0,0 +1,50 @@ +# Extra facets for native ORAS5 data + +# Notes: +# - All facets can also be specified in the recipes. The values given here are +# only defaults. +# - The facet ``var_type`` has to be specified in the recipe if it is not given +# here and default DRS is used. + +# A complete list of supported keys is given in the documentation (see +# ESMValCore/doc/quickstart/find_data.rst). +--- + +ORAS5: + + '*': + # Cell measures + areacella: + latitude: grid_latitude + longitude: grid_longitude + raw_name: cell_area + areacello: + latitude: grid_latitude + longitude: grid_longitude + raw_name: cell_area + + # Variable names and units + uo: + raw_name: vozocrte + raw_units: m/s + vo: + raw_name: vomecrtn + raw_units: m/s + tos: + raw_name: sosstsst + raw_units: degC + sos: + raw_name: sosaline + raw_units: '0.001' + zos: + raw_name: sossheig + raw_units: m + mlotst: + raw_name: somxl010 + raw_unis: m + to: + raw_name: votemper + raw_units: degC + so: + raw_name: vosaline + raw_units: '0.001' \ No newline at end of file From 013b219a750d5a6c812d43fbc2ebdcddb126df7b Mon Sep 17 00:00:00 2001 From: Jan-Hendrik Malles Date: Fri, 24 Jan 2025 09:13:22 +0100 Subject: [PATCH 04/32] Fixing codacy issues --- esmvalcore/cmor/_fixes/oras5/_base_fixes.py | 8 ++++---- esmvalcore/cmor/_fixes/oras5/oras5.py | 21 ++++++++++----------- 2 files changed, 14 insertions(+), 15 deletions(-) diff --git a/esmvalcore/cmor/_fixes/oras5/_base_fixes.py b/esmvalcore/cmor/_fixes/oras5/_base_fixes.py index 63be455018..a4f450b84f 100644 --- a/esmvalcore/cmor/_fixes/oras5/_base_fixes.py +++ b/esmvalcore/cmor/_fixes/oras5/_base_fixes.py @@ -93,7 +93,7 @@ def _create_mesh(self, cube): ) return mesh - + def get_horizontal_grid(self, cube): """Get copy of ORAS5 horizontal grid. @@ -104,7 +104,7 @@ def get_horizontal_grid(self, cube): ---------- cube: iris.cube.Cube Cube for which the ORS5 horizontal grid is retrieved. If the facet - `horizontal_grid` is not specified by the user, it raises a + `horizontal_grid` is not specified by the user, it raises a NotImplementedError. Returns @@ -176,8 +176,8 @@ def get_mesh(self, cube): """ # Use `horizontal_grid` facet to determine grid name grid_path = self._get_path_from_facet( - "horizontal_grid", "Horizontal grid file" - ) + "horizontal_grid", "Horizontal grid file" + ) grid_name = grid_path.name # Reuse mesh if possible diff --git a/esmvalcore/cmor/_fixes/oras5/oras5.py b/esmvalcore/cmor/_fixes/oras5/oras5.py index 993611016d..bba28fc391 100644 --- a/esmvalcore/cmor/_fixes/oras5/oras5.py +++ b/esmvalcore/cmor/_fixes/oras5/oras5.py @@ -85,17 +85,16 @@ def _fix_cube(self, cube): return cube # Data is made unstructured (flattened) - else: - coords_add = [] - for coord in cube.coords(): - if isinstance(coord, iris.coords.DimCoord): - dim = cube.coord_dims(coord) - coords_add.append((coord, dim)) - data = da.moveaxis(cube.core_data(), -1, -2).flatten() - dim_shape = tuple(cube.data.shape[:-2]) - data_shape = tuple(data.shape / np.prod(dim_shape)) - data = da.reshape(data, dim_shape + data_shape) - return iris.cube.Cube(data, dim_coords_and_dims=coords_add) + coords_add = [] + for coord in cube.coords(): + if isinstance(coord, iris.coords.DimCoord): + dim = cube.coord_dims(coord) + coords_add.append((coord, dim)) + data = da.moveaxis(cube.core_data(), -1, -2).flatten() + dim_shape = tuple(cube.data.shape[:-2]) + data_shape = tuple(data.shape / np.prod(dim_shape)) + data = da.reshape(data, dim_shape + data_shape) + return iris.cube.Cube(data, dim_coords_and_dims=coords_add) def _add_coord_from_grid_file(self, cube, coord_name): """Add coordinate from grid file to cube. From 8b877f38734797c1cc63db822844816b29420f5e Mon Sep 17 00:00:00 2001 From: Katja Weigel Date: Tue, 11 Nov 2025 17:58:35 +0100 Subject: [PATCH 05/32] Added integration test. Typo: to should be thetao. --- .../config/extra_facets/oras5-mappings.yml | 2 +- .../cmor/_fixes/oras5/test_oras5.py | 349 ++++++++++++++++++ 2 files changed, 350 insertions(+), 1 deletion(-) create mode 100644 tests/integration/cmor/_fixes/oras5/test_oras5.py diff --git a/esmvalcore/config/extra_facets/oras5-mappings.yml b/esmvalcore/config/extra_facets/oras5-mappings.yml index e93f9519d8..cd9b013c36 100644 --- a/esmvalcore/config/extra_facets/oras5-mappings.yml +++ b/esmvalcore/config/extra_facets/oras5-mappings.yml @@ -42,7 +42,7 @@ ORAS5: mlotst: raw_name: somxl010 raw_unis: m - to: + thetao: raw_name: votemper raw_units: degC so: diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py new file mode 100644 index 0000000000..a96d0e6aaf --- /dev/null +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -0,0 +1,349 @@ +"""Tests for the fixes of ORAS5.""" + +import datetime + +import dask.array as da +import numpy as np +import pytest +from cf_units import Unit +from iris.coords import AuxCoord, DimCoord +from iris.cube import Cube, CubeList + +from esmvalcore.cmor._fixes.fix import Fix, GenericFix +from esmvalcore.cmor._fixes.oras5.oras5 import ( + AllVars) +from esmvalcore.cmor.fix import fix_metadata +from esmvalcore.cmor.table import CMOR_TABLES, get_var_info +from esmvalcore.preprocessor import cmor_check_metadata + +COMMENT = ( + "Contains modified Copernicus Climate Change Service Information " + f"{datetime.datetime.now().year}" +) + +def test_get_frequency_monthly(): + """Test cubes with monthly frequency.""" + time = DimCoord( + [0, 31, 59], + standard_name="time", + units=Unit("hours since 1900-01-01"), + ) + cube = Cube( + [1, 6, 3], + var_name="random_var", + dim_coords_and_dims=[(time, 0)], + ) + cube.coord("time").convert_units("days since 1850-1-1 00:00:00.0") + + +def _oras5_latitude(): + return DimCoord( + np.array([90.0, 0.0, -90.0]), + standard_name="latitude", + long_name="latitude", + var_name="latitude", + units=Unit("degrees"), + ) + + +def _oras5_longitude(): + return DimCoord( + np.array([0, 180, 359.75]), + standard_name="longitude", + long_name="longitude", + var_name="longitude", + units=Unit("degrees"), + circular=True, + ) + + + +def _oras5_time(frequency): + if frequency == "invariant": + timestamps = [788928] # hours since 1900 at 1 january 1990 + elif frequency == "monthly": + timestamps = [788928, 789672, 790344] + else: + raise NotImplementedError(f"Invalid frequency {frequency}") + return DimCoord( + np.array(timestamps, dtype="int32"), + standard_name="time", + long_name="time", + var_name="time", + units=Unit("hours since 1900-01-0100:00:00.0", calendar="gregorian"), + ) + +def _oras5_depth(): + values = np.array( + [ + 0.5, + 6000, + ] + ) + return DimCoord( + values, + axis="Z", + long_name="Vertical T levels", + units=Unit("m"), + var_name="deptht", + attributes={"positive": "down"}, + ) + + +def _oras5_data(frequency): + if frequency == "invariant": + return np.arange(9).reshape(1, 3, 3) + return np.arange(27).reshape(3, 3, 3) + + +def _cmor_latitude(): + return DimCoord( + np.array([-90.0, 0.0, 90.0]), + standard_name="latitude", + long_name="Latitude", + var_name="lat", + units=Unit("degrees_north"), + bounds=np.array([[-90.0, -45.0], [-45.0, 45.0], [45.0, 90.0]]), + ) + + +def _cmor_longitude(): + return DimCoord( + np.array([0, 180, 359.75]), + standard_name="longitude", + long_name="Longitude", + var_name="lon", + units=Unit("degrees_east"), + bounds=np.array([[-0.125, 90.0], [90.0, 269.875], [269.875, 359.875]]), + circular=True, + ) + + +def _cmor_time(mip, bounds=None, shifted=False): + """Provide expected time coordinate after fixes.""" + if "mon" in mip: + timestamps = np.array([51149.5, 51179.0, 51208.5]) + if bounds is not None: + bounds = np.array( + [[51134.0, 51165.0], [51165.0, 51193.0], [51193.0, 51224.0]] + ) + else: + raise NotImplementedError() + + return DimCoord( + np.array(timestamps, dtype=float), + standard_name="time", + long_name="time", + var_name="time", + units=Unit("days since 1850-1-1 00:00:00", calendar="gregorian"), + bounds=bounds, + ) + + +def _cmor_depth(): + values = np.array( + [ + 0.5, + 6000.0, + ] + ) + return DimCoord( + values, + axis="Z", + long_name="ocean depth coordinate", + standard_name="depth", + units=Unit("m"), + var_name="lev", + attributes={"positive": "down"}, + ) + + +def _cmor_data(mip): + if mip == "fx": + return np.arange(9).reshape(3, 3)[::-1, :] + return np.arange(27).reshape(3, 3, 3)[:, ::-1, :] + + +def oras5_2d(frequency): + cube = Cube( + _oras5_data("monthly"), + long_name=None, + var_name=None, + units="unknown", + dim_coords_and_dims=[ + (time, 0), + (_oras5_latitude(), 1), + (_oras5_longitude(), 2), + ], + ) + return CubeList([cube]) + + +def oras5_3d(frequency): + cube = Cube( + np.ones((3, 2, 3, 3)), + long_name=None, + var_name=None, + units="unknown", + dim_coords_and_dims=[ + (_oras5_time(frequency), 0), + (_oras5_depth(), 1), + (_oras5_latitude(), 2), + (_oras5_longitude(), 3), + ], + ) + return CubeList([cube]) + +def cmor_2d(mip, short_name): + cmor_table = CMOR_TABLES["oras5"] + vardef = cmor_table.get_variable(mip, short_name) + if "mon" in mip: + time = DimCoord( + [-15.5, 15.5, 45.0], + bounds=[[-31.0, 0.0], [0.0, 31.0], [31.0, 59.0]], + standard_name="time", + long_name="time", + var_name="time", + units="days since 1850-01-01", + ) + else: + time = _cmor_time(mip, bounds=True) + cube = Cube( + _cmor_data(mip).astype("float32"), + long_name=vardef.long_name, + var_name=vardef.short_name, + standard_name=vardef.standard_name, + units=Unit(vardef.units), + dim_coords_and_dims=[ + (time, 0), + (_cmor_latitude(), 1), + (_cmor_longitude(), 2), + ], + attributes={"comment": COMMENT}, + ) + return CubeList([cube]) + + +def cmor_3d(mip, short_name): + cmor_table = CMOR_TABLES["oras5"] + vardef = cmor_table.get_variable(mip, short_name) + cube = Cube( + np.ones((3, 2, 3, 3)), + long_name=vardef.long_name, + var_name=vardef.short_name, + standard_name=vardef.standard_name, + units=Unit(vardef.units), + dim_coords_and_dims=[ + (_cmor_time(mip, bounds=True), 0), + (_cmor_depth(), 1), + (_cmor_latitude(), 2), + (_cmor_longitude(), 3), + ], + attributes={"comment": COMMENT}, + ) + return CubeList([cube]) + + +VARIABLES = [ + pytest.param(a, b, c, d, id=c + "_" + d) + for (a, b, c, d) in [ + (oras5_3d("monthly"), cmor_3d("Omon", "so"), "so", "Omon"), + (oras5_3d("monthly"), cmor_3d("Omon", "thetao"), "thetao", "Omon"), + (oras5_3d("monthly"), cmor_3d("Omon", "uo"), "uo", "Omon"), + (oras5_3d("monthly"), cmor_3d("Omon", "vo"), "vo", "Omon"), + (oras5_2d("monthly"), cmor_2d("Omon", "mlotst"), "mlotst", "Omon"), + (oras5_2d("monthly"), cmor_2d("Omon", "tos"), "tos", "Omon"), + (oras5_2d("monthly"), cmor_2d("Omon", "sos"), "sos", "Omon"), + (oras5_2d("monthly"), cmor_2d("Omon", "zos"), "zos", "Omon"), + ] +] + + +@pytest.mark.parametrize("oras5_cubes, cmor_cubes, var, mip", VARIABLES) +def test_cmorization(oras5_cubes, cmor_cubes, var, mip): + """Verify that cmorization results in the expected target cube.""" + fixed_cubes = fix_metadata(oras5_cubes, var, "oras5", "oras5", mip) + + assert len(fixed_cubes) == 1 + fixed_cube = fixed_cubes[0] + cmor_cube = cmor_cubes[0] + + # Test that CMOR checks are passing + fixed_cubes = cmor_check_metadata(fixed_cube, "oras5", mip, var) + + if fixed_cube.coords("time"): + for cube in [fixed_cube, cmor_cube]: + coord = cube.coord("time") + coord.points = np.round(coord.points, decimals=7) + if coord.bounds is not None: + coord.bounds = np.round(coord.bounds, decimals=7) + print("Test results for variable/MIP: ", var, mip) + print("cmor_cube:", cmor_cube) + print("fixed_cube:", fixed_cube) + print("cmor_cube data:", cmor_cube.data) + print("fixed_cube data:", fixed_cube.data) + print("cmor_cube coords:") + for coord in cmor_cube.coords(): + print(coord) + print("\n") + print("fixed_cube coords:") + for coord in fixed_cube.coords(): + print(coord) + assert fixed_cube == cmor_cube + + +@pytest.fixture +def unstructured_grid_cubes(): + """Sample cubes with unstructured grid.""" + time = DimCoord( + [0.0, 31.0], standard_name="time", units="days since 1950-01-01" + ) + lat = AuxCoord( + [1.0, 1.0, -1.0, -1.0], standard_name="latitude", units="degrees_north" + ) + lon = AuxCoord( + [179.0, 180.0, 180.0, 179.0], + standard_name="longitude", + units="degrees_east", + ) + cube = Cube( + da.from_array([[0.0, 1.0, 2.0, 3.0], [0.0, 0.0, 0.0, 0.0]]), + standard_name="Sea Surface Salinity", + units="0.001", + dim_coords_and_dims=[(time, 0)], + aux_coords_and_dims=[(lat, 1), (lon, 1)], + ) + return CubeList([cube]) + + +def test_unstructured_grid(unstructured_grid_cubes): + """Test processing unstructured data.""" + fixed_cubes = fix_metadata( + unstructured_grid_cubes, + "sos", + "oras5", + "oras5", + "Omon", + ) + + assert len(fixed_cubes) == 1 + fixed_cube = fixed_cubes[0] + + assert fixed_cube.shape == (2, 4) + + assert fixed_cube.coords("time", dim_coords=True) + assert fixed_cube.coord_dims("time") == (0,) + + assert fixed_cube.coords("latitude", dim_coords=False) + assert fixed_cube.coord_dims("latitude") == (1,) + lat = fixed_cube.coord("latitude") + np.testing.assert_allclose(lat.points, [1, 1, -1, -1]) + assert lat.bounds is None + + assert fixed_cube.coords("longitude", dim_coords=False) + assert fixed_cube.coord_dims("longitude") == (1,) + lon = fixed_cube.coord("longitude") + np.testing.assert_allclose(lon.points, [179, 180, 180, 179]) + assert lon.bounds is None + + From 975e327cf822b45bfb103ea4446ad10205e59be3 Mon Sep 17 00:00:00 2001 From: Katja Weigel Date: Thu, 13 Nov 2025 12:34:05 +0100 Subject: [PATCH 06/32] Remove axis from test. --- tests/integration/cmor/_fixes/oras5/test_oras5.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index a96d0e6aaf..5a7df23008 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -82,7 +82,6 @@ def _oras5_depth(): ) return DimCoord( values, - axis="Z", long_name="Vertical T levels", units=Unit("m"), var_name="deptht", From aa8d621a2856a9e424110e6162e25e8f343535ba Mon Sep 17 00:00:00 2001 From: Katja Weigel Date: Thu, 13 Nov 2025 14:31:41 +0100 Subject: [PATCH 07/32] Change cmor table name to native6. --- tests/integration/cmor/_fixes/oras5/test_oras5.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index 5a7df23008..90f18c8b79 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -194,7 +194,7 @@ def oras5_3d(frequency): return CubeList([cube]) def cmor_2d(mip, short_name): - cmor_table = CMOR_TABLES["oras5"] + cmor_table = CMOR_TABLES["native6"] vardef = cmor_table.get_variable(mip, short_name) if "mon" in mip: time = DimCoord( @@ -224,7 +224,7 @@ def cmor_2d(mip, short_name): def cmor_3d(mip, short_name): - cmor_table = CMOR_TABLES["oras5"] + cmor_table = CMOR_TABLES["native6"] vardef = cmor_table.get_variable(mip, short_name) cube = Cube( np.ones((3, 2, 3, 3)), @@ -268,7 +268,7 @@ def test_cmorization(oras5_cubes, cmor_cubes, var, mip): cmor_cube = cmor_cubes[0] # Test that CMOR checks are passing - fixed_cubes = cmor_check_metadata(fixed_cube, "oras5", mip, var) + fixed_cubes = cmor_check_metadata(fixed_cube, "native6", mip, var) if fixed_cube.coords("time"): for cube in [fixed_cube, cmor_cube]: From c7d670d723527f197383122f358a6ad9c8a7ec0f Mon Sep 17 00:00:00 2001 From: Katja Weigel Date: Thu, 13 Nov 2025 14:45:34 +0100 Subject: [PATCH 08/32] Another axis removed. --- tests/integration/cmor/_fixes/oras5/test_oras5.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index 90f18c8b79..e19553af1b 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -148,7 +148,6 @@ def _cmor_depth(): ) return DimCoord( values, - axis="Z", long_name="ocean depth coordinate", standard_name="depth", units=Unit("m"), From 0086844a5257b11ec6d69722a01c5814f8a38944 Mon Sep 17 00:00:00 2001 From: Katja Weigel Date: Thu, 13 Nov 2025 14:54:59 +0100 Subject: [PATCH 09/32] Add missing time. --- tests/integration/cmor/_fixes/oras5/test_oras5.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index e19553af1b..2e4a700c1d 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -57,7 +57,6 @@ def _oras5_longitude(): ) - def _oras5_time(frequency): if frequency == "invariant": timestamps = [788928] # hours since 1900 at 1 january 1990 @@ -73,6 +72,7 @@ def _oras5_time(frequency): units=Unit("hours since 1900-01-0100:00:00.0", calendar="gregorian"), ) + def _oras5_depth(): values = np.array( [ @@ -162,7 +162,13 @@ def _cmor_data(mip): return np.arange(27).reshape(3, 3, 3)[:, ::-1, :] -def oras5_2d(frequency): +def oras5_2d(frequency): + if frequency == "monthly": + time = DimCoord( + [-31, 0, 31], standard_name="time", units="days since 1850-01-01" + ) + else: + time = _oras5_time(frequency) cube = Cube( _oras5_data("monthly"), long_name=None, From c03c7c34702211bd5c7fe02d937ad6ad1d625897 Mon Sep 17 00:00:00 2001 From: Katja Weigel Date: Thu, 13 Nov 2025 15:08:34 +0100 Subject: [PATCH 10/32] Some more fixes for key related to CMOR. --- tests/integration/cmor/_fixes/oras5/test_oras5.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index 2e4a700c1d..17528820a5 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -199,7 +199,7 @@ def oras5_3d(frequency): return CubeList([cube]) def cmor_2d(mip, short_name): - cmor_table = CMOR_TABLES["native6"] + cmor_table = CMOR_TABLES["ORAS5"] vardef = cmor_table.get_variable(mip, short_name) if "mon" in mip: time = DimCoord( @@ -229,7 +229,7 @@ def cmor_2d(mip, short_name): def cmor_3d(mip, short_name): - cmor_table = CMOR_TABLES["native6"] + cmor_table = CMOR_TABLES["ORAS5"] vardef = cmor_table.get_variable(mip, short_name) cube = Cube( np.ones((3, 2, 3, 3)), @@ -266,14 +266,14 @@ def cmor_3d(mip, short_name): @pytest.mark.parametrize("oras5_cubes, cmor_cubes, var, mip", VARIABLES) def test_cmorization(oras5_cubes, cmor_cubes, var, mip): """Verify that cmorization results in the expected target cube.""" - fixed_cubes = fix_metadata(oras5_cubes, var, "oras5", "oras5", mip) + fixed_cubes = fix_metadata(oras5_cubes, var, "ORAS5", "oras5", mip) assert len(fixed_cubes) == 1 fixed_cube = fixed_cubes[0] cmor_cube = cmor_cubes[0] # Test that CMOR checks are passing - fixed_cubes = cmor_check_metadata(fixed_cube, "native6", mip, var) + fixed_cubes = cmor_check_metadata(fixed_cube, "ORAS5", mip, var) if fixed_cube.coords("time"): for cube in [fixed_cube, cmor_cube]: @@ -312,7 +312,7 @@ def unstructured_grid_cubes(): ) cube = Cube( da.from_array([[0.0, 1.0, 2.0, 3.0], [0.0, 0.0, 0.0, 0.0]]), - standard_name="Sea Surface Salinity", + standard_name="sea_surface_salinity", units="0.001", dim_coords_and_dims=[(time, 0)], aux_coords_and_dims=[(lat, 1), (lon, 1)], @@ -325,7 +325,7 @@ def test_unstructured_grid(unstructured_grid_cubes): fixed_cubes = fix_metadata( unstructured_grid_cubes, "sos", - "oras5", + "ORAS5", "oras5", "Omon", ) From 2c8daa182153091dd08197abeacfbd9213c288c5 Mon Sep 17 00:00:00 2001 From: Katja Weigel Date: Thu, 13 Nov 2025 15:25:43 +0100 Subject: [PATCH 11/32] Change to native names. --- .../integration/cmor/_fixes/oras5/test_oras5.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index 17528820a5..aabb60fe1e 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -251,14 +251,14 @@ def cmor_3d(mip, short_name): VARIABLES = [ pytest.param(a, b, c, d, id=c + "_" + d) for (a, b, c, d) in [ - (oras5_3d("monthly"), cmor_3d("Omon", "so"), "so", "Omon"), - (oras5_3d("monthly"), cmor_3d("Omon", "thetao"), "thetao", "Omon"), - (oras5_3d("monthly"), cmor_3d("Omon", "uo"), "uo", "Omon"), - (oras5_3d("monthly"), cmor_3d("Omon", "vo"), "vo", "Omon"), - (oras5_2d("monthly"), cmor_2d("Omon", "mlotst"), "mlotst", "Omon"), - (oras5_2d("monthly"), cmor_2d("Omon", "tos"), "tos", "Omon"), - (oras5_2d("monthly"), cmor_2d("Omon", "sos"), "sos", "Omon"), - (oras5_2d("monthly"), cmor_2d("Omon", "zos"), "zos", "Omon"), + (oras5_3d("monthly"), cmor_3d("Omon", "so"), "vosaline", "Omon"), + (oras5_3d("monthly"), cmor_3d("Omon", "thetao"), "votemper", "Omon"), + (oras5_3d("monthly"), cmor_3d("Omon", "uo"), "vozocrte", "Omon"), + (oras5_3d("monthly"), cmor_3d("Omon", "vo"), "vozocrte", "Omon"), + (oras5_2d("monthly"), cmor_2d("Omon", "mlotst"), "somxl010", "Omon"), + (oras5_2d("monthly"), cmor_2d("Omon", "tos"), "sosstsst", "Omon"), + (oras5_2d("monthly"), cmor_2d("Omon", "sos"), "sosaline", "Omon"), + (oras5_2d("monthly"), cmor_2d("Omon", "zos"), "sossheig", "Omon"), ] ] From 6d78213bf9ba047ebbae0f5002e2c5370b73fdea Mon Sep 17 00:00:00 2001 From: Katja Weigel Date: Thu, 13 Nov 2025 15:49:33 +0100 Subject: [PATCH 12/32] variable names. --- .../cmor/_fixes/oras5/test_oras5.py | 34 +++++++++++++------ 1 file changed, 24 insertions(+), 10 deletions(-) diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index aabb60fe1e..bcd953bb47 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -248,21 +248,35 @@ def cmor_3d(mip, short_name): return CubeList([cube]) +# VARIABLES = [ +# pytest.param(a, b, c, d, id=c + "_" + d) +# for (a, b, c, d) in [ +# (oras5_3d("monthly"), cmor_3d("Omon", "so"), "vosaline", "Omon"), +# (oras5_3d("monthly"), cmor_3d("Omon", "thetao"), "votemper", "Omon"), +# (oras5_3d("monthly"), cmor_3d("Omon", "uo"), "vozocrte", "Omon"), +# (oras5_3d("monthly"), cmor_3d("Omon", "vo"), "vozocrte", "Omon"), +# (oras5_2d("monthly"), cmor_2d("Omon", "mlotst"), "somxl010", "Omon"), +# (oras5_2d("monthly"), cmor_2d("Omon", "tos"), "sosstsst", "Omon"), +# (oras5_2d("monthly"), cmor_2d("Omon", "sos"), "sosaline", "Omon"), +# (oras5_2d("monthly"), cmor_2d("Omon", "zos"), "sossheig", "Omon"), +# ] +# ] + + VARIABLES = [ pytest.param(a, b, c, d, id=c + "_" + d) for (a, b, c, d) in [ - (oras5_3d("monthly"), cmor_3d("Omon", "so"), "vosaline", "Omon"), - (oras5_3d("monthly"), cmor_3d("Omon", "thetao"), "votemper", "Omon"), - (oras5_3d("monthly"), cmor_3d("Omon", "uo"), "vozocrte", "Omon"), - (oras5_3d("monthly"), cmor_3d("Omon", "vo"), "vozocrte", "Omon"), - (oras5_2d("monthly"), cmor_2d("Omon", "mlotst"), "somxl010", "Omon"), - (oras5_2d("monthly"), cmor_2d("Omon", "tos"), "sosstsst", "Omon"), - (oras5_2d("monthly"), cmor_2d("Omon", "sos"), "sosaline", "Omon"), - (oras5_2d("monthly"), cmor_2d("Omon", "zos"), "sossheig", "Omon"), + (oras5_3d("monthly"), cmor_3d("Omon", "so"), "so", "Omon"), + (oras5_3d("monthly"), cmor_3d("Omon", "thetao"), "thetao", "Omon"), + (oras5_3d("monthly"), cmor_3d("Omon", "uo"), "uo", "Omon"), + (oras5_3d("monthly"), cmor_3d("Omon", "vo"), "vo", "Omon"), + (oras5_2d("monthly"), cmor_2d("Omon", "mlotst"), "mlotst", "Omon"), + (oras5_2d("monthly"), cmor_2d("Omon", "tos"), "tos", "Omon"), + (oras5_2d("monthly"), cmor_2d("Omon", "sos"), "sos", "Omon"), + (oras5_2d("monthly"), cmor_2d("Omon", "zos"), "zos", "Omon"), ] ] - @pytest.mark.parametrize("oras5_cubes, cmor_cubes, var, mip", VARIABLES) def test_cmorization(oras5_cubes, cmor_cubes, var, mip): """Verify that cmorization results in the expected target cube.""" @@ -324,7 +338,7 @@ def test_unstructured_grid(unstructured_grid_cubes): """Test processing unstructured data.""" fixed_cubes = fix_metadata( unstructured_grid_cubes, - "sos", + "sosaline", "ORAS5", "oras5", "Omon", From 3809f2a7edeaee9a379cd768b61145249de65e97 Mon Sep 17 00:00:00 2001 From: Katja Weigel Date: Thu, 13 Nov 2025 15:54:53 +0100 Subject: [PATCH 13/32] Added Omon. --- esmvalcore/config/extra_facets/oras5-mappings.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/esmvalcore/config/extra_facets/oras5-mappings.yml b/esmvalcore/config/extra_facets/oras5-mappings.yml index cd9b013c36..b415a5ef3a 100644 --- a/esmvalcore/config/extra_facets/oras5-mappings.yml +++ b/esmvalcore/config/extra_facets/oras5-mappings.yml @@ -47,4 +47,9 @@ ORAS5: raw_units: degC so: raw_name: vosaline - raw_units: '0.001' \ No newline at end of file + raw_units: '0.001' + + # MIP-specific settings + Omon: + '*': + tres: 1M \ No newline at end of file From f0d2d27be712196ee73923dbe85ed942dde8a694 Mon Sep 17 00:00:00 2001 From: Katja Weigel Date: Thu, 13 Nov 2025 17:48:50 +0100 Subject: [PATCH 14/32] adding tos as def. --- .../cmor/_fixes/oras5/test_oras5.py | 66 +++++++++++++++---- 1 file changed, 52 insertions(+), 14 deletions(-) diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index bcd953bb47..c94a84db93 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -39,9 +39,9 @@ def test_get_frequency_monthly(): def _oras5_latitude(): return DimCoord( np.array([90.0, 0.0, -90.0]), - standard_name="latitude", - long_name="latitude", - var_name="latitude", + standard_name="grid_latitude", + long_name="grid_latitude", + var_name="grid_latitude", units=Unit("degrees"), ) @@ -49,9 +49,9 @@ def _oras5_latitude(): def _oras5_longitude(): return DimCoord( np.array([0, 180, 359.75]), - standard_name="longitude", - long_name="longitude", - var_name="longitude", + standard_name="grid_longitude", + long_name="grid_longitude", + var_name="grid_longitude", units=Unit("degrees"), circular=True, ) @@ -263,17 +263,55 @@ def cmor_3d(mip, short_name): # ] + +def tos_oras5_monthly(): + time = _oras5_time("monthly") + cube = Cube( + _oras5_data("monthly"), + long_name="Sea Surface Temperature", + var_name="sosstsst", + units="degC", + dim_coords_and_dims=[ + (time, 0), + (_oras5_latitude(), 1), + (_oras5_longitude(), 2), + ], + ) + return CubeList([cube]) + + +def tos_cmor_omon(): + cmor_table = CMOR_TABLES["ORAS5"] + vardef = cmor_table.get_variable("Omon", "tas") + time = _cmor_time("Omon", bounds=True) + data = _cmor_data("Omon") + cube = Cube( + data.astype("float32"), + long_name=vardef.long_name, + var_name=vardef.short_name, + standard_name=vardef.standard_name, + units=Unit(vardef.units), + dim_coords_and_dims=[ + (time, 0), + (_cmor_latitude(), 1), + (_cmor_longitude(), 2), + ], + attributes={"comment": COMMENT}, + ) + return CubeList([cube]) + VARIABLES = [ pytest.param(a, b, c, d, id=c + "_" + d) for (a, b, c, d) in [ - (oras5_3d("monthly"), cmor_3d("Omon", "so"), "so", "Omon"), - (oras5_3d("monthly"), cmor_3d("Omon", "thetao"), "thetao", "Omon"), - (oras5_3d("monthly"), cmor_3d("Omon", "uo"), "uo", "Omon"), - (oras5_3d("monthly"), cmor_3d("Omon", "vo"), "vo", "Omon"), - (oras5_2d("monthly"), cmor_2d("Omon", "mlotst"), "mlotst", "Omon"), - (oras5_2d("monthly"), cmor_2d("Omon", "tos"), "tos", "Omon"), - (oras5_2d("monthly"), cmor_2d("Omon", "sos"), "sos", "Omon"), - (oras5_2d("monthly"), cmor_2d("Omon", "zos"), "zos", "Omon"), + # (oras5_3d("monthly"), cmor_3d("Omon", "so"), "so", "Omon"), + # (oras5_3d("monthly"), cmor_3d("Omon", "thetao"), "thetao", "Omon"), + # (oras5_3d("monthly"), cmor_3d("Omon", "uo"), "uo", "Omon"), + # (oras5_3d("monthly"), cmor_3d("Omon", "vo"), "vo", "Omon"), + # (oras5_2d("monthly"), cmor_2d("Omon", "mlotst"), "mlotst", "Omon"), + # (oras5_2d("monthly"), cmor_2d("Omon", "tos"), "tos", "Omon"), + # (oras5_2d("monthly"), cmor_2d("Omon", "sos"), "sosaline", "Omon"), + (tos_oras5_monthly(), tos_cmor_omon(), "tos", "Omon"), + # (oras5_2d("monthly"), cmor_2d("Omon", "zos"), "zos", "Omon"), ] ] From ce0c5db8fb029fe6ba1af9806b4be1776fcd96d1 Mon Sep 17 00:00:00 2001 From: Katja Weigel Date: Thu, 13 Nov 2025 18:10:20 +0100 Subject: [PATCH 15/32] Variable name. --- tests/integration/cmor/_fixes/oras5/test_oras5.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index c94a84db93..6c1836ee1c 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -310,7 +310,7 @@ def tos_cmor_omon(): # (oras5_2d("monthly"), cmor_2d("Omon", "mlotst"), "mlotst", "Omon"), # (oras5_2d("monthly"), cmor_2d("Omon", "tos"), "tos", "Omon"), # (oras5_2d("monthly"), cmor_2d("Omon", "sos"), "sosaline", "Omon"), - (tos_oras5_monthly(), tos_cmor_omon(), "tos", "Omon"), + (tos_oras5_monthly(), tos_cmor_omon(), "sosstsst", "Omon"), # (oras5_2d("monthly"), cmor_2d("Omon", "zos"), "zos", "Omon"), ] ] From f2cca1b70471fdd8b3e593fc71a8c5027029c554 Mon Sep 17 00:00:00 2001 From: Katja Weigel Date: Fri, 14 Nov 2025 14:01:15 +0100 Subject: [PATCH 16/32] Using raw_name. --- tests/integration/cmor/_fixes/oras5/test_oras5.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index 6c1836ee1c..ac07eceddc 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -264,7 +264,7 @@ def cmor_3d(mip, short_name): -def tos_oras5_monthly(): +def sosstsst_oras5_monthly(): time = _oras5_time("monthly") cube = Cube( _oras5_data("monthly"), @@ -310,7 +310,7 @@ def tos_cmor_omon(): # (oras5_2d("monthly"), cmor_2d("Omon", "mlotst"), "mlotst", "Omon"), # (oras5_2d("monthly"), cmor_2d("Omon", "tos"), "tos", "Omon"), # (oras5_2d("monthly"), cmor_2d("Omon", "sos"), "sosaline", "Omon"), - (tos_oras5_monthly(), tos_cmor_omon(), "sosstsst", "Omon"), + (sosstsst_oras5_monthly(), tos_cmor_omon(), "sosstsst", "Omon"), # (oras5_2d("monthly"), cmor_2d("Omon", "zos"), "zos", "Omon"), ] ] From 92177ab46dd5129b2fd57ac0f95e228e5be7480d Mon Sep 17 00:00:00 2001 From: Katja Weigel Date: Fri, 14 Nov 2025 15:30:35 +0100 Subject: [PATCH 17/32] Typo. --- tests/integration/cmor/_fixes/oras5/test_oras5.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index ac07eceddc..b4e9bac834 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -282,7 +282,7 @@ def sosstsst_oras5_monthly(): def tos_cmor_omon(): cmor_table = CMOR_TABLES["ORAS5"] - vardef = cmor_table.get_variable("Omon", "tas") + vardef = cmor_table.get_variable("Omon", "tos") time = _cmor_time("Omon", bounds=True) data = _cmor_data("Omon") cube = Cube( From ef989e593e2a91e44497d92aba99494aff66a7d9 Mon Sep 17 00:00:00 2001 From: Katja Weigel Date: Fri, 14 Nov 2025 15:44:16 +0100 Subject: [PATCH 18/32] test raw_name. --- tests/integration/cmor/_fixes/oras5/test_oras5.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index b4e9bac834..7859039f41 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -269,6 +269,7 @@ def sosstsst_oras5_monthly(): cube = Cube( _oras5_data("monthly"), long_name="Sea Surface Temperature", + raw_name="sosstsst", var_name="sosstsst", units="degC", dim_coords_and_dims=[ From 9dd1939fa533fa42873aa79d298af3f2649ff6d8 Mon Sep 17 00:00:00 2001 From: Katja Weigel Date: Fri, 14 Nov 2025 16:08:35 +0100 Subject: [PATCH 19/32] Remove raw_name. --- tests/integration/cmor/_fixes/oras5/test_oras5.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index 7859039f41..b4e9bac834 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -269,7 +269,6 @@ def sosstsst_oras5_monthly(): cube = Cube( _oras5_data("monthly"), long_name="Sea Surface Temperature", - raw_name="sosstsst", var_name="sosstsst", units="degC", dim_coords_and_dims=[ From 1adfd7416c993cfe893d23205356342d48c24b35 Mon Sep 17 00:00:00 2001 From: Jan-Hendrik Malles Date: Tue, 25 Nov 2025 11:52:06 +0100 Subject: [PATCH 20/32] New testing approach based on ICON --- .../cmor/_fixes/oras5/test_oras5.py | 2775 +++++++++++++++-- .../cmor/_fixes/test_data/oras5_2d.nc | Bin 0 -> 21889 bytes .../cmor/_fixes/test_data/oras5_3d.nc | Bin 0 -> 76353 bytes .../cmor/_fixes/test_data/oras5_grid.nc | Bin 0 -> 17127 bytes 4 files changed, 2445 insertions(+), 330 deletions(-) create mode 100644 tests/integration/cmor/_fixes/test_data/oras5_2d.nc create mode 100644 tests/integration/cmor/_fixes/test_data/oras5_3d.nc create mode 100644 tests/integration/cmor/_fixes/test_data/oras5_grid.nc diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index b4e9bac834..8b661ec6f1 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -1,405 +1,2520 @@ -"""Tests for the fixes of ORAS5.""" +"""Test the ICON on-the-fly CMORizer.""" -import datetime +from copy import deepcopy +from datetime import datetime +from pathlib import Path +from unittest import mock -import dask.array as da +import iris import numpy as np import pytest from cf_units import Unit -from iris.coords import AuxCoord, DimCoord +from iris import NameConstraint +from iris.coords import AuxCoord, CellMethod, DimCoord from iris.cube import Cube, CubeList -from esmvalcore.cmor._fixes.fix import Fix, GenericFix +import esmvalcore.cmor._fixes.oras5.oras5 +from esmvalcore.cmor._fixes.fix import GenericFix +from esmvalcore.cmor._fixes.oras5._base_fixes import Oras5Fix from esmvalcore.cmor._fixes.oras5.oras5 import ( - AllVars) -from esmvalcore.cmor.fix import fix_metadata -from esmvalcore.cmor.table import CMOR_TABLES, get_var_info -from esmvalcore.preprocessor import cmor_check_metadata - -COMMENT = ( - "Contains modified Copernicus Climate Change Service Information " - f"{datetime.datetime.now().year}" + AllVars, + # Clwvi, + # Hfls, + # Hfss, + # Rtmt, + # Rtnt, ) +from esmvalcore.cmor.fix import Fix +from esmvalcore.cmor.table import CoordinateInfo, get_var_info +from esmvalcore.config import CFG +from esmvalcore.config._config import get_extra_facets +from esmvalcore.dataset import Dataset -def test_get_frequency_monthly(): - """Test cubes with monthly frequency.""" - time = DimCoord( - [0, 31, 59], - standard_name="time", - units=Unit("hours since 1900-01-01"), - ) - cube = Cube( - [1, 6, 3], - var_name="random_var", - dim_coords_and_dims=[(time, 0)], - ) - cube.coord("time").convert_units("days since 1850-1-1 00:00:00.0") +TEST_GRID_FILE_URI = ( + "https://github.com/ESMValGroup/ESMValCore/raw/main/tests/integration/" + "cmor/_fixes/test_data/oras5_grid.nc" +) +TEST_GRID_FILE_NAME = "oras5_grid.nc" -def _oras5_latitude(): - return DimCoord( - np.array([90.0, 0.0, -90.0]), - standard_name="grid_latitude", - long_name="grid_latitude", - var_name="grid_latitude", - units=Unit("degrees"), - ) +@pytest.fixture(autouse=True) +def tmp_cache_dir(monkeypatch, tmp_path): + """Use temporary path as cache directory for all tests in this module.""" + monkeypatch.setattr(Oras5Fix, "CACHE_DIR", tmp_path) -def _oras5_longitude(): - return DimCoord( - np.array([0, 180, 359.75]), - standard_name="grid_longitude", - long_name="grid_longitude", - var_name="grid_longitude", - units=Unit("degrees"), - circular=True, - ) +# Note: test_data_path is defined in tests/integration/cmor/_fixes/conftest.py -def _oras5_time(frequency): - if frequency == "invariant": - timestamps = [788928] # hours since 1900 at 1 january 1990 - elif frequency == "monthly": - timestamps = [788928, 789672, 790344] - else: - raise NotImplementedError(f"Invalid frequency {frequency}") - return DimCoord( - np.array(timestamps, dtype="int32"), - standard_name="time", - long_name="time", - var_name="time", - units=Unit("hours since 1900-01-0100:00:00.0", calendar="gregorian"), - ) +@pytest.fixture +def cubes_2d(test_data_path): + """2D sample cubes.""" + nc_path = test_data_path / "oras5_2d.nc" + return iris.load(str(nc_path)) -def _oras5_depth(): - values = np.array( - [ - 0.5, - 6000, - ] - ) - return DimCoord( - values, - long_name="Vertical T levels", - units=Unit("m"), - var_name="deptht", - attributes={"positive": "down"}, - ) +@pytest.fixture +def cubes_3d(test_data_path): + """3D sample cubes.""" + nc_path = test_data_path / "oras5_3d.nc" + return iris.load(str(nc_path)) -def _oras5_data(frequency): - if frequency == "invariant": - return np.arange(9).reshape(1, 3, 3) - return np.arange(27).reshape(3, 3, 3) +@pytest.fixture +def cubes_grid(test_data_path): + """Grid description sample cubes.""" + nc_path = test_data_path / "oras5_grid.nc" + return iris.load(str(nc_path)) -def _cmor_latitude(): - return DimCoord( - np.array([-90.0, 0.0, 90.0]), - standard_name="latitude", - long_name="Latitude", +@pytest.fixture +def cubes_regular_grid(): + """Cube with regular grid.""" + time_coord = DimCoord( + [0], + var_name="time", + standard_name="time", + units="days since 1850-01-01", + ) + lat_coord = DimCoord( + [0.0, 1.0], var_name="lat", - units=Unit("degrees_north"), - bounds=np.array([[-90.0, -45.0], [-45.0, 45.0], [45.0, 90.0]]), + standard_name="latitude", + long_name="latitude", + units="degrees_north", + ) + lon_coord = DimCoord( + [-1.0, 1.0], + var_name="lon", + standard_name="longitude", + long_name="longitude", + units="degrees_east", + ) + cube = Cube( + [[[0.0, 1.0], [2.0, 3.0]]], + var_name="sosstsst", + units="degC", + dim_coords_and_dims=[(time_coord, 0), (lat_coord, 1), (lon_coord, 2)], ) + return CubeList([cube]) -def _cmor_longitude(): - return DimCoord( - np.array([0, 180, 359.75]), - standard_name="longitude", - long_name="Longitude", +@pytest.fixture +def cubes_2d_lat_lon_grid(): + """Cube with 2D latitude and longitude.""" + time_coord = DimCoord( + [0], + var_name="time", + standard_name="time", + units="days since 1850-01-01", + ) + lat_coord = AuxCoord( + [[0.0, 0.0, 0.0], [1.0, 1.0, 1.0]], + var_name="lat", + standard_name="latitude", + long_name="latitude", + units="degrees_north", + ) + lon_coord = AuxCoord( + [[0.0, 1.0, 2.0], [0.0, 1.0, 2.0]], var_name="lon", - units=Unit("degrees_east"), - bounds=np.array([[-0.125, 90.0], [90.0, 269.875], [269.875, 359.875]]), - circular=True, + standard_name="longitude", + long_name="longitude", + units="degrees_east", + ) + cube = Cube( + [[[0.0, 1.0, 2.0], [2.0, 3.0, 4.0]]], + var_name="sosstsst", + units="degC", + dim_coords_and_dims=[(time_coord, 0)], + aux_coords_and_dims=[(lat_coord, (1, 2)), (lon_coord, (1, 2))], ) + return CubeList([cube]) -def _cmor_time(mip, bounds=None, shifted=False): - """Provide expected time coordinate after fixes.""" - if "mon" in mip: - timestamps = np.array([51149.5, 51179.0, 51208.5]) - if bounds is not None: - bounds = np.array( - [[51134.0, 51165.0], [51165.0, 51193.0], [51193.0, 51224.0]] - ) - else: - raise NotImplementedError() - - return DimCoord( - np.array(timestamps, dtype=float), - standard_name="time", - long_name="time", +@pytest.fixture +def simple_unstructured_cube(): + """Create a cube with an unstructured grid.""" + time_coord = DimCoord( + [0], var_name="time", - units=Unit("days since 1850-1-1 00:00:00", calendar="gregorian"), - bounds=bounds, + standard_name="time", + units="days since 1850-01-01", + ) + hdepth_coord = DimCoord([0, 1, 2], var_name="depth") + lat_coord = AuxCoord( + [0.0, 1.0], + var_name="lat", + standard_name="latitude", + long_name="latitude", + units="degrees_north", + ) + lon_coord = AuxCoord( + [0.0, 1.0], + var_name="lon", + standard_name="longitude", + long_name="longitude", + units="degrees_east", + ) + cube = Cube( + [[[0.0, 1.0], [2.0, 3.0], [4.0, 5.0]]], + var_name="votemper", + units="degC", + dim_coords_and_dims=[(time_coord, 0), (height_coord, 1)], + aux_coords_and_dims=[(lat_coord, 2), (lon_coord, 2)], ) + return cube -def _cmor_depth(): - values = np.array( - [ - 0.5, - 6000.0, - ] - ) - return DimCoord( - values, - long_name="ocean depth coordinate", - standard_name="depth", - units=Unit("m"), - var_name="lev", - attributes={"positive": "down"}, +def _get_fix(mip, short_name, fix_name, session=None): + """Load a fix from esmvalcore.cmor._fixes.oras5.oras5.""" + dataset = Dataset( + project="ORAS5", + dataset="ORAS5", + mip=mip, + short_name=short_name, ) + extra_facets = get_extra_facets(dataset, ()) + extra_facets["frequency"] = "mon" + extra_facets["exp"] = "omip" + extra_facets["horizontal_grid"] = '/work/bd1083/b382555/esmvalcore_dev/tests/integration/cmor/_fixes/test_data/oras5_grid.nc' + vardef = get_var_info(project="ORAS5", mip=mip, short_name=short_name) + cls = getattr(esmvalcore.cmor._fixes.oras5.oras5, fix_name) + fix = cls(vardef, extra_facets=extra_facets, session=session) + return fix -def _cmor_data(mip): - if mip == "fx": - return np.arange(9).reshape(3, 3)[::-1, :] - return np.arange(27).reshape(3, 3, 3)[:, ::-1, :] +def get_fix(mip, short_name, session=None): + """Load a variable fix from esmvalcore.cmor._fixes.oras5.oras5.""" + fix_name = short_name[0].upper() + short_name[1:] + return _get_fix(mip, short_name, fix_name, session=session) -def oras5_2d(frequency): - if frequency == "monthly": - time = DimCoord( - [-31, 0, 31], standard_name="time", units="days since 1850-01-01" - ) - else: - time = _oras5_time(frequency) - cube = Cube( - _oras5_data("monthly"), - long_name=None, - var_name=None, - units="unknown", - dim_coords_and_dims=[ - (time, 0), - (_oras5_latitude(), 1), - (_oras5_longitude(), 2), - ], - ) - return CubeList([cube]) +def get_allvars_fix(mip, short_name, session=None): + """Load the AllVars fix from esmvalcore.cmor._fixes.oras5.oras5.""" + return _get_fix(mip, short_name, "AllVars", session=session) -def oras5_3d(frequency): - cube = Cube( - np.ones((3, 2, 3, 3)), - long_name=None, - var_name=None, - units="unknown", - dim_coords_and_dims=[ - (_oras5_time(frequency), 0), - (_oras5_depth(), 1), - (_oras5_latitude(), 2), - (_oras5_longitude(), 3), - ], - ) - return CubeList([cube]) +def fix_metadata(cubes, mip, short_name, session=None): + """Fix metadata of cubes.""" + fix = get_fix(mip, short_name, session=session) + cubes = fix.fix_metadata(cubes) + fix = get_allvars_fix(mip, short_name, session=session) + cubes = fix.fix_metadata(cubes) + return cubes + + +def fix_data(cube, mip, short_name, session=None): + """Fix data of cube.""" + fix = get_fix(mip, short_name, session=session) + cube = fix.fix_data(cube) + fix = get_allvars_fix(mip, short_name, session=session) + cube = fix.fix_data(cube) + return cube + -def cmor_2d(mip, short_name): - cmor_table = CMOR_TABLES["ORAS5"] - vardef = cmor_table.get_variable(mip, short_name) - if "mon" in mip: - time = DimCoord( - [-15.5, 15.5, 45.0], - bounds=[[-31.0, 0.0], [0.0, 31.0], [31.0, 59.0]], - standard_name="time", - long_name="time", - var_name="time", - units="days since 1850-01-01", +def check_ta_metadata(cubes): + """Check ta metadata.""" + assert len(cubes) == 1 + cube = cubes[0] + assert cube.var_name == "thetao" + assert cube.standard_name == "air_temperature" + assert cube.long_name == "Air Temperature" + assert cube.units == "degC" + assert "positive" not in cube.attributes + return cube + + +def check_tas_metadata(cubes): + """Check tas metadata.""" + assert len(cubes) == 1 + cube = cubes[0] + assert cube.var_name == "tos" + assert cube.standard_name == "sea_surface_temperature" + assert cube.long_name == "Sea Surface Temperature" + assert cube.units == "degC" + # assert "positive" not in cube.attributes + return cube + + +def check_siconc_metadata(cubes, var_name, long_name): + """Check tas metadata.""" + assert len(cubes) == 1 + cube = cubes[0] + assert cube.var_name == var_name + assert cube.standard_name == "sea_ice_area_fraction" + assert cube.long_name == long_name + assert cube.units == "%" + assert "positive" not in cube.attributes + return cube + + +def check_time(cube): + """Check time coordinate of cube.""" + assert cube.coords("time", dim_coords=True) + time = cube.coord("time", dim_coords=True) + assert time.var_name == "time" + assert time.standard_name == "time" + assert time.long_name == "time" + # assert time.units == Unit( + # "days since 1850-01-01", calendar="proleptic_gregorian" + # ) + # np.testing.assert_allclose(time.points, [54770.5]) + # np.testing.assert_allclose(time.bounds, [[54755.0, 54786.0]]) + assert time.attributes == {} + + +def check_model_level_metadata(cube): + """Check metadata of model_level coordinate.""" + assert cube.coords("model level number", dim_coords=True) + height = cube.coord("model level number", dim_coords=True) + assert height.var_name == "model_level" + assert height.standard_name is None + assert height.long_name == "model level number" + assert height.units == "no unit" + assert height.attributes == {"positive": "up"} + return height + + +def check_air_pressure_metadata(cube): + """Check metadata of air_pressure coordinate.""" + assert cube.coords("air_pressure", dim_coords=False) + plev = cube.coord("air_pressure", dim_coords=False) + assert plev.var_name == "plev" + assert plev.standard_name == "air_pressure" + assert plev.long_name == "pressure" + assert plev.units == "Pa" + assert plev.attributes == {"positive": "down"} + return plev + + +def check_height(cube, plev_has_bounds=True): + """Check height coordinate of cube.""" + height = check_model_level_metadata(cube) + np.testing.assert_array_equal(height.points, np.arange(47)) + assert height.bounds is None + + plev = check_air_pressure_metadata(cube) + assert cube.coord_dims("air_pressure") == (0, 1, 2) + + np.testing.assert_allclose( + plev.points[0, :4, 0], + [100566.234, 99652.07, 97995.77, 95686.08], + ) + if plev_has_bounds: + np.testing.assert_allclose( + plev.bounds[0, :4, 0], + [ + [100825.04, 100308.09], + [100308.09, 99000.336], + [99000.336, 97001.42], + [97001.42, 94388.59], + ], ) else: - time = _cmor_time(mip, bounds=True) - cube = Cube( - _cmor_data(mip).astype("float32"), - long_name=vardef.long_name, - var_name=vardef.short_name, - standard_name=vardef.standard_name, - units=Unit(vardef.units), - dim_coords_and_dims=[ - (time, 0), - (_cmor_latitude(), 1), - (_cmor_longitude(), 2), + assert plev.bounds is None + + +# def check_heightxm(cube, height_value): +# """Check scalar heightxm coordinate of cube.""" +# assert cube.coords("depth") +# height = cube.coord("depth") +# assert height.var_name == "depth" +# assert height.standard_name == "depth" +# assert height.long_name == "depth" +# assert height.units == "m" +# assert height.attributes == {"positive": "up"} +# np.testing.assert_allclose(height.points, [height_value]) +# assert height.bounds is None + + +def check_lat(cube): + """Check latitude coordinate of cube.""" + assert cube.coords("latitude", dim_coords=False) + lat = cube.coord("latitude", dim_coords=False) + assert lat.var_name == "lat" + assert lat.standard_name == "latitude" + assert lat.long_name == "latitude" + assert lat.units == "degrees_north" + assert lat.attributes == {} + np.testing.assert_allclose( + lat.points, + [-45.0, -45.0, -45.0, -45.0, 45.0, 45.0, 45.0, 45.0], + rtol=1e-5, + ) + np.testing.assert_allclose( + lat.bounds, + [ + [-90.0, 0.0, 0.0], + [-90.0, 0.0, 0.0], + [-90.0, 0.0, 0.0], + [-90.0, 0.0, 0.0], + [0.0, 0.0, 90.0], + [0.0, 0.0, 90.0], + [0.0, 0.0, 90.0], + [0.0, 0.0, 90.0], ], - attributes={"comment": COMMENT}, + rtol=1e-5, ) - return CubeList([cube]) + return lat -def cmor_3d(mip, short_name): - cmor_table = CMOR_TABLES["ORAS5"] - vardef = cmor_table.get_variable(mip, short_name) - cube = Cube( - np.ones((3, 2, 3, 3)), - long_name=vardef.long_name, - var_name=vardef.short_name, - standard_name=vardef.standard_name, - units=Unit(vardef.units), - dim_coords_and_dims=[ - (_cmor_time(mip, bounds=True), 0), - (_cmor_depth(), 1), - (_cmor_latitude(), 2), - (_cmor_longitude(), 3), +def check_lon(cube): + """Check longitude coordinate of cube.""" + assert cube.coords("longitude", dim_coords=False) + lon = cube.coord("longitude", dim_coords=False) + assert lon.var_name == "lon" + assert lon.standard_name == "longitude" + assert lon.long_name == "longitude" + assert lon.units == "degrees_east" + assert lon.attributes == {} + np.testing.assert_allclose( + lon.points, + [225.0, 315.0, 45.0, 135.0, 225.0, 315.0, 45.0, 135.0], + rtol=1e-5, + ) + np.testing.assert_allclose( + lon.bounds, + [ + [0.0, 270.0, 180.0], + [0.0, 0.0, 270.0], + [0.0, 90.0, 0.0], + [0.0, 180.0, 90.0], + [180.0, 270.0, 0.0], + [270.0, 0.0, 0.0], + [0.0, 90.0, 0.0], + [90.0, 180.0, 0.0], ], - attributes={"comment": COMMENT}, + rtol=1e-5, ) - return CubeList([cube]) - + return lon -# VARIABLES = [ -# pytest.param(a, b, c, d, id=c + "_" + d) -# for (a, b, c, d) in [ -# (oras5_3d("monthly"), cmor_3d("Omon", "so"), "vosaline", "Omon"), -# (oras5_3d("monthly"), cmor_3d("Omon", "thetao"), "votemper", "Omon"), -# (oras5_3d("monthly"), cmor_3d("Omon", "uo"), "vozocrte", "Omon"), -# (oras5_3d("monthly"), cmor_3d("Omon", "vo"), "vozocrte", "Omon"), -# (oras5_2d("monthly"), cmor_2d("Omon", "mlotst"), "somxl010", "Omon"), -# (oras5_2d("monthly"), cmor_2d("Omon", "tos"), "sosstsst", "Omon"), -# (oras5_2d("monthly"), cmor_2d("Omon", "sos"), "sosaline", "Omon"), -# (oras5_2d("monthly"), cmor_2d("Omon", "zos"), "sossheig", "Omon"), -# ] -# ] +def check_lat_lon(cube): + """Check latitude, longitude and mesh of cube.""" + lat = check_lat(cube) + lon = check_lon(cube) + # Check that latitude and longitude are mesh coordinates + assert cube.coords("latitude", mesh_coords=True) + assert cube.coords("longitude", mesh_coords=True) -def sosstsst_oras5_monthly(): - time = _oras5_time("monthly") - cube = Cube( - _oras5_data("monthly"), - long_name="Sea Surface Temperature", - var_name="sosstsst", - units="degC", - dim_coords_and_dims=[ - (time, 0), - (_oras5_latitude(), 1), - (_oras5_longitude(), 2), - ], + # Check dimensional coordinate describing the mesh + assert cube.coords( + "first spatial index for variables stored on an unstructured grid", + dim_coords=True, ) - return CubeList([cube]) + i_coord = cube.coord( + "first spatial index for variables stored on an unstructured grid", + dim_coords=True, + ) + assert i_coord.var_name == "i" + assert i_coord.standard_name is None + assert i_coord.long_name == ( + "first spatial index for variables stored on an unstructured grid" + ) + assert i_coord.units == "1" + np.testing.assert_allclose(i_coord.points, [0, 1, 2, 3, 4, 5, 6, 7]) + assert i_coord.bounds is None + assert len(cube.coord_dims(lat)) == 1 + assert cube.coord_dims(lat) == cube.coord_dims(lon) + assert cube.coord_dims(lat) == cube.coord_dims(i_coord) -def tos_cmor_omon(): - cmor_table = CMOR_TABLES["ORAS5"] - vardef = cmor_table.get_variable("Omon", "tos") - time = _cmor_time("Omon", bounds=True) - data = _cmor_data("Omon") - cube = Cube( - data.astype("float32"), - long_name=vardef.long_name, - var_name=vardef.short_name, - standard_name=vardef.standard_name, - units=Unit(vardef.units), - dim_coords_and_dims=[ - (time, 0), - (_cmor_latitude(), 1), - (_cmor_longitude(), 2), - ], - attributes={"comment": COMMENT}, - ) - return CubeList([cube]) + # Check the mesh itself + assert cube.location == "face" + mesh = cube.mesh + check_mesh(mesh) -VARIABLES = [ - pytest.param(a, b, c, d, id=c + "_" + d) - for (a, b, c, d) in [ - # (oras5_3d("monthly"), cmor_3d("Omon", "so"), "so", "Omon"), - # (oras5_3d("monthly"), cmor_3d("Omon", "thetao"), "thetao", "Omon"), - # (oras5_3d("monthly"), cmor_3d("Omon", "uo"), "uo", "Omon"), - # (oras5_3d("monthly"), cmor_3d("Omon", "vo"), "vo", "Omon"), - # (oras5_2d("monthly"), cmor_2d("Omon", "mlotst"), "mlotst", "Omon"), - # (oras5_2d("monthly"), cmor_2d("Omon", "tos"), "tos", "Omon"), - # (oras5_2d("monthly"), cmor_2d("Omon", "sos"), "sosaline", "Omon"), - (sosstsst_oras5_monthly(), tos_cmor_omon(), "sosstsst", "Omon"), - # (oras5_2d("monthly"), cmor_2d("Omon", "zos"), "zos", "Omon"), - ] -] - -@pytest.mark.parametrize("oras5_cubes, cmor_cubes, var, mip", VARIABLES) -def test_cmorization(oras5_cubes, cmor_cubes, var, mip): - """Verify that cmorization results in the expected target cube.""" - fixed_cubes = fix_metadata(oras5_cubes, var, "ORAS5", "oras5", mip) - assert len(fixed_cubes) == 1 - fixed_cube = fixed_cubes[0] - cmor_cube = cmor_cubes[0] - - # Test that CMOR checks are passing - fixed_cubes = cmor_check_metadata(fixed_cube, "ORAS5", mip, var) - - if fixed_cube.coords("time"): - for cube in [fixed_cube, cmor_cube]: - coord = cube.coord("time") - coord.points = np.round(coord.points, decimals=7) - if coord.bounds is not None: - coord.bounds = np.round(coord.bounds, decimals=7) - print("Test results for variable/MIP: ", var, mip) - print("cmor_cube:", cmor_cube) - print("fixed_cube:", fixed_cube) - print("cmor_cube data:", cmor_cube.data) - print("fixed_cube data:", fixed_cube.data) - print("cmor_cube coords:") - for coord in cmor_cube.coords(): - print(coord) - print("\n") - print("fixed_cube coords:") - for coord in fixed_cube.coords(): - print(coord) - assert fixed_cube == cmor_cube +def check_mesh(mesh): + """Check the mesh.""" + assert mesh is not None + assert mesh.var_name is None + assert mesh.standard_name is None + assert mesh.long_name is None + assert mesh.units == "unknown" + assert mesh.attributes == {} + assert mesh.cf_role == "mesh_topology" + assert mesh.topology_dimension == 2 + # Check face coordinates + assert len(mesh.coords(location="face")) == 2 -@pytest.fixture -def unstructured_grid_cubes(): - """Sample cubes with unstructured grid.""" - time = DimCoord( - [0.0, 31.0], standard_name="time", units="days since 1950-01-01" + mesh_face_lat = mesh.coord(location="face", axis="y") + assert mesh_face_lat.var_name == "lat" + assert mesh_face_lat.standard_name == "latitude" + assert mesh_face_lat.long_name == "latitude" + assert mesh_face_lat.units == "degrees_north" + assert mesh_face_lat.attributes == {} + np.testing.assert_allclose( + mesh_face_lat.points, + [-45.0, -45.0, -45.0, -45.0, 45.0, 45.0, 45.0, 45.0], + rtol=1e-5, ) - lat = AuxCoord( - [1.0, 1.0, -1.0, -1.0], standard_name="latitude", units="degrees_north" + np.testing.assert_allclose( + mesh_face_lat.bounds, + [ + [-90.0, 0.0, 0.0], + [-90.0, 0.0, 0.0], + [-90.0, 0.0, 0.0], + [-90.0, 0.0, 0.0], + [0.0, 0.0, 90.0], + [0.0, 0.0, 90.0], + [0.0, 0.0, 90.0], + [0.0, 0.0, 90.0], + ], + rtol=1e-5, ) - lon = AuxCoord( - [179.0, 180.0, 180.0, 179.0], - standard_name="longitude", - units="degrees_east", + + mesh_face_lon = mesh.coord(location="face", axis="x") + assert mesh_face_lon.var_name == "lon" + assert mesh_face_lon.standard_name == "longitude" + assert mesh_face_lon.long_name == "longitude" + assert mesh_face_lon.units == "degrees_east" + assert mesh_face_lon.attributes == {} + np.testing.assert_allclose( + mesh_face_lon.points, + [225.0, 315.0, 45.0, 135.0, 225.0, 315.0, 45.0, 135.0], + rtol=1e-5, ) - cube = Cube( - da.from_array([[0.0, 1.0, 2.0, 3.0], [0.0, 0.0, 0.0, 0.0]]), - standard_name="sea_surface_salinity", - units="0.001", - dim_coords_and_dims=[(time, 0)], - aux_coords_and_dims=[(lat, 1), (lon, 1)], + np.testing.assert_allclose( + mesh_face_lon.bounds, + [ + [0.0, 270.0, 180.0], + [0.0, 0.0, 270.0], + [0.0, 90.0, 0.0], + [0.0, 180.0, 90.0], + [180.0, 270.0, 0.0], + [270.0, 0.0, 0.0], + [0.0, 90.0, 0.0], + [90.0, 180.0, 0.0], + ], + rtol=1e-5, ) - return CubeList([cube]) + # Check node coordinates + assert len(mesh.coords(location="node")) == 2 -def test_unstructured_grid(unstructured_grid_cubes): - """Test processing unstructured data.""" - fixed_cubes = fix_metadata( - unstructured_grid_cubes, - "sosaline", - "ORAS5", - "oras5", - "Omon", + mesh_node_lat = mesh.coord(location="node", axis="y") + assert mesh_node_lat.var_name == "nlat" + assert mesh_node_lat.standard_name == "latitude" + assert mesh_node_lat.long_name == "node latitude" + assert mesh_node_lat.units == "degrees_north" + assert mesh_node_lat.attributes == {} + np.testing.assert_allclose( + mesh_node_lat.points, [-90.0, 0.0, 0.0, 0.0, 0.0, 90.0], rtol=1e-5 ) + assert mesh_node_lat.bounds is None + + mesh_node_lon = mesh.coord(location="node", axis="x") + assert mesh_node_lon.var_name == "nlon" + assert mesh_node_lon.standard_name == "longitude" + assert mesh_node_lon.long_name == "node longitude" + assert mesh_node_lon.units == "degrees_east" + assert mesh_node_lon.attributes == {} + np.testing.assert_allclose( + mesh_node_lon.points, [0.0, 180.0, 270.0, 0.0, 90, 0.0], rtol=1e-5 + ) + assert mesh_node_lon.bounds is None + + # Check connectivity + assert len(mesh.connectivities()) == 1 + conn = mesh.connectivity() + assert conn.var_name is None + assert conn.standard_name is None + assert conn.long_name is None + assert conn.units == "unknown" + assert conn.attributes == {} + assert conn.cf_role == "face_node_connectivity" + assert conn.start_index == 1 + assert conn.location_axis == 0 + assert conn.shape == (8, 3) + np.testing.assert_array_equal( + conn.indices, + [ + [1, 3, 2], + [1, 4, 3], + [1, 5, 4], + [1, 2, 5], + [2, 3, 6], + [3, 4, 6], + [4, 5, 6], + [5, 2, 6], + ], + ) + + +# def check_typesi(cube): +# """Check scalar typesi coordinate of cube.""" +# assert cube.coords("area_type") +# typesi = cube.coord("area_type") +# assert typesi.var_name == "type" +# assert typesi.standard_name == "area_type" +# assert typesi.long_name == "Sea Ice area type" +# assert typesi.units.is_no_unit() +# np.testing.assert_array_equal(typesi.points, ["sea_ice"]) +# assert typesi.bounds is None + + +# Test areacella and areacello (for extra_facets, and grid_latitude and +# grid_longitude coordinates) + + +# def test_get_areacella_fix(): +# """Test getting of fix.""" +# fix = Fix.get_fixes("ORAS5", "ORAS5", "fx", "areacella") +# assert fix == [AllVars(None), GenericFix(None)] + + +# def test_areacella_fix(cubes_grid): +# """Test fix.""" +# fix = get_allvars_fix("fx", "areacella") +# fix.extra_facets["var_type"] = "fx" +# fixed_cubes = fix.fix_metadata(cubes_grid) + +# assert len(fixed_cubes) == 1 +# cube = fixed_cubes[0] +# assert cube.var_name == "areacella" +# assert cube.standard_name == "cell_area" +# assert cube.long_name == "Grid-Cell Area for Atmospheric Grid Variables" +# assert cube.units == "m2" +# assert "positive" not in cube.attributes + +# check_lat_lon(cube) + + +# def test_get_areacello_fix(): +# """Test getting of fix.""" +# fix = Fix.get_fixes("ORAS5", "ORAS5", "Ofx", "areacello") +# assert fix == [AllVars(None), GenericFix(None)] + + +# def test_areacello_fix(cubes_grid): +# """Test fix.""" +# fix = get_allvars_fix("Ofx", "areacello") +# fix.extra_facets["var_type"] = "fx" +# fixed_cubes = fix.fix_metadata(cubes_grid) + +# assert len(fixed_cubes) == 1 +# cube = fixed_cubes[0] +# assert cube.var_name == "areacello" +# assert cube.standard_name == "cell_area" +# assert cube.long_name == "Grid-Cell Area for Ocean Variables" +# assert cube.units == "m2" +# assert "positive" not in cube.attributes + +# check_lat_lon(cube) + + +# Test clwvi (for extra fix) + + +# def test_get_clwvi_fix(): +# """Test getting of fix.""" +# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "clwvi") +# assert fix == [Clwvi(None), AllVars(None), GenericFix(None)] + + +# def test_clwvi_fix(cubes_regular_grid): +# """Test fix.""" +# cubes = CubeList( +# [cubes_regular_grid[0].copy(), cubes_regular_grid[0].copy()] +# ) +# cubes[0].var_name = "cllvi" +# cubes[1].var_name = "clivi" +# cubes[0].units = "1e3 kg m-2" +# cubes[1].units = "1e3 kg m-2" + +# fixed_cubes = fix_metadata(cubes, "Omon", "clwvi") + +# assert len(fixed_cubes) == 1 +# cube = fixed_cubes[0] +# assert cube.var_name == "clwvi" +# assert cube.standard_name == ( +# "atmosphere_mass_content_of_cloud_condensed_water" +# ) +# assert cube.long_name == "Condensed Water Path" +# assert cube.units == "kg m-2" +# assert "positive" not in cube.attributes + +# np.testing.assert_allclose(cube.data, [[[0.0, 2000.0], [4000.0, 6000.0]]]) + + +# # Test lwp (for extra_facets) + + +# def test_get_lwp_fix(): +# """Test getting of fix.""" +# fix = Fix.get_fixes("ORAS5", "ORAS5", "AERmon", "lwp") +# assert fix == [AllVars(None), GenericFix(None)] + + +# def test_lwp_fix(cubes_2d): +# """Test fix.""" +# fix = get_allvars_fix("AERmon", "lwp") +# fixed_cubes = fix.fix_metadata(cubes_2d) + +# assert len(fixed_cubes) == 1 +# cube = fixed_cubes[0] +# assert cube.var_name == "lwp" +# assert cube.standard_name == ( +# "atmosphere_mass_content_of_cloud_liquid_water" +# ) +# assert cube.long_name == "Liquid Water Path" +# assert cube.units == "kg m-2" +# assert "positive" not in cube.attributes + +# check_time(cube) +# check_lat_lon(cube) + + +# # Test rsdt and rsut (for positive attribute) + + +# def test_get_rsdt_fix(): +# """Test getting of fix.""" +# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "rsdt") +# assert fix == [AllVars(None), GenericFix(None)] + + +# def test_rsdt_fix(cubes_2d): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "rsdt") +# fixed_cubes = fix.fix_metadata(cubes_2d) + +# assert len(fixed_cubes) == 1 +# cube = fixed_cubes[0] +# assert cube.var_name == "rsdt" +# assert cube.standard_name == "toa_incoming_shortwave_flux" +# assert cube.long_name == "TOA Incident Shortwave Radiation" +# assert cube.units == "W m-2" +# assert cube.attributes["positive"] == "down" + +# check_time(cube) +# check_lat_lon(cube) + + +# def test_get_rsut_fix(): +# """Test getting of fix.""" +# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "rsut") +# assert fix == [AllVars(None), GenericFix(None)] + + +# def test_rsut_fix(cubes_2d): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "rsut") +# fixed_cubes = fix.fix_metadata(cubes_2d) + +# assert len(fixed_cubes) == 1 +# cube = fixed_cubes[0] +# assert cube.var_name == "rsut" +# assert cube.standard_name == "toa_outgoing_shortwave_flux" +# assert cube.long_name == "TOA Outgoing Shortwave Radiation" +# assert cube.units == "W m-2" +# assert cube.attributes["positive"] == "up" + +# check_time(cube) +# check_lat_lon(cube) + + +# # Test siconc and siconca (for extra_facets, extra fix and typesi coordinate) + + +# def test_get_siconc_fix(): +# """Test getting of fix.""" +# fix = Fix.get_fixes("ORAS5", "ORAS5", "SImon", "siconc") +# assert fix == [AllVars(None), GenericFix(None)] + + +# def test_siconc_fix(cubes_2d): +# """Test fix.""" +# fix = get_allvars_fix("SImon", "siconc") +# fixed_cubes = fix.fix_metadata(cubes_2d) + +# cube = check_siconc_metadata( +# fixed_cubes, "siconc", "Sea-Ice Area Percentage (Ocean Grid)" +# ) +# check_time(cube) +# check_lat_lon(cube) +# check_typesi(cube) + +# np.testing.assert_allclose( +# cube.data, +# [[10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0]], +# ) + + +# def test_get_siconca_fix(): +# """Test getting of fix.""" +# fix = Fix.get_fixes("ORAS5", "ORAS5", "SImon", "siconca") +# assert fix == [AllVars(None), GenericFix(None)] + + +# def test_siconca_fix(cubes_2d): +# """Test fix.""" +# fix = get_allvars_fix("SImon", "siconca") +# fixed_cubes = fix.fix_metadata(cubes_2d) + +# cube = check_siconc_metadata( +# fixed_cubes, "siconca", "Sea-Ice Area Percentage (Atmospheric Grid)" +# ) +# check_time(cube) +# check_lat_lon(cube) +# check_typesi(cube) + +# np.testing.assert_allclose( +# cube.data, +# [[10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0]], +# ) + + +# # Test ta (for height and plev coordinate) + + +# def test_get_ta_fix(): +# """Test getting of fix.""" +# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "thetao") +# assert fix == [AllVars(None), GenericFix(None)] + + +# def test_ta_fix(cubes_3d): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "thetao") +# fixed_cubes = fix.fix_metadata(cubes_3d) + +# cube = check_ta_metadata(fixed_cubes) +# check_time(cube) +# check_height(cube) +# check_lat_lon(cube) + + +# def test_ta_fix_no_plev_bounds(cubes_3d): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "thetao") +# cubes = CubeList( +# [ +# cubes_3d.extract_cube(NameConstraint(var_name="thetao")), +# cubes_3d.extract_cube(NameConstraint(var_name="pfull")), +# ] +# ) +# fixed_cubes = fix.fix_metadata(cubes) + +# cube = check_ta_metadata(fixed_cubes) +# check_time(cube) +# check_height(cube, plev_has_bounds=False) +# check_lat_lon(cube) + + +# # Test tas (for height2m coordinate, no mesh, no shift time) + + +# def test_get_tas_fix(): +# """Test getting of fix.""" +# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "tos") +# assert fix == [AllVars(None), GenericFix(None)] + + +# def test_tas_fix(cubes_2d): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "tos") +# fixed_cubes = fix.fix_metadata(cubes_2d) + +# cube = check_tas_metadata(fixed_cubes) +# check_time(cube) +# check_lat_lon(cube) +# check_heightxm(cube, 2.0) + + +# def test_tas_spatial_index_coord_already_present(cubes_2d): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "tos") + +# index_coord = DimCoord(np.arange(8), var_name="ncells") +# cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) +# cube.add_dim_coord(index_coord, 1) +# fixed_cubes = fix.fix_metadata(cubes_2d) + +# assert len(fixed_cubes) == 1 +# cube = fixed_cubes[0] +# check_lat_lon(cube) + + +# def test_tas_scalar_height2m_already_present(cubes_2d): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "tos") + +# # Scalar height (with wrong metadata) already present +# height_coord = AuxCoord(2.0, var_name="h", standard_name="depth") +# cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) +# cube.add_aux_coord(height_coord, ()) +# fixed_cubes = fix.fix_metadata(cubes_2d) + +# assert len(fixed_cubes) == 1 +# cube = fixed_cubes[0] +# assert cube.shape == (1, 8) +# check_heightxm(cube, 2.0) + + +# def test_tas_no_mesh(cubes_2d): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "tos") +# fix.extra_facets["ugrid"] = False +# fixed_cubes = fix.fix_metadata(cubes_2d) + +# cube = check_tas_metadata(fixed_cubes) + +# assert cube.mesh is None + +# assert cube.coords( +# "first spatial index for variables stored on an unstructured grid", +# dim_coords=True, +# ) +# i_coord = cube.coord( +# "first spatial index for variables stored on an unstructured grid", +# dim_coords=True, +# ) +# assert i_coord.var_name == "i" +# assert i_coord.standard_name is None +# assert i_coord.long_name == ( +# "first spatial index for variables stored on an unstructured grid" +# ) +# assert i_coord.units == "1" +# np.testing.assert_allclose(i_coord.points, [0, 1, 2, 3, 4, 5, 6, 7]) +# assert i_coord.bounds is None + +# assert cube.coords("latitude", dim_coords=False) +# assert cube.coords("longitude", dim_coords=False) +# lat = cube.coord("latitude", dim_coords=False) +# lon = cube.coord("longitude", dim_coords=False) +# assert len(cube.coord_dims(lat)) == 1 +# assert cube.coord_dims(lat) == cube.coord_dims(lon) +# assert cube.coord_dims(lat) == cube.coord_dims(i_coord) + + +# def test_tas_dim_height2m_already_present(cubes_2d): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "tos") + +# # Dimensional coordinate height (with wrong metadata) already present +# height_coord = AuxCoord(2.0, var_name="h", standard_name="depth") +# cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) +# cube.add_aux_coord(height_coord, ()) +# cube = iris.util.new_axis(cube, scalar_coord="depth") +# cube.transpose((1, 0, 2)) +# cubes = CubeList([cube]) +# fixed_cubes = fix.fix_metadata(cubes) + +# assert len(fixed_cubes) == 1 +# cube = fixed_cubes[0] +# assert cube.shape == (1, 8) +# check_heightxm(cube, 2.0) + + +# def test_tas_no_shift_time(cubes_2d): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "tos") +# fix.extra_facets["shift_time"] = False +# fixed_cubes = fix.fix_metadata(cubes_2d) + +# cube = check_tas_metadata(fixed_cubes) +# check_lat_lon(cube) +# check_heightxm(cube, 2.0) + +# assert cube.coords("time", dim_coords=True) +# time = cube.coord("time", dim_coords=True) +# assert time.var_name == "time" +# assert time.standard_name == "time" +# assert time.long_name == "time" +# assert time.units == Unit( +# "days since 1850-01-01", calendar="proleptic_gregorian" +# ) +# np.testing.assert_allclose(time.points, [54786.0]) +# assert time.bounds is None +# assert time.attributes == {} + + +# def test_fix_does_not_change_cached_grid(cubes_2d): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "tos") +# assert not fix._horizontal_grids +# assert not fix._meshes + +# # Remove latitude and longitude from tas cube to trigger automatic addition +# # of them +# cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) +# cube.remove_coord("latitude") +# cube.remove_coord("longitude") + +# # Make sure horizontal grid is cached +# fix.get_horizontal_grid(cube) +# assert "oras5_grid.nc" in fix._horizontal_grids +# original_grid = fix._horizontal_grids["oras5_grid.nc"].copy() + +# # Make sure that fix does not alter existing grid +# fix.fix_metadata(cubes_2d) +# assert fix._horizontal_grids["oras5_grid.nc"] == original_grid + + +# # Test uas (for height10m coordinate) + + +# def test_get_uas_fix(): +# """Test getting of fix.""" +# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "uas") +# assert fix == [AllVars(None), GenericFix(None)] + + +# def test_uas_fix(cubes_2d): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "uas") +# fixed_cubes = fix.fix_metadata(cubes_2d) + +# assert len(fixed_cubes) == 1 +# cube = fixed_cubes[0] +# assert cube.var_name == "uas" +# assert cube.standard_name == "eastward_wind" +# assert cube.long_name == "Eastward Near-Surface Wind" +# assert cube.units == "m s-1" +# assert "positive" not in cube.attributes + +# check_time(cube) +# check_lat_lon(cube) +# assert cube.coords("depth") +# height = cube.coord("depth") +# assert height.var_name == "depth" +# assert height.standard_name == "depth" +# assert height.long_name == "depth" +# assert height.units == "m" +# assert height.attributes == {"positive": "up"} +# np.testing.assert_allclose(height.points, [10.0]) +# assert height.bounds is None + + +# def test_uas_scalar_height10m_already_present(cubes_2d): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "uas") + +# # Scalar height (with wrong metadata) already present +# height_coord = AuxCoord(10.0, var_name="h", standard_name="depth") +# cube = cubes_2d.extract_cube(NameConstraint(var_name="uas")) +# cube.add_aux_coord(height_coord, ()) +# fixed_cubes = fix.fix_metadata(cubes_2d) + +# assert len(fixed_cubes) == 1 +# cube = fixed_cubes[0] +# assert cube.shape == (1, 8) +# check_heightxm(cube, 10.0) + + +# def test_uas_dim_height10m_already_present(cubes_2d): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "uas") + +# # Dimensional coordinate height (with wrong metadata) already present +# height_coord = AuxCoord(10.0, var_name="h", standard_name="depth") +# cube = cubes_2d.extract_cube(NameConstraint(var_name="uas")) +# cube.add_aux_coord(height_coord, ()) +# cube = iris.util.new_axis(cube, scalar_coord="depth") +# cube.transpose((1, 0, 2)) +# cubes = CubeList([cube]) +# fixed_cubes = fix.fix_metadata(cubes) + +# assert len(fixed_cubes) == 1 +# cube = fixed_cubes[0] +# assert cube.shape == (1, 8) +# check_heightxm(cube, 10.0) + + +# Test fix with regular grid and 2D latitudes and longitude + + +# def test_regular_grid_fix(cubes_regular_grid): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "tos") +# fixed_cubes = fix.fix_metadata(cubes_regular_grid) + +# cube = check_tas_metadata(fixed_cubes) +# assert cube.coords("time", dim_coords=True, dimensions=0) +# assert cube.coords("latitude", dim_coords=True, dimensions=1) +# assert cube.coords("longitude", dim_coords=True, dimensions=2) +# assert cube.coords("depth", dim_coords=False, dimensions=()) + + +# def test_2d_lat_lon_grid_fix(cubes_2d_lat_lon_grid): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "tos") +# fixed_cubes = fix.fix_metadata(cubes_2d_lat_lon_grid) + +# cube = check_tas_metadata(fixed_cubes) +# assert cube.coords("time", dim_coords=True, dimensions=0) +# assert cube.coords("latitude", dim_coords=False, dimensions=(1, 2)) +# assert cube.coords("longitude", dim_coords=False, dimensions=(1, 2)) +# assert cube.coords("depth", dim_coords=False, dimensions=()) + + +# Test ch4Clim (for time dimension time2) + + +# def test_get_ch4clim_fix(): +# """Test getting of fix.""" +# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "ch4Clim") +# assert fix == [AllVars(None), GenericFix(None)] + + +# def test_ch4clim_fix(cubes_regular_grid): +# """Test fix.""" +# cube = cubes_regular_grid[0] +# cube.var_name = "ch4Clim" +# cube.units = "mol mol-1" +# cube.coord("time").units = "no_unit" +# cube.coord("time").attributes["invalid_units"] = "day as %Y%m%d.%f" +# cube.coord("time").points = [18500201.0] +# cube.coord("time").long_name = "wrong_time_name" + +# fix = get_allvars_fix("Omon", "ch4Clim") +# fixed_cubes = fix.fix_metadata(cubes_regular_grid) + +# assert len(fixed_cubes) == 1 +# cube = fixed_cubes[0] +# assert cube.var_name == "ch4Clim" +# assert cube.standard_name == "mole_fraction_of_methane_in_air" +# assert cube.long_name == "Mole Fraction of CH4" +# assert cube.units == "mol mol-1" +# assert "positive" not in cube.attributes + +# time_coord = cube.coord("time") +# assert time_coord.var_name == "time" +# assert time_coord.standard_name == "time" +# assert time_coord.long_name == "time" +# assert time_coord.units == Unit( +# "days since 1850-01-01", calendar="proleptic_gregorian" +# ) +# np.testing.assert_allclose(time_coord.points, [15.5]) +# np.testing.assert_allclose(time_coord.bounds, [[0.0, 31.0]]) + + +# Test fix with empty standard_name + + +def test_empty_standard_name_fix(cubes_2d, monkeypatch): + """Test fix.""" + fix = get_allvars_fix("Omon", "tos") + # We know that tas has a standard name, but this being native model output + # there may be variables with no standard name. The code is designed to + # handle this gracefully and here we test it with an artificial, but + # realistic case. + monkeypatch.setattr(fix.vardef, "standard_name", "") + fixed_cubes = fix.fix_metadata(cubes_2d) + + assert len(fixed_cubes) == 1 + cube = fixed_cubes[0] + assert cube.var_name == "tos" + assert cube.standard_name is None + assert cube.long_name == "Sea Surface Temperature" + assert cube.units == "degC" + assert "positive" not in cube.attributes + + +# Test automatic addition of missing coordinates + + +def test_add_time(cubes_2d, cubes_3d): + """Test fix.""" + # Remove time from tas cube to test automatic addition + tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) + thetao_cube = cubes_3d.extract_cube(NameConstraint(var_name="votemper")) + tas_cube = tas_cube[0] + tas_cube.remove_coord("time") + cubes = CubeList([tas_cube, thetao_cube]) + + fix = get_allvars_fix("Omon", "tos") + fixed_cubes = fix.fix_metadata(cubes) + cube = check_tas_metadata(fixed_cubes) + assert cube.shape == (1, 13, 12) + check_time(cube) + + +def test_add_time_fail(): + """Test fix.""" + fix = get_allvars_fix("Omon", "tos") + cube = Cube(1, var_name="sosstsst", units="degC") + cubes = CubeList( + [ + cube, + Cube(1, var_name="sosstsst", units="degC"), + ] + ) + msg = "Cannot add required coordinate 'time' to variable 'tos'" + with pytest.raises(ValueError, match=msg): + fix._add_time(cube, cubes) + + +# def test_add_latitude(cubes_2d): +# """Test fix.""" +# # Remove latitude from tas cube to test automatic addition +# tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) +# tas_cube.remove_coord("latitude") +# cubes = CubeList([tas_cube]) +# fix = get_allvars_fix("Omon", "tos") + +# assert len(fix._horizontal_grids) == 0 +# fixed_cubes = fix.fix_metadata(cubes) + +# cube = check_tas_metadata(fixed_cubes) +# assert cube.shape == (1, 8) +# check_lat_lon(cube) +# assert len(fix._horizontal_grids) == 1 +# assert TEST_GRID_FILE_NAME in fix._horizontal_grids + + +# def test_add_longitude(cubes_2d): +# """Test fix.""" +# # Remove longitude from tas cube to test automatic addition +# tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) +# tas_cube.remove_coord("longitude") +# cubes = CubeList([tas_cube]) +# fix = get_allvars_fix("Omon", "tos") + +# assert len(fix._horizontal_grids) == 0 +# fixed_cubes = fix.fix_metadata(cubes) + +# cube = check_tas_metadata(fixed_cubes) +# assert cube.shape == (1, 8) +# check_lat_lon(cube) +# assert len(fix._horizontal_grids) == 1 +# assert TEST_GRID_FILE_NAME in fix._horizontal_grids + + +# def test_add_latitude_longitude(cubes_2d): +# """Test fix.""" +# # Remove latitude and longitude from tas cube to test automatic addition +# tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) +# tas_cube.remove_coord("latitude") +# tas_cube.remove_coord("longitude") +# cubes = CubeList([tas_cube]) +# fix = get_allvars_fix("Omon", "tos") + +# assert len(fix._horizontal_grids) == 0 +# fixed_cubes = fix.fix_metadata(cubes) + +# cube = check_tas_metadata(fixed_cubes) +# assert cube.shape == (1, 8) +# check_lat_lon(cube) +# assert len(fix._horizontal_grids) == 1 +# assert TEST_GRID_FILE_NAME in fix._horizontal_grids + + +# def test_add_latitude_fail(cubes_2d): +# """Test fix.""" +# # Remove latitude and grid file attribute from tas cube to test automatic +# # addition +# tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) +# tas_cube.remove_coord("latitude") +# tas_cube.attributes = {} +# cubes = CubeList([tas_cube]) +# fix = get_allvars_fix("Omon", "tos") + +# msg = "Failed to add missing latitude coordinate to cube" +# with pytest.raises(ValueError, match=msg): +# fix.fix_metadata(cubes) + + +# def test_add_longitude_fail(cubes_2d): +# """Test fix.""" +# # Remove longitude and grid file attribute from tas cube to test automatic +# # addition +# tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) +# tas_cube.remove_coord("longitude") +# tas_cube.attributes = {} +# cubes = CubeList([tas_cube]) +# fix = get_allvars_fix("Omon", "tos") + +# msg = "Failed to add missing longitude coordinate to cube" +# with pytest.raises(ValueError, match=msg): +# fix.fix_metadata(cubes) + + +# def test_add_coord_from_grid_file_fail_invalid_coord(): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "tos") + +# msg = r"coord_name must be one of .* got 'invalid_coord_name'" +# with pytest.raises(ValueError, match=msg): +# fix._add_coord_from_grid_file(mock.sentinel.cube, "invalid_coord_name") + + +# def test_add_coord_from_grid_file_fail_no_url(): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "tos") + +# msg = ( +# "Cube does not contain the attribute 'grid_file_uri' necessary to " +# "download the ICON horizontal grid file" +# ) +# with pytest.raises(ValueError, match=msg): +# fix._add_coord_from_grid_file(Cube(0), "latitude") + + +# def test_add_coord_from_grid_fail_no_unnamed_dim(cubes_2d): +# """Test fix.""" +# # Remove latitude from tas cube to test automatic addition +# tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) +# tas_cube.remove_coord("latitude") +# index_coord = DimCoord(np.arange(8), var_name="ncells") +# tas_cube.add_dim_coord(index_coord, 1) +# fix = get_allvars_fix("Omon", "tos") + +# msg = ( +# "Cannot determine coordinate dimension for coordinate 'latitude', " +# "cube does not contain a single unnamed dimension" +# ) +# with pytest.raises(ValueError, match=msg): +# fix._add_coord_from_grid_file(tas_cube, "latitude") + + +# def test_add_coord_from_grid_fail_two_unnamed_dims(cubes_2d): +# """Test fix.""" +# # Remove latitude from tas cube to test automatic addition +# tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) +# tas_cube.remove_coord("latitude") +# tas_cube = iris.util.new_axis(tas_cube) +# fix = get_allvars_fix("Omon", "tos") + +# msg = ( +# "Cannot determine coordinate dimension for coordinate 'latitude', " +# "cube does not contain a single unnamed dimension" +# ) +# with pytest.raises(ValueError, match=msg): +# fix._add_coord_from_grid_file(tas_cube, "latitude") + + +# Test get_horizontal_grid + + +# @mock.patch.object(Oras5Fix, "_get_grid_from_facet", autospec=True) +# @mock.patch("esmvalcore.cmor._fixes.oras5._base_fixes.requests", autospec=True) +# def test_get_horizontal_grid_from_attr_cached_in_dict( +# mock_requests, +# mock_get_grid_from_facet, +# ): +# """Test fix.""" +# cube = Cube(0, attributes={"grid_file_uri": "cached_grid_url.nc"}) +# grid_cube = Cube(0) +# fix = get_allvars_fix("Omon", "tos") +# fix._horizontal_grids["cached_grid_url.nc"] = grid_cube +# fix._horizontal_grids["grid_from_facet.nc"] = mock.sentinel.wrong_grid + +# grid = fix.get_horizontal_grid(cube) +# assert len(fix._horizontal_grids) == 2 +# assert "cached_grid_url.nc" in fix._horizontal_grids +# assert "grid_from_facet.nc" in fix._horizontal_grids # has not been used +# assert fix._horizontal_grids["cached_grid_url.nc"] == grid +# assert grid is grid_cube +# assert mock_requests.mock_calls == [] +# mock_get_grid_from_facet.assert_not_called() + + +# @mock.patch.object(Oras5Fix, "_get_grid_from_facet", autospec=True) +# def test_get_horizontal_grid_from_attr_rootpath( +# mock_get_grid_from_facet, monkeypatch, tmp_path +# ): +# """Test fix.""" +# rootpath = deepcopy(CFG["rootpath"]) +# rootpath["ORAS5"] = str(tmp_path) +# monkeypatch.setitem(CFG, "rootpath", rootpath) +# cube = Cube(0, attributes={"grid_file_uri": "grid.nc"}) +# grid_cube = Cube(0, var_name="test_grid_cube") +# (tmp_path / "omip").mkdir(parents=True, exist_ok=True) +# iris.save(grid_cube, tmp_path / "omip" / "grid.nc") + +# fix = get_allvars_fix("Omon", "tos") +# fix._horizontal_grids["grid_from_facet.nc"] = mock.sentinel.wrong_grid + +# grid = fix.get_horizontal_grid(cube) +# assert len(fix._horizontal_grids) == 2 +# assert "grid.nc" in fix._horizontal_grids +# assert "grid_from_facet.nc" in fix._horizontal_grids # has not been used +# assert fix._horizontal_grids["grid.nc"] == grid +# assert len(grid) == 1 +# assert grid[0].var_name == "test_grid_cube" +# assert grid[0].shape == () +# mock_get_grid_from_facet.assert_not_called() + + +# @mock.patch.object(Oras5Fix, "_get_grid_from_facet", autospec=True) +# @mock.patch("esmvalcore.cmor._fixes.oras5._base_fixes.requests", autospec=True) +# def test_get_horizontal_grid_from_attr_cached_in_file( +# mock_requests, +# mock_get_grid_from_facet, +# tmp_path, +# ): +# """Test fix.""" +# cube = Cube( +# 0, +# attributes={ +# "grid_file_uri": "https://temporary.url/this/is/the/grid_file.nc" +# }, +# ) +# fix = get_allvars_fix("Omon", "tos") +# assert len(fix._horizontal_grids) == 0 + +# # Save temporary grid file +# grid_cube = Cube(0, var_name="grid") +# iris.save(grid_cube, str(tmp_path / "grid_file.nc")) + +# grid = fix.get_horizontal_grid(cube) +# assert isinstance(grid, CubeList) +# assert len(grid) == 1 +# assert grid[0].var_name == "grid" +# assert grid[0].shape == () +# assert len(fix._horizontal_grids) == 1 +# assert "grid_file.nc" in fix._horizontal_grids +# assert fix._horizontal_grids["grid_file.nc"] == grid +# assert mock_requests.mock_calls == [] +# mock_get_grid_from_facet.assert_not_called() + + +# @mock.patch.object(Oras5Fix, "_get_grid_from_facet", autospec=True) +# def test_get_horizontal_grid_from_attr_cache_file_too_old( +# mock_get_grid_from_facet, +# tmp_path, +# monkeypatch, +# ): +# """Test fix.""" +# cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) +# fix = get_allvars_fix("Omon", "tos") +# assert len(fix._horizontal_grids) == 0 + +# # Save temporary grid file +# grid_cube = Cube(0, var_name="grid") +# iris.save(grid_cube, str(tmp_path / "oras5_grid.nc")) + +# # Temporary overwrite default cache location for downloads and cache +# # validity duration +# monkeypatch.setattr(fix, "CACHE_VALIDITY", -1) + +# grid = fix.get_horizontal_grid(cube) +# assert isinstance(grid, CubeList) +# assert len(grid) == 4 +# var_names = [cube.var_name for cube in grid] +# assert "cell_area" in var_names +# assert "dual_area" in var_names +# assert "vertex_index" in var_names +# assert "vertex_of_cell" in var_names +# assert len(fix._horizontal_grids) == 1 +# assert TEST_GRID_FILE_NAME in fix._horizontal_grids +# assert fix._horizontal_grids[TEST_GRID_FILE_NAME] == grid +# mock_get_grid_from_facet.assert_not_called() + + +@mock.patch.object(Oras5Fix, "_get_grid_from_cube_attr", autospec=True) +def test_get_horizontal_grid_from_facet_cached_in_dict( + mock_get_grid_from_cube_attr, + tmp_path, +): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + + # Save temporary grid file (this will not be used; however, it is necessary + # to not raise a FileNotFoundError) + grid_path = "grid.nc" + wrong_grid_cube = Cube(0, var_name="wrong_grid") + iris.save(wrong_grid_cube, tmp_path / "grid.nc") + + # Make sure that grid specified by cube attribute is NOT used + cube = Cube(0, attributes={"grid_file_uri": "cached_grid_url.nc"}) + grid_cube = Cube(0, var_name="grid") + fix = get_allvars_fix("Omon", "tos", session=session) + fix.extra_facets["horizontal_grid"] = grid_path + fix._horizontal_grids["cached_grid_url.nc"] = mock.sentinel.wrong_grid + fix._horizontal_grids["grid.nc"] = grid_cube + + grid = fix.get_horizontal_grid(cube) + assert len(fix._horizontal_grids) == 2 + assert "cached_grid_url.nc" in fix._horizontal_grids # has not been used + assert "grid.nc" in fix._horizontal_grids + assert fix._horizontal_grids["grid.nc"] == grid + # assert grid is grid_cube + mock_get_grid_from_cube_attr.assert_not_called() + + +@pytest.mark.parametrize("grid_path", ["{tmp_path}/grid.nc", "grid.nc"]) +@mock.patch.object(Oras5Fix, "_get_grid_from_cube_attr", autospec=True) +def test_get_horizontal_grid_from_facet( + mock_get_grid_from_cube_attr, + grid_path, + tmp_path, +): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + + # Make sure that grid specified by cube attribute is NOT used + cube = Cube(0, attributes={"grid_file_uri": "cached_grid_url.nc"}) + + # Save temporary grid file + grid_path = grid_path.format(tmp_path=tmp_path) + grid_cube = Cube(0, var_name="grid") + iris.save(grid_cube, tmp_path / "grid.nc") + + fix = get_allvars_fix("Omon", "tos", session=session) + fix.extra_facets["horizontal_grid"] = grid_path + fix._horizontal_grids["cached_grid_url.nc"] = mock.sentinel.wrong_grid + + grid = fix.get_horizontal_grid(cube) + assert isinstance(grid, CubeList) + assert len(grid) == 1 + assert grid[0].var_name == "grid" + assert len(fix._horizontal_grids) == 2 + assert "cached_grid_url.nc" in fix._horizontal_grids # has not been used + assert "grid.nc" in fix._horizontal_grids + assert fix._horizontal_grids["grid.nc"] == grid + mock_get_grid_from_cube_attr.assert_not_called() + + +def test_get_horizontal_grid_from_facet_fail(tmp_path): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + + cube = Cube(0) + fix = get_allvars_fix("Omon", "tos", session=session) + fix.extra_facets["horizontal_grid"] = "/this/does/not/exist.nc" + + with pytest.raises(FileNotFoundError): + fix.get_horizontal_grid(cube) + + +# Test with single-dimension cubes + + +# def test_only_time(monkeypatch): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "thetao") +# # We know that ta has dimensions time, plev19, latitude, longitude, but the +# # ICON CMORizer is designed to check for the presence of each dimension +# # individually. To test this, remove all but one dimension of ta to create +# # an artificial, but realistic test case. +# coord_info = CoordinateInfo("time") +# coord_info.standard_name = "time" +# monkeypatch.setattr(fix.vardef, "coordinates", {"time": coord_info}) + +# # Create cube with only a single dimension +# time_coord = DimCoord( +# [0.0, 31.0], +# var_name="time", +# standard_name="time", +# long_name="time", +# units="days since 1850-01-01", +# ) +# cubes = CubeList( +# [ +# Cube( +# [1, 1], +# var_name="votemper", +# units="degC", +# dim_coords_and_dims=[(time_coord, 0)], +# ), +# ] +# ) +# fixed_cubes = fix.fix_metadata(cubes) + +# # Check cube metadata +# cube = check_ta_metadata(fixed_cubes) + +# # Check cube data +# assert cube.shape == (2,) +# np.testing.assert_equal(cube.data, [1, 1]) + +# # Check time metadata +# assert cube.coords("time") +# new_time_coord = cube.coord("time", dim_coords=True) +# assert new_time_coord.var_name == "time" +# assert new_time_coord.standard_name == "time" +# assert new_time_coord.long_name == "time" +# assert new_time_coord.units == "days since 1850-01-01" + +# # Check time data +# np.testing.assert_allclose(new_time_coord.points, [-15.5, 15.5]) +# np.testing.assert_allclose( +# new_time_coord.bounds, [[-31.0, 0.0], [0.0, 31.0]] +# ) + +# # Check that no mesh has been created +# assert cube.mesh is None + + +# def test_only_depth(monkeypatch): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "thetao") +# # We know that ta has dimensions time, plev19, latitude, longitude, but the +# # ICON CMORizer is designed to check for the presence of each dimension +# # individually. To test this, remove all but one dimension of ta to create +# # an artificial, but realistic test case. +# coord_info = CoordinateInfo("plev19") +# coord_info.standard_name = "air_pressure" +# monkeypatch.setattr(fix.vardef, "coordinates", {"plev19": coord_info}) + +# # Create cube with only a single dimension +# height_coord = DimCoord( +# [1000.0, 100.0], var_name="depth", standard_name="depth", units="cm" +# ) +# cubes = CubeList( +# [ +# Cube( +# [1, 1], +# var_name="votemper", +# units="degC", +# dim_coords_and_dims=[(height_coord, 0)], +# ), +# ] +# ) +# fixed_cubes = fix.fix_metadata(cubes) + +# # Check cube metadata +# cube = check_ta_metadata(fixed_cubes) + +# # Check cube data +# assert cube.shape == (2,) +# np.testing.assert_equal(cube.data, [1, 1]) + +# # Check height metadata +# assert cube.coords("depth", dim_coords=True) +# new_height_coord = cube.coord("depth") +# assert new_height_coord.var_name == "depth" +# assert new_height_coord.standard_name == "depth" +# assert new_height_coord.long_name == "depth" +# assert new_height_coord.units == "m" +# assert new_height_coord.attributes == {"positive": "up"} + +# # Check height data +# np.testing.assert_allclose(new_height_coord.points, [1.0, 10.0]) +# assert new_height_coord.bounds is None + +# # Check that no air_pressure coordinate has been created +# assert not cube.coords("air_pressure") + +# # Check that no mesh has been created +# assert cube.mesh is None + + +# def test_only_latitude(monkeypatch): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "thetao") +# # We know that ta has dimensions time, plev19, latitude, longitude, but the +# # ICON CMORizer is designed to check for the presence of each dimension +# # individually. To test this, remove all but one dimension of ta to create +# # an artificial, but realistic test case. +# coord_info = CoordinateInfo("latitude") +# coord_info.standard_name = "latitude" +# monkeypatch.setattr(fix.vardef, "coordinates", {"latitude": coord_info}) + +# # Create cube with only a single dimension +# lat_coord = DimCoord( +# [0.0, 10.0], var_name="lat", standard_name="latitude", units="degrees" +# ) +# cubes = CubeList( +# [ +# Cube( +# [1, 1], +# var_name="votemper", +# units="degC", +# dim_coords_and_dims=[(lat_coord, 0)], +# ), +# ] +# ) +# fixed_cubes = fix.fix_metadata(cubes) + +# # Check cube metadata +# cube = check_ta_metadata(fixed_cubes) + +# # Check cube data +# assert cube.shape == (2,) +# np.testing.assert_equal(cube.data, [1, 1]) + +# # Check latitude metadata +# assert cube.coords("latitude", dim_coords=True) +# new_lat_coord = cube.coord("latitude") +# assert new_lat_coord.var_name == "lat" +# assert new_lat_coord.standard_name == "latitude" +# assert new_lat_coord.long_name == "latitude" +# assert new_lat_coord.units == "degrees_north" + +# # Check latitude data +# np.testing.assert_allclose(new_lat_coord.points, [0.0, 10.0]) +# assert new_lat_coord.bounds is None + +# # Check that no mesh has been created +# assert cube.mesh is None + + +# def test_only_longitude(monkeypatch): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "thetao") +# # We know that ta has dimensions time, plev19, latitude, longitude, but the +# # ICON CMORizer is designed to check for the presence of each dimension +# # individually. To test this, remove all but one dimension of ta to create +# # an artificial, but realistic test case. +# coord_info = CoordinateInfo("longitude") +# coord_info.standard_name = "longitude" +# monkeypatch.setattr(fix.vardef, "coordinates", {"longitude": coord_info}) + +# # Create cube with only a single dimension +# lon_coord = DimCoord( +# [0.0, 180.0], +# var_name="lon", +# standard_name="longitude", +# units="degrees", +# ) +# cubes = CubeList( +# [ +# Cube( +# [1, 1], +# var_name="votemper", +# units="degC", +# dim_coords_and_dims=[(lon_coord, 0)], +# ), +# ] +# ) +# fixed_cubes = fix.fix_metadata(cubes) + +# # Check cube metadata +# cube = check_ta_metadata(fixed_cubes) + +# # Check cube data +# assert cube.shape == (2,) +# np.testing.assert_equal(cube.data, [1, 1]) + +# # Check longitude metadata +# assert cube.coords("longitude", dim_coords=True) +# new_lon_coord = cube.coord("longitude") +# assert new_lon_coord.var_name == "lon" +# assert new_lon_coord.standard_name == "longitude" +# assert new_lon_coord.long_name == "longitude" +# assert new_lon_coord.units == "degrees_east" + +# # Check longitude data +# np.testing.assert_allclose(new_lon_coord.points, [0.0, 180.0]) +# assert new_lon_coord.bounds is None + +# # Check that no mesh has been created +# assert cube.mesh is None + + +# Test variable not available in file + + +def test_var_not_available_pr(cubes_2d): + """Test fix.""" + fix = get_allvars_fix("Omon", "pr") + msg = "Variable 'pr' used to extract 'pr' is not available in input file" + with pytest.raises(ValueError, match=msg): + fix.fix_metadata(cubes_2d) + + +# Test fix with invalid time units + + +def test_invalid_time_units(cubes_2d): + """Test fix.""" + fix = get_allvars_fix("Omon", "tos") + for cube in cubes_2d: + cube.coord("time").attributes["invalid_units"] = "month as %Y%m%d.%f" + msg = "Expected time units" + with pytest.raises(ValueError, match=msg): + fix.fix_metadata(cubes_2d) + + +# Test fix with (sub-)hourly data + + +# def test_hourly_data(cubes_2d): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "tos") +# fix.extra_facets["frequency"] = "1hr" +# for cube in cubes_2d: +# cube.coord("time").points = [20041104.5833333] + +# fixed_cubes = fix.fix_metadata(cubes_2d) + +# cube = check_tas_metadata(fixed_cubes) +# date = cube.coord("time").units.num2date(cube.coord("time").points) +# date_bnds = cube.coord("time").units.num2date(cube.coord("time").bounds) +# np.testing.assert_array_equal(date, [datetime(2004, 11, 4, 13, 30)]) +# np.testing.assert_array_equal( +# date_bnds, [[datetime(2004, 11, 4, 13), datetime(2004, 11, 4, 14)]] +# ) + + +# @pytest.mark.parametrize( +# "bounds", +# [ +# None, +# [ +# [20211231.875, 20220101.125], +# [20220101.125, 20220101.375], +# ], +# ], +# ) +# def test_6hourly_data_multiple_points(bounds): +# """Test fix.""" +# time_coord = DimCoord( +# [20220101, 20220101.25], +# bounds=bounds, +# standard_name="time", +# attributes={"invalid_units": "day as %Y%m%d.%f"}, +# ) +# cube = Cube( +# [1, 2], +# var_name="sosstsst", +# units="degC", +# dim_coords_and_dims=[(time_coord, 0)], +# ) +# cubes = CubeList([cube]) +# fix = get_allvars_fix("Omon", "tos") +# fix.extra_facets["frequency"] = "6hr" + +# fixed_cube = fix._fix_time(cube, cubes) + +# points = fixed_cube.coord("time").units.num2date(cube.coord("time").points) +# bounds = fixed_cube.coord("time").units.num2date(cube.coord("time").bounds) +# np.testing.assert_array_equal( +# points, +# [datetime(2021, 12, 31, 21), datetime(2022, 1, 1, 3)], +# ) +# np.testing.assert_array_equal( +# bounds, +# [ +# [datetime(2021, 12, 31, 18), datetime(2022, 1, 1)], +# [datetime(2022, 1, 1), datetime(2022, 1, 1, 6)], +# ], +# ) + + +# def test_subhourly_data_no_shift(): +# """Test fix.""" +# time_coord = DimCoord( +# [0.5, 1.0], +# standard_name="time", +# units=Unit("hours since 2022-01-01", calendar="proleptic_gregorian"), +# ) +# cube = Cube( +# [1, 2], +# var_name="sosstsst", +# units="degC", +# dim_coords_and_dims=[(time_coord, 0)], +# ) +# cubes = CubeList([cube]) +# fix = get_allvars_fix("Omon", "tos") +# fix.extra_facets["frequency"] = "subhr" +# fix.extra_facets["shift_time"] = False + +# fixed_cube = fix._fix_time(cube, cubes) + +# points = fixed_cube.coord("time").units.num2date(cube.coord("time").points) +# bounds = fixed_cube.coord("time").units.num2date(cube.coord("time").bounds) +# np.testing.assert_array_equal( +# points, +# [datetime(2022, 1, 1, 0, 30), datetime(2022, 1, 1, 1)], +# ) +# np.testing.assert_array_equal( +# bounds, +# [ +# [datetime(2022, 1, 1, 0, 15), datetime(2022, 1, 1, 0, 45)], +# [datetime(2022, 1, 1, 0, 45), datetime(2022, 1, 1, 1, 15)], +# ], +# ) + + +# Test _shift_time_coord + + +@pytest.mark.parametrize( + "frequency,dt_in,dt_out,bounds", + [ + ( + "dec", + [(2000, 1, 1)], + [(1995, 1, 1)], + [[(1990, 1, 1), (2000, 1, 1)]], + ), + ( + "yr", + [(2000, 1, 1), (2001, 1, 1)], + [(1999, 7, 2, 12), (2000, 7, 2)], + [[(1999, 1, 1), (2000, 1, 1)], [(2000, 1, 1), (2001, 1, 1)]], + ), + ( + "mon", + [(2000, 1, 1)], + [(1999, 12, 16, 12)], + [[(1999, 12, 1), (2000, 1, 1)]], + ), + ( + "mon", + [(2000, 11, 30, 23, 45), (2000, 12, 31, 23)], + [(2000, 11, 16), (2000, 12, 16, 12)], + [[(2000, 11, 1), (2000, 12, 1)], [(2000, 12, 1), (2001, 1, 1)]], + ), + ( + "day", + [(2000, 1, 1, 12)], + [(2000, 1, 1)], + [[(1999, 12, 31, 12), (2000, 1, 1, 12)]], + ), + ( + "6hr", + [(2000, 1, 5, 14), (2000, 1, 5, 20)], + [(2000, 1, 5, 11), (2000, 1, 5, 17)], + [ + [(2000, 1, 5, 8), (2000, 1, 5, 14)], + [(2000, 1, 5, 14), (2000, 1, 5, 20)], + ], + ), + ( + "3hr", + [(2000, 1, 1)], + [(1999, 12, 31, 22, 30)], + [[(1999, 12, 31, 21), (2000, 1, 1)]], + ), + ( + "1hr", + [(2000, 1, 5, 14), (2000, 1, 5, 15)], + [(2000, 1, 5, 13, 30), (2000, 1, 5, 14, 30)], + [ + [(2000, 1, 5, 13), (2000, 1, 5, 14)], + [(2000, 1, 5, 14), (2000, 1, 5, 15)], + ], + ), + ], +) +def test_shift_time_coord(frequency, dt_in, dt_out, bounds): + """Test ``_shift_time_coord``.""" + cube = Cube(0, cell_methods=[CellMethod("mean", "time")]) + datetimes = [datetime(*dt) for dt in dt_in] + time_units = Unit("days since 1950-01-01", calendar="proleptic_gregorian") + time_coord = DimCoord( + time_units.date2num(datetimes), + standard_name="time", + var_name="time", + long_name="time", + units=time_units, + ) + + fix = get_allvars_fix("Omon", "tos") + fix.extra_facets["frequency"] = frequency + + fix._shift_time_coord(cube, time_coord) + + dt_out = [datetime(*dt) for dt in dt_out] + bounds = [[datetime(*dt1), datetime(*dt2)] for (dt1, dt2) in bounds] + np.testing.assert_allclose( + time_coord.points, time_coord.units.date2num(dt_out) + ) + np.testing.assert_allclose( + time_coord.bounds, time_coord.units.date2num(bounds) + ) + + +@pytest.mark.parametrize( + "frequency,dt_in", + [ + ("dec", [(2000, 1, 15)]), + ("yr", [(2000, 1, 1), (2001, 1, 1)]), + ("mon", [(2000, 6, 15)]), + ("day", [(2000, 1, 1), (2001, 1, 2)]), + ("6hr", [(2000, 6, 15, 12)]), + ("3hr", [(2000, 1, 1, 4), (2000, 1, 1, 7)]), + ("1hr", [(2000, 1, 1, 4), (2000, 1, 1, 5)]), + ], +) +def test_shift_time_point_measurement(frequency, dt_in): + """Test ``_shift_time_coord``.""" + cube = Cube(0, cell_methods=[CellMethod("point", "time")]) + datetimes = [datetime(*dt) for dt in dt_in] + time_units = Unit("days since 1950-01-01", calendar="proleptic_gregorian") + time_coord = DimCoord( + time_units.date2num(datetimes), + standard_name="time", + var_name="time", + long_name="time", + units=time_units, + ) + + fix = get_allvars_fix("Omon", "tos") + fix.extra_facets["frequency"] = frequency + + fix._shift_time_coord(cube, time_coord) + + np.testing.assert_allclose( + time_coord.points, time_coord.units.date2num(datetimes) + ) + assert time_coord.bounds is None + + +@pytest.mark.parametrize( + "frequency", ["dec", "yr", "yrPt", "mon", "monC", "monPt"] +) +def test_shift_time_coord_hourly_data_low_freq_fail(frequency): + """Test ``_shift_time_coord``.""" + cube = Cube(0, cell_methods=[CellMethod("mean", "time")]) + time_units = Unit("hours since 1950-01-01", calendar="proleptic_gregorian") + time_coord = DimCoord( + [1, 2, 3], + standard_name="time", + var_name="time", + long_name="time", + units=time_units, + ) + + fix = get_allvars_fix("Omon", "tos") + fix.extra_facets["frequency"] = frequency + + msg = "Cannot shift time coordinate: Rounding to closest day failed." + with pytest.raises(ValueError, match=msg): + fix._shift_time_coord(cube, time_coord) + + +@pytest.mark.parametrize( + "frequency", ["dec", "yr", "yrPt", "mon", "monC", "monPt"] +) +def test_shift_time_coord_not_first_of_month(frequency): + """Test ``_get_previous_timestep``.""" + cube = Cube(0, cell_methods=[CellMethod("mean", "time")]) + time_units = Unit("days since 1950-01-01", calendar="proleptic_gregorian") + time_coord = DimCoord( + [1.5], + standard_name="time", + var_name="time", + long_name="time", + units=time_units, + ) + fix = get_allvars_fix("Omon", "tos") + fix.extra_facets["frequency"] = frequency + + msg = ( + "Cannot shift time coordinate: expected first of the month at 00:00:00" + ) + with pytest.raises(ValueError, match=msg): + fix._shift_time_coord(cube, time_coord) + + +@pytest.mark.parametrize("frequency", ["fx", "subhrPt", "invalid_freq"]) +def test_shift_time_coord_invalid_freq(frequency): + """Test ``_get_previous_timestep``.""" + cube = Cube(0, cell_methods=[CellMethod("mean", "time")]) + time_units = Unit("days since 1950-01-01", calendar="proleptic_gregorian") + time_coord = DimCoord( + [1.5, 2.5], + standard_name="time", + var_name="time", + long_name="time", + units=time_units, + ) + fix = get_allvars_fix("Omon", "tos") + fix.extra_facets["frequency"] = frequency + + msg = ( + "Cannot shift time coordinate: failed to determine previous time step" + ) + with pytest.raises(ValueError, match=msg): + fix._shift_time_coord(cube, time_coord) + + +# Test _get_previous_timestep + + +@pytest.mark.parametrize( + "frequency,datetime_in,datetime_out", + [ + ("dec", (2000, 1, 1), (1990, 1, 1)), + ("yr", (2000, 1, 1), (1999, 1, 1)), + ("yrPt", (2001, 6, 1), (2000, 6, 1)), + ("mon", (2001, 1, 1), (2000, 12, 1)), + ("mon", (2001, 2, 1), (2001, 1, 1)), + ("mon", (2001, 3, 1), (2001, 2, 1)), + ("mon", (2001, 4, 1), (2001, 3, 1)), + ("monC", (2000, 5, 1), (2000, 4, 1)), + ("monC", (2000, 6, 1), (2000, 5, 1)), + ("monC", (2000, 7, 1), (2000, 6, 1)), + ("monC", (2000, 8, 1), (2000, 7, 1)), + ("monPt", (2002, 9, 1), (2002, 8, 1)), + ("monPt", (2002, 10, 1), (2002, 9, 1)), + ("monPt", (2002, 11, 1), (2002, 10, 1)), + ("monPt", (2002, 12, 1), (2002, 11, 1)), + ("day", (2000, 1, 1), (1999, 12, 31)), + ("day", (2000, 3, 1), (2000, 2, 29)), + ("day", (2187, 3, 14), (2187, 3, 13)), + ("6hr", (2000, 3, 14, 15), (2000, 3, 14, 9)), + ("6hrPt", (2000, 1, 1), (1999, 12, 31, 18)), + ("6hrCM", (2000, 1, 1, 1), (1999, 12, 31, 19)), + ("3hr", (2000, 3, 14, 15), (2000, 3, 14, 12)), + ("3hrPt", (2000, 1, 1), (1999, 12, 31, 21)), + ("3hrCM", (2000, 1, 1, 1), (1999, 12, 31, 22)), + ("1hr", (2000, 3, 14, 15), (2000, 3, 14, 14)), + ("1hrPt", (2000, 1, 1), (1999, 12, 31, 23)), + ("1hrCM", (2000, 1, 1, 1), (2000, 1, 1)), + ("hr", (2000, 3, 14), (2000, 3, 13, 23)), + ], +) +def test_get_previous_timestep(frequency, datetime_in, datetime_out): + """Test ``_get_previous_timestep``.""" + datetime_in = datetime(*datetime_in) + datetime_out = datetime(*datetime_out) + + fix = get_allvars_fix("Omon", "tos") + fix.extra_facets["frequency"] = frequency + + new_datetime = fix._get_previous_timestep(datetime_in) + + assert new_datetime == datetime_out + + +# Test mesh creation raises warning because bounds do not match vertices + + +# @mock.patch("esmvalcore.cmor._fixes.oras5._base_fixes.logger", autospec=True) +# def test_get_mesh_fail_invalid_clat_bounds(mock_logger, cubes_2d): +# """Test fix.""" +# # Slightly modify latitude bounds from tas cube to make mesh creation fail +# tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) +# lat_bnds = tas_cube.coord("latitude").bounds.copy() +# lat_bnds[0, 0] = 40.0 +# tas_cube.coord("latitude").bounds = lat_bnds +# cubes = CubeList([tas_cube]) +# fix = get_allvars_fix("Omon", "tos") + +# fixed_cubes = fix.fix_metadata(cubes) +# cube = check_tas_metadata(fixed_cubes) + +# assert cube.coord("latitude").bounds[0, 0] != 40.0 +# mock_logger.warning.assert_called_once_with( +# "Latitude bounds of the face coordinate ('clat_vertices' in " +# "the grid file) differ from the corresponding values " +# "calculated from the connectivity ('vertex_of_cell') and the " +# "node coordinate ('vlat'). Using bounds defined by " +# "connectivity." +# ) + + +# @mock.patch("esmvalcore.cmor._fixes.oras5._base_fixes.logger", autospec=True) +# def test_get_mesh_fail_invalid_clon_bounds(mock_logger, cubes_2d): +# """Test fix.""" +# # Slightly modify longitude bounds from tas cube to make mesh creation fail +# tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) +# lon_bnds = tas_cube.coord("longitude").bounds.copy() +# lon_bnds[0, 1] = 40.0 +# tas_cube.coord("longitude").bounds = lon_bnds +# cubes = CubeList([tas_cube]) +# fix = get_allvars_fix("Omon", "tos") + +# fixed_cubes = fix.fix_metadata(cubes) +# cube = check_tas_metadata(fixed_cubes) + +# assert cube.coord("longitude").bounds[0, 1] != 40.0 +# mock_logger.warning.assert_called_once_with( +# "Longitude bounds of the face coordinate ('clon_vertices' in " +# "the grid file) differ from the corresponding values " +# "calculated from the connectivity ('vertex_of_cell') and the " +# "node coordinate ('vlon'). Note that these values are allowed " +# "to differ by 360° or at the poles of the grid. Using bounds " +# "defined by connectivity." +# ) + + +# Test _get_grid_url + + +def test_get_grid_url(): + """Test fix.""" + cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) + fix = get_allvars_fix("Omon", "tos") + (grid_url, grid_name) = fix._get_grid_url(cube) + assert grid_url == TEST_GRID_FILE_URI + assert grid_name == TEST_GRID_FILE_NAME + + +def test_get_grid_url_fail(): + """Test fix.""" + cube = Cube(0) + fix = get_allvars_fix("Omon", "tos") + msg = ( + "Cube does not contain the attribute 'grid_file_uri' necessary to " + "download the ICON horizontal grid file" + ) + with pytest.raises(ValueError, match=msg): + fix._get_grid_url(cube) + + +# Test get_mesh + + +def test_get_mesh_cached_from_attr(monkeypatch): + """Test fix.""" + cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) + fix = get_allvars_fix("Omon", "tos") + monkeypatch.setattr(fix, "_create_mesh", mock.Mock()) + fix._meshes[TEST_GRID_FILE_NAME] = mock.sentinel.mesh + mesh = fix.get_mesh(cube) + assert mesh == mock.sentinel.mesh + fix._create_mesh.assert_not_called() + + +def test_get_mesh_not_cached_from_attr(monkeypatch): + """Test fix.""" + cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) + fix = get_allvars_fix("Omon", "tos") + monkeypatch.setattr(fix, "_create_mesh", mock.Mock()) + fix.get_mesh(cube) + fix._create_mesh.assert_called_once_with(cube) + + +def test_get_mesh_cached_from_facet(monkeypatch, tmp_path): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + + # Save temporary grid file (this will not be used; however, it is necessary + # to not raise a FileNotFoundError) + grid_path = "grid.nc" + grid_cube = Cube(0, var_name="grid") + iris.save(grid_cube, tmp_path / "grid.nc") + + cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) + fix = get_allvars_fix("Omon", "tos", session=session) + fix.extra_facets["horizontal_grid"] = grid_path + monkeypatch.setattr(fix, "_create_mesh", mock.Mock()) + fix._meshes[TEST_GRID_FILE_NAME] = mock.sentinel.wrong_mesh + fix._meshes["grid.nc"] = mock.sentinel.mesh + + mesh = fix.get_mesh(cube) + + assert mesh == mock.sentinel.mesh + fix._create_mesh.assert_not_called() + + +def test_get_mesh_not_cached_from_facet(monkeypatch, tmp_path): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + + # Save temporary grid file (this will not be used; however, it is necessary + # to not raise a FileNotFoundError) + grid_path = "grid.nc" + grid_cube = Cube(0, var_name="grid") + iris.save(grid_cube, tmp_path / "grid.nc") + + cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) + fix = get_allvars_fix("Omon", "tos", session=session) + fix.extra_facets["horizontal_grid"] = grid_path + monkeypatch.setattr(fix, "_create_mesh", mock.Mock()) + fix._meshes[TEST_GRID_FILE_NAME] = mock.sentinel.wrong_mesh + + fix.get_mesh(cube) + + fix._create_mesh.assert_called_once_with(cube) + + +# Test _get_path_from_facet + + +@pytest.mark.parametrize( + "path,description,output", + [ + ("{tmp_path}/a.nc", None, "{tmp_path}/a.nc"), + ("b.nc", "Grid file", "{tmp_path}/b.nc"), + ], +) +def test_get_path_from_facet(path, description, output, tmp_path): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + path = path.format(tmp_path=tmp_path) + fix = get_allvars_fix("Omon", "tos", session=session) + fix.extra_facets["test_path"] = path + + # Create empty dummy file + output = output.format(tmp_path=tmp_path) + with open(output, "w", encoding="utf-8"): + pass + + out_path = fix._get_path_from_facet("test_path", description=description) + + assert isinstance(out_path, Path) + assert out_path == Path(output.format(tmp_path=tmp_path)) + + +@pytest.mark.parametrize( + "path,description", + [ + ("{tmp_path}/a.nc", None), + ("b.nc", "Grid file"), + ], +) +def test_get_path_from_facet_fail(path, description, tmp_path): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + path = path.format(tmp_path=tmp_path) + fix = get_allvars_fix("Omon", "tos", session=session) + fix.extra_facets["test_path"] = path + + with pytest.raises(FileNotFoundError, match=description): + fix._get_path_from_facet("test_path", description=description) + + +# Test add_additional_cubes + + +@pytest.mark.parametrize("facet", ["zg_file", "zghalf_file"]) +@pytest.mark.parametrize("path", ["{tmp_path}/a.nc", "a.nc"]) +def test_add_additional_cubes(path, facet, tmp_path): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + path = path.format(tmp_path=tmp_path) + fix = get_allvars_fix("Omon", "tos", session=session) + fix.extra_facets[facet] = path + + # Save temporary cube + cube = Cube(0, var_name=facet) + iris.save(cube, tmp_path / "a.nc") + + cubes = CubeList([]) + new_cubes = fix.add_additional_cubes(cubes) + + assert new_cubes is cubes + assert len(cubes) == 1 + assert cubes[0].var_name == facet + + +@pytest.mark.parametrize("facet", ["zg_file", "zghalf_file"]) +@pytest.mark.parametrize("path", ["{tmp_path}/a.nc", "a.nc"]) +def test_add_additional_cubes_fail(path, facet, tmp_path): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + path = path.format(tmp_path=tmp_path) + fix = get_allvars_fix("Omon", "tos", session=session) + fix.extra_facets[facet] = path + + cubes = CubeList([]) + with pytest.raises(FileNotFoundError, match="File"): + fix.add_additional_cubes(cubes) + + +# Test _fix_height + + +# @pytest.mark.parametrize("bounds", [True, False]) +# def test_fix_height_plev(bounds, simple_unstructured_cube): +# """Test fix.""" +# cube = simple_unstructured_cube[:, 1:, :] +# pfull_cube = simple_unstructured_cube[:, 1:, :] +# pfull_cube.var_name = "pfull" +# pfull_cube.units = "Pa" +# cubes = CubeList([cube, pfull_cube]) +# if bounds: +# phalf_cube = simple_unstructured_cube.copy() +# phalf_cube.var_name = "phalf" +# phalf_cube.units = "Pa" +# cubes.append(phalf_cube) +# fix = get_allvars_fix("Omon", "thetao") + +# fixed_cube = fix._fix_height(cube, cubes) + +# expected_data = [[[4.0, 5.0], [2.0, 3.0]]] +# np.testing.assert_allclose(fixed_cube.data, expected_data) + +# height = check_model_level_metadata(fixed_cube) +# np.testing.assert_array_equal(height.points, [0, 1]) +# assert height.bounds is None + +# plev = check_air_pressure_metadata(fixed_cube) +# assert fixed_cube.coord_dims("air_pressure") == (0, 1, 2) +# np.testing.assert_allclose(plev.points, expected_data) +# if bounds: +# expected_bnds = [[[[4.0, 2.0], [5.0, 3.0]], [[2.0, 0.0], [3.0, 1.0]]]] +# np.testing.assert_allclose(plev.bounds, expected_bnds) +# else: +# assert plev.bounds is None + + +# @pytest.mark.parametrize("bounds", [True, False]) +# def test_fix_height_alt16(bounds, simple_unstructured_cube): +# """Test fix.""" +# cube = simple_unstructured_cube[:, 1:, :] +# zg_cube = simple_unstructured_cube[0, 1:, :] +# zg_cube.var_name = "zg" +# zg_cube.units = "m" +# cubes = CubeList([cube, zg_cube]) +# if bounds: +# zghalf_cube = simple_unstructured_cube[0, :, :] +# zghalf_cube.var_name = "zghalf" +# zghalf_cube.units = "m" +# cubes.append(zghalf_cube) +# fix = get_allvars_fix("Omon", "thetao") + +# fixed_cube = fix._fix_height(cube, cubes) + +# expected_data = [[[4.0, 5.0], [2.0, 3.0]]] +# np.testing.assert_allclose(fixed_cube.data, expected_data) + +# height = check_model_level_metadata(fixed_cube) +# np.testing.assert_array_equal(height.points, [0, 1]) +# assert height.bounds is None + +# assert fixed_cube.coords("altitude", dim_coords=False) +# alt16 = fixed_cube.coord("altitude", dim_coords=False) +# assert alt16.var_name == "alt16" +# assert alt16.standard_name == "altitude" +# assert alt16.long_name == "altitude" +# assert alt16.units == "m" +# assert alt16.attributes == {"positive": "up"} +# assert fixed_cube.coord_dims("altitude") == (1, 2) +# np.testing.assert_allclose(alt16.points, expected_data[0]) +# if bounds: +# expected_bnds = [[[4.0, 2.0], [5.0, 3.0]], [[2.0, 0.0], [3.0, 1.0]]] +# np.testing.assert_allclose(alt16.bounds, expected_bnds) +# else: +# assert alt16.bounds is None + + +# Test hfls (for extra fix) + + +# def test_get_hfls_fix(): +# """Test getting of fix.""" +# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "hfls") +# assert fix == [Hfls(None), AllVars(None), GenericFix(None)] + + +# def test_hfls_fix(cubes_regular_grid): +# """Test fix.""" +# cubes = CubeList([cubes_regular_grid[0].copy()]) +# cubes[0].var_name = "hfls" +# cubes[0].units = "W m-2" + +# fixed_cubes = fix_metadata(cubes, "Omon", "hfls") + +# assert len(fixed_cubes) == 1 +# cube = fixed_cubes[0] +# assert cube.var_name == "hfls" +# assert cube.standard_name == "surface_upward_latent_heat_flux" +# assert cube.long_name == "Surface Upward Latent Heat Flux" +# assert cube.units == "W m-2" +# assert cube.attributes["positive"] == "up" + +# fixed_cube = fix_data(cube, "Omon", "hfls") + +# np.testing.assert_allclose(fixed_cube.data, [[[0.0, -1.0], [-2.0, -3.0]]]) + + +# # Test hfss (for extra fix) + + +# def test_get_hfss_fix(): +# """Test getting of fix.""" +# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "hfss") +# assert fix == [Hfss(None), AllVars(None), GenericFix(None)] + + +# def test_hfss_fix(cubes_regular_grid): +# """Test fix.""" +# cubes = CubeList([cubes_regular_grid[0].copy()]) +# cubes[0].var_name = "hfss" +# cubes[0].units = "W m-2" + +# fixed_cubes = fix_metadata(cubes, "Omon", "hfss") + +# assert len(fixed_cubes) == 1 +# cube = fixed_cubes[0] +# assert cube.var_name == "hfss" +# assert cube.standard_name == "surface_upward_sensible_heat_flux" +# assert cube.long_name == "Surface Upward Sensible Heat Flux" +# assert cube.units == "W m-2" +# assert cube.attributes["positive"] == "up" + +# fixed_cube = fix_data(cube, "Omon", "hfss") + +# np.testing.assert_allclose(fixed_cube.data, [[[0.0, -1.0], [-2.0, -3.0]]]) + + +# # Test rtnt (for extra fix) + + +# def test_get_rtnt_fix(): +# """Test getting of fix.""" +# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "rtnt") +# assert fix == [Rtnt(None), AllVars(None), GenericFix(None)] + + +# def test_rtnt_fix(cubes_regular_grid): +# """Test fix.""" +# cubes = CubeList( +# [ +# cubes_regular_grid[0].copy(), +# cubes_regular_grid[0].copy(), +# cubes_regular_grid[0].copy(), +# ] +# ) +# cubes[0].var_name = "rsdt" +# cubes[1].var_name = "rsut" +# cubes[2].var_name = "rlut" +# cubes[0].units = "W m-2" +# cubes[1].units = "W m-2" +# cubes[2].units = "W m-2" + +# fixed_cubes = fix_metadata(cubes, "Omon", "rtnt") + +# assert len(fixed_cubes) == 1 +# cube = fixed_cubes[0] +# assert cube.var_name == "rtnt" +# assert cube.standard_name is None +# assert cube.long_name == "TOA Net downward Total Radiation" +# assert cube.units == "W m-2" +# assert cube.attributes["positive"] == "down" + +# np.testing.assert_allclose(cube.data, [[[0.0, -1.0], [-2.0, -3.0]]]) + + +# # Test rtmt (for extra fix) - assert len(fixed_cubes) == 1 - fixed_cube = fixed_cubes[0] - assert fixed_cube.shape == (2, 4) +# def test_get_rtmt_fix(): +# """Test getting of fix.""" +# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "rtmt") +# assert fix == [Rtmt(None), AllVars(None), GenericFix(None)] - assert fixed_cube.coords("time", dim_coords=True) - assert fixed_cube.coord_dims("time") == (0,) - assert fixed_cube.coords("latitude", dim_coords=False) - assert fixed_cube.coord_dims("latitude") == (1,) - lat = fixed_cube.coord("latitude") - np.testing.assert_allclose(lat.points, [1, 1, -1, -1]) - assert lat.bounds is None +# def test_rtmt_fix(cubes_regular_grid): +# """Test fix.""" +# cubes = CubeList( +# [ +# cubes_regular_grid[0].copy(), +# cubes_regular_grid[0].copy(), +# cubes_regular_grid[0].copy(), +# ] +# ) +# cubes[0].var_name = "rsdt" +# cubes[1].var_name = "rsut" +# cubes[2].var_name = "rlut" +# cubes[0].units = "W m-2" +# cubes[1].units = "W m-2" +# cubes[2].units = "W m-2" - assert fixed_cube.coords("longitude", dim_coords=False) - assert fixed_cube.coord_dims("longitude") == (1,) - lon = fixed_cube.coord("longitude") - np.testing.assert_allclose(lon.points, [179, 180, 180, 179]) - assert lon.bounds is None +# fixed_cubes = fix_metadata(cubes, "Omon", "rtmt") +# assert len(fixed_cubes) == 1 +# cube = fixed_cubes[0] +# assert cube.var_name == "rtmt" +# assert cube.standard_name == ( +# "net_downward_radiative_flux_at_top_of_atmosphere_model" +# ) +# assert cube.long_name == "Net Downward Radiative Flux at Top of Model" +# assert cube.units == "W m-2" +# assert cube.attributes["positive"] == "down" +# np.testing.assert_allclose(cube.data, [[[0.0, -1.0], [-2.0, -3.0]]]) diff --git a/tests/integration/cmor/_fixes/test_data/oras5_2d.nc b/tests/integration/cmor/_fixes/test_data/oras5_2d.nc new file mode 100644 index 0000000000000000000000000000000000000000..49aee8c167bfe7fd824e10bd4db8836025022a9f GIT binary patch literal 21889 zcmeHPYitzP6+XLbz?j8`lmG$LOvnR+@!BSi6`t{Wy^Bfw0NV*|K-vzwvv|tx44Rq6 zCZPfnexwwlHmNWrBuFI^L_vwtR;VREN|&~fHqfY~m6BhHP*ow4G(S*_qNenm``F#F zaWGq)h;*)e=g!=F?wxbLb6dBt{<$nOFhP{bIz!V6Klq4b$`rc39-u2g7f&EKk8T1GgmNeYh?vF}&2V(nu;Ck6 zFdm~HV0(52L1bu7q&22=ZPqN+F)dr!qFFXJr8I^XhZZT- zZDvN>tZvkld17yO zbSJH`_Y)ArWQS=ued(=AMpF%CQLECUsY*}QN~sA=N$FZTX$MzOUjs0KH({GuE8%XV zt2^BDfKsb$P!pT_RV%4z1CC~}UQ|aT9cyFrgEk-ldBk+XMozK>8DpkfFcZyM=4N6f zf_PfUfN3cmT2jwuYP(efL0GG)PM>BeG1Jl#s_oc{cmckh;IR>({{H@umdNy{Lb~Au zlK|uJ#TA@+OG{~%W(fYH;3{sx?N``@V+R6 zi5+<1&(7?+z=s_8eADnP;P+vm8w_%=1*Xv;o2op7yisU|#!1k46$jh6N-7D|6bAQ| z?YGaNu?B&OFg47`;jl>G5@|UNJ_z{zYdSaqw!xe3=g(isb<_Qi|Eep}_6VO!);P{Y ztKdcUYC;|riH=NRB9Y3ctDoKX!8U5H{5HxC_-4wI0cbag{o3n?iN>&-LbsmHW@47SJ z?2K}lu^kJ?u@xjx!MnYC7N>ts)RWyFU+7rAhx~^6f&G&cH_{&N>0xt)RwSCZt*bhs zojt3%I^#W&aC=lNRmTOseN-C*HAC}%)Ntgx#_O}|sTnU^DTr`^SUi^rit%iK`^pGR z3cjr=jxk>ec_wT9&G@sv=g#q@EFL$%d?> z7^YJw+A>sb4UO=(zO}nevOpMrSlr|u7WL{3-}N`xt-6y|A^ILjp&`6 zf8`_3KiV8{;!MMJ1P8cWMV=)9J_5 zbXLRdW;kZWo0H@Y;NvPn%;LGDEy{Y4wf=+ARz5LBBqU%A_vXPFa67>_P35j<` zR+9zDa&*+MkvU^7M$&KytJv~I3sEhM3jO8GJhreenR;M(L#UxKbU(GY__qGH&+pow zDFv3qT#Rvy5#RxB8iV+TCiVOw`3W%9Q51;l{-J&;PokaPYcP z&E*{tVwc85X@26yMAMkfP~$|W+x~KdS_n4Z6k{S9@VF?Su ziITcKPmQ)maxofaP7I26r8!YUzo8qoCYr|0wI&km#nwc!_EmL$b}rV_6NQV+^#chm z0o=~4y}vm7>+eS6%x>9)5RpwUsXD5yIRyh|-cJI`shkT|VU9+ie7dciS=IyovZbs^E(#*1NJP>-pb{ntS-dFHQz9MN0Reijfw2e`n+_Tz zmGO*wUm1-V3gxzGqbHp0e1tQA0{1uABUY(ep2@Cb&yO-uKGVbKc9`YJ>0&3^TwL=laT54M!n=r zj=iR>y~n>?&g7ei&H~AqoWBpA-tlLNAH=%bRx|6qpREfbUrvRS;#saKVqK8yL%tO` zRq`1l+~Dlsvk1#MyVaO3t(WA-SH2N#>1}I8j*#1_n4PR%iYq^b%Dj)#zYZKd$fza~ z742O}6=Fn1RN;BQn-J^4w%{Z$5^t1m7MYlo!D<))+Tu7*Pd4*1;Xwm3J^tLLT`j%s zG30J7QMPdxwaY7JR=|5yrHOVN^d32?Oab!Hg=XI5<=uDQ0~e?YPN#BA);%4rg()PE z`+#{UI@4#xpoS>0w=^~1eRIzmID|K_gs;432I{)u5K&-{$Y*EjtBTDRcwAHn1TDDY zc}P*C5r9cV_??M|po*%Xmm|Ynf9VaNEdW!7G%JMrS`v63_qWB232{ zSVb4iIQ!1UAHQFv^C$1hNog7eaN?HkbNA`Cye@ROw;j2v z(JP8w^c^k3{&OH}X(PDZUANY-1GDT(B!CMzJFJ^VS~s*fX=c%RPdM$bY0JjOgQ>jrfF8+;U=szXU zB2N8d|G|*|*M&rp11Kq|$$gHfgfphXQqv$YvTkH_+t$$y5)ru}?@Jm9qxW2tBT=R# zLg}eUTYK>{aa{aNzZOR6AdZx`eCX|tMtul?JR!+co`J8|!Enq?K~PeVCn%Zbk4e1P zyLt$C2zUs12zUs12zUs12zUs12;71QxJ@)C0h-y5t|LEuz zFmiY=&l|hz7i)o$6&56Kc^&SMA1P#18%@A!`I@|G(gLp$8b|zK!fy_LK2!@FZs9FY zhiZZMHS-dw{gvsNSJyWPE$~ii z1I;F+jy>`6X9~PaRgeej9#i_Rr3}>165((EzHb@K6jew)z*M?V5e2qEvX1{?=Wcj~ zGO!Ab3#QV2h^k=sNn-+}!97YDsIVnX^7P9ezsa?cVJhYd+eP*%)cAT~%n&Hn0t?)s zSkzD~RhL9vL&P;2z$NG6q2&_zrMA_tjak3EhDVFQr)b>vqKAPu251a0tBCxt44XnP zKY4Wh4OvBKPop4Hy3f1gT>twe0koPjV?+LvAY>AyO|{sU;6^mn*EYor|Ju&AKYu{% zU)lZz<`_C6NcQsmEPTZPPXw&k))Jikym2L+3J}>;AAuy6vgiff}3QOm@ dbLAKsiQY}^J$VRt2zUs12zUs12;7PY{1>%RD$)P| literal 0 HcmV?d00001 diff --git a/tests/integration/cmor/_fixes/test_data/oras5_3d.nc b/tests/integration/cmor/_fixes/test_data/oras5_3d.nc new file mode 100644 index 0000000000000000000000000000000000000000..07baebb0e0ae71b4c9e693158e1a1199ca273eb8 GIT binary patch literal 76353 zcmeI44RDmj8OQf>A%t)VK@gN`Igmj>hy+u_@|F8|LzECANx}Nja3&YHnaf?wT>@GJ zp^jFhQlt_@Ok_mI`dJxqq|{N*)+$y-%S^S}j_run`h}x*9JJEs*>|7I9r-Xhp8`aO0@|xJ3;e6r9 zw(CY$SLQnHNAe`gq;#yTNWPGw_T0X@Syw+lKg*#D$#OUx^G#w2ox{1tL*1j5o!6~K z=BklvDAj;Ge&U>D$3M9y_ax5hbkq&Bi z-%#QYHHPX{A?duR23O7|aC`CV*B(0=su5H$xf4Z(wo?8n=hkty3{?t;qTv$C05z97 zpIxac8LUcm^l$H8?jnFqp?VPBStLbJCy3~X+j6GyPPv&09>4i~=wMS~Z zq(1!Z_YfuH({}&1eG7f%^K(Y?TO8S<;;uNMTw51yiNu4=KQ+Te-R`QLl#0i!QL}f1 zIGm9LaV``y_N({60ost5$8Luc|rP6l{)%f-$XQW9cz>S8K4D6>9dh z$)sbX9kWY{&MTTWd3k-qcV-tC6_*s9!v($4az*&+D;;zF<9|E1V(+BV3tng~{lnEe zOMkrd<lQt-@L@*kDs;JfA1S%|Kh#t{Er>k=-)K6-Cr1fz<=kBkNb;ne$l_8&99;u9;%F_sp>JCi*SUzJ=D9wo9ya>z7%NcEqgpudJ~; zhFoXO_H3~3UUjpz@yV^$fx#Wtip%b@wmov6wW@HJ)!Ou!_4{kLCNlAtG;B>|;uG_> zCNlB46SpQZ@ssnmCNlA7zt@(?#1H*lTOt#`|Ngc_CO-0$wnQfW-Nv>=Cf-}wmdM0k zRn(Tq#NSlVmdM0k^Wm06CSIwnUUdz5bt{GB)iw&rtJ^3fueMW2UfoV1dDTuKdE|p! z{x5mquj>DnOHS#=alNx>(J4!coHrIzDfjCWf(q{U!RL-)?mMNN^`E#5-*Lv8j(>r9MI@lzaVBjN%-`7*RQ5@sQ zwigk!Hj4yWYr}zf-4HvaUEFpvx z>vmq{OPwTC>d7^os~kPmi^T(x`apAiZ6web)Kfi{DYuX67K*pj2VJzXbJBl#_S`TW zjV$Y$xIz+7;AK*BQP&a)#q9}XzA3dnxU4xCjMYY>&GF@~$-MS;QfusKXJ*GF?j$<>)fw^^CW z3e|o{e+MP6dcPnh1xqatr6y3gX@=xl;CbR)z7~!i{AkD7$5X#;U8pj4qzg?o{sY>* z$*iQJ(sIT)-F2L~cVw!5i!I02?`XuwCJl0Sn|^4PqbAT#X1+cC8_MkLGP30kGkyU0bvMPO6tsY{gljwMesSm^hv0%J&tVtT1R4zZa^~tSx7&c!g z60+@N$QP&t5xI|2hp?0MJKkBmyR5p}Qgf+q**VC9JXAkT+)?`YF{q&#Pc#?&g0HLL z*xK!toURu)&$qBbH^_V-rExqivlH{RyjtSLcr62x#7lb7FNv4*j2}CR7b}Tqti+4) zU1lK?FXfxhak3mK-!5ILsggh<5JCcw03-kjKmw2eBmfCO0+0YC00}?>C$I#{s>}UK ziKS9j{}PH|vQ?E@N!CWOExO72%8oWKX(wTO<-uqEJB&7_VoHoQaxX?3v1c?CPW}z; zF^YUJbWS2^Dv}hVzi%Y8PBs^c zy+}e+G}|xx;wW1(r(Lu*fzOh>PlTG*dTR*jTsD#orRe$zo2J@N2PF;d1L~f7PNy2g zso3;HsiqyB>Vx__VzzonE2O(6v~zHk($!<8=^{}J2|q3yo!_N)B}czE{Ku?tm)cD~ zI@PZ4fA4;6ZIoXZ9Hhj5(KUPUug_HIM`aRJ?N#0&f1eS4$ldHW83yGTIy|DZIPrFC^$pQTZe-ZiOxokmky zAE!Z+225JNreToQ!G~V^wi!3Nv;+~ENWkd-T2@5*fsH?mzubA)tqyQuq?xRDTHTj$ z1bqcK;jiD>GE1Fevaoh*WnRZAs0`M}+8fudS3lcQjIE)?812^jq^U`3>_Dl4=8+`q`#OH=g!VopIb2Oc4=f82X+8YOx_(4>q>(f>Os zX`24e;W9{{^^{}ztuP5<*>T(Xc$fjdqT=$rnVUR9|*s!|bAsQ{_8eoh4^t;1^$ z?$Mijj#JA}Y+oc$R_^yE56 zv`A(3X-k~_+HbZ;v4_ouyz1r(E#`wMb%C(E#vKl}2E(yrGduSivJ^ONW3pWV+ES=D zILf1oc^l!e+T$ZqmryJja!*x(g9Lh#fYyIGtELl$+E)+$M*7dw{iI@KqW^4tVHHX} zd)C)czS>po>(5=r%v?d%)?Xa%Gn$AWdyNPWy53q|UJ z?&9-iOq(+G+$qJ=Yo<>9uKstH{_7+gp58D?<4TZsMC+kKc0badj(+49$d`Px6 z5Kc+m-T&C2`(bB`pH)1)oE6QXcs~wVTy@3;PIZ#0cSA!TD(o7_qRe{}%#!!sLzGbx}k$xQ&;Xds9F$3CW)O-H0q}~@Cb_}KqE&Gm_|Q*5CA`v0JL{@X5z_oESuNe5J8eNc&g#wJ8t6500n%Gz*2u| zb5J!9>sCQxN(9aTY?BYlSaIRz`qoyZGEy_Ov2no#k(Sm4i&~E_FXkc!Mn9ydqU+}xEcBOIm0# z#uY?+aDo{aALCB@aa1gmjJ2f_H9;MwPKTx+KmUox77DO0pN7OHWBcWJZsdI3R?$LP zpIRsa_6e%SV+-ZFi8|VQ?{80v&H=8dV)h*UmdC1n;$OKqIKAQeT{qoM1^AC6p3f6K zk>6cO&BH(~Ea~sCQHC9Z_O5acTCf%(YtMhJXV4~)pciK>nW*H0bf$fk1ced7Q+j5R z(S8`c9&?wL^F+qOonOygomomH12{j@(!K{WLh?qFnAVKUpOjw#mYZ-u&)FX;sFl&jPSzvH`B9)59GKtuOx*uHR8+yQ!y+y#f51yPrWduSzdk(=V zm?q(Q$y91-EVVjuNUhN#A#<<90QvQS(|AhlWAUQ;sn@u8(Yh75j6daJFr4b_ou8W* zv|a=RAey>IBdKKut8}W2>Rv%FG0Z|LOrMMC8F#jgR~kBrBs?Wrr{I@WbNM8C0Z^s| z1k!$4U$3jx0t(Nb5?vXT@v8fc*7@$8pV~@Lv=JT0`6T_^r(^dJXY1M*_RkM5YF2!; z0pOO?J1P4+N?@QKP!Rqw-Tm{J-NP68*$%$Q*E#qiFQ2!w{J9Q!>14>kmvZVIe5tp= z!IyHv4!)Gr=-^8^nhkGO$S*f`HQ;K%)qtx3R|BpFTn)Gya5dm+z}0}O0apXZz6J{I zWMTx@W(H-jN1MXw+=~~bGfU-1@#CLAT((eIfY5?P_VWs~E1)$j+7iAn(|KhglTB*Z zLlxFd7Kp17nXY7Kx^CL!w6fIK)z-|anKpSHHXYCgDJw`StCb$ z&ndT|^(l{*HF-jrZ7)?-NBC~rz| zKh|SN-6(HLum$Teq;8Zqq_=O#*~?buYllwtZyuP#2(9jbDL$SZ?){N zJ1o1Q#j@AdSoUodmhI+ob#Sye;5C*OZw_7^i#tfH=EF2!dmYqO0%9qT9maIMfPz}Q zF&?E-0LSZ?95EEM%T_sPm}B<*H{EyQe%!14IRN5*?1v||*bclWs#q;`GA?p2M6|W@ zD)>Y>r<8&{CG3Jkml!#x#<7&_q{Ln8xkj8K?^YccrM7J$-nRWi$K7Oh-096cyxaHvXJpGn2&wT%DH>ID7j-5RaxSxA6)%Ks_e(OYT!ferj6ZaJlT=G~U?zN8^ zaUaWGjHpsUO+tryvwcVF)kVZTB8P^$Pu%x_IhO4&?Dfs8;=-PMxP`DM->ot1-`zK< zmUKt3FM;v(tm=n2QpNTAW(|Gd0PpNu^DxCSY%=8HR%a=IORm2AQj)tleP=U&1~a^# zTH<(p9*<0X&RZLO@%w+9LzV+upQrQsMB0z#U;(wx=st(6w-QU|Nhw9TUNQX$Xr-l{ zs_mJjqdYGMf;?IKd9rR#q_cR4SF<&>R%Z#MOebpTs>Fo|rCS7r>5wB`k7*+WQ!6KA z9#0d3WnkOSK_&(nSmOd4Z+!f{!UESfaDl5hg1|Pqe?g1a6xtXqiOh>EiL`_xQFTY< zY-|(hJRZx&x)Rym%PR3?Moyl2Q1%8D8&+^+c50_QeE8o@ z$1e;6;bC69tBlMja-dm$*}iW#tcGW;`z`z*<^%6tIpg`t&12cF4qP43_wC}~*w#Ve zj`W!zZ6pDL>WN1Eb?l8Tp2=KH+Y*a=NmmGB0+;ldNpHMPEgU%Gz0@qmdr6D&UeeNX V*$Yd{CG8!u^F8@T-+%kw#-DoS=M(?{ literal 0 HcmV?d00001 From 3e24968ac61fd273a3e4909cc36a0e833072bd16 Mon Sep 17 00:00:00 2001 From: Jan-Hendrik Malles Date: Tue, 25 Nov 2025 13:45:05 +0100 Subject: [PATCH 21/32] Remove hardcoded path to grid file --- tests/integration/cmor/_fixes/oras5/test_oras5.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index 8b661ec6f1..e11029e768 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -177,7 +177,8 @@ def _get_fix(mip, short_name, fix_name, session=None): extra_facets = get_extra_facets(dataset, ()) extra_facets["frequency"] = "mon" extra_facets["exp"] = "omip" - extra_facets["horizontal_grid"] = '/work/bd1083/b382555/esmvalcore_dev/tests/integration/cmor/_fixes/test_data/oras5_grid.nc' + test_data_path = Path(__file__).resolve().parent.parent / "test_data" + extra_facets["horizontal_grid"] = str(test_data_path / "oras5_grid.nc") vardef = get_var_info(project="ORAS5", mip=mip, short_name=short_name) cls = getattr(esmvalcore.cmor._fixes.oras5.oras5, fix_name) fix = cls(vardef, extra_facets=extra_facets, session=session) From 1cfe6cb9dfa72cda3d4f75c796f6aca261c11c90 Mon Sep 17 00:00:00 2001 From: Jan-Hendrik Malles Date: Tue, 25 Nov 2025 13:57:43 +0100 Subject: [PATCH 22/32] Accomodating changes in main --- .../configurations/data-native-oras5.yml | 9 +++ .../defaults/extra_facets_oras5.yml | 58 +++++++++++++++++++ .../cmor/_fixes/oras5/test_oras5.py | 15 +---- 3 files changed, 69 insertions(+), 13 deletions(-) create mode 100644 esmvalcore/config/configurations/data-native-oras5.yml create mode 100644 esmvalcore/config/configurations/defaults/extra_facets_oras5.yml diff --git a/esmvalcore/config/configurations/data-native-oras5.yml b/esmvalcore/config/configurations/data-native-oras5.yml new file mode 100644 index 0000000000..76edb768aa --- /dev/null +++ b/esmvalcore/config/configurations/data-native-oras5.yml @@ -0,0 +1,9 @@ +# Read data from the ICON model in its native format. +projects: + ORAS5: + data: + oras5: &oras5 + type: "esmvalcore.local.LocalDataSource" + rootpath: ~/climate_data + dirname_template: '/' + filename_template: '*{raw_name}*{version}*.nc' \ No newline at end of file diff --git a/esmvalcore/config/configurations/defaults/extra_facets_oras5.yml b/esmvalcore/config/configurations/defaults/extra_facets_oras5.yml new file mode 100644 index 0000000000..d13c6a6bda --- /dev/null +++ b/esmvalcore/config/configurations/defaults/extra_facets_oras5.yml @@ -0,0 +1,58 @@ +# Extra facets for native ORAS5 data + +# Notes: +# - All facets can also be specified in the recipes. The values given here are +# only defaults. +# - The facet ``var_type`` has to be specified in the recipe if it is not given +# here and default DRS is used. + +# A complete list of supported keys is given in the documentation (see +# ESMValCore/doc/quickstart/find_data.rst). +--- + +projects: + ORAS5: + extra_facets: + ORAS5: + '*': + # Cell measures + areacella: + latitude: grid_latitude + longitude: grid_longitude + raw_name: cell_area + areacello: + latitude: grid_latitude + longitude: grid_longitude + raw_name: cell_area + + # Variable names and units + uo: + raw_name: vozocrte + raw_units: m/s + vo: + raw_name: vomecrtn + raw_units: m/s + tos: + raw_name: sosstsst + raw_units: degC + sos: + raw_name: sosaline + raw_units: '0.001' + zos: + raw_name: sossheig + raw_units: m + mlotst: + raw_name: somxl010 + raw_units: m + thetao: + raw_name: votemper + raw_units: degC + so: + raw_name: vosaline + raw_units: '0.001' + hfds: + raw_name: sohefldo + raw_units: W/m^2 + tauuo: + raw_name: sozotaux + raw_units: N/m^2 \ No newline at end of file diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index e11029e768..a2861e48ad 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -14,20 +14,9 @@ from iris.cube import Cube, CubeList import esmvalcore.cmor._fixes.oras5.oras5 -from esmvalcore.cmor._fixes.fix import GenericFix from esmvalcore.cmor._fixes.oras5._base_fixes import Oras5Fix -from esmvalcore.cmor._fixes.oras5.oras5 import ( - AllVars, - # Clwvi, - # Hfls, - # Hfss, - # Rtmt, - # Rtnt, -) -from esmvalcore.cmor.fix import Fix -from esmvalcore.cmor.table import CoordinateInfo, get_var_info +from esmvalcore.cmor.table import get_var_info from esmvalcore.config import CFG -from esmvalcore.config._config import get_extra_facets from esmvalcore.dataset import Dataset TEST_GRID_FILE_URI = ( @@ -174,7 +163,7 @@ def _get_fix(mip, short_name, fix_name, session=None): mip=mip, short_name=short_name, ) - extra_facets = get_extra_facets(dataset, ()) + extra_facets = dataset._get_extra_facets() extra_facets["frequency"] = "mon" extra_facets["exp"] = "omip" test_data_path = Path(__file__).resolve().parent.parent / "test_data" From 68637848f1ee6038a39ba097be6fe9912b705789 Mon Sep 17 00:00:00 2001 From: Jan-Hendrik Malles Date: Tue, 25 Nov 2025 15:09:30 +0100 Subject: [PATCH 23/32] Small adjustments --- esmvalcore/config/configurations/data-native-oras5.yml | 2 +- .../config/configurations/defaults/extra_facets_oras5.yml | 4 +--- tests/integration/recipe/test_recipe.py | 2 +- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/esmvalcore/config/configurations/data-native-oras5.yml b/esmvalcore/config/configurations/data-native-oras5.yml index 76edb768aa..b24958e47b 100644 --- a/esmvalcore/config/configurations/data-native-oras5.yml +++ b/esmvalcore/config/configurations/data-native-oras5.yml @@ -1,4 +1,4 @@ -# Read data from the ICON model in its native format. +# Read data from ORAS5 data in its native format. projects: ORAS5: data: diff --git a/esmvalcore/config/configurations/defaults/extra_facets_oras5.yml b/esmvalcore/config/configurations/defaults/extra_facets_oras5.yml index d13c6a6bda..03832d97ac 100644 --- a/esmvalcore/config/configurations/defaults/extra_facets_oras5.yml +++ b/esmvalcore/config/configurations/defaults/extra_facets_oras5.yml @@ -1,10 +1,8 @@ -# Extra facets for native ORAS5 data +# Extra facets for native ORAS5 data. # Notes: # - All facets can also be specified in the recipes. The values given here are # only defaults. -# - The facet ``var_type`` has to be specified in the recipe if it is not given -# here and default DRS is used. # A complete list of supported keys is given in the documentation (see # ESMValCore/doc/quickstart/find_data.rst). diff --git a/tests/integration/recipe/test_recipe.py b/tests/integration/recipe/test_recipe.py index 10801a671e..a03e92e817 100644 --- a/tests/integration/recipe/test_recipe.py +++ b/tests/integration/recipe/test_recipe.py @@ -3824,7 +3824,7 @@ def test_align_metadata_invalid_project(tmp_path, patched_datafinder, session): "align_metadata failed: \"No CMOR tables available for project 'ZZZ'. " "The following tables are available: custom, CMIP6, CMIP5, CMIP3, OBS, " "OBS6, native6, obs4MIPs, ana4mips, EMAC, CORDEX, IPSLCM, ICON, CESM, " - 'ACCESS."' + 'ACCESS, ORAS5."' ) with pytest.raises(RecipeError) as exc: get_recipe(tmp_path, content, session) From 21c78f6754a63f3a05d5064f7597534e493e3eab Mon Sep 17 00:00:00 2001 From: Jan-Hendrik Malles Date: Tue, 25 Nov 2025 16:07:23 +0100 Subject: [PATCH 24/32] More tests --- esmvalcore/cmor/_fixes/oras5/_base_fixes.py | 8 +- esmvalcore/cmor/_fixes/oras5/oras5.py | 6 +- .../cmor/_fixes/oras5/test_oras5.py | 361 +++++++++--------- 3 files changed, 192 insertions(+), 183 deletions(-) diff --git a/esmvalcore/cmor/_fixes/oras5/_base_fixes.py b/esmvalcore/cmor/_fixes/oras5/_base_fixes.py index a4f450b84f..0efb03b4ea 100644 --- a/esmvalcore/cmor/_fixes/oras5/_base_fixes.py +++ b/esmvalcore/cmor/_fixes/oras5/_base_fixes.py @@ -73,15 +73,15 @@ def _create_mesh(self, cube): node_lat = iris.coords.AuxCoord( node_lat, standard_name="latitude", - var_name="lat", - long_name="latitude", + var_name="nlat", + long_name="node latitude", units="degrees", ) node_lon = iris.coords.AuxCoord( node_lon, standard_name="longitude", - var_name="lon", - long_name="longitude", + var_name="nlon", + long_name="node longitude", units="degrees", ) diff --git a/esmvalcore/cmor/_fixes/oras5/oras5.py b/esmvalcore/cmor/_fixes/oras5/oras5.py index bba28fc391..ce715dc83e 100644 --- a/esmvalcore/cmor/_fixes/oras5/oras5.py +++ b/esmvalcore/cmor/_fixes/oras5/oras5.py @@ -92,8 +92,10 @@ def _fix_cube(self, cube): coords_add.append((coord, dim)) data = da.moveaxis(cube.core_data(), -1, -2).flatten() dim_shape = tuple(cube.data.shape[:-2]) - data_shape = tuple(data.shape / np.prod(dim_shape)) - data = da.reshape(data, dim_shape + data_shape) + data_shape = data.shape / np.prod(dim_shape) + data_shape = tuple(map(int, data_shape)) + print(dim_shape, data_shape) + data = np.reshape(data, dim_shape + data_shape) return iris.cube.Cube(data, dim_coords_and_dims=coords_add) def _add_coord_from_grid_file(self, cube, coord_name): diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index a2861e48ad..28562344c3 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -14,7 +14,10 @@ from iris.cube import Cube, CubeList import esmvalcore.cmor._fixes.oras5.oras5 +from esmvalcore.cmor._fixes.fix import GenericFix from esmvalcore.cmor._fixes.oras5._base_fixes import Oras5Fix +from esmvalcore.cmor._fixes.icon.icon import AllVars +from esmvalcore.cmor.fix import Fix from esmvalcore.cmor.table import get_var_info from esmvalcore.config import CFG from esmvalcore.dataset import Dataset @@ -168,6 +171,7 @@ def _get_fix(mip, short_name, fix_name, session=None): extra_facets["exp"] = "omip" test_data_path = Path(__file__).resolve().parent.parent / "test_data" extra_facets["horizontal_grid"] = str(test_data_path / "oras5_grid.nc") + extra_facets["ugrid"] = True vardef = get_var_info(project="ORAS5", mip=mip, short_name=short_name) cls = getattr(esmvalcore.cmor._fixes.oras5.oras5, fix_name) fix = cls(vardef, extra_facets=extra_facets, session=session) @@ -203,20 +207,20 @@ def fix_data(cube, mip, short_name, session=None): return cube -def check_ta_metadata(cubes): - """Check ta metadata.""" +def check_thetao_metadata(cubes): + """Check thetao metadata.""" assert len(cubes) == 1 cube = cubes[0] assert cube.var_name == "thetao" - assert cube.standard_name == "air_temperature" - assert cube.long_name == "Air Temperature" + assert cube.standard_name == "sea_water_potential_temperature" + assert cube.long_name == "Sea Water Potential Temperature" assert cube.units == "degC" assert "positive" not in cube.attributes return cube -def check_tas_metadata(cubes): - """Check tas metadata.""" +def check_tos_metadata(cubes): + """Check tos metadata.""" assert len(cubes) == 1 cube = cubes[0] assert cube.var_name == "tos" @@ -227,16 +231,16 @@ def check_tas_metadata(cubes): return cube -def check_siconc_metadata(cubes, var_name, long_name): - """Check tas metadata.""" - assert len(cubes) == 1 - cube = cubes[0] - assert cube.var_name == var_name - assert cube.standard_name == "sea_ice_area_fraction" - assert cube.long_name == long_name - assert cube.units == "%" - assert "positive" not in cube.attributes - return cube +# def check_siconc_metadata(cubes, var_name, long_name): +# """Check tas metadata.""" +# assert len(cubes) == 1 +# cube = cubes[0] +# assert cube.var_name == var_name +# assert cube.standard_name == "sea_ice_area_fraction" +# assert cube.long_name == long_name +# assert cube.units == "%" +# assert "positive" not in cube.attributes +# return cube def check_time(cube): @@ -256,13 +260,13 @@ def check_time(cube): def check_model_level_metadata(cube): """Check metadata of model_level coordinate.""" - assert cube.coords("model level number", dim_coords=True) - height = cube.coord("model level number", dim_coords=True) - assert height.var_name == "model_level" + assert cube.coords("depth", dim_coords=True) + height = cube.coord("depth", dim_coords=True) + assert height.var_name == "lev" assert height.standard_name is None assert height.long_name == "model level number" - assert height.units == "no unit" - assert height.attributes == {"positive": "up"} + assert height.units == "m" + assert height.attributes == {"positive": "down"} return height @@ -326,26 +330,26 @@ def check_lat(cube): assert lat.standard_name == "latitude" assert lat.long_name == "latitude" assert lat.units == "degrees_north" - assert lat.attributes == {} - np.testing.assert_allclose( - lat.points, - [-45.0, -45.0, -45.0, -45.0, 45.0, 45.0, 45.0, 45.0], - rtol=1e-5, - ) - np.testing.assert_allclose( - lat.bounds, - [ - [-90.0, 0.0, 0.0], - [-90.0, 0.0, 0.0], - [-90.0, 0.0, 0.0], - [-90.0, 0.0, 0.0], - [0.0, 0.0, 90.0], - [0.0, 0.0, 90.0], - [0.0, 0.0, 90.0], - [0.0, 0.0, 90.0], - ], - rtol=1e-5, - ) + # assert lat.attributes == {} + # np.testing.assert_allclose( + # lat.points, + # [-45.0, -45.0, -45.0, -45.0, 45.0, 45.0, 45.0, 45.0], + # rtol=1e-5, + # ) + # np.testing.assert_allclose( + # lat.bounds, + # [ + # [-90.0, 0.0, 0.0], + # [-90.0, 0.0, 0.0], + # [-90.0, 0.0, 0.0], + # [-90.0, 0.0, 0.0], + # [0.0, 0.0, 90.0], + # [0.0, 0.0, 90.0], + # [0.0, 0.0, 90.0], + # [0.0, 0.0, 90.0], + # ], + # rtol=1e-5, + # ) return lat @@ -357,26 +361,26 @@ def check_lon(cube): assert lon.standard_name == "longitude" assert lon.long_name == "longitude" assert lon.units == "degrees_east" - assert lon.attributes == {} - np.testing.assert_allclose( - lon.points, - [225.0, 315.0, 45.0, 135.0, 225.0, 315.0, 45.0, 135.0], - rtol=1e-5, - ) - np.testing.assert_allclose( - lon.bounds, - [ - [0.0, 270.0, 180.0], - [0.0, 0.0, 270.0], - [0.0, 90.0, 0.0], - [0.0, 180.0, 90.0], - [180.0, 270.0, 0.0], - [270.0, 0.0, 0.0], - [0.0, 90.0, 0.0], - [90.0, 180.0, 0.0], - ], - rtol=1e-5, - ) + # assert lon.attributes == {} + # np.testing.assert_allclose( + # lon.points, + # [225.0, 315.0, 45.0, 135.0, 225.0, 315.0, 45.0, 135.0], + # rtol=1e-5, + # ) + # np.testing.assert_allclose( + # lon.bounds, + # [ + # [0.0, 270.0, 180.0], + # [0.0, 0.0, 270.0], + # [0.0, 90.0, 0.0], + # [0.0, 180.0, 90.0], + # [180.0, 270.0, 0.0], + # [270.0, 0.0, 0.0], + # [0.0, 90.0, 0.0], + # [90.0, 180.0, 0.0], + # ], + # rtol=1e-5, + # ) return lon @@ -404,7 +408,7 @@ def check_lat_lon(cube): "first spatial index for variables stored on an unstructured grid" ) assert i_coord.units == "1" - np.testing.assert_allclose(i_coord.points, [0, 1, 2, 3, 4, 5, 6, 7]) + np.testing.assert_allclose(i_coord.points, list(range(13*12))) assert i_coord.bounds is None assert len(cube.coord_dims(lat)) == 1 @@ -437,25 +441,25 @@ def check_mesh(mesh): assert mesh_face_lat.long_name == "latitude" assert mesh_face_lat.units == "degrees_north" assert mesh_face_lat.attributes == {} - np.testing.assert_allclose( - mesh_face_lat.points, - [-45.0, -45.0, -45.0, -45.0, 45.0, 45.0, 45.0, 45.0], - rtol=1e-5, - ) - np.testing.assert_allclose( - mesh_face_lat.bounds, - [ - [-90.0, 0.0, 0.0], - [-90.0, 0.0, 0.0], - [-90.0, 0.0, 0.0], - [-90.0, 0.0, 0.0], - [0.0, 0.0, 90.0], - [0.0, 0.0, 90.0], - [0.0, 0.0, 90.0], - [0.0, 0.0, 90.0], - ], - rtol=1e-5, - ) + # np.testing.assert_allclose( + # mesh_face_lat.points, + # [-45.0, -45.0, -45.0, -45.0, 45.0, 45.0, 45.0, 45.0], + # rtol=1e-5, + # ) + # np.testing.assert_allclose( + # mesh_face_lat.bounds, + # [ + # [-90.0, 0.0, 0.0], + # [-90.0, 0.0, 0.0], + # [-90.0, 0.0, 0.0], + # [-90.0, 0.0, 0.0], + # [0.0, 0.0, 90.0], + # [0.0, 0.0, 90.0], + # [0.0, 0.0, 90.0], + # [0.0, 0.0, 90.0], + # ], + # rtol=1e-5, + # ) mesh_face_lon = mesh.coord(location="face", axis="x") assert mesh_face_lon.var_name == "lon" @@ -463,25 +467,25 @@ def check_mesh(mesh): assert mesh_face_lon.long_name == "longitude" assert mesh_face_lon.units == "degrees_east" assert mesh_face_lon.attributes == {} - np.testing.assert_allclose( - mesh_face_lon.points, - [225.0, 315.0, 45.0, 135.0, 225.0, 315.0, 45.0, 135.0], - rtol=1e-5, - ) - np.testing.assert_allclose( - mesh_face_lon.bounds, - [ - [0.0, 270.0, 180.0], - [0.0, 0.0, 270.0], - [0.0, 90.0, 0.0], - [0.0, 180.0, 90.0], - [180.0, 270.0, 0.0], - [270.0, 0.0, 0.0], - [0.0, 90.0, 0.0], - [90.0, 180.0, 0.0], - ], - rtol=1e-5, - ) + # np.testing.assert_allclose( + # mesh_face_lon.points, + # [225.0, 315.0, 45.0, 135.0, 225.0, 315.0, 45.0, 135.0], + # rtol=1e-5, + # ) + # np.testing.assert_allclose( + # mesh_face_lon.bounds, + # [ + # [0.0, 270.0, 180.0], + # [0.0, 0.0, 270.0], + # [0.0, 90.0, 0.0], + # [0.0, 180.0, 90.0], + # [180.0, 270.0, 0.0], + # [270.0, 0.0, 0.0], + # [0.0, 90.0, 0.0], + # [90.0, 180.0, 0.0], + # ], + # rtol=1e-5, + # ) # Check node coordinates assert len(mesh.coords(location="node")) == 2 @@ -492,9 +496,9 @@ def check_mesh(mesh): assert mesh_node_lat.long_name == "node latitude" assert mesh_node_lat.units == "degrees_north" assert mesh_node_lat.attributes == {} - np.testing.assert_allclose( - mesh_node_lat.points, [-90.0, 0.0, 0.0, 0.0, 0.0, 90.0], rtol=1e-5 - ) + # np.testing.assert_allclose( + # mesh_node_lat.points, [-90.0, 0.0, 0.0, 0.0, 0.0, 90.0], rtol=1e-5 + # ) assert mesh_node_lat.bounds is None mesh_node_lon = mesh.coord(location="node", axis="x") @@ -503,9 +507,9 @@ def check_mesh(mesh): assert mesh_node_lon.long_name == "node longitude" assert mesh_node_lon.units == "degrees_east" assert mesh_node_lon.attributes == {} - np.testing.assert_allclose( - mesh_node_lon.points, [0.0, 180.0, 270.0, 0.0, 90, 0.0], rtol=1e-5 - ) + # np.testing.assert_allclose( + # mesh_node_lon.points, [0.0, 180.0, 270.0, 0.0, 90, 0.0], rtol=1e-5 + # ) assert mesh_node_lon.bounds is None # Check connectivity @@ -517,22 +521,22 @@ def check_mesh(mesh): assert conn.units == "unknown" assert conn.attributes == {} assert conn.cf_role == "face_node_connectivity" - assert conn.start_index == 1 + assert conn.start_index == 0 assert conn.location_axis == 0 - assert conn.shape == (8, 3) - np.testing.assert_array_equal( - conn.indices, - [ - [1, 3, 2], - [1, 4, 3], - [1, 5, 4], - [1, 2, 5], - [2, 3, 6], - [3, 4, 6], - [4, 5, 6], - [5, 2, 6], - ], - ) + assert conn.shape == (int(13*12), 4) + # np.testing.assert_array_equal( + # conn.indices, + # [ + # [1, 3, 2], + # [1, 4, 3], + # [1, 5, 4], + # [1, 2, 5], + # [2, 3, 6], + # [3, 4, 6], + # [4, 5, 6], + # [5, 2, 6], + # ], + # ) # def check_typesi(cube): @@ -762,21 +766,21 @@ def check_mesh(mesh): # # Test ta (for height and plev coordinate) -# def test_get_ta_fix(): -# """Test getting of fix.""" -# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "thetao") -# assert fix == [AllVars(None), GenericFix(None)] +def test_get_thetao_fix(): + """Test getting of fix.""" + fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "thetao") + assert fix == [AllVars(None), GenericFix(None)] -# def test_ta_fix(cubes_3d): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "thetao") -# fixed_cubes = fix.fix_metadata(cubes_3d) +def test_thetao_fix(cubes_3d): + """Test fix.""" + fix = get_allvars_fix("Omon", "thetao") + fixed_cubes = fix.fix_metadata(cubes_3d) -# cube = check_ta_metadata(fixed_cubes) -# check_time(cube) -# check_height(cube) -# check_lat_lon(cube) + cube = check_thetao_metadata(fixed_cubes) + check_time(cube) + # check_height(cube) + check_lat_lon(cube) # def test_ta_fix_no_plev_bounds(cubes_3d): @@ -799,21 +803,21 @@ def check_mesh(mesh): # # Test tas (for height2m coordinate, no mesh, no shift time) -# def test_get_tas_fix(): -# """Test getting of fix.""" -# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "tos") -# assert fix == [AllVars(None), GenericFix(None)] +def test_get_tos_fix(): + """Test getting of fix.""" + fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "tos") + assert fix == [AllVars(None), GenericFix(None)] -# def test_tas_fix(cubes_2d): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "tos") -# fixed_cubes = fix.fix_metadata(cubes_2d) +def test_tos_fix(cubes_2d): + """Test fix.""" + fix = get_allvars_fix("Omon", "tos") + fixed_cubes = fix.fix_metadata(cubes_2d) -# cube = check_tas_metadata(fixed_cubes) -# check_time(cube) -# check_lat_lon(cube) -# check_heightxm(cube, 2.0) + cube = check_tos_metadata(fixed_cubes) + check_time(cube) + check_lat_lon(cube) + # check_heightxm(cube, 2.0) # def test_tas_spatial_index_coord_already_present(cubes_2d): @@ -846,40 +850,43 @@ def check_mesh(mesh): # check_heightxm(cube, 2.0) -# def test_tas_no_mesh(cubes_2d): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "tos") -# fix.extra_facets["ugrid"] = False -# fixed_cubes = fix.fix_metadata(cubes_2d) +def test_tas_no_mesh(cubes_2d): + """Test fix.""" + fix = get_allvars_fix("Omon", "tos") + fix.extra_facets["ugrid"] = False + fixed_cubes = fix.fix_metadata(cubes_2d) -# cube = check_tas_metadata(fixed_cubes) + cube = check_tos_metadata(fixed_cubes) -# assert cube.mesh is None + assert cube.mesh is None -# assert cube.coords( -# "first spatial index for variables stored on an unstructured grid", -# dim_coords=True, -# ) -# i_coord = cube.coord( -# "first spatial index for variables stored on an unstructured grid", -# dim_coords=True, -# ) -# assert i_coord.var_name == "i" -# assert i_coord.standard_name is None -# assert i_coord.long_name == ( -# "first spatial index for variables stored on an unstructured grid" -# ) -# assert i_coord.units == "1" -# np.testing.assert_allclose(i_coord.points, [0, 1, 2, 3, 4, 5, 6, 7]) -# assert i_coord.bounds is None + lat = check_lat(cube) + lon = check_lon(cube) + + # assert cube.coords( + # "first spatial index for variables stored on an unstructured grid", + # dim_coords=True, + # ) + # i_coord = cube.coord( + # "first spatial index for variables stored on an unstructured grid", + # dim_coords=True, + # ) + # assert i_coord.var_name == "i" + # assert i_coord.standard_name is None + # assert i_coord.long_name == ( + # "first spatial index for variables stored on an unstructured grid" + # ) + # assert i_coord.units == "1" + # np.testing.assert_allclose(i_coord.points, [0, 1, 2, 3, 4, 5, 6, 7]) + # assert i_coord.bounds is None -# assert cube.coords("latitude", dim_coords=False) -# assert cube.coords("longitude", dim_coords=False) -# lat = cube.coord("latitude", dim_coords=False) -# lon = cube.coord("longitude", dim_coords=False) -# assert len(cube.coord_dims(lat)) == 1 -# assert cube.coord_dims(lat) == cube.coord_dims(lon) -# assert cube.coord_dims(lat) == cube.coord_dims(i_coord) + assert cube.coords("latitude", dim_coords=False) + assert cube.coords("longitude", dim_coords=False) + # lat = cube.coord("latitude", dim_coords=False) + # lon = cube.coord("longitude", dim_coords=False) + assert len(cube.coord_dims(lat)) == 2 + # assert cube.coord_dims(lat) == cube.coord_dims(lon) + # assert cube.coord_dims(lat) == cube.coord_dims(i_coord) # def test_tas_dim_height2m_already_present(cubes_2d): @@ -1120,8 +1127,8 @@ def test_add_time(cubes_2d, cubes_3d): fix = get_allvars_fix("Omon", "tos") fixed_cubes = fix.fix_metadata(cubes) - cube = check_tas_metadata(fixed_cubes) - assert cube.shape == (1, 13, 12) + cube = check_tos_metadata(fixed_cubes) + # assert cube.shape == (1, 13, 12) check_time(cube) From 1367855f2b9269f6bb13335127b1b7a7d6d27b44 Mon Sep 17 00:00:00 2001 From: Jan-Hendrik Malles Date: Wed, 26 Nov 2025 10:37:47 +0100 Subject: [PATCH 25/32] Add more tests --- esmvalcore/cmor/_fixes/oras5/oras5.py | 1 - .../cmor/_fixes/oras5/test_oras5.py | 693 ++---------------- 2 files changed, 79 insertions(+), 615 deletions(-) diff --git a/esmvalcore/cmor/_fixes/oras5/oras5.py b/esmvalcore/cmor/_fixes/oras5/oras5.py index ce715dc83e..9012d0051e 100644 --- a/esmvalcore/cmor/_fixes/oras5/oras5.py +++ b/esmvalcore/cmor/_fixes/oras5/oras5.py @@ -94,7 +94,6 @@ def _fix_cube(self, cube): dim_shape = tuple(cube.data.shape[:-2]) data_shape = data.shape / np.prod(dim_shape) data_shape = tuple(map(int, data_shape)) - print(dim_shape, data_shape) data = np.reshape(data, dim_shape + data_shape) return iris.cube.Cube(data, dim_coords_and_dims=coords_add) diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index 28562344c3..9e33c09120 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -539,233 +539,6 @@ def check_mesh(mesh): # ) -# def check_typesi(cube): -# """Check scalar typesi coordinate of cube.""" -# assert cube.coords("area_type") -# typesi = cube.coord("area_type") -# assert typesi.var_name == "type" -# assert typesi.standard_name == "area_type" -# assert typesi.long_name == "Sea Ice area type" -# assert typesi.units.is_no_unit() -# np.testing.assert_array_equal(typesi.points, ["sea_ice"]) -# assert typesi.bounds is None - - -# Test areacella and areacello (for extra_facets, and grid_latitude and -# grid_longitude coordinates) - - -# def test_get_areacella_fix(): -# """Test getting of fix.""" -# fix = Fix.get_fixes("ORAS5", "ORAS5", "fx", "areacella") -# assert fix == [AllVars(None), GenericFix(None)] - - -# def test_areacella_fix(cubes_grid): -# """Test fix.""" -# fix = get_allvars_fix("fx", "areacella") -# fix.extra_facets["var_type"] = "fx" -# fixed_cubes = fix.fix_metadata(cubes_grid) - -# assert len(fixed_cubes) == 1 -# cube = fixed_cubes[0] -# assert cube.var_name == "areacella" -# assert cube.standard_name == "cell_area" -# assert cube.long_name == "Grid-Cell Area for Atmospheric Grid Variables" -# assert cube.units == "m2" -# assert "positive" not in cube.attributes - -# check_lat_lon(cube) - - -# def test_get_areacello_fix(): -# """Test getting of fix.""" -# fix = Fix.get_fixes("ORAS5", "ORAS5", "Ofx", "areacello") -# assert fix == [AllVars(None), GenericFix(None)] - - -# def test_areacello_fix(cubes_grid): -# """Test fix.""" -# fix = get_allvars_fix("Ofx", "areacello") -# fix.extra_facets["var_type"] = "fx" -# fixed_cubes = fix.fix_metadata(cubes_grid) - -# assert len(fixed_cubes) == 1 -# cube = fixed_cubes[0] -# assert cube.var_name == "areacello" -# assert cube.standard_name == "cell_area" -# assert cube.long_name == "Grid-Cell Area for Ocean Variables" -# assert cube.units == "m2" -# assert "positive" not in cube.attributes - -# check_lat_lon(cube) - - -# Test clwvi (for extra fix) - - -# def test_get_clwvi_fix(): -# """Test getting of fix.""" -# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "clwvi") -# assert fix == [Clwvi(None), AllVars(None), GenericFix(None)] - - -# def test_clwvi_fix(cubes_regular_grid): -# """Test fix.""" -# cubes = CubeList( -# [cubes_regular_grid[0].copy(), cubes_regular_grid[0].copy()] -# ) -# cubes[0].var_name = "cllvi" -# cubes[1].var_name = "clivi" -# cubes[0].units = "1e3 kg m-2" -# cubes[1].units = "1e3 kg m-2" - -# fixed_cubes = fix_metadata(cubes, "Omon", "clwvi") - -# assert len(fixed_cubes) == 1 -# cube = fixed_cubes[0] -# assert cube.var_name == "clwvi" -# assert cube.standard_name == ( -# "atmosphere_mass_content_of_cloud_condensed_water" -# ) -# assert cube.long_name == "Condensed Water Path" -# assert cube.units == "kg m-2" -# assert "positive" not in cube.attributes - -# np.testing.assert_allclose(cube.data, [[[0.0, 2000.0], [4000.0, 6000.0]]]) - - -# # Test lwp (for extra_facets) - - -# def test_get_lwp_fix(): -# """Test getting of fix.""" -# fix = Fix.get_fixes("ORAS5", "ORAS5", "AERmon", "lwp") -# assert fix == [AllVars(None), GenericFix(None)] - - -# def test_lwp_fix(cubes_2d): -# """Test fix.""" -# fix = get_allvars_fix("AERmon", "lwp") -# fixed_cubes = fix.fix_metadata(cubes_2d) - -# assert len(fixed_cubes) == 1 -# cube = fixed_cubes[0] -# assert cube.var_name == "lwp" -# assert cube.standard_name == ( -# "atmosphere_mass_content_of_cloud_liquid_water" -# ) -# assert cube.long_name == "Liquid Water Path" -# assert cube.units == "kg m-2" -# assert "positive" not in cube.attributes - -# check_time(cube) -# check_lat_lon(cube) - - -# # Test rsdt and rsut (for positive attribute) - - -# def test_get_rsdt_fix(): -# """Test getting of fix.""" -# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "rsdt") -# assert fix == [AllVars(None), GenericFix(None)] - - -# def test_rsdt_fix(cubes_2d): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "rsdt") -# fixed_cubes = fix.fix_metadata(cubes_2d) - -# assert len(fixed_cubes) == 1 -# cube = fixed_cubes[0] -# assert cube.var_name == "rsdt" -# assert cube.standard_name == "toa_incoming_shortwave_flux" -# assert cube.long_name == "TOA Incident Shortwave Radiation" -# assert cube.units == "W m-2" -# assert cube.attributes["positive"] == "down" - -# check_time(cube) -# check_lat_lon(cube) - - -# def test_get_rsut_fix(): -# """Test getting of fix.""" -# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "rsut") -# assert fix == [AllVars(None), GenericFix(None)] - - -# def test_rsut_fix(cubes_2d): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "rsut") -# fixed_cubes = fix.fix_metadata(cubes_2d) - -# assert len(fixed_cubes) == 1 -# cube = fixed_cubes[0] -# assert cube.var_name == "rsut" -# assert cube.standard_name == "toa_outgoing_shortwave_flux" -# assert cube.long_name == "TOA Outgoing Shortwave Radiation" -# assert cube.units == "W m-2" -# assert cube.attributes["positive"] == "up" - -# check_time(cube) -# check_lat_lon(cube) - - -# # Test siconc and siconca (for extra_facets, extra fix and typesi coordinate) - - -# def test_get_siconc_fix(): -# """Test getting of fix.""" -# fix = Fix.get_fixes("ORAS5", "ORAS5", "SImon", "siconc") -# assert fix == [AllVars(None), GenericFix(None)] - - -# def test_siconc_fix(cubes_2d): -# """Test fix.""" -# fix = get_allvars_fix("SImon", "siconc") -# fixed_cubes = fix.fix_metadata(cubes_2d) - -# cube = check_siconc_metadata( -# fixed_cubes, "siconc", "Sea-Ice Area Percentage (Ocean Grid)" -# ) -# check_time(cube) -# check_lat_lon(cube) -# check_typesi(cube) - -# np.testing.assert_allclose( -# cube.data, -# [[10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0]], -# ) - - -# def test_get_siconca_fix(): -# """Test getting of fix.""" -# fix = Fix.get_fixes("ORAS5", "ORAS5", "SImon", "siconca") -# assert fix == [AllVars(None), GenericFix(None)] - - -# def test_siconca_fix(cubes_2d): -# """Test fix.""" -# fix = get_allvars_fix("SImon", "siconca") -# fixed_cubes = fix.fix_metadata(cubes_2d) - -# cube = check_siconc_metadata( -# fixed_cubes, "siconca", "Sea-Ice Area Percentage (Atmospheric Grid)" -# ) -# check_time(cube) -# check_lat_lon(cube) -# check_typesi(cube) - -# np.testing.assert_allclose( -# cube.data, -# [[10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0]], -# ) - - -# # Test ta (for height and plev coordinate) - - def test_get_thetao_fix(): """Test getting of fix.""" fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "thetao") @@ -782,27 +555,6 @@ def test_thetao_fix(cubes_3d): # check_height(cube) check_lat_lon(cube) - -# def test_ta_fix_no_plev_bounds(cubes_3d): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "thetao") -# cubes = CubeList( -# [ -# cubes_3d.extract_cube(NameConstraint(var_name="thetao")), -# cubes_3d.extract_cube(NameConstraint(var_name="pfull")), -# ] -# ) -# fixed_cubes = fix.fix_metadata(cubes) - -# cube = check_ta_metadata(fixed_cubes) -# check_time(cube) -# check_height(cube, plev_has_bounds=False) -# check_lat_lon(cube) - - -# # Test tas (for height2m coordinate, no mesh, no shift time) - - def test_get_tos_fix(): """Test getting of fix.""" fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "tos") @@ -819,38 +571,7 @@ def test_tos_fix(cubes_2d): check_lat_lon(cube) # check_heightxm(cube, 2.0) - -# def test_tas_spatial_index_coord_already_present(cubes_2d): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "tos") - -# index_coord = DimCoord(np.arange(8), var_name="ncells") -# cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) -# cube.add_dim_coord(index_coord, 1) -# fixed_cubes = fix.fix_metadata(cubes_2d) - -# assert len(fixed_cubes) == 1 -# cube = fixed_cubes[0] -# check_lat_lon(cube) - - -# def test_tas_scalar_height2m_already_present(cubes_2d): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "tos") - -# # Scalar height (with wrong metadata) already present -# height_coord = AuxCoord(2.0, var_name="h", standard_name="depth") -# cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) -# cube.add_aux_coord(height_coord, ()) -# fixed_cubes = fix.fix_metadata(cubes_2d) - -# assert len(fixed_cubes) == 1 -# cube = fixed_cubes[0] -# assert cube.shape == (1, 8) -# check_heightxm(cube, 2.0) - - -def test_tas_no_mesh(cubes_2d): +def test_tos_no_mesh(cubes_2d): """Test fix.""" fix = get_allvars_fix("Omon", "tos") fix.extra_facets["ugrid"] = False @@ -889,211 +610,6 @@ def test_tas_no_mesh(cubes_2d): # assert cube.coord_dims(lat) == cube.coord_dims(i_coord) -# def test_tas_dim_height2m_already_present(cubes_2d): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "tos") - -# # Dimensional coordinate height (with wrong metadata) already present -# height_coord = AuxCoord(2.0, var_name="h", standard_name="depth") -# cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) -# cube.add_aux_coord(height_coord, ()) -# cube = iris.util.new_axis(cube, scalar_coord="depth") -# cube.transpose((1, 0, 2)) -# cubes = CubeList([cube]) -# fixed_cubes = fix.fix_metadata(cubes) - -# assert len(fixed_cubes) == 1 -# cube = fixed_cubes[0] -# assert cube.shape == (1, 8) -# check_heightxm(cube, 2.0) - - -# def test_tas_no_shift_time(cubes_2d): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "tos") -# fix.extra_facets["shift_time"] = False -# fixed_cubes = fix.fix_metadata(cubes_2d) - -# cube = check_tas_metadata(fixed_cubes) -# check_lat_lon(cube) -# check_heightxm(cube, 2.0) - -# assert cube.coords("time", dim_coords=True) -# time = cube.coord("time", dim_coords=True) -# assert time.var_name == "time" -# assert time.standard_name == "time" -# assert time.long_name == "time" -# assert time.units == Unit( -# "days since 1850-01-01", calendar="proleptic_gregorian" -# ) -# np.testing.assert_allclose(time.points, [54786.0]) -# assert time.bounds is None -# assert time.attributes == {} - - -# def test_fix_does_not_change_cached_grid(cubes_2d): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "tos") -# assert not fix._horizontal_grids -# assert not fix._meshes - -# # Remove latitude and longitude from tas cube to trigger automatic addition -# # of them -# cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) -# cube.remove_coord("latitude") -# cube.remove_coord("longitude") - -# # Make sure horizontal grid is cached -# fix.get_horizontal_grid(cube) -# assert "oras5_grid.nc" in fix._horizontal_grids -# original_grid = fix._horizontal_grids["oras5_grid.nc"].copy() - -# # Make sure that fix does not alter existing grid -# fix.fix_metadata(cubes_2d) -# assert fix._horizontal_grids["oras5_grid.nc"] == original_grid - - -# # Test uas (for height10m coordinate) - - -# def test_get_uas_fix(): -# """Test getting of fix.""" -# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "uas") -# assert fix == [AllVars(None), GenericFix(None)] - - -# def test_uas_fix(cubes_2d): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "uas") -# fixed_cubes = fix.fix_metadata(cubes_2d) - -# assert len(fixed_cubes) == 1 -# cube = fixed_cubes[0] -# assert cube.var_name == "uas" -# assert cube.standard_name == "eastward_wind" -# assert cube.long_name == "Eastward Near-Surface Wind" -# assert cube.units == "m s-1" -# assert "positive" not in cube.attributes - -# check_time(cube) -# check_lat_lon(cube) -# assert cube.coords("depth") -# height = cube.coord("depth") -# assert height.var_name == "depth" -# assert height.standard_name == "depth" -# assert height.long_name == "depth" -# assert height.units == "m" -# assert height.attributes == {"positive": "up"} -# np.testing.assert_allclose(height.points, [10.0]) -# assert height.bounds is None - - -# def test_uas_scalar_height10m_already_present(cubes_2d): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "uas") - -# # Scalar height (with wrong metadata) already present -# height_coord = AuxCoord(10.0, var_name="h", standard_name="depth") -# cube = cubes_2d.extract_cube(NameConstraint(var_name="uas")) -# cube.add_aux_coord(height_coord, ()) -# fixed_cubes = fix.fix_metadata(cubes_2d) - -# assert len(fixed_cubes) == 1 -# cube = fixed_cubes[0] -# assert cube.shape == (1, 8) -# check_heightxm(cube, 10.0) - - -# def test_uas_dim_height10m_already_present(cubes_2d): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "uas") - -# # Dimensional coordinate height (with wrong metadata) already present -# height_coord = AuxCoord(10.0, var_name="h", standard_name="depth") -# cube = cubes_2d.extract_cube(NameConstraint(var_name="uas")) -# cube.add_aux_coord(height_coord, ()) -# cube = iris.util.new_axis(cube, scalar_coord="depth") -# cube.transpose((1, 0, 2)) -# cubes = CubeList([cube]) -# fixed_cubes = fix.fix_metadata(cubes) - -# assert len(fixed_cubes) == 1 -# cube = fixed_cubes[0] -# assert cube.shape == (1, 8) -# check_heightxm(cube, 10.0) - - -# Test fix with regular grid and 2D latitudes and longitude - - -# def test_regular_grid_fix(cubes_regular_grid): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "tos") -# fixed_cubes = fix.fix_metadata(cubes_regular_grid) - -# cube = check_tas_metadata(fixed_cubes) -# assert cube.coords("time", dim_coords=True, dimensions=0) -# assert cube.coords("latitude", dim_coords=True, dimensions=1) -# assert cube.coords("longitude", dim_coords=True, dimensions=2) -# assert cube.coords("depth", dim_coords=False, dimensions=()) - - -# def test_2d_lat_lon_grid_fix(cubes_2d_lat_lon_grid): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "tos") -# fixed_cubes = fix.fix_metadata(cubes_2d_lat_lon_grid) - -# cube = check_tas_metadata(fixed_cubes) -# assert cube.coords("time", dim_coords=True, dimensions=0) -# assert cube.coords("latitude", dim_coords=False, dimensions=(1, 2)) -# assert cube.coords("longitude", dim_coords=False, dimensions=(1, 2)) -# assert cube.coords("depth", dim_coords=False, dimensions=()) - - -# Test ch4Clim (for time dimension time2) - - -# def test_get_ch4clim_fix(): -# """Test getting of fix.""" -# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "ch4Clim") -# assert fix == [AllVars(None), GenericFix(None)] - - -# def test_ch4clim_fix(cubes_regular_grid): -# """Test fix.""" -# cube = cubes_regular_grid[0] -# cube.var_name = "ch4Clim" -# cube.units = "mol mol-1" -# cube.coord("time").units = "no_unit" -# cube.coord("time").attributes["invalid_units"] = "day as %Y%m%d.%f" -# cube.coord("time").points = [18500201.0] -# cube.coord("time").long_name = "wrong_time_name" - -# fix = get_allvars_fix("Omon", "ch4Clim") -# fixed_cubes = fix.fix_metadata(cubes_regular_grid) - -# assert len(fixed_cubes) == 1 -# cube = fixed_cubes[0] -# assert cube.var_name == "ch4Clim" -# assert cube.standard_name == "mole_fraction_of_methane_in_air" -# assert cube.long_name == "Mole Fraction of CH4" -# assert cube.units == "mol mol-1" -# assert "positive" not in cube.attributes - -# time_coord = cube.coord("time") -# assert time_coord.var_name == "time" -# assert time_coord.standard_name == "time" -# assert time_coord.long_name == "time" -# assert time_coord.units == Unit( -# "days since 1850-01-01", calendar="proleptic_gregorian" -# ) -# np.testing.assert_allclose(time_coord.points, [15.5]) -# np.testing.assert_allclose(time_coord.bounds, [[0.0, 31.0]]) - - -# Test fix with empty standard_name - - def test_empty_standard_name_fix(cubes_2d, monkeypatch): """Test fix.""" fix = get_allvars_fix("Omon", "tos") @@ -1119,11 +635,11 @@ def test_empty_standard_name_fix(cubes_2d, monkeypatch): def test_add_time(cubes_2d, cubes_3d): """Test fix.""" # Remove time from tas cube to test automatic addition - tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) + tos_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) thetao_cube = cubes_3d.extract_cube(NameConstraint(var_name="votemper")) - tas_cube = tas_cube[0] - tas_cube.remove_coord("time") - cubes = CubeList([tas_cube, thetao_cube]) + tos_cube = tos_cube[0] + tos_cube.remove_coord("time") + cubes = CubeList([tos_cube, thetao_cube]) fix = get_allvars_fix("Omon", "tos") fixed_cubes = fix.fix_metadata(cubes) @@ -1146,146 +662,42 @@ def test_add_time_fail(): with pytest.raises(ValueError, match=msg): fix._add_time(cube, cubes) - -# def test_add_latitude(cubes_2d): -# """Test fix.""" -# # Remove latitude from tas cube to test automatic addition -# tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) -# tas_cube.remove_coord("latitude") -# cubes = CubeList([tas_cube]) -# fix = get_allvars_fix("Omon", "tos") - -# assert len(fix._horizontal_grids) == 0 -# fixed_cubes = fix.fix_metadata(cubes) - -# cube = check_tas_metadata(fixed_cubes) -# assert cube.shape == (1, 8) -# check_lat_lon(cube) -# assert len(fix._horizontal_grids) == 1 -# assert TEST_GRID_FILE_NAME in fix._horizontal_grids - - -# def test_add_longitude(cubes_2d): -# """Test fix.""" -# # Remove longitude from tas cube to test automatic addition -# tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) -# tas_cube.remove_coord("longitude") -# cubes = CubeList([tas_cube]) -# fix = get_allvars_fix("Omon", "tos") - -# assert len(fix._horizontal_grids) == 0 -# fixed_cubes = fix.fix_metadata(cubes) - -# cube = check_tas_metadata(fixed_cubes) -# assert cube.shape == (1, 8) -# check_lat_lon(cube) -# assert len(fix._horizontal_grids) == 1 -# assert TEST_GRID_FILE_NAME in fix._horizontal_grids - - -# def test_add_latitude_longitude(cubes_2d): -# """Test fix.""" -# # Remove latitude and longitude from tas cube to test automatic addition -# tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) -# tas_cube.remove_coord("latitude") -# tas_cube.remove_coord("longitude") -# cubes = CubeList([tas_cube]) -# fix = get_allvars_fix("Omon", "tos") - -# assert len(fix._horizontal_grids) == 0 -# fixed_cubes = fix.fix_metadata(cubes) - -# cube = check_tas_metadata(fixed_cubes) -# assert cube.shape == (1, 8) -# check_lat_lon(cube) -# assert len(fix._horizontal_grids) == 1 -# assert TEST_GRID_FILE_NAME in fix._horizontal_grids - - # def test_add_latitude_fail(cubes_2d): # """Test fix.""" # # Remove latitude and grid file attribute from tas cube to test automatic # # addition -# tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) -# tas_cube.remove_coord("latitude") -# tas_cube.attributes = {} -# cubes = CubeList([tas_cube]) +# tos_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) +# cubes = CubeList([tos_cube]) # fix = get_allvars_fix("Omon", "tos") +# fixed_cube = fix.fix_metadata(cubes)[0] +# fixed_cube.remove_coord("longitude") +# # fix._horizontal_grids[0].remove_coord("longitude") +# first_key = list(fix._horizontal_grids)[0] +# first_val = list(fix._horizontal_grids.values())[0][0] +# first_val.remove_coord("longitude") +# fix._horizontal_grids[first_key] = first_val +# fix.extra_facets["raw_name"] = "tos" # msg = "Failed to add missing latitude coordinate to cube" # with pytest.raises(ValueError, match=msg): -# fix.fix_metadata(cubes) +# fix.fix_metadata(CubeList([fixed_cube])) # def test_add_longitude_fail(cubes_2d): # """Test fix.""" # # Remove longitude and grid file attribute from tas cube to test automatic # # addition -# tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) -# tas_cube.remove_coord("longitude") -# tas_cube.attributes = {} -# cubes = CubeList([tas_cube]) +# tos_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) +# tos_cube.remove_coord("longitude") +# cubes = CubeList([tos_cube]) # fix = get_allvars_fix("Omon", "tos") +# fix.extra_facets["horizontal_grid"] = None # msg = "Failed to add missing longitude coordinate to cube" # with pytest.raises(ValueError, match=msg): # fix.fix_metadata(cubes) -# def test_add_coord_from_grid_file_fail_invalid_coord(): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "tos") - -# msg = r"coord_name must be one of .* got 'invalid_coord_name'" -# with pytest.raises(ValueError, match=msg): -# fix._add_coord_from_grid_file(mock.sentinel.cube, "invalid_coord_name") - - -# def test_add_coord_from_grid_file_fail_no_url(): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "tos") - -# msg = ( -# "Cube does not contain the attribute 'grid_file_uri' necessary to " -# "download the ICON horizontal grid file" -# ) -# with pytest.raises(ValueError, match=msg): -# fix._add_coord_from_grid_file(Cube(0), "latitude") - - -# def test_add_coord_from_grid_fail_no_unnamed_dim(cubes_2d): -# """Test fix.""" -# # Remove latitude from tas cube to test automatic addition -# tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) -# tas_cube.remove_coord("latitude") -# index_coord = DimCoord(np.arange(8), var_name="ncells") -# tas_cube.add_dim_coord(index_coord, 1) -# fix = get_allvars_fix("Omon", "tos") - -# msg = ( -# "Cannot determine coordinate dimension for coordinate 'latitude', " -# "cube does not contain a single unnamed dimension" -# ) -# with pytest.raises(ValueError, match=msg): -# fix._add_coord_from_grid_file(tas_cube, "latitude") - - -# def test_add_coord_from_grid_fail_two_unnamed_dims(cubes_2d): -# """Test fix.""" -# # Remove latitude from tas cube to test automatic addition -# tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) -# tas_cube.remove_coord("latitude") -# tas_cube = iris.util.new_axis(tas_cube) -# fix = get_allvars_fix("Omon", "tos") - -# msg = ( -# "Cannot determine coordinate dimension for coordinate 'latitude', " -# "cube does not contain a single unnamed dimension" -# ) -# with pytest.raises(ValueError, match=msg): -# fix._add_coord_from_grid_file(tas_cube, "latitude") - - # Test get_horizontal_grid @@ -1483,6 +895,19 @@ def test_get_horizontal_grid_from_facet_fail(tmp_path): with pytest.raises(FileNotFoundError): fix.get_horizontal_grid(cube) +def test_get_horizontal_grid_none(tmp_path): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + + cube = Cube(0) + fix = get_allvars_fix("Omon", "tos", session=session) + del fix.extra_facets["horizontal_grid"] + + msg = "Full path to suitable ORAS5 grid must be specified in facet 'horizontal_grid'" + with pytest.raises(NotImplementedError, match=msg): + fix.get_horizontal_grid(cube) + # Test with single-dimension cubes @@ -1703,12 +1128,12 @@ def test_get_horizontal_grid_from_facet_fail(tmp_path): # Test variable not available in file -def test_var_not_available_pr(cubes_2d): - """Test fix.""" - fix = get_allvars_fix("Omon", "pr") - msg = "Variable 'pr' used to extract 'pr' is not available in input file" - with pytest.raises(ValueError, match=msg): - fix.fix_metadata(cubes_2d) +# def test_var_not_available_pr(cubes_2d): +# """Test fix.""" +# fix = get_allvars_fix("Omon", "pr") +# msg = "Variable 'pr' used to extract 'pr' is not available in input file" +# with pytest.raises(ValueError, match=msg): +# fix.fix_metadata(cubes_2d) # Test fix with invalid time units @@ -2214,6 +1639,46 @@ def test_get_mesh_not_cached_from_facet(monkeypatch, tmp_path): fix._create_mesh.assert_called_once_with(cube) +def test_get_bounds_cached_from_facet(cubes_2d, cubes_3d): + """Test fix.""" + tos_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) + tos_cube2 = tos_cube.copy() + cubes = CubeList([tos_cube,tos_cube2]) + + fix = get_allvars_fix("Omon", "tos") + fix.extra_facets["ugrid"] = False + fixed_cubes = [] + for i in range(len(cubes)): + fixed_cubes.append(fix.fix_metadata(CubeList([cubes[i]]))[0]) + fixed_cubes = CubeList(fixed_cubes) + + assert (fixed_cubes[0].coord("latitude") == fixed_cubes[1].coord("latitude")) + assert (fixed_cubes[0].coord("longitude") == fixed_cubes[1].coord("longitude")) + assert (fixed_cubes[0].coord("latitude").bounds == fixed_cubes[1].coord("latitude").bounds).all() + assert (fixed_cubes[0].coord("latitude").points == fixed_cubes[1].coord("latitude").points).all() + assert (fixed_cubes[0].coord("longitude").bounds == fixed_cubes[1].coord("longitude").bounds).all() + assert (fixed_cubes[0].coord("longitude").points == fixed_cubes[1].coord("longitude").points).all() + +def test_get_coord_cached_from_facet(cubes_2d, cubes_3d): + """Test fix.""" + tos_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) + tos_cube2 = tos_cube.copy() + cubes = CubeList([tos_cube,tos_cube2]) + # thetao_cube = cubes_3d.extract_cube(NameConstraint(var_name="votemper")) + + fix = get_allvars_fix("Omon", "tos") + fixed_cubes = [] + for i in range(len(cubes)): + fixed_cubes.append(fix.fix_metadata(CubeList([cubes[i]]))[0]) + fixed_cubes = CubeList(fixed_cubes) + + assert (fixed_cubes[0].coord("latitude") == fixed_cubes[1].coord("latitude")) + assert (fixed_cubes[0].coord("longitude") == fixed_cubes[1].coord("longitude")) + assert (fixed_cubes[0].coord("latitude").bounds == fixed_cubes[1].coord("latitude").bounds).all() + assert (fixed_cubes[0].coord("latitude").points == fixed_cubes[1].coord("latitude").points).all() + assert (fixed_cubes[0].coord("longitude").bounds == fixed_cubes[1].coord("longitude").bounds).all() + assert (fixed_cubes[0].coord("longitude").points == fixed_cubes[1].coord("longitude").points).all() + # Test _get_path_from_facet From 21b13fffddbbdbca2b5cd51db2815a22bcf1ba89 Mon Sep 17 00:00:00 2001 From: Jan-Hendrik Malles Date: Wed, 26 Nov 2025 11:17:11 +0100 Subject: [PATCH 26/32] Small changes --- esmvalcore/cmor/_fixes/oras5/_base_fixes.py | 2 - esmvalcore/cmor/_fixes/oras5/oras5.py | 1 - .../config/extra_facets/oras5-mappings.yml | 55 ------ .../cmor/_fixes/oras5/test_oras5.py | 186 +++++++++--------- 4 files changed, 93 insertions(+), 151 deletions(-) delete mode 100644 esmvalcore/config/extra_facets/oras5-mappings.yml diff --git a/esmvalcore/cmor/_fixes/oras5/_base_fixes.py b/esmvalcore/cmor/_fixes/oras5/_base_fixes.py index 0efb03b4ea..f1f9f1285a 100644 --- a/esmvalcore/cmor/_fixes/oras5/_base_fixes.py +++ b/esmvalcore/cmor/_fixes/oras5/_base_fixes.py @@ -7,8 +7,6 @@ import iris import numpy as np -# import xarray as xr -# from iris import Constraint from iris.mesh import Connectivity, MeshXY from ..icon.icon import IconFix diff --git a/esmvalcore/cmor/_fixes/oras5/oras5.py b/esmvalcore/cmor/_fixes/oras5/oras5.py index 9012d0051e..ecabdab99e 100644 --- a/esmvalcore/cmor/_fixes/oras5/oras5.py +++ b/esmvalcore/cmor/_fixes/oras5/oras5.py @@ -4,7 +4,6 @@ import dask.array as da import iris -import iris.util import numpy as np from iris import Constraint from iris.coords import DimCoord diff --git a/esmvalcore/config/extra_facets/oras5-mappings.yml b/esmvalcore/config/extra_facets/oras5-mappings.yml deleted file mode 100644 index b415a5ef3a..0000000000 --- a/esmvalcore/config/extra_facets/oras5-mappings.yml +++ /dev/null @@ -1,55 +0,0 @@ -# Extra facets for native ORAS5 data - -# Notes: -# - All facets can also be specified in the recipes. The values given here are -# only defaults. -# - The facet ``var_type`` has to be specified in the recipe if it is not given -# here and default DRS is used. - -# A complete list of supported keys is given in the documentation (see -# ESMValCore/doc/quickstart/find_data.rst). ---- - -ORAS5: - - '*': - # Cell measures - areacella: - latitude: grid_latitude - longitude: grid_longitude - raw_name: cell_area - areacello: - latitude: grid_latitude - longitude: grid_longitude - raw_name: cell_area - - # Variable names and units - uo: - raw_name: vozocrte - raw_units: m/s - vo: - raw_name: vomecrtn - raw_units: m/s - tos: - raw_name: sosstsst - raw_units: degC - sos: - raw_name: sosaline - raw_units: '0.001' - zos: - raw_name: sossheig - raw_units: m - mlotst: - raw_name: somxl010 - raw_unis: m - thetao: - raw_name: votemper - raw_units: degC - so: - raw_name: vosaline - raw_units: '0.001' - - # MIP-specific settings - Omon: - '*': - tres: 1M \ No newline at end of file diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index 9e33c09120..31f3f87c6a 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -59,103 +59,103 @@ def cubes_grid(test_data_path): return iris.load(str(nc_path)) -@pytest.fixture -def cubes_regular_grid(): - """Cube with regular grid.""" - time_coord = DimCoord( - [0], - var_name="time", - standard_name="time", - units="days since 1850-01-01", - ) - lat_coord = DimCoord( - [0.0, 1.0], - var_name="lat", - standard_name="latitude", - long_name="latitude", - units="degrees_north", - ) - lon_coord = DimCoord( - [-1.0, 1.0], - var_name="lon", - standard_name="longitude", - long_name="longitude", - units="degrees_east", - ) - cube = Cube( - [[[0.0, 1.0], [2.0, 3.0]]], - var_name="sosstsst", - units="degC", - dim_coords_and_dims=[(time_coord, 0), (lat_coord, 1), (lon_coord, 2)], - ) - return CubeList([cube]) +# @pytest.fixture +# def cubes_regular_grid(): +# """Cube with regular grid.""" +# time_coord = DimCoord( +# [0], +# var_name="time", +# standard_name="time", +# units="days since 1850-01-01", +# ) +# lat_coord = DimCoord( +# [0.0, 1.0], +# var_name="lat", +# standard_name="latitude", +# long_name="latitude", +# units="degrees_north", +# ) +# lon_coord = DimCoord( +# [-1.0, 1.0], +# var_name="lon", +# standard_name="longitude", +# long_name="longitude", +# units="degrees_east", +# ) +# cube = Cube( +# [[[0.0, 1.0], [2.0, 3.0]]], +# var_name="sosstsst", +# units="degC", +# dim_coords_and_dims=[(time_coord, 0), (lat_coord, 1), (lon_coord, 2)], +# ) +# return CubeList([cube]) -@pytest.fixture -def cubes_2d_lat_lon_grid(): - """Cube with 2D latitude and longitude.""" - time_coord = DimCoord( - [0], - var_name="time", - standard_name="time", - units="days since 1850-01-01", - ) - lat_coord = AuxCoord( - [[0.0, 0.0, 0.0], [1.0, 1.0, 1.0]], - var_name="lat", - standard_name="latitude", - long_name="latitude", - units="degrees_north", - ) - lon_coord = AuxCoord( - [[0.0, 1.0, 2.0], [0.0, 1.0, 2.0]], - var_name="lon", - standard_name="longitude", - long_name="longitude", - units="degrees_east", - ) - cube = Cube( - [[[0.0, 1.0, 2.0], [2.0, 3.0, 4.0]]], - var_name="sosstsst", - units="degC", - dim_coords_and_dims=[(time_coord, 0)], - aux_coords_and_dims=[(lat_coord, (1, 2)), (lon_coord, (1, 2))], - ) - return CubeList([cube]) +# @pytest.fixture +# def cubes_2d_lat_lon_grid(): +# """Cube with 2D latitude and longitude.""" +# time_coord = DimCoord( +# [0], +# var_name="time", +# standard_name="time", +# units="days since 1850-01-01", +# ) +# lat_coord = AuxCoord( +# [[0.0, 0.0, 0.0], [1.0, 1.0, 1.0]], +# var_name="lat", +# standard_name="latitude", +# long_name="latitude", +# units="degrees_north", +# ) +# lon_coord = AuxCoord( +# [[0.0, 1.0, 2.0], [0.0, 1.0, 2.0]], +# var_name="lon", +# standard_name="longitude", +# long_name="longitude", +# units="degrees_east", +# ) +# cube = Cube( +# [[[0.0, 1.0, 2.0], [2.0, 3.0, 4.0]]], +# var_name="sosstsst", +# units="degC", +# dim_coords_and_dims=[(time_coord, 0)], +# aux_coords_and_dims=[(lat_coord, (1, 2)), (lon_coord, (1, 2))], +# ) +# return CubeList([cube]) -@pytest.fixture -def simple_unstructured_cube(): - """Create a cube with an unstructured grid.""" - time_coord = DimCoord( - [0], - var_name="time", - standard_name="time", - units="days since 1850-01-01", - ) - hdepth_coord = DimCoord([0, 1, 2], var_name="depth") - lat_coord = AuxCoord( - [0.0, 1.0], - var_name="lat", - standard_name="latitude", - long_name="latitude", - units="degrees_north", - ) - lon_coord = AuxCoord( - [0.0, 1.0], - var_name="lon", - standard_name="longitude", - long_name="longitude", - units="degrees_east", - ) - cube = Cube( - [[[0.0, 1.0], [2.0, 3.0], [4.0, 5.0]]], - var_name="votemper", - units="degC", - dim_coords_and_dims=[(time_coord, 0), (height_coord, 1)], - aux_coords_and_dims=[(lat_coord, 2), (lon_coord, 2)], - ) - return cube +# @pytest.fixture +# def simple_unstructured_cube(): +# """Create a cube with an unstructured grid.""" +# time_coord = DimCoord( +# [0], +# var_name="time", +# standard_name="time", +# units="days since 1850-01-01", +# ) +# depth_coord = DimCoord([0, 1, 2], var_name="depth") +# lat_coord = AuxCoord( +# [0.0, 1.0], +# var_name="lat", +# standard_name="latitude", +# long_name="latitude", +# units="degrees_north", +# ) +# lon_coord = AuxCoord( +# [0.0, 1.0], +# var_name="lon", +# standard_name="longitude", +# long_name="longitude", +# units="degrees_east", +# ) +# cube = Cube( +# [[[0.0, 1.0], [2.0, 3.0], [4.0, 5.0]]], +# var_name="votemper", +# units="degC", +# dim_coords_and_dims=[(time_coord, 0), (height_coord, 1)], +# aux_coords_and_dims=[(lat_coord, 2), (lon_coord, 2)], +# ) +# return cube def _get_fix(mip, short_name, fix_name, session=None): From 69f3cd7bc25e27821f3e73c0a2aeebd93e08a75b Mon Sep 17 00:00:00 2001 From: Jan-Hendrik Malles Date: Wed, 26 Nov 2025 15:10:46 +0100 Subject: [PATCH 27/32] Some formatting --- .../configurations/data-native-oras5.yml | 2 +- .../defaults/extra_facets_oras5.yml | 4 +- .../cmor/_fixes/oras5/test_oras5.py | 68 ++++++++++++++----- 3 files changed, 54 insertions(+), 20 deletions(-) diff --git a/esmvalcore/config/configurations/data-native-oras5.yml b/esmvalcore/config/configurations/data-native-oras5.yml index b24958e47b..9645aac38f 100644 --- a/esmvalcore/config/configurations/data-native-oras5.yml +++ b/esmvalcore/config/configurations/data-native-oras5.yml @@ -6,4 +6,4 @@ projects: type: "esmvalcore.local.LocalDataSource" rootpath: ~/climate_data dirname_template: '/' - filename_template: '*{raw_name}*{version}*.nc' \ No newline at end of file + filename_template: '*{raw_name}*{version}*.nc' diff --git a/esmvalcore/config/configurations/defaults/extra_facets_oras5.yml b/esmvalcore/config/configurations/defaults/extra_facets_oras5.yml index 03832d97ac..1b8f27c149 100644 --- a/esmvalcore/config/configurations/defaults/extra_facets_oras5.yml +++ b/esmvalcore/config/configurations/defaults/extra_facets_oras5.yml @@ -24,7 +24,7 @@ projects: raw_name: cell_area # Variable names and units - uo: + uo: raw_name: vozocrte raw_units: m/s vo: @@ -53,4 +53,4 @@ projects: raw_units: W/m^2 tauuo: raw_name: sozotaux - raw_units: N/m^2 \ No newline at end of file + raw_units: N/m^2 diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py index 31f3f87c6a..4c13b60c9c 100644 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ b/tests/integration/cmor/_fixes/oras5/test_oras5.py @@ -408,7 +408,7 @@ def check_lat_lon(cube): "first spatial index for variables stored on an unstructured grid" ) assert i_coord.units == "1" - np.testing.assert_allclose(i_coord.points, list(range(13*12))) + np.testing.assert_allclose(i_coord.points, list(range(13 * 12))) assert i_coord.bounds is None assert len(cube.coord_dims(lat)) == 1 @@ -523,7 +523,7 @@ def check_mesh(mesh): assert conn.cf_role == "face_node_connectivity" assert conn.start_index == 0 assert conn.location_axis == 0 - assert conn.shape == (int(13*12), 4) + assert conn.shape == (int(13 * 12), 4) # np.testing.assert_array_equal( # conn.indices, # [ @@ -555,6 +555,7 @@ def test_thetao_fix(cubes_3d): # check_height(cube) check_lat_lon(cube) + def test_get_tos_fix(): """Test getting of fix.""" fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "tos") @@ -571,6 +572,7 @@ def test_tos_fix(cubes_2d): check_lat_lon(cube) # check_heightxm(cube, 2.0) + def test_tos_no_mesh(cubes_2d): """Test fix.""" fix = get_allvars_fix("Omon", "tos") @@ -662,6 +664,7 @@ def test_add_time_fail(): with pytest.raises(ValueError, match=msg): fix._add_time(cube, cubes) + # def test_add_latitude_fail(cubes_2d): # """Test fix.""" # # Remove latitude and grid file attribute from tas cube to test automatic @@ -895,6 +898,7 @@ def test_get_horizontal_grid_from_facet_fail(tmp_path): with pytest.raises(FileNotFoundError): fix.get_horizontal_grid(cube) + def test_get_horizontal_grid_none(tmp_path): """Test fix.""" session = CFG.start_session("my session") @@ -903,7 +907,7 @@ def test_get_horizontal_grid_none(tmp_path): cube = Cube(0) fix = get_allvars_fix("Omon", "tos", session=session) del fix.extra_facets["horizontal_grid"] - + msg = "Full path to suitable ORAS5 grid must be specified in facet 'horizontal_grid'" with pytest.raises(NotImplementedError, match=msg): fix.get_horizontal_grid(cube) @@ -1643,7 +1647,7 @@ def test_get_bounds_cached_from_facet(cubes_2d, cubes_3d): """Test fix.""" tos_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) tos_cube2 = tos_cube.copy() - cubes = CubeList([tos_cube,tos_cube2]) + cubes = CubeList([tos_cube, tos_cube2]) fix = get_allvars_fix("Omon", "tos") fix.extra_facets["ugrid"] = False @@ -1652,18 +1656,33 @@ def test_get_bounds_cached_from_facet(cubes_2d, cubes_3d): fixed_cubes.append(fix.fix_metadata(CubeList([cubes[i]]))[0]) fixed_cubes = CubeList(fixed_cubes) - assert (fixed_cubes[0].coord("latitude") == fixed_cubes[1].coord("latitude")) - assert (fixed_cubes[0].coord("longitude") == fixed_cubes[1].coord("longitude")) - assert (fixed_cubes[0].coord("latitude").bounds == fixed_cubes[1].coord("latitude").bounds).all() - assert (fixed_cubes[0].coord("latitude").points == fixed_cubes[1].coord("latitude").points).all() - assert (fixed_cubes[0].coord("longitude").bounds == fixed_cubes[1].coord("longitude").bounds).all() - assert (fixed_cubes[0].coord("longitude").points == fixed_cubes[1].coord("longitude").points).all() + assert fixed_cubes[0].coord("latitude") == fixed_cubes[1].coord("latitude") + assert fixed_cubes[0].coord("longitude") == fixed_cubes[1].coord( + "longitude" + ) + assert ( + fixed_cubes[0].coord("latitude").bounds + == fixed_cubes[1].coord("latitude").bounds + ).all() + assert ( + fixed_cubes[0].coord("latitude").points + == fixed_cubes[1].coord("latitude").points + ).all() + assert ( + fixed_cubes[0].coord("longitude").bounds + == fixed_cubes[1].coord("longitude").bounds + ).all() + assert ( + fixed_cubes[0].coord("longitude").points + == fixed_cubes[1].coord("longitude").points + ).all() + def test_get_coord_cached_from_facet(cubes_2d, cubes_3d): """Test fix.""" tos_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) tos_cube2 = tos_cube.copy() - cubes = CubeList([tos_cube,tos_cube2]) + cubes = CubeList([tos_cube, tos_cube2]) # thetao_cube = cubes_3d.extract_cube(NameConstraint(var_name="votemper")) fix = get_allvars_fix("Omon", "tos") @@ -1672,12 +1691,27 @@ def test_get_coord_cached_from_facet(cubes_2d, cubes_3d): fixed_cubes.append(fix.fix_metadata(CubeList([cubes[i]]))[0]) fixed_cubes = CubeList(fixed_cubes) - assert (fixed_cubes[0].coord("latitude") == fixed_cubes[1].coord("latitude")) - assert (fixed_cubes[0].coord("longitude") == fixed_cubes[1].coord("longitude")) - assert (fixed_cubes[0].coord("latitude").bounds == fixed_cubes[1].coord("latitude").bounds).all() - assert (fixed_cubes[0].coord("latitude").points == fixed_cubes[1].coord("latitude").points).all() - assert (fixed_cubes[0].coord("longitude").bounds == fixed_cubes[1].coord("longitude").bounds).all() - assert (fixed_cubes[0].coord("longitude").points == fixed_cubes[1].coord("longitude").points).all() + assert fixed_cubes[0].coord("latitude") == fixed_cubes[1].coord("latitude") + assert fixed_cubes[0].coord("longitude") == fixed_cubes[1].coord( + "longitude" + ) + assert ( + fixed_cubes[0].coord("latitude").bounds + == fixed_cubes[1].coord("latitude").bounds + ).all() + assert ( + fixed_cubes[0].coord("latitude").points + == fixed_cubes[1].coord("latitude").points + ).all() + assert ( + fixed_cubes[0].coord("longitude").bounds + == fixed_cubes[1].coord("longitude").bounds + ).all() + assert ( + fixed_cubes[0].coord("longitude").points + == fixed_cubes[1].coord("longitude").points + ).all() + # Test _get_path_from_facet From d81ce6b78e71bf1bcb912123aa84ac41589bb4f9 Mon Sep 17 00:00:00 2001 From: Jan-Hendrik Malles Date: Fri, 28 Nov 2025 13:59:55 +0100 Subject: [PATCH 28/32] Make pre-commit run --- .../cmor/_fixes/{oras5 => native6}/oras5.py | 206 +- esmvalcore/cmor/_fixes/oras5/__init__.py | 0 esmvalcore/cmor/_fixes/oras5/_base_fixes.py | 188 -- esmvalcore/config-developer.yml | 2 +- .../configurations/data-native-oras5.yml | 9 - .../defaults/extra_facets_native6.yml | 37 + .../defaults/extra_facets_oras5.yml | 56 - .../cmor/_fixes/native6/test_oras5.py | 1001 ++++++++ .../cmor/_fixes/oras5/test_oras5.py | 2016 ----------------- 9 files changed, 1234 insertions(+), 2281 deletions(-) rename esmvalcore/cmor/_fixes/{oras5 => native6}/oras5.py (58%) delete mode 100644 esmvalcore/cmor/_fixes/oras5/__init__.py delete mode 100644 esmvalcore/cmor/_fixes/oras5/_base_fixes.py delete mode 100644 esmvalcore/config/configurations/data-native-oras5.yml delete mode 100644 esmvalcore/config/configurations/defaults/extra_facets_oras5.yml create mode 100644 tests/integration/cmor/_fixes/native6/test_oras5.py delete mode 100644 tests/integration/cmor/_fixes/oras5/test_oras5.py diff --git a/esmvalcore/cmor/_fixes/oras5/oras5.py b/esmvalcore/cmor/_fixes/native6/oras5.py similarity index 58% rename from esmvalcore/cmor/_fixes/oras5/oras5.py rename to esmvalcore/cmor/_fixes/native6/oras5.py index ecabdab99e..868dd349e3 100644 --- a/esmvalcore/cmor/_fixes/oras5/oras5.py +++ b/esmvalcore/cmor/_fixes/native6/oras5.py @@ -1,6 +1,9 @@ """On-the-fly CMORizer for ORAS5.""" +from __future__ import annotations + import logging +from typing import TYPE_CHECKING import dask.array as da import iris @@ -8,18 +11,196 @@ from iris import Constraint from iris.coords import DimCoord from iris.cube import CubeList +from iris.mesh import Connectivity, MeshXY + +from esmvalcore.cmor._fixes.icon.icon import AllVars as AllVars_ICON +from esmvalcore.cmor._fixes.icon.icon import IconFix +from esmvalcore.cmor._fixes.shared import fix_ocean_depth_coord -from ..icon.icon import AllVars as AllVars_ICON -from ..shared import fix_ocean_depth_coord -from ._base_fixes import Oras5Fix +if TYPE_CHECKING: + from iris.cube import Cube logger = logging.getLogger(__name__) +class Oras5Fix(IconFix): + """Base class for all ORAS5 fixes.""" + + def __init__(self, *args, **kwargs): + """Initialize ORAS5 fix.""" + super().__init__(*args, **kwargs) + self._horizontal_grids: dict[str, CubeList] = {} + self._meshes: dict[str, MeshXY] = {} + + def _create_mesh(self, cube: Cube) -> MeshXY: + """Create mesh from horizontal grid file.""" + # Get coordinates + face_lon = cube.coord("longitude") + face_lat = cube.coord("latitude") + node_lon = cube.coord("longitude").bounds.T.flatten() + node_lat = cube.coord("latitude").bounds.T.flatten() + + # Make the node locations a 2D array + nodes_flat = np.stack([node_lon, node_lat], axis=1) + + # Find the unique nodes to be able to associate them with the faces + # Unfortunately, dask does not support the axis parameter... + nodes_unique, indices = np.unique( + nodes_flat, + return_inverse=True, + axis=0, + ) + + # Get the unique nodes as dask arrays + node_lon = da.from_array(nodes_unique[:, 0]) + node_lat = da.from_array(nodes_unique[:, 1]) + + # Get dimensions (N_faces x M_nodes) + n_faces = len(face_lat.core_points()) + n_nodes = int(len(indices) / n_faces) + + # Reshape indices to N_faces x M_nodes dask array + indices = da.reshape(da.from_array(indices), (n_nodes, n_faces)).T + + # Create the necessary mask + mask = da.full(da.shape(indices), False) + + # Define the connectivity + connectivity = Connectivity( + indices=da.ma.masked_array(indices, mask=mask), + cf_role="face_node_connectivity", + start_index=0, + location_axis=0, + ) + + # Put everything together to get a U-Grid style mesh + node_lat = iris.coords.AuxCoord( + node_lat, + standard_name="latitude", + var_name="nlat", + long_name="node latitude", + units="degrees", + ) + node_lon = iris.coords.AuxCoord( + node_lon, + standard_name="longitude", + var_name="nlon", + long_name="node longitude", + units="degrees", + ) + + return MeshXY( + topology_dimension=2, + node_coords_and_axes=[(node_lat, "y"), (node_lon, "x")], + connectivities=[connectivity], + face_coords_and_axes=[(face_lat, "y"), (face_lon, "x")], + ) + + def get_horizontal_grid(self, cube: Cube) -> CubeList: + """Get copy of ORAS5 horizontal grid. + + If given, retrieve grid from `horizontal_grid` facet specified by the + user. + + Parameters + ---------- + cube: iris.cube.Cube + Cube for which the ORS5 horizontal grid is retrieved. If the facet + `horizontal_grid` is not specified by the user, it raises a + NotImplementedError. + + Returns + ------- + iris.cube.CubeList + Copy of ORAS5 horizontal grid. + + Raises + ------ + FileNotFoundError + Path specified by `horizontal_grid` facet (absolute or relative to + `auxiliary_data_dir`) does not exist. + NotImplementedError + No `horizontal_grid` facet is defined. + + """ + if self.extra_facets.get("horizontal_grid") is not None: + grid = self._get_grid_from_facet() + else: + msg = ( + f"Full path to suitable ORAS5 grid must be specified in facet " + f"'horizontal_grid' for cube: {cube}" + ) + raise NotImplementedError( + msg, + ) + + return grid + + def _get_grid_from_facet(self) -> CubeList: + """Get horizontal grid from user-defined facet `horizontal_grid`.""" + grid_path = self._get_path_from_facet( + "horizontal_grid", + "Horizontal grid file", + ) + grid_name = grid_path.name + + # If already loaded, return the horizontal grid + if grid_name in self._horizontal_grids: + return self._horizontal_grids[grid_name] + + # Load file + self._horizontal_grids[grid_name] = self._load_cubes(grid_path) + logger.debug("Loaded ORAS5 grid file from %s", grid_path) + return self._horizontal_grids[grid_name] + + def get_mesh(self, cube: Cube) -> MeshXY: + """Get mesh. + + Note + ---- + If possible, this function uses a cached version of the mesh to save + time. + + Parameters + ---------- + cube: iris.cube.Cube + Cube for which the mesh is retrieved. + + Returns + ------- + iris.mesh.MeshXY + Mesh of the cube. + + Raises + ------ + FileNotFoundError + Path specified by `horizontal_grid` facet (absolute or relative to + `auxiliary_data_dir`) does not exist. + NotImplementedError + No `horizontal_grid` facet is defined. + + """ + # Use `horizontal_grid` facet to determine grid name + grid_path = self._get_path_from_facet( + "horizontal_grid", + "Horizontal grid file", + ) + grid_name = grid_path.name + + # Reuse mesh if possible + if grid_name in self._meshes: + logger.debug("Reusing ORAS5 mesh for grid %s", grid_name) + else: + logger.debug("Creating ORAS5 mesh for grid %s", grid_name) + self._meshes[grid_name] = self._create_mesh(cube) + + return self._meshes[grid_name] + + class AllVars(Oras5Fix, AllVars_ICON): """Fixes for all variables.""" - def fix_metadata(self, cubes): + def fix_metadata(self, cubes: CubeList) -> CubeList: """Fix metadata.""" cubes = self.add_additional_cubes(cubes) cube = self.get_cube(cubes) @@ -54,7 +235,7 @@ def fix_metadata(self, cubes): return CubeList([cube]) - def _fix_cube(self, cube): + def _fix_cube(self, cube: Cube) -> Cube: """Remove redundant cells and predetermine how to handle grid.""" # Remove redundant cells cube = cube[..., :-1, 1:-1] @@ -96,7 +277,7 @@ def _fix_cube(self, cube): data = np.reshape(data, dim_shape + data_shape) return iris.cube.Cube(data, dim_coords_and_dims=coords_add) - def _add_coord_from_grid_file(self, cube, coord_name): + def _add_coord_from_grid_file(self, cube: Cube, coord_name: str) -> None: """Add coordinate from grid file to cube. Note. @@ -143,12 +324,15 @@ def _add_coord_from_grid_file(self, cube, coord_name): # Find index of mesh dimension (= single unnamed dimension) n_unnamed_dimensions = cube.ndim - len(cube.dim_coords) if n_unnamed_dimensions != 1: - raise ValueError( + msg = ( f"Cannot determine coordinate dimension for coordinate " f"'{coord_name}', cube does not contain a single unnamed " f"dimension:\n{cube}" ) - coord_dims = () + raise ValueError( + msg, + ) + coord_dims: tuple[()] | tuple[int] = () for idx in range(cube.ndim): if not cube.coords(dimensions=idx, dim_coords=True): coord_dims = (idx,) @@ -160,7 +344,7 @@ def _add_coord_from_grid_file(self, cube, coord_name): coord.long_name = coord_name cube.add_aux_coord(coord, coord_dims) - def _fix_lat(self, cube): + def _fix_lat(self, cube: Cube) -> tuple[int, ...]: """Fix latitude coordinate of cube.""" lat_name = self.extra_facets.get("latitude", "latitude") @@ -177,7 +361,7 @@ def _fix_lat(self, cube): return cube.coord_dims(lat) - def _fix_lon(self, cube): + def _fix_lon(self, cube: Cube) -> tuple[int, ...]: """Fix longitude coordinate of cube.""" lon_name = self.extra_facets.get("longitude", "longitude") @@ -195,7 +379,7 @@ def _fix_lon(self, cube): return cube.coord_dims(lon) - def _fix_time(self, cube, cubes): + def _fix_time(self, cube: Cube, cubes: CubeList) -> Cube: """Fix time coordinate of cube.""" # Add time coordinate if not already present if not cube.coords("time"): diff --git a/esmvalcore/cmor/_fixes/oras5/__init__.py b/esmvalcore/cmor/_fixes/oras5/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/esmvalcore/cmor/_fixes/oras5/_base_fixes.py b/esmvalcore/cmor/_fixes/oras5/_base_fixes.py deleted file mode 100644 index f1f9f1285a..0000000000 --- a/esmvalcore/cmor/_fixes/oras5/_base_fixes.py +++ /dev/null @@ -1,188 +0,0 @@ -"""Fix base classes for ORAS5 on-the-fly CMORizer.""" - -import logging -from pathlib import Path - -import dask.array as da -import iris -import numpy as np - -from iris.mesh import Connectivity, MeshXY - -from ..icon.icon import IconFix - -logger = logging.getLogger(__name__) - - -class Oras5Fix(IconFix): - """Base class for all ORAS5 fixes.""" - - CACHE_DIR = Path.home() / ".esmvaltool" / "cache" - CACHE_VALIDITY = 7 * 24 * 60 * 60 # [s]; = 1 week - TIMEOUT = 5 * 60 # [s]; = 5 min - GRID_FILE_ATTR = "grid_file_uri" - - def __init__(self, *args, **kwargs): - """Initialize ORAS5 fix.""" - super().__init__(*args, **kwargs) - self._horizontal_grids = {} - self._meshes = {} - - def _create_mesh(self, cube): - """Create mesh from horizontal grid file.""" - # Get coordinates - face_lon = cube.coord("longitude") - face_lat = cube.coord("latitude") - node_lon = cube.coord("longitude").bounds.T.flatten() - node_lat = cube.coord("latitude").bounds.T.flatten() - - # Make the node locations a 2D array - nodes_flat = np.stack([node_lon, node_lat], axis=1) - - # Find the unique nodes to be able to associate them with the faces - # Unfortunately, dask does not support the axis parameter... - nodes_unique, indices = np.unique( - nodes_flat, return_inverse=True, axis=0 - ) - - # Get the unique nodes as dask arrays - node_lon = da.from_array(nodes_unique[:, 0]) - node_lat = da.from_array(nodes_unique[:, 1]) - - # Get dimensions (N_faces x M_nodes) - n_faces = len(face_lat.core_points()) - n_nodes = int(len(indices) / n_faces) - - # Reshape indices to N_faces x M_nodes dask array - indices = da.reshape(da.from_array(indices), (n_nodes, n_faces)).T - - # Create the necessary mask - mask = da.full(da.shape(indices), False) - - # Define the connectivity - connectivity = Connectivity( - indices=da.ma.masked_array(indices, mask=mask), - cf_role="face_node_connectivity", - start_index=0, - location_axis=0, - ) - - # Put everything together to get a U-Grid style mesh - node_lat = iris.coords.AuxCoord( - node_lat, - standard_name="latitude", - var_name="nlat", - long_name="node latitude", - units="degrees", - ) - node_lon = iris.coords.AuxCoord( - node_lon, - standard_name="longitude", - var_name="nlon", - long_name="node longitude", - units="degrees", - ) - - mesh = MeshXY( - topology_dimension=2, - node_coords_and_axes=[(node_lat, "y"), (node_lon, "x")], - connectivities=[connectivity], - face_coords_and_axes=[(face_lat, "y"), (face_lon, "x")], - ) - - return mesh - - def get_horizontal_grid(self, cube): - """Get copy of ORAS5 horizontal grid. - - If given, retrieve grid from `horizontal_grid` facet specified by the - user. - - Parameters - ---------- - cube: iris.cube.Cube - Cube for which the ORS5 horizontal grid is retrieved. If the facet - `horizontal_grid` is not specified by the user, it raises a - NotImplementedError. - - Returns - ------- - iris.cube.CubeList - Copy of ORAS5 horizontal grid. - - Raises - ------ - FileNotFoundError - Path specified by `horizontal_grid` facet (absolute or relative to - `auxiliary_data_dir`) does not exist. - NotImplementedError - No `horizontal_grid` facet is defined. - - """ - if self.extra_facets.get("horizontal_grid") is not None: - grid = self._get_grid_from_facet() - else: - raise NotImplementedError( - "Full path to suitable ORAS5 grid must be specified in facet " - "'horizontal_grid'" - ) - - return grid.copy() - - def _get_grid_from_facet(self): - """Get horizontal grid from user-defined facet `horizontal_grid`.""" - grid_path = self._get_path_from_facet( - "horizontal_grid", "Horizontal grid file" - ) - grid_name = grid_path.name - - # If already loaded, return the horizontal grid - if grid_name in self._horizontal_grids: - return self._horizontal_grids[grid_name] - - # Load file - self._horizontal_grids[grid_name] = self._load_cubes(grid_path) - logger.debug("Loaded ORAS5 grid file from %s", grid_path) - return self._horizontal_grids[grid_name] - - def get_mesh(self, cube): - """Get mesh. - - Note - ---- - If possible, this function uses a cached version of the mesh to save - time. - - Parameters - ---------- - cube: iris.cube.Cube - Cube for which the mesh is retrieved. - - Returns - ------- - iris.mesh.MeshXY - Mesh of the cube. - - Raises - ------ - FileNotFoundError - Path specified by `horizontal_grid` facet (absolute or relative to - `auxiliary_data_dir`) does not exist. - NotImplementedError - No `horizontal_grid` facet is defined. - - """ - # Use `horizontal_grid` facet to determine grid name - grid_path = self._get_path_from_facet( - "horizontal_grid", "Horizontal grid file" - ) - grid_name = grid_path.name - - # Reuse mesh if possible - if grid_name in self._meshes: - logger.debug("Reusing ORAS5 mesh for grid %s", grid_name) - else: - logger.debug("Creating ORAS5 mesh for grid %s", grid_name) - self._meshes[grid_name] = self._create_mesh(cube) - - return self._meshes[grid_name] diff --git a/esmvalcore/config-developer.yml b/esmvalcore/config-developer.yml index 6d838534af..a9ef59ec52 100644 --- a/esmvalcore/config-developer.yml +++ b/esmvalcore/config-developer.yml @@ -219,4 +219,4 @@ ORAS5: default: '*{raw_name}*{version}*.nc' output_file: '{project}_{dataset}_{version}_{mip}_{short_name}' cmor_type: 'CMIP6' - cmor_default_table_prefix: 'CMIP6_' \ No newline at end of file + cmor_default_table_prefix: 'CMIP6_' diff --git a/esmvalcore/config/configurations/data-native-oras5.yml b/esmvalcore/config/configurations/data-native-oras5.yml deleted file mode 100644 index 9645aac38f..0000000000 --- a/esmvalcore/config/configurations/data-native-oras5.yml +++ /dev/null @@ -1,9 +0,0 @@ -# Read data from ORAS5 data in its native format. -projects: - ORAS5: - data: - oras5: &oras5 - type: "esmvalcore.local.LocalDataSource" - rootpath: ~/climate_data - dirname_template: '/' - filename_template: '*{raw_name}*{version}*.nc' diff --git a/esmvalcore/config/configurations/defaults/extra_facets_native6.yml b/esmvalcore/config/configurations/defaults/extra_facets_native6.yml index 6e56e49544..b5834785ba 100644 --- a/esmvalcore/config/configurations/defaults/extra_facets_native6.yml +++ b/esmvalcore/config/configurations/defaults/extra_facets_native6.yml @@ -196,3 +196,40 @@ projects: SImon: '*': tres: 1M + + ORAS5: + '*': + '*': + tier: 3 + #'*': + # Variable names and units + uo: + raw_name: vozocrte + raw_units: m/s + vo: + raw_name: vomecrtn + raw_units: m/s + tos: + raw_name: sosstsst + raw_units: degC + sos: + raw_name: sosaline + raw_units: '0.001' + zos: + raw_name: sossheig + raw_units: m + mlotst: + raw_name: somxl010 + raw_units: m + thetao: + raw_name: votemper + raw_units: degC + so: + raw_name: vosaline + raw_units: '0.001' + hfds: + raw_name: sohefldo + raw_units: W/m^2 + tauuo: + raw_name: sozotaux + raw_units: N/m^2 diff --git a/esmvalcore/config/configurations/defaults/extra_facets_oras5.yml b/esmvalcore/config/configurations/defaults/extra_facets_oras5.yml deleted file mode 100644 index 1b8f27c149..0000000000 --- a/esmvalcore/config/configurations/defaults/extra_facets_oras5.yml +++ /dev/null @@ -1,56 +0,0 @@ -# Extra facets for native ORAS5 data. - -# Notes: -# - All facets can also be specified in the recipes. The values given here are -# only defaults. - -# A complete list of supported keys is given in the documentation (see -# ESMValCore/doc/quickstart/find_data.rst). ---- - -projects: - ORAS5: - extra_facets: - ORAS5: - '*': - # Cell measures - areacella: - latitude: grid_latitude - longitude: grid_longitude - raw_name: cell_area - areacello: - latitude: grid_latitude - longitude: grid_longitude - raw_name: cell_area - - # Variable names and units - uo: - raw_name: vozocrte - raw_units: m/s - vo: - raw_name: vomecrtn - raw_units: m/s - tos: - raw_name: sosstsst - raw_units: degC - sos: - raw_name: sosaline - raw_units: '0.001' - zos: - raw_name: sossheig - raw_units: m - mlotst: - raw_name: somxl010 - raw_units: m - thetao: - raw_name: votemper - raw_units: degC - so: - raw_name: vosaline - raw_units: '0.001' - hfds: - raw_name: sohefldo - raw_units: W/m^2 - tauuo: - raw_name: sozotaux - raw_units: N/m^2 diff --git a/tests/integration/cmor/_fixes/native6/test_oras5.py b/tests/integration/cmor/_fixes/native6/test_oras5.py new file mode 100644 index 0000000000..99c5e89d97 --- /dev/null +++ b/tests/integration/cmor/_fixes/native6/test_oras5.py @@ -0,0 +1,1001 @@ +"""Test the ICON on-the-fly CMORizer.""" + +from datetime import datetime +from pathlib import Path +from unittest import mock + +import iris +import numpy as np +import pytest +from cf_units import Unit +from iris import NameConstraint +from iris.coords import CellMethod, DimCoord +from iris.cube import Cube, CubeList + +import esmvalcore.cmor._fixes.native6.oras5 +from esmvalcore.cmor._fixes.fix import GenericFix +from esmvalcore.cmor._fixes.native6.oras5 import AllVars, Oras5Fix +from esmvalcore.cmor.fix import Fix +from esmvalcore.cmor.table import get_var_info +from esmvalcore.config import CFG +from esmvalcore.dataset import Dataset + +TEST_GRID_FILE_URI = ( + "https://github.com/ESMValGroup/ESMValCore/raw/main/tests/integration/" + "cmor/_fixes/test_data/oras5_grid.nc" +) +TEST_GRID_FILE_NAME = "oras5_grid.nc" + + +@pytest.fixture(autouse=True) +def tmp_cache_dir(monkeypatch, tmp_path): + """Use temporary path as cache directory for all tests in this module.""" + monkeypatch.setattr(Oras5Fix, "CACHE_DIR", tmp_path) + + +# Note that test_data_path is defined in tests/integration/cmor/_fixes/conftest.py + + +@pytest.fixture +def cubes_2d(test_data_path): + """2D sample cubes.""" + nc_path = test_data_path / "oras5_2d.nc" + return iris.load(str(nc_path)) + + +@pytest.fixture +def cubes_3d(test_data_path): + """3D sample cubes.""" + nc_path = test_data_path / "oras5_3d.nc" + return iris.load(str(nc_path)) + + +@pytest.fixture +def cubes_grid(test_data_path): + """Grid description sample cubes.""" + nc_path = test_data_path / "oras5_grid.nc" + return iris.load(str(nc_path)) + + +def _get_fix(mip, short_name, fix_name, session=None): + """Load a fix from esmvalcore.cmor._fixes.native6.oras5.""" + dataset = Dataset( + project="native6", + dataset="ORAS5", + mip=mip, + short_name=short_name, + ) + extra_facets = dataset._get_extra_facets() + extra_facets["frequency"] = "mon" + extra_facets["exp"] = "omip" + test_data_path = Path(__file__).resolve().parent.parent / "test_data" + extra_facets["horizontal_grid"] = str(test_data_path / "oras5_grid.nc") + extra_facets["ugrid"] = True + vardef = get_var_info(project="native6", mip=mip, short_name=short_name) + cls = getattr(esmvalcore.cmor._fixes.native6.oras5, fix_name) + return cls(vardef, extra_facets=extra_facets, session=session) + + +def get_fix(mip, short_name, session=None): + """Load a variable fix from esmvalcore.cmor._fixes.native6.oras5.""" + fix_name = short_name[0].upper() + short_name[1:] + return _get_fix(mip, short_name, fix_name, session=session) + + +def get_allvars_fix(mip, short_name, session=None): + """Load the AllVars fix from esmvalcore.cmor._fixes.native6.oras5.""" + return _get_fix(mip, short_name, "AllVars", session=session) + + +def fix_metadata(cubes, mip, short_name, session=None): + """Fix metadata of cubes.""" + fix = get_fix(mip, short_name, session=session) + cubes = fix.fix_metadata(cubes) + fix = get_allvars_fix(mip, short_name, session=session) + return fix.fix_metadata(cubes) + + +def fix_data(cube, mip, short_name, session=None): + """Fix data of cube.""" + fix = get_fix(mip, short_name, session=session) + cube = fix.fix_data(cube) + fix = get_allvars_fix(mip, short_name, session=session) + return fix.fix_data(cube) + + +def check_thetao_metadata(cubes): + """Check thetao metadata.""" + assert len(cubes) == 1 + cube = cubes[0] + assert cube.var_name == "thetao" + assert cube.standard_name == "sea_water_potential_temperature" + assert cube.long_name == "Sea Water Potential Temperature" + assert cube.units == "degC" + assert "positive" not in cube.attributes + return cube + + +def check_tos_metadata(cubes): + """Check tos metadata.""" + assert len(cubes) == 1 + cube = cubes[0] + assert cube.var_name == "tos" + assert cube.standard_name == "sea_surface_temperature" + assert cube.long_name == "Sea Surface Temperature" + assert cube.units == "degC" + # assert "positive" not in cube.attributes + return cube + + +def check_time(cube): + """Check time coordinate of cube.""" + assert cube.coords("time", dim_coords=True) + time = cube.coord("time", dim_coords=True) + assert time.var_name == "time" + assert time.standard_name == "time" + assert time.long_name == "time" + assert time.attributes == {} + + +def check_model_level_metadata(cube): + """Check metadata of model_level coordinate.""" + assert cube.coords("depth", dim_coords=True) + height = cube.coord("depth", dim_coords=True) + assert height.var_name == "lev" + assert height.standard_name is None + assert height.long_name == "model level number" + assert height.units == "m" + assert height.attributes == {"positive": "down"} + return height + + +def check_air_pressure_metadata(cube): + """Check metadata of air_pressure coordinate.""" + assert cube.coords("air_pressure", dim_coords=False) + plev = cube.coord("air_pressure", dim_coords=False) + assert plev.var_name == "plev" + assert plev.standard_name == "air_pressure" + assert plev.long_name == "pressure" + assert plev.units == "Pa" + assert plev.attributes == {"positive": "down"} + return plev + + +def check_lat(cube): + """Check latitude coordinate of cube.""" + assert cube.coords("latitude", dim_coords=False) + lat = cube.coord("latitude", dim_coords=False) + assert lat.var_name == "lat" + assert lat.standard_name == "latitude" + assert lat.long_name == "latitude" + assert lat.units == "degrees_north" + return lat + + +def check_lon(cube): + """Check longitude coordinate of cube.""" + assert cube.coords("longitude", dim_coords=False) + lon = cube.coord("longitude", dim_coords=False) + assert lon.var_name == "lon" + assert lon.standard_name == "longitude" + assert lon.long_name == "longitude" + assert lon.units == "degrees_east" + return lon + + +def check_lat_lon(cube): + """Check latitude, longitude and mesh of cube.""" + lat = check_lat(cube) + lon = check_lon(cube) + + # Check that latitude and longitude are mesh coordinates + assert cube.coords("latitude", mesh_coords=True) + assert cube.coords("longitude", mesh_coords=True) + + # Check dimensional coordinate describing the mesh + assert cube.coords( + "first spatial index for variables stored on an unstructured grid", + dim_coords=True, + ) + i_coord = cube.coord( + "first spatial index for variables stored on an unstructured grid", + dim_coords=True, + ) + assert i_coord.var_name == "i" + assert i_coord.standard_name is None + assert i_coord.long_name == ( + "first spatial index for variables stored on an unstructured grid" + ) + assert i_coord.units == "1" + np.testing.assert_allclose(i_coord.points, list(range(13 * 12))) + assert i_coord.bounds is None + + assert len(cube.coord_dims(lat)) == 1 + assert cube.coord_dims(lat) == cube.coord_dims(lon) + assert cube.coord_dims(lat) == cube.coord_dims(i_coord) + + # Check the mesh itself + assert cube.location == "face" + mesh = cube.mesh + check_mesh(mesh) + return lat, lon + + +def check_mesh(mesh): + """Check the mesh.""" + assert mesh is not None + assert mesh.var_name is None + assert mesh.standard_name is None + assert mesh.long_name is None + assert mesh.units == "unknown" + assert mesh.attributes == {} + assert mesh.cf_role == "mesh_topology" + assert mesh.topology_dimension == 2 + + # Check face coordinates + assert len(mesh.coords(location="face")) == 2 + + mesh_face_lat = mesh.coord(location="face", axis="y") + assert mesh_face_lat.var_name == "lat" + assert mesh_face_lat.standard_name == "latitude" + assert mesh_face_lat.long_name == "latitude" + assert mesh_face_lat.units == "degrees_north" + assert mesh_face_lat.attributes == {} + + mesh_face_lon = mesh.coord(location="face", axis="x") + assert mesh_face_lon.var_name == "lon" + assert mesh_face_lon.standard_name == "longitude" + assert mesh_face_lon.long_name == "longitude" + assert mesh_face_lon.units == "degrees_east" + assert mesh_face_lon.attributes == {} + + # Check node coordinates + assert len(mesh.coords(location="node")) == 2 + + mesh_node_lat = mesh.coord(location="node", axis="y") + assert mesh_node_lat.var_name == "nlat" + assert mesh_node_lat.standard_name == "latitude" + assert mesh_node_lat.long_name == "node latitude" + assert mesh_node_lat.units == "degrees_north" + assert mesh_node_lat.attributes == {} + assert mesh_node_lat.bounds is None + + mesh_node_lon = mesh.coord(location="node", axis="x") + assert mesh_node_lon.var_name == "nlon" + assert mesh_node_lon.standard_name == "longitude" + assert mesh_node_lon.long_name == "node longitude" + assert mesh_node_lon.units == "degrees_east" + assert mesh_node_lon.attributes == {} + assert mesh_node_lon.bounds is None + + # Check connectivity + assert len(mesh.connectivities()) == 1 + conn = mesh.connectivity() + assert conn.var_name is None + assert conn.standard_name is None + assert conn.long_name is None + assert conn.units == "unknown" + assert conn.attributes == {} + assert conn.cf_role == "face_node_connectivity" + assert conn.start_index == 0 + assert conn.location_axis == 0 + assert conn.shape == ((13 * 12), 4) + + +def test_get_thetao_fix(): + """Test getting of fix.""" + fix = Fix.get_fixes("native6", "ORAS5", "Omon", "thetao") + assert fix == [AllVars(None), GenericFix(None)] + + +def test_thetao_fix(cubes_3d): + """Test fix.""" + fix = get_allvars_fix("Omon", "thetao") + fixed_cubes = fix.fix_metadata(cubes_3d) + + cube = check_thetao_metadata(fixed_cubes) + check_time(cube) + check_lat_lon(cube) + assert cube.shape == (1, 75, 13 * 12) + + +def test_get_tos_fix(): + """Test getting of fix.""" + fix = Fix.get_fixes("native6", "ORAS5", "Omon", "tos") + assert fix == [AllVars(None), GenericFix(None)] + + +def test_tos_fix(cubes_2d): + """Test fix.""" + fix = get_allvars_fix("Omon", "tos") + fixed_cubes = fix.fix_metadata(cubes_2d) + + cube = check_tos_metadata(fixed_cubes) + check_time(cube) + lat, lon = check_lat_lon(cube) + + assert cube.coords("latitude", dim_coords=False) + assert cube.coords("longitude", dim_coords=False) + assert len(cube.coord_dims(lat)) == 1 + assert len(cube.coord_dims(lon)) == 1 + assert cube.shape == (1, 13 * 12) + + +def test_tos_no_mesh(cubes_2d): + """Test fix.""" + fix = get_allvars_fix("Omon", "tos") + fix.extra_facets["ugrid"] = False + fixed_cubes = fix.fix_metadata(cubes_2d) + + cube = check_tos_metadata(fixed_cubes) + + assert cube.mesh is None + + lat = check_lat(cube) + lon = check_lon(cube) + + assert cube.coords("latitude", dim_coords=False) + assert cube.coords("longitude", dim_coords=False) + assert len(cube.coord_dims(lat)) == 2 + assert len(cube.coord_dims(lon)) == 2 + assert cube.shape == (1, 13, 12) + + +def test_tos_no_mesh_unstructured(cubes_2d): + """Test fix.""" + fix = get_allvars_fix("Omon", "tos") + fix.extra_facets["ugrid"] = False + fix.extra_facets["make_unstructured"] = True + fixed_cubes = fix.fix_metadata(cubes_2d) + + cube = check_tos_metadata(fixed_cubes) + + assert cube.mesh is None + + lat = check_lat(cube) + lon = check_lon(cube) + + assert cube.coords("latitude", dim_coords=False) + assert cube.coords("longitude", dim_coords=False) + assert len(cube.coord_dims(lat)) == 1 + assert len(cube.coord_dims(lon)) == 1 + assert cube.shape == (1, 13 * 12) + + +def test_empty_standard_name_fix(cubes_2d, monkeypatch): + """Test fix.""" + fix = get_allvars_fix("Omon", "tos") + # We know that tas has a standard name, but this being native model output + # there may be variables with no standard name. The code is designed to + # handle this gracefully and here we test it with an artificial, but + # realistic case. + monkeypatch.setattr(fix.vardef, "standard_name", "") + fixed_cubes = fix.fix_metadata(cubes_2d) + + assert len(fixed_cubes) == 1 + cube = fixed_cubes[0] + assert cube.var_name == "tos" + assert cube.standard_name is None + assert cube.long_name == "Sea Surface Temperature" + assert cube.units == "degC" + assert "positive" not in cube.attributes + + +# Test automatic addition of missing coordinates + + +def test_add_time(cubes_2d, cubes_3d): + """Test fix.""" + # Remove time from tas cube to test automatic addition + tos_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) + thetao_cube = cubes_3d.extract_cube(NameConstraint(var_name="votemper")) + tos_cube = tos_cube[0] + tos_cube.remove_coord("time") + cubes = CubeList([tos_cube, thetao_cube]) + + fix = get_allvars_fix("Omon", "tos") + fixed_cubes = fix.fix_metadata(cubes) + cube = check_tos_metadata(fixed_cubes) + assert cube.shape == (1, 13 * 12) + check_time(cube) + + +def test_add_time_fail(): + """Test fix.""" + fix = get_allvars_fix("Omon", "tos") + cube = Cube(1, var_name="sosstsst", units="degC") + cubes = CubeList( + [ + cube, + Cube(1, var_name="sosstsst", units="degC"), + ], + ) + msg = "Cannot add required coordinate 'time' to variable 'tos'" + with pytest.raises(ValueError, match=msg): + fix._add_time(cube, cubes) + + +@mock.patch.object(Oras5Fix, "_get_grid_from_cube_attr", autospec=True) +def test_get_horizontal_grid_from_facet_cached_in_dict( + mock_get_grid_from_cube_attr, + tmp_path, +): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + + # Save temporary grid file (this will not be used; however, it is necessary + # to not raise a FileNotFoundError) + grid_path = "grid.nc" + wrong_grid_cube = Cube(0, var_name="wrong_grid") + iris.save(wrong_grid_cube, tmp_path / "grid.nc") + + # Make sure that grid specified by cube attribute is NOT used + cube = Cube(0, attributes={"grid_file_uri": "cached_grid_url.nc"}) + grid_cube = Cube(0, var_name="grid") + fix = get_allvars_fix("Omon", "tos", session=session) + fix.extra_facets["horizontal_grid"] = grid_path + fix._horizontal_grids["cached_grid_url.nc"] = mock.sentinel.wrong_grid + fix._horizontal_grids[grid_path] = grid_cube + + grid = fix.get_horizontal_grid(cube) + assert len(fix._horizontal_grids) == 2 + assert "cached_grid_url.nc" in fix._horizontal_grids # has not been used + assert grid_path in fix._horizontal_grids + assert fix._horizontal_grids[grid_path] == grid + assert grid is grid_cube + mock_get_grid_from_cube_attr.assert_not_called() + + +@pytest.mark.parametrize("grid_path", ["{tmp_path}/grid.nc", "grid.nc"]) +@mock.patch.object(Oras5Fix, "_get_grid_from_cube_attr", autospec=True) +def test_get_horizontal_grid_from_facet( + mock_get_grid_from_cube_attr, + grid_path, + tmp_path, +): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + + # Make sure that grid specified by cube attribute is NOT used + cube = Cube(0, attributes={"grid_file_uri": "cached_grid_url.nc"}) + + # Save temporary grid file + grid_path = grid_path.format(tmp_path=tmp_path) + grid_cube = Cube(0, var_name="grid") + iris.save(grid_cube, tmp_path / "grid.nc") + + fix = get_allvars_fix("Omon", "tos", session=session) + fix.extra_facets["horizontal_grid"] = grid_path + fix._horizontal_grids["cached_grid_url.nc"] = mock.sentinel.wrong_grid + + grid = fix.get_horizontal_grid(cube) + assert isinstance(grid, CubeList) + assert len(grid) == 1 + assert grid[0].var_name == "grid" + assert len(fix._horizontal_grids) == 2 + assert "cached_grid_url.nc" in fix._horizontal_grids # has not been used + assert "grid.nc" in fix._horizontal_grids + assert fix._horizontal_grids["grid.nc"] == grid + mock_get_grid_from_cube_attr.assert_not_called() + + +def test_get_horizontal_grid_from_facet_fail(tmp_path): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + + cube = Cube(0) + fix = get_allvars_fix("Omon", "tos", session=session) + fix.extra_facets["horizontal_grid"] = "/this/does/not/exist.nc" + + with pytest.raises(FileNotFoundError): + fix.get_horizontal_grid(cube) + + +def test_get_horizontal_grid_none(tmp_path): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + + cube = Cube(0) + fix = get_allvars_fix("Omon", "tos", session=session) + del fix.extra_facets["horizontal_grid"] + + msg = "Full path to suitable ORAS5 grid must be specified in facet 'horizontal_grid'" + with pytest.raises(NotImplementedError, match=msg): + fix.get_horizontal_grid(cube) + + +@pytest.mark.parametrize( + ("frequency", "dt_in", "dt_out", "bounds"), + [ + ( + "dec", + [(2000, 1, 1)], + [(1995, 1, 1)], + [[(1990, 1, 1), (2000, 1, 1)]], + ), + ( + "yr", + [(2000, 1, 1), (2001, 1, 1)], + [(1999, 7, 2, 12), (2000, 7, 2)], + [[(1999, 1, 1), (2000, 1, 1)], [(2000, 1, 1), (2001, 1, 1)]], + ), + ( + "mon", + [(2000, 1, 1)], + [(1999, 12, 16, 12)], + [[(1999, 12, 1), (2000, 1, 1)]], + ), + ( + "mon", + [(2000, 11, 30, 23, 45), (2000, 12, 31, 23)], + [(2000, 11, 16), (2000, 12, 16, 12)], + [[(2000, 11, 1), (2000, 12, 1)], [(2000, 12, 1), (2001, 1, 1)]], + ), + ( + "day", + [(2000, 1, 1, 12)], + [(2000, 1, 1)], + [[(1999, 12, 31, 12), (2000, 1, 1, 12)]], + ), + ( + "6hr", + [(2000, 1, 5, 14), (2000, 1, 5, 20)], + [(2000, 1, 5, 11), (2000, 1, 5, 17)], + [ + [(2000, 1, 5, 8), (2000, 1, 5, 14)], + [(2000, 1, 5, 14), (2000, 1, 5, 20)], + ], + ), + ( + "3hr", + [(2000, 1, 1)], + [(1999, 12, 31, 22, 30)], + [[(1999, 12, 31, 21), (2000, 1, 1)]], + ), + ( + "1hr", + [(2000, 1, 5, 14), (2000, 1, 5, 15)], + [(2000, 1, 5, 13, 30), (2000, 1, 5, 14, 30)], + [ + [(2000, 1, 5, 13), (2000, 1, 5, 14)], + [(2000, 1, 5, 14), (2000, 1, 5, 15)], + ], + ), + ], +) +def test_shift_time_coord(frequency, dt_in, dt_out, bounds): + """Test ``_shift_time_coord``.""" + cube = Cube(0, cell_methods=[CellMethod("mean", "time")]) + datetimes = [datetime(*dt) for dt in dt_in] + time_units = Unit("days since 1950-01-01", calendar="proleptic_gregorian") + time_coord = DimCoord( + time_units.date2num(datetimes), + standard_name="time", + var_name="time", + long_name="time", + units=time_units, + ) + + fix = get_allvars_fix("Omon", "tos") + fix.extra_facets["frequency"] = frequency + + fix._shift_time_coord(cube, time_coord) + + dt_out = [datetime(*dt) for dt in dt_out] + bounds = [[datetime(*dt1), datetime(*dt2)] for (dt1, dt2) in bounds] + np.testing.assert_allclose( + time_coord.points, + time_coord.units.date2num(dt_out), + ) + np.testing.assert_allclose( + time_coord.bounds, + time_coord.units.date2num(bounds), + ) + + +@pytest.mark.parametrize( + ("frequency", "dt_in"), + [ + ("dec", [(2000, 1, 15)]), + ("yr", [(2000, 1, 1), (2001, 1, 1)]), + ("mon", [(2000, 6, 15)]), + ("day", [(2000, 1, 1), (2001, 1, 2)]), + ("6hr", [(2000, 6, 15, 12)]), + ("3hr", [(2000, 1, 1, 4), (2000, 1, 1, 7)]), + ("1hr", [(2000, 1, 1, 4), (2000, 1, 1, 5)]), + ], +) +def test_shift_time_point_measurement(frequency, dt_in): + """Test ``_shift_time_coord``.""" + cube = Cube(0, cell_methods=[CellMethod("point", "time")]) + datetimes = [datetime(*dt) for dt in dt_in] + time_units = Unit("days since 1950-01-01", calendar="proleptic_gregorian") + time_coord = DimCoord( + time_units.date2num(datetimes), + standard_name="time", + var_name="time", + long_name="time", + units=time_units, + ) + + fix = get_allvars_fix("Omon", "tos") + fix.extra_facets["frequency"] = frequency + + fix._shift_time_coord(cube, time_coord) + + np.testing.assert_allclose( + time_coord.points, + time_coord.units.date2num(datetimes), + ) + assert time_coord.bounds is None + + +@pytest.mark.parametrize( + "frequency", + ["dec", "yr", "yrPt", "mon", "monC", "monPt"], +) +def test_shift_time_coord_hourly_data_low_freq_fail(frequency): + """Test ``_shift_time_coord``.""" + cube = Cube(0, cell_methods=[CellMethod("mean", "time")]) + time_units = Unit("hours since 1950-01-01", calendar="proleptic_gregorian") + time_coord = DimCoord( + [1, 2, 3], + standard_name="time", + var_name="time", + long_name="time", + units=time_units, + ) + + fix = get_allvars_fix("Omon", "tos") + fix.extra_facets["frequency"] = frequency + + msg = "Cannot shift time coordinate: Rounding to closest day failed." + with pytest.raises(ValueError, match=msg): + fix._shift_time_coord(cube, time_coord) + + +@pytest.mark.parametrize( + "frequency", + ["dec", "yr", "yrPt", "mon", "monC", "monPt"], +) +def test_shift_time_coord_not_first_of_month(frequency): + """Test ``_get_previous_timestep``.""" + cube = Cube(0, cell_methods=[CellMethod("mean", "time")]) + time_units = Unit("days since 1950-01-01", calendar="proleptic_gregorian") + time_coord = DimCoord( + [1.5], + standard_name="time", + var_name="time", + long_name="time", + units=time_units, + ) + fix = get_allvars_fix("Omon", "tos") + fix.extra_facets["frequency"] = frequency + + msg = ( + "Cannot shift time coordinate: expected first of the month at 00:00:00" + ) + with pytest.raises(ValueError, match=msg): + fix._shift_time_coord(cube, time_coord) + + +@pytest.mark.parametrize("frequency", ["fx", "subhrPt", "invalid_freq"]) +def test_shift_time_coord_invalid_freq(frequency): + """Test ``_get_previous_timestep``.""" + cube = Cube(0, cell_methods=[CellMethod("mean", "time")]) + time_units = Unit("days since 1950-01-01", calendar="proleptic_gregorian") + time_coord = DimCoord( + [1.5, 2.5], + standard_name="time", + var_name="time", + long_name="time", + units=time_units, + ) + fix = get_allvars_fix("Omon", "tos") + fix.extra_facets["frequency"] = frequency + + msg = ( + "Cannot shift time coordinate: failed to determine previous time step" + ) + with pytest.raises(ValueError, match=msg): + fix._shift_time_coord(cube, time_coord) + + +# Test _get_previous_timestep + + +@pytest.mark.parametrize( + ("frequency", "datetime_in", "datetime_out"), + [ + ("dec", (2000, 1, 1), (1990, 1, 1)), + ("yr", (2000, 1, 1), (1999, 1, 1)), + ("yrPt", (2001, 6, 1), (2000, 6, 1)), + ("mon", (2001, 1, 1), (2000, 12, 1)), + ("mon", (2001, 2, 1), (2001, 1, 1)), + ("mon", (2001, 3, 1), (2001, 2, 1)), + ("mon", (2001, 4, 1), (2001, 3, 1)), + ("monC", (2000, 5, 1), (2000, 4, 1)), + ("monC", (2000, 6, 1), (2000, 5, 1)), + ("monC", (2000, 7, 1), (2000, 6, 1)), + ("monC", (2000, 8, 1), (2000, 7, 1)), + ("monPt", (2002, 9, 1), (2002, 8, 1)), + ("monPt", (2002, 10, 1), (2002, 9, 1)), + ("monPt", (2002, 11, 1), (2002, 10, 1)), + ("monPt", (2002, 12, 1), (2002, 11, 1)), + ("day", (2000, 1, 1), (1999, 12, 31)), + ("day", (2000, 3, 1), (2000, 2, 29)), + ("day", (2187, 3, 14), (2187, 3, 13)), + ("6hr", (2000, 3, 14, 15), (2000, 3, 14, 9)), + ("6hrPt", (2000, 1, 1), (1999, 12, 31, 18)), + ("6hrCM", (2000, 1, 1, 1), (1999, 12, 31, 19)), + ("3hr", (2000, 3, 14, 15), (2000, 3, 14, 12)), + ("3hrPt", (2000, 1, 1), (1999, 12, 31, 21)), + ("3hrCM", (2000, 1, 1, 1), (1999, 12, 31, 22)), + ("1hr", (2000, 3, 14, 15), (2000, 3, 14, 14)), + ("1hrPt", (2000, 1, 1), (1999, 12, 31, 23)), + ("1hrCM", (2000, 1, 1, 1), (2000, 1, 1)), + ("hr", (2000, 3, 14), (2000, 3, 13, 23)), + ], +) +def test_get_previous_timestep(frequency, datetime_in, datetime_out): + """Test ``_get_previous_timestep``.""" + datetime_in = datetime(*datetime_in) + datetime_out = datetime(*datetime_out) + + fix = get_allvars_fix("Omon", "tos") + fix.extra_facets["frequency"] = frequency + + new_datetime = fix._get_previous_timestep(datetime_in) + + assert new_datetime == datetime_out + + +def test_get_grid_url(): + """Test fix.""" + cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) + fix = get_allvars_fix("Omon", "tos") + (grid_url, grid_name) = fix._get_grid_url(cube) + assert grid_url == TEST_GRID_FILE_URI + assert grid_name == TEST_GRID_FILE_NAME + + +def test_get_grid_url_fail(): + """Test fix.""" + cube = Cube(0) + fix = get_allvars_fix("Omon", "tos") + msg = ( + "Cube does not contain the attribute 'grid_file_uri' necessary to " + "download the ICON horizontal grid file" + ) + with pytest.raises(ValueError, match=msg): + fix._get_grid_url(cube) + + +# Test get_mesh + + +def test_get_mesh_cached_from_attr(monkeypatch): + """Test fix.""" + cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) + fix = get_allvars_fix("Omon", "tos") + monkeypatch.setattr(fix, "_create_mesh", mock.Mock()) + fix._meshes[TEST_GRID_FILE_NAME] = mock.sentinel.mesh + mesh = fix.get_mesh(cube) + assert mesh == mock.sentinel.mesh + fix._create_mesh.assert_not_called() + + +def test_get_mesh_not_cached_from_attr(monkeypatch): + """Test fix.""" + cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) + fix = get_allvars_fix("Omon", "tos") + monkeypatch.setattr(fix, "_create_mesh", mock.Mock()) + fix.get_mesh(cube) + fix._create_mesh.assert_called_once_with(cube) + + +def test_get_mesh_cached_from_facet(monkeypatch, tmp_path): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + + # Save temporary grid file (this will not be used; however, it is necessary + # to not raise a FileNotFoundError) + grid_path = "grid.nc" + grid_cube = Cube(0, var_name="grid") + iris.save(grid_cube, tmp_path / "grid.nc") + + cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) + fix = get_allvars_fix("Omon", "tos", session=session) + fix.extra_facets["horizontal_grid"] = grid_path + monkeypatch.setattr(fix, "_create_mesh", mock.Mock()) + fix._meshes[TEST_GRID_FILE_NAME] = mock.sentinel.wrong_mesh + fix._meshes["grid.nc"] = mock.sentinel.mesh + + mesh = fix.get_mesh(cube) + + assert mesh == mock.sentinel.mesh + fix._create_mesh.assert_not_called() + + +def test_get_mesh_not_cached_from_facet(monkeypatch, tmp_path): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + + # Save temporary grid file (this will not be used; however, it is necessary + # to not raise a FileNotFoundError) + grid_path = "grid.nc" + grid_cube = Cube(0, var_name="grid") + iris.save(grid_cube, tmp_path / "grid.nc") + + cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) + fix = get_allvars_fix("Omon", "tos", session=session) + fix.extra_facets["horizontal_grid"] = grid_path + monkeypatch.setattr(fix, "_create_mesh", mock.Mock()) + fix._meshes[TEST_GRID_FILE_NAME] = mock.sentinel.wrong_mesh + + fix.get_mesh(cube) + + fix._create_mesh.assert_called_once_with(cube) + + +def test_get_bounds_cached_from_facet(cubes_2d, cubes_3d): + """Test fix.""" + tos_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) + tos_cube2 = tos_cube.copy() + cubes = CubeList([tos_cube, tos_cube2]) + + fix = get_allvars_fix("Omon", "tos") + fix.extra_facets["ugrid"] = False + fixed_cubes = [] + for i in range(len(cubes)): + fixed_cubes.append(fix.fix_metadata(CubeList([cubes[i]]))[0]) + fixed_cubes = CubeList(fixed_cubes) + + assert fixed_cubes[0].coord("latitude") == fixed_cubes[1].coord("latitude") + assert fixed_cubes[0].coord("longitude") == fixed_cubes[1].coord( + "longitude", + ) + assert ( + fixed_cubes[0].coord("latitude").bounds + == fixed_cubes[1].coord("latitude").bounds + ).all() + assert ( + fixed_cubes[0].coord("latitude").points + == fixed_cubes[1].coord("latitude").points + ).all() + assert ( + fixed_cubes[0].coord("longitude").bounds + == fixed_cubes[1].coord("longitude").bounds + ).all() + assert ( + fixed_cubes[0].coord("longitude").points + == fixed_cubes[1].coord("longitude").points + ).all() + + +def test_get_coord_cached_from_facet(cubes_2d, cubes_3d): + """Test fix.""" + tos_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) + tos_cube2 = tos_cube.copy() + cubes = CubeList([tos_cube, tos_cube2]) + + fix = get_allvars_fix("Omon", "tos") + fixed_cubes = [] + for i in range(len(cubes)): + fixed_cubes.append(fix.fix_metadata(CubeList([cubes[i]]))[0]) + fixed_cubes = CubeList(fixed_cubes) + + assert fixed_cubes[0].coord("latitude") == fixed_cubes[1].coord("latitude") + assert fixed_cubes[0].coord("longitude") == fixed_cubes[1].coord( + "longitude", + ) + assert ( + fixed_cubes[0].coord("latitude").bounds + == fixed_cubes[1].coord("latitude").bounds + ).all() + assert ( + fixed_cubes[0].coord("latitude").points + == fixed_cubes[1].coord("latitude").points + ).all() + assert ( + fixed_cubes[0].coord("longitude").bounds + == fixed_cubes[1].coord("longitude").bounds + ).all() + assert ( + fixed_cubes[0].coord("longitude").points + == fixed_cubes[1].coord("longitude").points + ).all() + + +# Test _get_path_from_facet + + +@pytest.mark.parametrize( + ("path", "description", "output"), + [ + ("{tmp_path}/a.nc", None, "{tmp_path}/a.nc"), + ("b.nc", "Grid file", "{tmp_path}/b.nc"), + ], +) +def test_get_path_from_facet(path, description, output, tmp_path): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + path = path.format(tmp_path=tmp_path) + fix = get_allvars_fix("Omon", "tos", session=session) + fix.extra_facets["test_path"] = path + + # Create empty dummy file + output = output.format(tmp_path=tmp_path) + with open(output, "w", encoding="utf-8"): + pass + + out_path = fix._get_path_from_facet("test_path", description=description) + + assert isinstance(out_path, Path) + assert out_path == Path(output.format(tmp_path=tmp_path)) + + +@pytest.mark.parametrize( + ("path", "description"), + [ + ("{tmp_path}/a.nc", None), + ("b.nc", "Grid file"), + ], +) +def test_get_path_from_facet_fail(path, description, tmp_path): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + path = path.format(tmp_path=tmp_path) + fix = get_allvars_fix("Omon", "tos", session=session) + fix.extra_facets["test_path"] = path + + with pytest.raises(FileNotFoundError, match=description): + fix._get_path_from_facet("test_path", description=description) + + +# Test add_additional_cubes + + +@pytest.mark.parametrize("facet", ["zg_file", "zghalf_file"]) +@pytest.mark.parametrize("path", ["{tmp_path}/a.nc", "a.nc"]) +def test_add_additional_cubes(path, facet, tmp_path): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + path = path.format(tmp_path=tmp_path) + fix = get_allvars_fix("Omon", "tos", session=session) + fix.extra_facets[facet] = path + + # Save temporary cube + cube = Cube(0, var_name=facet) + iris.save(cube, tmp_path / "a.nc") + + cubes = CubeList([]) + new_cubes = fix.add_additional_cubes(cubes) + + assert new_cubes is cubes + assert len(cubes) == 1 + assert cubes[0].var_name == facet + + +@pytest.mark.parametrize("facet", ["zg_file", "zghalf_file"]) +@pytest.mark.parametrize("path", ["{tmp_path}/a.nc", "a.nc"]) +def test_add_additional_cubes_fail(path, facet, tmp_path): + """Test fix.""" + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path + path = path.format(tmp_path=tmp_path) + fix = get_allvars_fix("Omon", "tos", session=session) + fix.extra_facets[facet] = path + + cubes = CubeList([]) + with pytest.raises(FileNotFoundError, match="File"): + fix.add_additional_cubes(cubes) diff --git a/tests/integration/cmor/_fixes/oras5/test_oras5.py b/tests/integration/cmor/_fixes/oras5/test_oras5.py deleted file mode 100644 index 4c13b60c9c..0000000000 --- a/tests/integration/cmor/_fixes/oras5/test_oras5.py +++ /dev/null @@ -1,2016 +0,0 @@ -"""Test the ICON on-the-fly CMORizer.""" - -from copy import deepcopy -from datetime import datetime -from pathlib import Path -from unittest import mock - -import iris -import numpy as np -import pytest -from cf_units import Unit -from iris import NameConstraint -from iris.coords import AuxCoord, CellMethod, DimCoord -from iris.cube import Cube, CubeList - -import esmvalcore.cmor._fixes.oras5.oras5 -from esmvalcore.cmor._fixes.fix import GenericFix -from esmvalcore.cmor._fixes.oras5._base_fixes import Oras5Fix -from esmvalcore.cmor._fixes.icon.icon import AllVars -from esmvalcore.cmor.fix import Fix -from esmvalcore.cmor.table import get_var_info -from esmvalcore.config import CFG -from esmvalcore.dataset import Dataset - -TEST_GRID_FILE_URI = ( - "https://github.com/ESMValGroup/ESMValCore/raw/main/tests/integration/" - "cmor/_fixes/test_data/oras5_grid.nc" -) -TEST_GRID_FILE_NAME = "oras5_grid.nc" - - -@pytest.fixture(autouse=True) -def tmp_cache_dir(monkeypatch, tmp_path): - """Use temporary path as cache directory for all tests in this module.""" - monkeypatch.setattr(Oras5Fix, "CACHE_DIR", tmp_path) - - -# Note: test_data_path is defined in tests/integration/cmor/_fixes/conftest.py - - -@pytest.fixture -def cubes_2d(test_data_path): - """2D sample cubes.""" - nc_path = test_data_path / "oras5_2d.nc" - return iris.load(str(nc_path)) - - -@pytest.fixture -def cubes_3d(test_data_path): - """3D sample cubes.""" - nc_path = test_data_path / "oras5_3d.nc" - return iris.load(str(nc_path)) - - -@pytest.fixture -def cubes_grid(test_data_path): - """Grid description sample cubes.""" - nc_path = test_data_path / "oras5_grid.nc" - return iris.load(str(nc_path)) - - -# @pytest.fixture -# def cubes_regular_grid(): -# """Cube with regular grid.""" -# time_coord = DimCoord( -# [0], -# var_name="time", -# standard_name="time", -# units="days since 1850-01-01", -# ) -# lat_coord = DimCoord( -# [0.0, 1.0], -# var_name="lat", -# standard_name="latitude", -# long_name="latitude", -# units="degrees_north", -# ) -# lon_coord = DimCoord( -# [-1.0, 1.0], -# var_name="lon", -# standard_name="longitude", -# long_name="longitude", -# units="degrees_east", -# ) -# cube = Cube( -# [[[0.0, 1.0], [2.0, 3.0]]], -# var_name="sosstsst", -# units="degC", -# dim_coords_and_dims=[(time_coord, 0), (lat_coord, 1), (lon_coord, 2)], -# ) -# return CubeList([cube]) - - -# @pytest.fixture -# def cubes_2d_lat_lon_grid(): -# """Cube with 2D latitude and longitude.""" -# time_coord = DimCoord( -# [0], -# var_name="time", -# standard_name="time", -# units="days since 1850-01-01", -# ) -# lat_coord = AuxCoord( -# [[0.0, 0.0, 0.0], [1.0, 1.0, 1.0]], -# var_name="lat", -# standard_name="latitude", -# long_name="latitude", -# units="degrees_north", -# ) -# lon_coord = AuxCoord( -# [[0.0, 1.0, 2.0], [0.0, 1.0, 2.0]], -# var_name="lon", -# standard_name="longitude", -# long_name="longitude", -# units="degrees_east", -# ) -# cube = Cube( -# [[[0.0, 1.0, 2.0], [2.0, 3.0, 4.0]]], -# var_name="sosstsst", -# units="degC", -# dim_coords_and_dims=[(time_coord, 0)], -# aux_coords_and_dims=[(lat_coord, (1, 2)), (lon_coord, (1, 2))], -# ) -# return CubeList([cube]) - - -# @pytest.fixture -# def simple_unstructured_cube(): -# """Create a cube with an unstructured grid.""" -# time_coord = DimCoord( -# [0], -# var_name="time", -# standard_name="time", -# units="days since 1850-01-01", -# ) -# depth_coord = DimCoord([0, 1, 2], var_name="depth") -# lat_coord = AuxCoord( -# [0.0, 1.0], -# var_name="lat", -# standard_name="latitude", -# long_name="latitude", -# units="degrees_north", -# ) -# lon_coord = AuxCoord( -# [0.0, 1.0], -# var_name="lon", -# standard_name="longitude", -# long_name="longitude", -# units="degrees_east", -# ) -# cube = Cube( -# [[[0.0, 1.0], [2.0, 3.0], [4.0, 5.0]]], -# var_name="votemper", -# units="degC", -# dim_coords_and_dims=[(time_coord, 0), (height_coord, 1)], -# aux_coords_and_dims=[(lat_coord, 2), (lon_coord, 2)], -# ) -# return cube - - -def _get_fix(mip, short_name, fix_name, session=None): - """Load a fix from esmvalcore.cmor._fixes.oras5.oras5.""" - dataset = Dataset( - project="ORAS5", - dataset="ORAS5", - mip=mip, - short_name=short_name, - ) - extra_facets = dataset._get_extra_facets() - extra_facets["frequency"] = "mon" - extra_facets["exp"] = "omip" - test_data_path = Path(__file__).resolve().parent.parent / "test_data" - extra_facets["horizontal_grid"] = str(test_data_path / "oras5_grid.nc") - extra_facets["ugrid"] = True - vardef = get_var_info(project="ORAS5", mip=mip, short_name=short_name) - cls = getattr(esmvalcore.cmor._fixes.oras5.oras5, fix_name) - fix = cls(vardef, extra_facets=extra_facets, session=session) - return fix - - -def get_fix(mip, short_name, session=None): - """Load a variable fix from esmvalcore.cmor._fixes.oras5.oras5.""" - fix_name = short_name[0].upper() + short_name[1:] - return _get_fix(mip, short_name, fix_name, session=session) - - -def get_allvars_fix(mip, short_name, session=None): - """Load the AllVars fix from esmvalcore.cmor._fixes.oras5.oras5.""" - return _get_fix(mip, short_name, "AllVars", session=session) - - -def fix_metadata(cubes, mip, short_name, session=None): - """Fix metadata of cubes.""" - fix = get_fix(mip, short_name, session=session) - cubes = fix.fix_metadata(cubes) - fix = get_allvars_fix(mip, short_name, session=session) - cubes = fix.fix_metadata(cubes) - return cubes - - -def fix_data(cube, mip, short_name, session=None): - """Fix data of cube.""" - fix = get_fix(mip, short_name, session=session) - cube = fix.fix_data(cube) - fix = get_allvars_fix(mip, short_name, session=session) - cube = fix.fix_data(cube) - return cube - - -def check_thetao_metadata(cubes): - """Check thetao metadata.""" - assert len(cubes) == 1 - cube = cubes[0] - assert cube.var_name == "thetao" - assert cube.standard_name == "sea_water_potential_temperature" - assert cube.long_name == "Sea Water Potential Temperature" - assert cube.units == "degC" - assert "positive" not in cube.attributes - return cube - - -def check_tos_metadata(cubes): - """Check tos metadata.""" - assert len(cubes) == 1 - cube = cubes[0] - assert cube.var_name == "tos" - assert cube.standard_name == "sea_surface_temperature" - assert cube.long_name == "Sea Surface Temperature" - assert cube.units == "degC" - # assert "positive" not in cube.attributes - return cube - - -# def check_siconc_metadata(cubes, var_name, long_name): -# """Check tas metadata.""" -# assert len(cubes) == 1 -# cube = cubes[0] -# assert cube.var_name == var_name -# assert cube.standard_name == "sea_ice_area_fraction" -# assert cube.long_name == long_name -# assert cube.units == "%" -# assert "positive" not in cube.attributes -# return cube - - -def check_time(cube): - """Check time coordinate of cube.""" - assert cube.coords("time", dim_coords=True) - time = cube.coord("time", dim_coords=True) - assert time.var_name == "time" - assert time.standard_name == "time" - assert time.long_name == "time" - # assert time.units == Unit( - # "days since 1850-01-01", calendar="proleptic_gregorian" - # ) - # np.testing.assert_allclose(time.points, [54770.5]) - # np.testing.assert_allclose(time.bounds, [[54755.0, 54786.0]]) - assert time.attributes == {} - - -def check_model_level_metadata(cube): - """Check metadata of model_level coordinate.""" - assert cube.coords("depth", dim_coords=True) - height = cube.coord("depth", dim_coords=True) - assert height.var_name == "lev" - assert height.standard_name is None - assert height.long_name == "model level number" - assert height.units == "m" - assert height.attributes == {"positive": "down"} - return height - - -def check_air_pressure_metadata(cube): - """Check metadata of air_pressure coordinate.""" - assert cube.coords("air_pressure", dim_coords=False) - plev = cube.coord("air_pressure", dim_coords=False) - assert plev.var_name == "plev" - assert plev.standard_name == "air_pressure" - assert plev.long_name == "pressure" - assert plev.units == "Pa" - assert plev.attributes == {"positive": "down"} - return plev - - -def check_height(cube, plev_has_bounds=True): - """Check height coordinate of cube.""" - height = check_model_level_metadata(cube) - np.testing.assert_array_equal(height.points, np.arange(47)) - assert height.bounds is None - - plev = check_air_pressure_metadata(cube) - assert cube.coord_dims("air_pressure") == (0, 1, 2) - - np.testing.assert_allclose( - plev.points[0, :4, 0], - [100566.234, 99652.07, 97995.77, 95686.08], - ) - if plev_has_bounds: - np.testing.assert_allclose( - plev.bounds[0, :4, 0], - [ - [100825.04, 100308.09], - [100308.09, 99000.336], - [99000.336, 97001.42], - [97001.42, 94388.59], - ], - ) - else: - assert plev.bounds is None - - -# def check_heightxm(cube, height_value): -# """Check scalar heightxm coordinate of cube.""" -# assert cube.coords("depth") -# height = cube.coord("depth") -# assert height.var_name == "depth" -# assert height.standard_name == "depth" -# assert height.long_name == "depth" -# assert height.units == "m" -# assert height.attributes == {"positive": "up"} -# np.testing.assert_allclose(height.points, [height_value]) -# assert height.bounds is None - - -def check_lat(cube): - """Check latitude coordinate of cube.""" - assert cube.coords("latitude", dim_coords=False) - lat = cube.coord("latitude", dim_coords=False) - assert lat.var_name == "lat" - assert lat.standard_name == "latitude" - assert lat.long_name == "latitude" - assert lat.units == "degrees_north" - # assert lat.attributes == {} - # np.testing.assert_allclose( - # lat.points, - # [-45.0, -45.0, -45.0, -45.0, 45.0, 45.0, 45.0, 45.0], - # rtol=1e-5, - # ) - # np.testing.assert_allclose( - # lat.bounds, - # [ - # [-90.0, 0.0, 0.0], - # [-90.0, 0.0, 0.0], - # [-90.0, 0.0, 0.0], - # [-90.0, 0.0, 0.0], - # [0.0, 0.0, 90.0], - # [0.0, 0.0, 90.0], - # [0.0, 0.0, 90.0], - # [0.0, 0.0, 90.0], - # ], - # rtol=1e-5, - # ) - return lat - - -def check_lon(cube): - """Check longitude coordinate of cube.""" - assert cube.coords("longitude", dim_coords=False) - lon = cube.coord("longitude", dim_coords=False) - assert lon.var_name == "lon" - assert lon.standard_name == "longitude" - assert lon.long_name == "longitude" - assert lon.units == "degrees_east" - # assert lon.attributes == {} - # np.testing.assert_allclose( - # lon.points, - # [225.0, 315.0, 45.0, 135.0, 225.0, 315.0, 45.0, 135.0], - # rtol=1e-5, - # ) - # np.testing.assert_allclose( - # lon.bounds, - # [ - # [0.0, 270.0, 180.0], - # [0.0, 0.0, 270.0], - # [0.0, 90.0, 0.0], - # [0.0, 180.0, 90.0], - # [180.0, 270.0, 0.0], - # [270.0, 0.0, 0.0], - # [0.0, 90.0, 0.0], - # [90.0, 180.0, 0.0], - # ], - # rtol=1e-5, - # ) - return lon - - -def check_lat_lon(cube): - """Check latitude, longitude and mesh of cube.""" - lat = check_lat(cube) - lon = check_lon(cube) - - # Check that latitude and longitude are mesh coordinates - assert cube.coords("latitude", mesh_coords=True) - assert cube.coords("longitude", mesh_coords=True) - - # Check dimensional coordinate describing the mesh - assert cube.coords( - "first spatial index for variables stored on an unstructured grid", - dim_coords=True, - ) - i_coord = cube.coord( - "first spatial index for variables stored on an unstructured grid", - dim_coords=True, - ) - assert i_coord.var_name == "i" - assert i_coord.standard_name is None - assert i_coord.long_name == ( - "first spatial index for variables stored on an unstructured grid" - ) - assert i_coord.units == "1" - np.testing.assert_allclose(i_coord.points, list(range(13 * 12))) - assert i_coord.bounds is None - - assert len(cube.coord_dims(lat)) == 1 - assert cube.coord_dims(lat) == cube.coord_dims(lon) - assert cube.coord_dims(lat) == cube.coord_dims(i_coord) - - # Check the mesh itself - assert cube.location == "face" - mesh = cube.mesh - check_mesh(mesh) - - -def check_mesh(mesh): - """Check the mesh.""" - assert mesh is not None - assert mesh.var_name is None - assert mesh.standard_name is None - assert mesh.long_name is None - assert mesh.units == "unknown" - assert mesh.attributes == {} - assert mesh.cf_role == "mesh_topology" - assert mesh.topology_dimension == 2 - - # Check face coordinates - assert len(mesh.coords(location="face")) == 2 - - mesh_face_lat = mesh.coord(location="face", axis="y") - assert mesh_face_lat.var_name == "lat" - assert mesh_face_lat.standard_name == "latitude" - assert mesh_face_lat.long_name == "latitude" - assert mesh_face_lat.units == "degrees_north" - assert mesh_face_lat.attributes == {} - # np.testing.assert_allclose( - # mesh_face_lat.points, - # [-45.0, -45.0, -45.0, -45.0, 45.0, 45.0, 45.0, 45.0], - # rtol=1e-5, - # ) - # np.testing.assert_allclose( - # mesh_face_lat.bounds, - # [ - # [-90.0, 0.0, 0.0], - # [-90.0, 0.0, 0.0], - # [-90.0, 0.0, 0.0], - # [-90.0, 0.0, 0.0], - # [0.0, 0.0, 90.0], - # [0.0, 0.0, 90.0], - # [0.0, 0.0, 90.0], - # [0.0, 0.0, 90.0], - # ], - # rtol=1e-5, - # ) - - mesh_face_lon = mesh.coord(location="face", axis="x") - assert mesh_face_lon.var_name == "lon" - assert mesh_face_lon.standard_name == "longitude" - assert mesh_face_lon.long_name == "longitude" - assert mesh_face_lon.units == "degrees_east" - assert mesh_face_lon.attributes == {} - # np.testing.assert_allclose( - # mesh_face_lon.points, - # [225.0, 315.0, 45.0, 135.0, 225.0, 315.0, 45.0, 135.0], - # rtol=1e-5, - # ) - # np.testing.assert_allclose( - # mesh_face_lon.bounds, - # [ - # [0.0, 270.0, 180.0], - # [0.0, 0.0, 270.0], - # [0.0, 90.0, 0.0], - # [0.0, 180.0, 90.0], - # [180.0, 270.0, 0.0], - # [270.0, 0.0, 0.0], - # [0.0, 90.0, 0.0], - # [90.0, 180.0, 0.0], - # ], - # rtol=1e-5, - # ) - - # Check node coordinates - assert len(mesh.coords(location="node")) == 2 - - mesh_node_lat = mesh.coord(location="node", axis="y") - assert mesh_node_lat.var_name == "nlat" - assert mesh_node_lat.standard_name == "latitude" - assert mesh_node_lat.long_name == "node latitude" - assert mesh_node_lat.units == "degrees_north" - assert mesh_node_lat.attributes == {} - # np.testing.assert_allclose( - # mesh_node_lat.points, [-90.0, 0.0, 0.0, 0.0, 0.0, 90.0], rtol=1e-5 - # ) - assert mesh_node_lat.bounds is None - - mesh_node_lon = mesh.coord(location="node", axis="x") - assert mesh_node_lon.var_name == "nlon" - assert mesh_node_lon.standard_name == "longitude" - assert mesh_node_lon.long_name == "node longitude" - assert mesh_node_lon.units == "degrees_east" - assert mesh_node_lon.attributes == {} - # np.testing.assert_allclose( - # mesh_node_lon.points, [0.0, 180.0, 270.0, 0.0, 90, 0.0], rtol=1e-5 - # ) - assert mesh_node_lon.bounds is None - - # Check connectivity - assert len(mesh.connectivities()) == 1 - conn = mesh.connectivity() - assert conn.var_name is None - assert conn.standard_name is None - assert conn.long_name is None - assert conn.units == "unknown" - assert conn.attributes == {} - assert conn.cf_role == "face_node_connectivity" - assert conn.start_index == 0 - assert conn.location_axis == 0 - assert conn.shape == (int(13 * 12), 4) - # np.testing.assert_array_equal( - # conn.indices, - # [ - # [1, 3, 2], - # [1, 4, 3], - # [1, 5, 4], - # [1, 2, 5], - # [2, 3, 6], - # [3, 4, 6], - # [4, 5, 6], - # [5, 2, 6], - # ], - # ) - - -def test_get_thetao_fix(): - """Test getting of fix.""" - fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "thetao") - assert fix == [AllVars(None), GenericFix(None)] - - -def test_thetao_fix(cubes_3d): - """Test fix.""" - fix = get_allvars_fix("Omon", "thetao") - fixed_cubes = fix.fix_metadata(cubes_3d) - - cube = check_thetao_metadata(fixed_cubes) - check_time(cube) - # check_height(cube) - check_lat_lon(cube) - - -def test_get_tos_fix(): - """Test getting of fix.""" - fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "tos") - assert fix == [AllVars(None), GenericFix(None)] - - -def test_tos_fix(cubes_2d): - """Test fix.""" - fix = get_allvars_fix("Omon", "tos") - fixed_cubes = fix.fix_metadata(cubes_2d) - - cube = check_tos_metadata(fixed_cubes) - check_time(cube) - check_lat_lon(cube) - # check_heightxm(cube, 2.0) - - -def test_tos_no_mesh(cubes_2d): - """Test fix.""" - fix = get_allvars_fix("Omon", "tos") - fix.extra_facets["ugrid"] = False - fixed_cubes = fix.fix_metadata(cubes_2d) - - cube = check_tos_metadata(fixed_cubes) - - assert cube.mesh is None - - lat = check_lat(cube) - lon = check_lon(cube) - - # assert cube.coords( - # "first spatial index for variables stored on an unstructured grid", - # dim_coords=True, - # ) - # i_coord = cube.coord( - # "first spatial index for variables stored on an unstructured grid", - # dim_coords=True, - # ) - # assert i_coord.var_name == "i" - # assert i_coord.standard_name is None - # assert i_coord.long_name == ( - # "first spatial index for variables stored on an unstructured grid" - # ) - # assert i_coord.units == "1" - # np.testing.assert_allclose(i_coord.points, [0, 1, 2, 3, 4, 5, 6, 7]) - # assert i_coord.bounds is None - - assert cube.coords("latitude", dim_coords=False) - assert cube.coords("longitude", dim_coords=False) - # lat = cube.coord("latitude", dim_coords=False) - # lon = cube.coord("longitude", dim_coords=False) - assert len(cube.coord_dims(lat)) == 2 - # assert cube.coord_dims(lat) == cube.coord_dims(lon) - # assert cube.coord_dims(lat) == cube.coord_dims(i_coord) - - -def test_empty_standard_name_fix(cubes_2d, monkeypatch): - """Test fix.""" - fix = get_allvars_fix("Omon", "tos") - # We know that tas has a standard name, but this being native model output - # there may be variables with no standard name. The code is designed to - # handle this gracefully and here we test it with an artificial, but - # realistic case. - monkeypatch.setattr(fix.vardef, "standard_name", "") - fixed_cubes = fix.fix_metadata(cubes_2d) - - assert len(fixed_cubes) == 1 - cube = fixed_cubes[0] - assert cube.var_name == "tos" - assert cube.standard_name is None - assert cube.long_name == "Sea Surface Temperature" - assert cube.units == "degC" - assert "positive" not in cube.attributes - - -# Test automatic addition of missing coordinates - - -def test_add_time(cubes_2d, cubes_3d): - """Test fix.""" - # Remove time from tas cube to test automatic addition - tos_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) - thetao_cube = cubes_3d.extract_cube(NameConstraint(var_name="votemper")) - tos_cube = tos_cube[0] - tos_cube.remove_coord("time") - cubes = CubeList([tos_cube, thetao_cube]) - - fix = get_allvars_fix("Omon", "tos") - fixed_cubes = fix.fix_metadata(cubes) - cube = check_tos_metadata(fixed_cubes) - # assert cube.shape == (1, 13, 12) - check_time(cube) - - -def test_add_time_fail(): - """Test fix.""" - fix = get_allvars_fix("Omon", "tos") - cube = Cube(1, var_name="sosstsst", units="degC") - cubes = CubeList( - [ - cube, - Cube(1, var_name="sosstsst", units="degC"), - ] - ) - msg = "Cannot add required coordinate 'time' to variable 'tos'" - with pytest.raises(ValueError, match=msg): - fix._add_time(cube, cubes) - - -# def test_add_latitude_fail(cubes_2d): -# """Test fix.""" -# # Remove latitude and grid file attribute from tas cube to test automatic -# # addition -# tos_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) -# cubes = CubeList([tos_cube]) -# fix = get_allvars_fix("Omon", "tos") -# fixed_cube = fix.fix_metadata(cubes)[0] -# fixed_cube.remove_coord("longitude") -# # fix._horizontal_grids[0].remove_coord("longitude") -# first_key = list(fix._horizontal_grids)[0] -# first_val = list(fix._horizontal_grids.values())[0][0] -# first_val.remove_coord("longitude") -# fix._horizontal_grids[first_key] = first_val -# fix.extra_facets["raw_name"] = "tos" - -# msg = "Failed to add missing latitude coordinate to cube" -# with pytest.raises(ValueError, match=msg): -# fix.fix_metadata(CubeList([fixed_cube])) - - -# def test_add_longitude_fail(cubes_2d): -# """Test fix.""" -# # Remove longitude and grid file attribute from tas cube to test automatic -# # addition -# tos_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) -# tos_cube.remove_coord("longitude") -# cubes = CubeList([tos_cube]) -# fix = get_allvars_fix("Omon", "tos") -# fix.extra_facets["horizontal_grid"] = None - -# msg = "Failed to add missing longitude coordinate to cube" -# with pytest.raises(ValueError, match=msg): -# fix.fix_metadata(cubes) - - -# Test get_horizontal_grid - - -# @mock.patch.object(Oras5Fix, "_get_grid_from_facet", autospec=True) -# @mock.patch("esmvalcore.cmor._fixes.oras5._base_fixes.requests", autospec=True) -# def test_get_horizontal_grid_from_attr_cached_in_dict( -# mock_requests, -# mock_get_grid_from_facet, -# ): -# """Test fix.""" -# cube = Cube(0, attributes={"grid_file_uri": "cached_grid_url.nc"}) -# grid_cube = Cube(0) -# fix = get_allvars_fix("Omon", "tos") -# fix._horizontal_grids["cached_grid_url.nc"] = grid_cube -# fix._horizontal_grids["grid_from_facet.nc"] = mock.sentinel.wrong_grid - -# grid = fix.get_horizontal_grid(cube) -# assert len(fix._horizontal_grids) == 2 -# assert "cached_grid_url.nc" in fix._horizontal_grids -# assert "grid_from_facet.nc" in fix._horizontal_grids # has not been used -# assert fix._horizontal_grids["cached_grid_url.nc"] == grid -# assert grid is grid_cube -# assert mock_requests.mock_calls == [] -# mock_get_grid_from_facet.assert_not_called() - - -# @mock.patch.object(Oras5Fix, "_get_grid_from_facet", autospec=True) -# def test_get_horizontal_grid_from_attr_rootpath( -# mock_get_grid_from_facet, monkeypatch, tmp_path -# ): -# """Test fix.""" -# rootpath = deepcopy(CFG["rootpath"]) -# rootpath["ORAS5"] = str(tmp_path) -# monkeypatch.setitem(CFG, "rootpath", rootpath) -# cube = Cube(0, attributes={"grid_file_uri": "grid.nc"}) -# grid_cube = Cube(0, var_name="test_grid_cube") -# (tmp_path / "omip").mkdir(parents=True, exist_ok=True) -# iris.save(grid_cube, tmp_path / "omip" / "grid.nc") - -# fix = get_allvars_fix("Omon", "tos") -# fix._horizontal_grids["grid_from_facet.nc"] = mock.sentinel.wrong_grid - -# grid = fix.get_horizontal_grid(cube) -# assert len(fix._horizontal_grids) == 2 -# assert "grid.nc" in fix._horizontal_grids -# assert "grid_from_facet.nc" in fix._horizontal_grids # has not been used -# assert fix._horizontal_grids["grid.nc"] == grid -# assert len(grid) == 1 -# assert grid[0].var_name == "test_grid_cube" -# assert grid[0].shape == () -# mock_get_grid_from_facet.assert_not_called() - - -# @mock.patch.object(Oras5Fix, "_get_grid_from_facet", autospec=True) -# @mock.patch("esmvalcore.cmor._fixes.oras5._base_fixes.requests", autospec=True) -# def test_get_horizontal_grid_from_attr_cached_in_file( -# mock_requests, -# mock_get_grid_from_facet, -# tmp_path, -# ): -# """Test fix.""" -# cube = Cube( -# 0, -# attributes={ -# "grid_file_uri": "https://temporary.url/this/is/the/grid_file.nc" -# }, -# ) -# fix = get_allvars_fix("Omon", "tos") -# assert len(fix._horizontal_grids) == 0 - -# # Save temporary grid file -# grid_cube = Cube(0, var_name="grid") -# iris.save(grid_cube, str(tmp_path / "grid_file.nc")) - -# grid = fix.get_horizontal_grid(cube) -# assert isinstance(grid, CubeList) -# assert len(grid) == 1 -# assert grid[0].var_name == "grid" -# assert grid[0].shape == () -# assert len(fix._horizontal_grids) == 1 -# assert "grid_file.nc" in fix._horizontal_grids -# assert fix._horizontal_grids["grid_file.nc"] == grid -# assert mock_requests.mock_calls == [] -# mock_get_grid_from_facet.assert_not_called() - - -# @mock.patch.object(Oras5Fix, "_get_grid_from_facet", autospec=True) -# def test_get_horizontal_grid_from_attr_cache_file_too_old( -# mock_get_grid_from_facet, -# tmp_path, -# monkeypatch, -# ): -# """Test fix.""" -# cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) -# fix = get_allvars_fix("Omon", "tos") -# assert len(fix._horizontal_grids) == 0 - -# # Save temporary grid file -# grid_cube = Cube(0, var_name="grid") -# iris.save(grid_cube, str(tmp_path / "oras5_grid.nc")) - -# # Temporary overwrite default cache location for downloads and cache -# # validity duration -# monkeypatch.setattr(fix, "CACHE_VALIDITY", -1) - -# grid = fix.get_horizontal_grid(cube) -# assert isinstance(grid, CubeList) -# assert len(grid) == 4 -# var_names = [cube.var_name for cube in grid] -# assert "cell_area" in var_names -# assert "dual_area" in var_names -# assert "vertex_index" in var_names -# assert "vertex_of_cell" in var_names -# assert len(fix._horizontal_grids) == 1 -# assert TEST_GRID_FILE_NAME in fix._horizontal_grids -# assert fix._horizontal_grids[TEST_GRID_FILE_NAME] == grid -# mock_get_grid_from_facet.assert_not_called() - - -@mock.patch.object(Oras5Fix, "_get_grid_from_cube_attr", autospec=True) -def test_get_horizontal_grid_from_facet_cached_in_dict( - mock_get_grid_from_cube_attr, - tmp_path, -): - """Test fix.""" - session = CFG.start_session("my session") - session["auxiliary_data_dir"] = tmp_path - - # Save temporary grid file (this will not be used; however, it is necessary - # to not raise a FileNotFoundError) - grid_path = "grid.nc" - wrong_grid_cube = Cube(0, var_name="wrong_grid") - iris.save(wrong_grid_cube, tmp_path / "grid.nc") - - # Make sure that grid specified by cube attribute is NOT used - cube = Cube(0, attributes={"grid_file_uri": "cached_grid_url.nc"}) - grid_cube = Cube(0, var_name="grid") - fix = get_allvars_fix("Omon", "tos", session=session) - fix.extra_facets["horizontal_grid"] = grid_path - fix._horizontal_grids["cached_grid_url.nc"] = mock.sentinel.wrong_grid - fix._horizontal_grids["grid.nc"] = grid_cube - - grid = fix.get_horizontal_grid(cube) - assert len(fix._horizontal_grids) == 2 - assert "cached_grid_url.nc" in fix._horizontal_grids # has not been used - assert "grid.nc" in fix._horizontal_grids - assert fix._horizontal_grids["grid.nc"] == grid - # assert grid is grid_cube - mock_get_grid_from_cube_attr.assert_not_called() - - -@pytest.mark.parametrize("grid_path", ["{tmp_path}/grid.nc", "grid.nc"]) -@mock.patch.object(Oras5Fix, "_get_grid_from_cube_attr", autospec=True) -def test_get_horizontal_grid_from_facet( - mock_get_grid_from_cube_attr, - grid_path, - tmp_path, -): - """Test fix.""" - session = CFG.start_session("my session") - session["auxiliary_data_dir"] = tmp_path - - # Make sure that grid specified by cube attribute is NOT used - cube = Cube(0, attributes={"grid_file_uri": "cached_grid_url.nc"}) - - # Save temporary grid file - grid_path = grid_path.format(tmp_path=tmp_path) - grid_cube = Cube(0, var_name="grid") - iris.save(grid_cube, tmp_path / "grid.nc") - - fix = get_allvars_fix("Omon", "tos", session=session) - fix.extra_facets["horizontal_grid"] = grid_path - fix._horizontal_grids["cached_grid_url.nc"] = mock.sentinel.wrong_grid - - grid = fix.get_horizontal_grid(cube) - assert isinstance(grid, CubeList) - assert len(grid) == 1 - assert grid[0].var_name == "grid" - assert len(fix._horizontal_grids) == 2 - assert "cached_grid_url.nc" in fix._horizontal_grids # has not been used - assert "grid.nc" in fix._horizontal_grids - assert fix._horizontal_grids["grid.nc"] == grid - mock_get_grid_from_cube_attr.assert_not_called() - - -def test_get_horizontal_grid_from_facet_fail(tmp_path): - """Test fix.""" - session = CFG.start_session("my session") - session["auxiliary_data_dir"] = tmp_path - - cube = Cube(0) - fix = get_allvars_fix("Omon", "tos", session=session) - fix.extra_facets["horizontal_grid"] = "/this/does/not/exist.nc" - - with pytest.raises(FileNotFoundError): - fix.get_horizontal_grid(cube) - - -def test_get_horizontal_grid_none(tmp_path): - """Test fix.""" - session = CFG.start_session("my session") - session["auxiliary_data_dir"] = tmp_path - - cube = Cube(0) - fix = get_allvars_fix("Omon", "tos", session=session) - del fix.extra_facets["horizontal_grid"] - - msg = "Full path to suitable ORAS5 grid must be specified in facet 'horizontal_grid'" - with pytest.raises(NotImplementedError, match=msg): - fix.get_horizontal_grid(cube) - - -# Test with single-dimension cubes - - -# def test_only_time(monkeypatch): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "thetao") -# # We know that ta has dimensions time, plev19, latitude, longitude, but the -# # ICON CMORizer is designed to check for the presence of each dimension -# # individually. To test this, remove all but one dimension of ta to create -# # an artificial, but realistic test case. -# coord_info = CoordinateInfo("time") -# coord_info.standard_name = "time" -# monkeypatch.setattr(fix.vardef, "coordinates", {"time": coord_info}) - -# # Create cube with only a single dimension -# time_coord = DimCoord( -# [0.0, 31.0], -# var_name="time", -# standard_name="time", -# long_name="time", -# units="days since 1850-01-01", -# ) -# cubes = CubeList( -# [ -# Cube( -# [1, 1], -# var_name="votemper", -# units="degC", -# dim_coords_and_dims=[(time_coord, 0)], -# ), -# ] -# ) -# fixed_cubes = fix.fix_metadata(cubes) - -# # Check cube metadata -# cube = check_ta_metadata(fixed_cubes) - -# # Check cube data -# assert cube.shape == (2,) -# np.testing.assert_equal(cube.data, [1, 1]) - -# # Check time metadata -# assert cube.coords("time") -# new_time_coord = cube.coord("time", dim_coords=True) -# assert new_time_coord.var_name == "time" -# assert new_time_coord.standard_name == "time" -# assert new_time_coord.long_name == "time" -# assert new_time_coord.units == "days since 1850-01-01" - -# # Check time data -# np.testing.assert_allclose(new_time_coord.points, [-15.5, 15.5]) -# np.testing.assert_allclose( -# new_time_coord.bounds, [[-31.0, 0.0], [0.0, 31.0]] -# ) - -# # Check that no mesh has been created -# assert cube.mesh is None - - -# def test_only_depth(monkeypatch): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "thetao") -# # We know that ta has dimensions time, plev19, latitude, longitude, but the -# # ICON CMORizer is designed to check for the presence of each dimension -# # individually. To test this, remove all but one dimension of ta to create -# # an artificial, but realistic test case. -# coord_info = CoordinateInfo("plev19") -# coord_info.standard_name = "air_pressure" -# monkeypatch.setattr(fix.vardef, "coordinates", {"plev19": coord_info}) - -# # Create cube with only a single dimension -# height_coord = DimCoord( -# [1000.0, 100.0], var_name="depth", standard_name="depth", units="cm" -# ) -# cubes = CubeList( -# [ -# Cube( -# [1, 1], -# var_name="votemper", -# units="degC", -# dim_coords_and_dims=[(height_coord, 0)], -# ), -# ] -# ) -# fixed_cubes = fix.fix_metadata(cubes) - -# # Check cube metadata -# cube = check_ta_metadata(fixed_cubes) - -# # Check cube data -# assert cube.shape == (2,) -# np.testing.assert_equal(cube.data, [1, 1]) - -# # Check height metadata -# assert cube.coords("depth", dim_coords=True) -# new_height_coord = cube.coord("depth") -# assert new_height_coord.var_name == "depth" -# assert new_height_coord.standard_name == "depth" -# assert new_height_coord.long_name == "depth" -# assert new_height_coord.units == "m" -# assert new_height_coord.attributes == {"positive": "up"} - -# # Check height data -# np.testing.assert_allclose(new_height_coord.points, [1.0, 10.0]) -# assert new_height_coord.bounds is None - -# # Check that no air_pressure coordinate has been created -# assert not cube.coords("air_pressure") - -# # Check that no mesh has been created -# assert cube.mesh is None - - -# def test_only_latitude(monkeypatch): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "thetao") -# # We know that ta has dimensions time, plev19, latitude, longitude, but the -# # ICON CMORizer is designed to check for the presence of each dimension -# # individually. To test this, remove all but one dimension of ta to create -# # an artificial, but realistic test case. -# coord_info = CoordinateInfo("latitude") -# coord_info.standard_name = "latitude" -# monkeypatch.setattr(fix.vardef, "coordinates", {"latitude": coord_info}) - -# # Create cube with only a single dimension -# lat_coord = DimCoord( -# [0.0, 10.0], var_name="lat", standard_name="latitude", units="degrees" -# ) -# cubes = CubeList( -# [ -# Cube( -# [1, 1], -# var_name="votemper", -# units="degC", -# dim_coords_and_dims=[(lat_coord, 0)], -# ), -# ] -# ) -# fixed_cubes = fix.fix_metadata(cubes) - -# # Check cube metadata -# cube = check_ta_metadata(fixed_cubes) - -# # Check cube data -# assert cube.shape == (2,) -# np.testing.assert_equal(cube.data, [1, 1]) - -# # Check latitude metadata -# assert cube.coords("latitude", dim_coords=True) -# new_lat_coord = cube.coord("latitude") -# assert new_lat_coord.var_name == "lat" -# assert new_lat_coord.standard_name == "latitude" -# assert new_lat_coord.long_name == "latitude" -# assert new_lat_coord.units == "degrees_north" - -# # Check latitude data -# np.testing.assert_allclose(new_lat_coord.points, [0.0, 10.0]) -# assert new_lat_coord.bounds is None - -# # Check that no mesh has been created -# assert cube.mesh is None - - -# def test_only_longitude(monkeypatch): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "thetao") -# # We know that ta has dimensions time, plev19, latitude, longitude, but the -# # ICON CMORizer is designed to check for the presence of each dimension -# # individually. To test this, remove all but one dimension of ta to create -# # an artificial, but realistic test case. -# coord_info = CoordinateInfo("longitude") -# coord_info.standard_name = "longitude" -# monkeypatch.setattr(fix.vardef, "coordinates", {"longitude": coord_info}) - -# # Create cube with only a single dimension -# lon_coord = DimCoord( -# [0.0, 180.0], -# var_name="lon", -# standard_name="longitude", -# units="degrees", -# ) -# cubes = CubeList( -# [ -# Cube( -# [1, 1], -# var_name="votemper", -# units="degC", -# dim_coords_and_dims=[(lon_coord, 0)], -# ), -# ] -# ) -# fixed_cubes = fix.fix_metadata(cubes) - -# # Check cube metadata -# cube = check_ta_metadata(fixed_cubes) - -# # Check cube data -# assert cube.shape == (2,) -# np.testing.assert_equal(cube.data, [1, 1]) - -# # Check longitude metadata -# assert cube.coords("longitude", dim_coords=True) -# new_lon_coord = cube.coord("longitude") -# assert new_lon_coord.var_name == "lon" -# assert new_lon_coord.standard_name == "longitude" -# assert new_lon_coord.long_name == "longitude" -# assert new_lon_coord.units == "degrees_east" - -# # Check longitude data -# np.testing.assert_allclose(new_lon_coord.points, [0.0, 180.0]) -# assert new_lon_coord.bounds is None - -# # Check that no mesh has been created -# assert cube.mesh is None - - -# Test variable not available in file - - -# def test_var_not_available_pr(cubes_2d): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "pr") -# msg = "Variable 'pr' used to extract 'pr' is not available in input file" -# with pytest.raises(ValueError, match=msg): -# fix.fix_metadata(cubes_2d) - - -# Test fix with invalid time units - - -def test_invalid_time_units(cubes_2d): - """Test fix.""" - fix = get_allvars_fix("Omon", "tos") - for cube in cubes_2d: - cube.coord("time").attributes["invalid_units"] = "month as %Y%m%d.%f" - msg = "Expected time units" - with pytest.raises(ValueError, match=msg): - fix.fix_metadata(cubes_2d) - - -# Test fix with (sub-)hourly data - - -# def test_hourly_data(cubes_2d): -# """Test fix.""" -# fix = get_allvars_fix("Omon", "tos") -# fix.extra_facets["frequency"] = "1hr" -# for cube in cubes_2d: -# cube.coord("time").points = [20041104.5833333] - -# fixed_cubes = fix.fix_metadata(cubes_2d) - -# cube = check_tas_metadata(fixed_cubes) -# date = cube.coord("time").units.num2date(cube.coord("time").points) -# date_bnds = cube.coord("time").units.num2date(cube.coord("time").bounds) -# np.testing.assert_array_equal(date, [datetime(2004, 11, 4, 13, 30)]) -# np.testing.assert_array_equal( -# date_bnds, [[datetime(2004, 11, 4, 13), datetime(2004, 11, 4, 14)]] -# ) - - -# @pytest.mark.parametrize( -# "bounds", -# [ -# None, -# [ -# [20211231.875, 20220101.125], -# [20220101.125, 20220101.375], -# ], -# ], -# ) -# def test_6hourly_data_multiple_points(bounds): -# """Test fix.""" -# time_coord = DimCoord( -# [20220101, 20220101.25], -# bounds=bounds, -# standard_name="time", -# attributes={"invalid_units": "day as %Y%m%d.%f"}, -# ) -# cube = Cube( -# [1, 2], -# var_name="sosstsst", -# units="degC", -# dim_coords_and_dims=[(time_coord, 0)], -# ) -# cubes = CubeList([cube]) -# fix = get_allvars_fix("Omon", "tos") -# fix.extra_facets["frequency"] = "6hr" - -# fixed_cube = fix._fix_time(cube, cubes) - -# points = fixed_cube.coord("time").units.num2date(cube.coord("time").points) -# bounds = fixed_cube.coord("time").units.num2date(cube.coord("time").bounds) -# np.testing.assert_array_equal( -# points, -# [datetime(2021, 12, 31, 21), datetime(2022, 1, 1, 3)], -# ) -# np.testing.assert_array_equal( -# bounds, -# [ -# [datetime(2021, 12, 31, 18), datetime(2022, 1, 1)], -# [datetime(2022, 1, 1), datetime(2022, 1, 1, 6)], -# ], -# ) - - -# def test_subhourly_data_no_shift(): -# """Test fix.""" -# time_coord = DimCoord( -# [0.5, 1.0], -# standard_name="time", -# units=Unit("hours since 2022-01-01", calendar="proleptic_gregorian"), -# ) -# cube = Cube( -# [1, 2], -# var_name="sosstsst", -# units="degC", -# dim_coords_and_dims=[(time_coord, 0)], -# ) -# cubes = CubeList([cube]) -# fix = get_allvars_fix("Omon", "tos") -# fix.extra_facets["frequency"] = "subhr" -# fix.extra_facets["shift_time"] = False - -# fixed_cube = fix._fix_time(cube, cubes) - -# points = fixed_cube.coord("time").units.num2date(cube.coord("time").points) -# bounds = fixed_cube.coord("time").units.num2date(cube.coord("time").bounds) -# np.testing.assert_array_equal( -# points, -# [datetime(2022, 1, 1, 0, 30), datetime(2022, 1, 1, 1)], -# ) -# np.testing.assert_array_equal( -# bounds, -# [ -# [datetime(2022, 1, 1, 0, 15), datetime(2022, 1, 1, 0, 45)], -# [datetime(2022, 1, 1, 0, 45), datetime(2022, 1, 1, 1, 15)], -# ], -# ) - - -# Test _shift_time_coord - - -@pytest.mark.parametrize( - "frequency,dt_in,dt_out,bounds", - [ - ( - "dec", - [(2000, 1, 1)], - [(1995, 1, 1)], - [[(1990, 1, 1), (2000, 1, 1)]], - ), - ( - "yr", - [(2000, 1, 1), (2001, 1, 1)], - [(1999, 7, 2, 12), (2000, 7, 2)], - [[(1999, 1, 1), (2000, 1, 1)], [(2000, 1, 1), (2001, 1, 1)]], - ), - ( - "mon", - [(2000, 1, 1)], - [(1999, 12, 16, 12)], - [[(1999, 12, 1), (2000, 1, 1)]], - ), - ( - "mon", - [(2000, 11, 30, 23, 45), (2000, 12, 31, 23)], - [(2000, 11, 16), (2000, 12, 16, 12)], - [[(2000, 11, 1), (2000, 12, 1)], [(2000, 12, 1), (2001, 1, 1)]], - ), - ( - "day", - [(2000, 1, 1, 12)], - [(2000, 1, 1)], - [[(1999, 12, 31, 12), (2000, 1, 1, 12)]], - ), - ( - "6hr", - [(2000, 1, 5, 14), (2000, 1, 5, 20)], - [(2000, 1, 5, 11), (2000, 1, 5, 17)], - [ - [(2000, 1, 5, 8), (2000, 1, 5, 14)], - [(2000, 1, 5, 14), (2000, 1, 5, 20)], - ], - ), - ( - "3hr", - [(2000, 1, 1)], - [(1999, 12, 31, 22, 30)], - [[(1999, 12, 31, 21), (2000, 1, 1)]], - ), - ( - "1hr", - [(2000, 1, 5, 14), (2000, 1, 5, 15)], - [(2000, 1, 5, 13, 30), (2000, 1, 5, 14, 30)], - [ - [(2000, 1, 5, 13), (2000, 1, 5, 14)], - [(2000, 1, 5, 14), (2000, 1, 5, 15)], - ], - ), - ], -) -def test_shift_time_coord(frequency, dt_in, dt_out, bounds): - """Test ``_shift_time_coord``.""" - cube = Cube(0, cell_methods=[CellMethod("mean", "time")]) - datetimes = [datetime(*dt) for dt in dt_in] - time_units = Unit("days since 1950-01-01", calendar="proleptic_gregorian") - time_coord = DimCoord( - time_units.date2num(datetimes), - standard_name="time", - var_name="time", - long_name="time", - units=time_units, - ) - - fix = get_allvars_fix("Omon", "tos") - fix.extra_facets["frequency"] = frequency - - fix._shift_time_coord(cube, time_coord) - - dt_out = [datetime(*dt) for dt in dt_out] - bounds = [[datetime(*dt1), datetime(*dt2)] for (dt1, dt2) in bounds] - np.testing.assert_allclose( - time_coord.points, time_coord.units.date2num(dt_out) - ) - np.testing.assert_allclose( - time_coord.bounds, time_coord.units.date2num(bounds) - ) - - -@pytest.mark.parametrize( - "frequency,dt_in", - [ - ("dec", [(2000, 1, 15)]), - ("yr", [(2000, 1, 1), (2001, 1, 1)]), - ("mon", [(2000, 6, 15)]), - ("day", [(2000, 1, 1), (2001, 1, 2)]), - ("6hr", [(2000, 6, 15, 12)]), - ("3hr", [(2000, 1, 1, 4), (2000, 1, 1, 7)]), - ("1hr", [(2000, 1, 1, 4), (2000, 1, 1, 5)]), - ], -) -def test_shift_time_point_measurement(frequency, dt_in): - """Test ``_shift_time_coord``.""" - cube = Cube(0, cell_methods=[CellMethod("point", "time")]) - datetimes = [datetime(*dt) for dt in dt_in] - time_units = Unit("days since 1950-01-01", calendar="proleptic_gregorian") - time_coord = DimCoord( - time_units.date2num(datetimes), - standard_name="time", - var_name="time", - long_name="time", - units=time_units, - ) - - fix = get_allvars_fix("Omon", "tos") - fix.extra_facets["frequency"] = frequency - - fix._shift_time_coord(cube, time_coord) - - np.testing.assert_allclose( - time_coord.points, time_coord.units.date2num(datetimes) - ) - assert time_coord.bounds is None - - -@pytest.mark.parametrize( - "frequency", ["dec", "yr", "yrPt", "mon", "monC", "monPt"] -) -def test_shift_time_coord_hourly_data_low_freq_fail(frequency): - """Test ``_shift_time_coord``.""" - cube = Cube(0, cell_methods=[CellMethod("mean", "time")]) - time_units = Unit("hours since 1950-01-01", calendar="proleptic_gregorian") - time_coord = DimCoord( - [1, 2, 3], - standard_name="time", - var_name="time", - long_name="time", - units=time_units, - ) - - fix = get_allvars_fix("Omon", "tos") - fix.extra_facets["frequency"] = frequency - - msg = "Cannot shift time coordinate: Rounding to closest day failed." - with pytest.raises(ValueError, match=msg): - fix._shift_time_coord(cube, time_coord) - - -@pytest.mark.parametrize( - "frequency", ["dec", "yr", "yrPt", "mon", "monC", "monPt"] -) -def test_shift_time_coord_not_first_of_month(frequency): - """Test ``_get_previous_timestep``.""" - cube = Cube(0, cell_methods=[CellMethod("mean", "time")]) - time_units = Unit("days since 1950-01-01", calendar="proleptic_gregorian") - time_coord = DimCoord( - [1.5], - standard_name="time", - var_name="time", - long_name="time", - units=time_units, - ) - fix = get_allvars_fix("Omon", "tos") - fix.extra_facets["frequency"] = frequency - - msg = ( - "Cannot shift time coordinate: expected first of the month at 00:00:00" - ) - with pytest.raises(ValueError, match=msg): - fix._shift_time_coord(cube, time_coord) - - -@pytest.mark.parametrize("frequency", ["fx", "subhrPt", "invalid_freq"]) -def test_shift_time_coord_invalid_freq(frequency): - """Test ``_get_previous_timestep``.""" - cube = Cube(0, cell_methods=[CellMethod("mean", "time")]) - time_units = Unit("days since 1950-01-01", calendar="proleptic_gregorian") - time_coord = DimCoord( - [1.5, 2.5], - standard_name="time", - var_name="time", - long_name="time", - units=time_units, - ) - fix = get_allvars_fix("Omon", "tos") - fix.extra_facets["frequency"] = frequency - - msg = ( - "Cannot shift time coordinate: failed to determine previous time step" - ) - with pytest.raises(ValueError, match=msg): - fix._shift_time_coord(cube, time_coord) - - -# Test _get_previous_timestep - - -@pytest.mark.parametrize( - "frequency,datetime_in,datetime_out", - [ - ("dec", (2000, 1, 1), (1990, 1, 1)), - ("yr", (2000, 1, 1), (1999, 1, 1)), - ("yrPt", (2001, 6, 1), (2000, 6, 1)), - ("mon", (2001, 1, 1), (2000, 12, 1)), - ("mon", (2001, 2, 1), (2001, 1, 1)), - ("mon", (2001, 3, 1), (2001, 2, 1)), - ("mon", (2001, 4, 1), (2001, 3, 1)), - ("monC", (2000, 5, 1), (2000, 4, 1)), - ("monC", (2000, 6, 1), (2000, 5, 1)), - ("monC", (2000, 7, 1), (2000, 6, 1)), - ("monC", (2000, 8, 1), (2000, 7, 1)), - ("monPt", (2002, 9, 1), (2002, 8, 1)), - ("monPt", (2002, 10, 1), (2002, 9, 1)), - ("monPt", (2002, 11, 1), (2002, 10, 1)), - ("monPt", (2002, 12, 1), (2002, 11, 1)), - ("day", (2000, 1, 1), (1999, 12, 31)), - ("day", (2000, 3, 1), (2000, 2, 29)), - ("day", (2187, 3, 14), (2187, 3, 13)), - ("6hr", (2000, 3, 14, 15), (2000, 3, 14, 9)), - ("6hrPt", (2000, 1, 1), (1999, 12, 31, 18)), - ("6hrCM", (2000, 1, 1, 1), (1999, 12, 31, 19)), - ("3hr", (2000, 3, 14, 15), (2000, 3, 14, 12)), - ("3hrPt", (2000, 1, 1), (1999, 12, 31, 21)), - ("3hrCM", (2000, 1, 1, 1), (1999, 12, 31, 22)), - ("1hr", (2000, 3, 14, 15), (2000, 3, 14, 14)), - ("1hrPt", (2000, 1, 1), (1999, 12, 31, 23)), - ("1hrCM", (2000, 1, 1, 1), (2000, 1, 1)), - ("hr", (2000, 3, 14), (2000, 3, 13, 23)), - ], -) -def test_get_previous_timestep(frequency, datetime_in, datetime_out): - """Test ``_get_previous_timestep``.""" - datetime_in = datetime(*datetime_in) - datetime_out = datetime(*datetime_out) - - fix = get_allvars_fix("Omon", "tos") - fix.extra_facets["frequency"] = frequency - - new_datetime = fix._get_previous_timestep(datetime_in) - - assert new_datetime == datetime_out - - -# Test mesh creation raises warning because bounds do not match vertices - - -# @mock.patch("esmvalcore.cmor._fixes.oras5._base_fixes.logger", autospec=True) -# def test_get_mesh_fail_invalid_clat_bounds(mock_logger, cubes_2d): -# """Test fix.""" -# # Slightly modify latitude bounds from tas cube to make mesh creation fail -# tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) -# lat_bnds = tas_cube.coord("latitude").bounds.copy() -# lat_bnds[0, 0] = 40.0 -# tas_cube.coord("latitude").bounds = lat_bnds -# cubes = CubeList([tas_cube]) -# fix = get_allvars_fix("Omon", "tos") - -# fixed_cubes = fix.fix_metadata(cubes) -# cube = check_tas_metadata(fixed_cubes) - -# assert cube.coord("latitude").bounds[0, 0] != 40.0 -# mock_logger.warning.assert_called_once_with( -# "Latitude bounds of the face coordinate ('clat_vertices' in " -# "the grid file) differ from the corresponding values " -# "calculated from the connectivity ('vertex_of_cell') and the " -# "node coordinate ('vlat'). Using bounds defined by " -# "connectivity." -# ) - - -# @mock.patch("esmvalcore.cmor._fixes.oras5._base_fixes.logger", autospec=True) -# def test_get_mesh_fail_invalid_clon_bounds(mock_logger, cubes_2d): -# """Test fix.""" -# # Slightly modify longitude bounds from tas cube to make mesh creation fail -# tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) -# lon_bnds = tas_cube.coord("longitude").bounds.copy() -# lon_bnds[0, 1] = 40.0 -# tas_cube.coord("longitude").bounds = lon_bnds -# cubes = CubeList([tas_cube]) -# fix = get_allvars_fix("Omon", "tos") - -# fixed_cubes = fix.fix_metadata(cubes) -# cube = check_tas_metadata(fixed_cubes) - -# assert cube.coord("longitude").bounds[0, 1] != 40.0 -# mock_logger.warning.assert_called_once_with( -# "Longitude bounds of the face coordinate ('clon_vertices' in " -# "the grid file) differ from the corresponding values " -# "calculated from the connectivity ('vertex_of_cell') and the " -# "node coordinate ('vlon'). Note that these values are allowed " -# "to differ by 360° or at the poles of the grid. Using bounds " -# "defined by connectivity." -# ) - - -# Test _get_grid_url - - -def test_get_grid_url(): - """Test fix.""" - cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) - fix = get_allvars_fix("Omon", "tos") - (grid_url, grid_name) = fix._get_grid_url(cube) - assert grid_url == TEST_GRID_FILE_URI - assert grid_name == TEST_GRID_FILE_NAME - - -def test_get_grid_url_fail(): - """Test fix.""" - cube = Cube(0) - fix = get_allvars_fix("Omon", "tos") - msg = ( - "Cube does not contain the attribute 'grid_file_uri' necessary to " - "download the ICON horizontal grid file" - ) - with pytest.raises(ValueError, match=msg): - fix._get_grid_url(cube) - - -# Test get_mesh - - -def test_get_mesh_cached_from_attr(monkeypatch): - """Test fix.""" - cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) - fix = get_allvars_fix("Omon", "tos") - monkeypatch.setattr(fix, "_create_mesh", mock.Mock()) - fix._meshes[TEST_GRID_FILE_NAME] = mock.sentinel.mesh - mesh = fix.get_mesh(cube) - assert mesh == mock.sentinel.mesh - fix._create_mesh.assert_not_called() - - -def test_get_mesh_not_cached_from_attr(monkeypatch): - """Test fix.""" - cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) - fix = get_allvars_fix("Omon", "tos") - monkeypatch.setattr(fix, "_create_mesh", mock.Mock()) - fix.get_mesh(cube) - fix._create_mesh.assert_called_once_with(cube) - - -def test_get_mesh_cached_from_facet(monkeypatch, tmp_path): - """Test fix.""" - session = CFG.start_session("my session") - session["auxiliary_data_dir"] = tmp_path - - # Save temporary grid file (this will not be used; however, it is necessary - # to not raise a FileNotFoundError) - grid_path = "grid.nc" - grid_cube = Cube(0, var_name="grid") - iris.save(grid_cube, tmp_path / "grid.nc") - - cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) - fix = get_allvars_fix("Omon", "tos", session=session) - fix.extra_facets["horizontal_grid"] = grid_path - monkeypatch.setattr(fix, "_create_mesh", mock.Mock()) - fix._meshes[TEST_GRID_FILE_NAME] = mock.sentinel.wrong_mesh - fix._meshes["grid.nc"] = mock.sentinel.mesh - - mesh = fix.get_mesh(cube) - - assert mesh == mock.sentinel.mesh - fix._create_mesh.assert_not_called() - - -def test_get_mesh_not_cached_from_facet(monkeypatch, tmp_path): - """Test fix.""" - session = CFG.start_session("my session") - session["auxiliary_data_dir"] = tmp_path - - # Save temporary grid file (this will not be used; however, it is necessary - # to not raise a FileNotFoundError) - grid_path = "grid.nc" - grid_cube = Cube(0, var_name="grid") - iris.save(grid_cube, tmp_path / "grid.nc") - - cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) - fix = get_allvars_fix("Omon", "tos", session=session) - fix.extra_facets["horizontal_grid"] = grid_path - monkeypatch.setattr(fix, "_create_mesh", mock.Mock()) - fix._meshes[TEST_GRID_FILE_NAME] = mock.sentinel.wrong_mesh - - fix.get_mesh(cube) - - fix._create_mesh.assert_called_once_with(cube) - - -def test_get_bounds_cached_from_facet(cubes_2d, cubes_3d): - """Test fix.""" - tos_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) - tos_cube2 = tos_cube.copy() - cubes = CubeList([tos_cube, tos_cube2]) - - fix = get_allvars_fix("Omon", "tos") - fix.extra_facets["ugrid"] = False - fixed_cubes = [] - for i in range(len(cubes)): - fixed_cubes.append(fix.fix_metadata(CubeList([cubes[i]]))[0]) - fixed_cubes = CubeList(fixed_cubes) - - assert fixed_cubes[0].coord("latitude") == fixed_cubes[1].coord("latitude") - assert fixed_cubes[0].coord("longitude") == fixed_cubes[1].coord( - "longitude" - ) - assert ( - fixed_cubes[0].coord("latitude").bounds - == fixed_cubes[1].coord("latitude").bounds - ).all() - assert ( - fixed_cubes[0].coord("latitude").points - == fixed_cubes[1].coord("latitude").points - ).all() - assert ( - fixed_cubes[0].coord("longitude").bounds - == fixed_cubes[1].coord("longitude").bounds - ).all() - assert ( - fixed_cubes[0].coord("longitude").points - == fixed_cubes[1].coord("longitude").points - ).all() - - -def test_get_coord_cached_from_facet(cubes_2d, cubes_3d): - """Test fix.""" - tos_cube = cubes_2d.extract_cube(NameConstraint(var_name="sosstsst")) - tos_cube2 = tos_cube.copy() - cubes = CubeList([tos_cube, tos_cube2]) - # thetao_cube = cubes_3d.extract_cube(NameConstraint(var_name="votemper")) - - fix = get_allvars_fix("Omon", "tos") - fixed_cubes = [] - for i in range(len(cubes)): - fixed_cubes.append(fix.fix_metadata(CubeList([cubes[i]]))[0]) - fixed_cubes = CubeList(fixed_cubes) - - assert fixed_cubes[0].coord("latitude") == fixed_cubes[1].coord("latitude") - assert fixed_cubes[0].coord("longitude") == fixed_cubes[1].coord( - "longitude" - ) - assert ( - fixed_cubes[0].coord("latitude").bounds - == fixed_cubes[1].coord("latitude").bounds - ).all() - assert ( - fixed_cubes[0].coord("latitude").points - == fixed_cubes[1].coord("latitude").points - ).all() - assert ( - fixed_cubes[0].coord("longitude").bounds - == fixed_cubes[1].coord("longitude").bounds - ).all() - assert ( - fixed_cubes[0].coord("longitude").points - == fixed_cubes[1].coord("longitude").points - ).all() - - -# Test _get_path_from_facet - - -@pytest.mark.parametrize( - "path,description,output", - [ - ("{tmp_path}/a.nc", None, "{tmp_path}/a.nc"), - ("b.nc", "Grid file", "{tmp_path}/b.nc"), - ], -) -def test_get_path_from_facet(path, description, output, tmp_path): - """Test fix.""" - session = CFG.start_session("my session") - session["auxiliary_data_dir"] = tmp_path - path = path.format(tmp_path=tmp_path) - fix = get_allvars_fix("Omon", "tos", session=session) - fix.extra_facets["test_path"] = path - - # Create empty dummy file - output = output.format(tmp_path=tmp_path) - with open(output, "w", encoding="utf-8"): - pass - - out_path = fix._get_path_from_facet("test_path", description=description) - - assert isinstance(out_path, Path) - assert out_path == Path(output.format(tmp_path=tmp_path)) - - -@pytest.mark.parametrize( - "path,description", - [ - ("{tmp_path}/a.nc", None), - ("b.nc", "Grid file"), - ], -) -def test_get_path_from_facet_fail(path, description, tmp_path): - """Test fix.""" - session = CFG.start_session("my session") - session["auxiliary_data_dir"] = tmp_path - path = path.format(tmp_path=tmp_path) - fix = get_allvars_fix("Omon", "tos", session=session) - fix.extra_facets["test_path"] = path - - with pytest.raises(FileNotFoundError, match=description): - fix._get_path_from_facet("test_path", description=description) - - -# Test add_additional_cubes - - -@pytest.mark.parametrize("facet", ["zg_file", "zghalf_file"]) -@pytest.mark.parametrize("path", ["{tmp_path}/a.nc", "a.nc"]) -def test_add_additional_cubes(path, facet, tmp_path): - """Test fix.""" - session = CFG.start_session("my session") - session["auxiliary_data_dir"] = tmp_path - path = path.format(tmp_path=tmp_path) - fix = get_allvars_fix("Omon", "tos", session=session) - fix.extra_facets[facet] = path - - # Save temporary cube - cube = Cube(0, var_name=facet) - iris.save(cube, tmp_path / "a.nc") - - cubes = CubeList([]) - new_cubes = fix.add_additional_cubes(cubes) - - assert new_cubes is cubes - assert len(cubes) == 1 - assert cubes[0].var_name == facet - - -@pytest.mark.parametrize("facet", ["zg_file", "zghalf_file"]) -@pytest.mark.parametrize("path", ["{tmp_path}/a.nc", "a.nc"]) -def test_add_additional_cubes_fail(path, facet, tmp_path): - """Test fix.""" - session = CFG.start_session("my session") - session["auxiliary_data_dir"] = tmp_path - path = path.format(tmp_path=tmp_path) - fix = get_allvars_fix("Omon", "tos", session=session) - fix.extra_facets[facet] = path - - cubes = CubeList([]) - with pytest.raises(FileNotFoundError, match="File"): - fix.add_additional_cubes(cubes) - - -# Test _fix_height - - -# @pytest.mark.parametrize("bounds", [True, False]) -# def test_fix_height_plev(bounds, simple_unstructured_cube): -# """Test fix.""" -# cube = simple_unstructured_cube[:, 1:, :] -# pfull_cube = simple_unstructured_cube[:, 1:, :] -# pfull_cube.var_name = "pfull" -# pfull_cube.units = "Pa" -# cubes = CubeList([cube, pfull_cube]) -# if bounds: -# phalf_cube = simple_unstructured_cube.copy() -# phalf_cube.var_name = "phalf" -# phalf_cube.units = "Pa" -# cubes.append(phalf_cube) -# fix = get_allvars_fix("Omon", "thetao") - -# fixed_cube = fix._fix_height(cube, cubes) - -# expected_data = [[[4.0, 5.0], [2.0, 3.0]]] -# np.testing.assert_allclose(fixed_cube.data, expected_data) - -# height = check_model_level_metadata(fixed_cube) -# np.testing.assert_array_equal(height.points, [0, 1]) -# assert height.bounds is None - -# plev = check_air_pressure_metadata(fixed_cube) -# assert fixed_cube.coord_dims("air_pressure") == (0, 1, 2) -# np.testing.assert_allclose(plev.points, expected_data) -# if bounds: -# expected_bnds = [[[[4.0, 2.0], [5.0, 3.0]], [[2.0, 0.0], [3.0, 1.0]]]] -# np.testing.assert_allclose(plev.bounds, expected_bnds) -# else: -# assert plev.bounds is None - - -# @pytest.mark.parametrize("bounds", [True, False]) -# def test_fix_height_alt16(bounds, simple_unstructured_cube): -# """Test fix.""" -# cube = simple_unstructured_cube[:, 1:, :] -# zg_cube = simple_unstructured_cube[0, 1:, :] -# zg_cube.var_name = "zg" -# zg_cube.units = "m" -# cubes = CubeList([cube, zg_cube]) -# if bounds: -# zghalf_cube = simple_unstructured_cube[0, :, :] -# zghalf_cube.var_name = "zghalf" -# zghalf_cube.units = "m" -# cubes.append(zghalf_cube) -# fix = get_allvars_fix("Omon", "thetao") - -# fixed_cube = fix._fix_height(cube, cubes) - -# expected_data = [[[4.0, 5.0], [2.0, 3.0]]] -# np.testing.assert_allclose(fixed_cube.data, expected_data) - -# height = check_model_level_metadata(fixed_cube) -# np.testing.assert_array_equal(height.points, [0, 1]) -# assert height.bounds is None - -# assert fixed_cube.coords("altitude", dim_coords=False) -# alt16 = fixed_cube.coord("altitude", dim_coords=False) -# assert alt16.var_name == "alt16" -# assert alt16.standard_name == "altitude" -# assert alt16.long_name == "altitude" -# assert alt16.units == "m" -# assert alt16.attributes == {"positive": "up"} -# assert fixed_cube.coord_dims("altitude") == (1, 2) -# np.testing.assert_allclose(alt16.points, expected_data[0]) -# if bounds: -# expected_bnds = [[[4.0, 2.0], [5.0, 3.0]], [[2.0, 0.0], [3.0, 1.0]]] -# np.testing.assert_allclose(alt16.bounds, expected_bnds) -# else: -# assert alt16.bounds is None - - -# Test hfls (for extra fix) - - -# def test_get_hfls_fix(): -# """Test getting of fix.""" -# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "hfls") -# assert fix == [Hfls(None), AllVars(None), GenericFix(None)] - - -# def test_hfls_fix(cubes_regular_grid): -# """Test fix.""" -# cubes = CubeList([cubes_regular_grid[0].copy()]) -# cubes[0].var_name = "hfls" -# cubes[0].units = "W m-2" - -# fixed_cubes = fix_metadata(cubes, "Omon", "hfls") - -# assert len(fixed_cubes) == 1 -# cube = fixed_cubes[0] -# assert cube.var_name == "hfls" -# assert cube.standard_name == "surface_upward_latent_heat_flux" -# assert cube.long_name == "Surface Upward Latent Heat Flux" -# assert cube.units == "W m-2" -# assert cube.attributes["positive"] == "up" - -# fixed_cube = fix_data(cube, "Omon", "hfls") - -# np.testing.assert_allclose(fixed_cube.data, [[[0.0, -1.0], [-2.0, -3.0]]]) - - -# # Test hfss (for extra fix) - - -# def test_get_hfss_fix(): -# """Test getting of fix.""" -# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "hfss") -# assert fix == [Hfss(None), AllVars(None), GenericFix(None)] - - -# def test_hfss_fix(cubes_regular_grid): -# """Test fix.""" -# cubes = CubeList([cubes_regular_grid[0].copy()]) -# cubes[0].var_name = "hfss" -# cubes[0].units = "W m-2" - -# fixed_cubes = fix_metadata(cubes, "Omon", "hfss") - -# assert len(fixed_cubes) == 1 -# cube = fixed_cubes[0] -# assert cube.var_name == "hfss" -# assert cube.standard_name == "surface_upward_sensible_heat_flux" -# assert cube.long_name == "Surface Upward Sensible Heat Flux" -# assert cube.units == "W m-2" -# assert cube.attributes["positive"] == "up" - -# fixed_cube = fix_data(cube, "Omon", "hfss") - -# np.testing.assert_allclose(fixed_cube.data, [[[0.0, -1.0], [-2.0, -3.0]]]) - - -# # Test rtnt (for extra fix) - - -# def test_get_rtnt_fix(): -# """Test getting of fix.""" -# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "rtnt") -# assert fix == [Rtnt(None), AllVars(None), GenericFix(None)] - - -# def test_rtnt_fix(cubes_regular_grid): -# """Test fix.""" -# cubes = CubeList( -# [ -# cubes_regular_grid[0].copy(), -# cubes_regular_grid[0].copy(), -# cubes_regular_grid[0].copy(), -# ] -# ) -# cubes[0].var_name = "rsdt" -# cubes[1].var_name = "rsut" -# cubes[2].var_name = "rlut" -# cubes[0].units = "W m-2" -# cubes[1].units = "W m-2" -# cubes[2].units = "W m-2" - -# fixed_cubes = fix_metadata(cubes, "Omon", "rtnt") - -# assert len(fixed_cubes) == 1 -# cube = fixed_cubes[0] -# assert cube.var_name == "rtnt" -# assert cube.standard_name is None -# assert cube.long_name == "TOA Net downward Total Radiation" -# assert cube.units == "W m-2" -# assert cube.attributes["positive"] == "down" - -# np.testing.assert_allclose(cube.data, [[[0.0, -1.0], [-2.0, -3.0]]]) - - -# # Test rtmt (for extra fix) - - -# def test_get_rtmt_fix(): -# """Test getting of fix.""" -# fix = Fix.get_fixes("ORAS5", "ORAS5", "Omon", "rtmt") -# assert fix == [Rtmt(None), AllVars(None), GenericFix(None)] - - -# def test_rtmt_fix(cubes_regular_grid): -# """Test fix.""" -# cubes = CubeList( -# [ -# cubes_regular_grid[0].copy(), -# cubes_regular_grid[0].copy(), -# cubes_regular_grid[0].copy(), -# ] -# ) -# cubes[0].var_name = "rsdt" -# cubes[1].var_name = "rsut" -# cubes[2].var_name = "rlut" -# cubes[0].units = "W m-2" -# cubes[1].units = "W m-2" -# cubes[2].units = "W m-2" - -# fixed_cubes = fix_metadata(cubes, "Omon", "rtmt") - -# assert len(fixed_cubes) == 1 -# cube = fixed_cubes[0] -# assert cube.var_name == "rtmt" -# assert cube.standard_name == ( -# "net_downward_radiative_flux_at_top_of_atmosphere_model" -# ) -# assert cube.long_name == "Net Downward Radiative Flux at Top of Model" -# assert cube.units == "W m-2" -# assert cube.attributes["positive"] == "down" - -# np.testing.assert_allclose(cube.data, [[[0.0, -1.0], [-2.0, -3.0]]]) From 6b1f0747011a390ead47899f61f07f910ec05d93 Mon Sep 17 00:00:00 2001 From: Jan-Hendrik Malles Date: Mon, 1 Dec 2025 15:31:47 +0100 Subject: [PATCH 29/32] Update documentation --- doc/develop/fixing_data.rst | 2 ++ doc/quickstart/find_data.rst | 20 ++++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/doc/develop/fixing_data.rst b/doc/develop/fixing_data.rst index 5e5be11d5f..cbb83717df 100644 --- a/doc/develop/fixing_data.rst +++ b/doc/develop/fixing_data.rst @@ -467,6 +467,8 @@ it is CMOR-compliant), use :ref:`dataset fixes `. This is where the actual CMORization takes place. For example, a ``native6`` dataset fix for ERA5 is located `here `__, +the ORAS5 fix is located `here +`__, and the ``ICON`` fix is located `here `__. diff --git a/doc/quickstart/find_data.rst b/doc/quickstart/find_data.rst index 939e65f378..37628ef5bc 100644 --- a/doc/quickstart/find_data.rst +++ b/doc/quickstart/find_data.rst @@ -242,6 +242,26 @@ For more info: http://www.gloh2o.org/ Data for the version ``V220`` can be downloaded from: https://hydrology.princeton.edu/data/hylkeb/MSWEP_V220/. +.. _read_native_oras5: + +ORAS5 (in netCDF format downloaded from the CDS) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +ORAS5 data can be downloaded from the Copernicus Climate Data Store (CDS) +`CDS `__. +Place the files in the ``/Tier3/ORAS5/single_levels`` or ``/Tier3/ORAS5/all_levels`` +subdirectory (depending on whether it is 2D or 3D data) of your ``rootpath`` that you have +configured for the ``native6`` project (assuming you are using the default DRS +for ``native6`` described :ref:`above `). + +- Supported variables: ``uo``, ``vo``, ``tos``, ``sos``, ``zos``, + ``mlotst``, ``thetao``, ``so``, ``hfds``, ``tauuo``. +- Tier: 3 + +.. note:: For ORAS5 the rotated meridional and zonal velocities are available. + Per default ESMValCore expects that you use those. Otherwise, you need to specify + the ``raw_name`` of the unrotated velocities in the recipe. + .. _read_native_models: Supported native models From 596188dbf07ff79535feb58ed2b222edb6fc8f08 Mon Sep 17 00:00:00 2001 From: Jan-Hendrik Malles Date: Tue, 2 Dec 2025 12:48:52 +0100 Subject: [PATCH 30/32] Fixing docu --- doc/quickstart/find_data.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/quickstart/find_data.rst b/doc/quickstart/find_data.rst index 37628ef5bc..8a9a0bbe48 100644 --- a/doc/quickstart/find_data.rst +++ b/doc/quickstart/find_data.rst @@ -245,7 +245,7 @@ Data for the version ``V220`` can be downloaded from: https://hydrology.princeto .. _read_native_oras5: ORAS5 (in netCDF format downloaded from the CDS) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ORAS5 data can be downloaded from the Copernicus Climate Data Store (CDS) `CDS `__. From b4a8089678afc7222717064e35268a38b4ce83ef Mon Sep 17 00:00:00 2001 From: Jan-Hendrik Malles Date: Tue, 2 Dec 2025 13:31:59 +0100 Subject: [PATCH 31/32] Amend documentation --- doc/quickstart/find_data.rst | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/doc/quickstart/find_data.rst b/doc/quickstart/find_data.rst index 8a9a0bbe48..596e44feb6 100644 --- a/doc/quickstart/find_data.rst +++ b/doc/quickstart/find_data.rst @@ -244,20 +244,27 @@ Data for the version ``V220`` can be downloaded from: https://hydrology.princeto .. _read_native_oras5: -ORAS5 (in netCDF format downloaded from the CDS) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +ORAS5 +^^^^^ -ORAS5 data can be downloaded from the Copernicus Climate Data Store (CDS) -`CDS `__. +ORAS5 data can be downloaded in netCDF from the Copernicus Climate Data Store +`(CDS) `__. Place the files in the ``/Tier3/ORAS5/single_levels`` or ``/Tier3/ORAS5/all_levels`` subdirectory (depending on whether it is 2D or 3D data) of your ``rootpath`` that you have configured for the ``native6`` project (assuming you are using the default DRS for ``native6`` described :ref:`above `). -- Supported variables: ``uo``, ``vo``, ``tos``, ``sos``, ``zos``, - ``mlotst``, ``thetao``, ``so``, ``hfds``, ``tauuo``. +- Supported variables: ``uo``, ``vo``, ``tos``, ``sos``, ``zos``, ``mlotst``, +``thetao``, ``so``, ``hfds``, ``tauuo``. - Tier: 3 +.. note:: For ORAS5 you need the grid files to read the data properly, which + can be downloaded `here `__. + Be aware that for the variables ``uo``, and ``tauuo`` you need to use the + ``oras5_mesh_u.nc`` grid and for the variable ``vo`` the ``oras5_mesh_v.nc`` + grid, respectively. For all other supported variables you need the + ``oras5_mesh_T.nc`` grid file. + .. note:: For ORAS5 the rotated meridional and zonal velocities are available. Per default ESMValCore expects that you use those. Otherwise, you need to specify the ``raw_name`` of the unrotated velocities in the recipe. From 9f61cb448f3871f4d96ab85b8e1532101ce72a52 Mon Sep 17 00:00:00 2001 From: Jan-Hendrik Malles Date: Tue, 2 Dec 2025 13:39:25 +0100 Subject: [PATCH 32/32] Fix docu --- doc/quickstart/find_data.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/quickstart/find_data.rst b/doc/quickstart/find_data.rst index 596e44feb6..3b6a154e45 100644 --- a/doc/quickstart/find_data.rst +++ b/doc/quickstart/find_data.rst @@ -255,7 +255,7 @@ configured for the ``native6`` project (assuming you are using the default DRS for ``native6`` described :ref:`above `). - Supported variables: ``uo``, ``vo``, ``tos``, ``sos``, ``zos``, ``mlotst``, -``thetao``, ``so``, ``hfds``, ``tauuo``. + ``thetao``, ``so``, ``hfds``, ``tauuo``. - Tier: 3 .. note:: For ORAS5 you need the grid files to read the data properly, which