diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml index 76e2964de..fe2405b7c 100644 --- a/.github/workflows/build_workflow.yml +++ b/.github/workflows/build_workflow.yml @@ -84,13 +84,13 @@ jobs: conda install conda conda-build conda build -m "conda_package/ci/linux_64_python${{ matrix.python-version }}.____cpython.yaml" "conda_package/recipe" # Add local build channel first for subsequent micromamba usage - micromamba config append channels "$CONDA_PREFIX/conda-bld" - micromamba config append channels conda-forge - name: Create Test Environment run: | micromamba create -y -n mpas_tools_test \ - python=${{ env.BUILD_PYTHON_VERSION }} \ + -c ${CONDA_PREFIX}/conda-bld \ + -c conda-forge \ + python=${{ matrix.python-version }} \ mpas_tools \ sphinx \ mock \ @@ -100,4 +100,4 @@ jobs: run: | micromamba activate mpas_tools_test cd conda_package/docs - DOCS_VERSION=test make versioned-html + DOCS_VERSION=test make versioned-html WERROR=1 diff --git a/conda_package/docs/Makefile b/conda_package/docs/Makefile index f74cca1c3..ad648fd56 100644 --- a/conda_package/docs/Makefile +++ b/conda_package/docs/Makefile @@ -2,16 +2,25 @@ # # You can set these variables from the command line. -SPHINXOPTS = +SPHINXOPTS ?= SPHINXBUILD = sphinx-build SPHINXPROJ = mpas_tools SOURCEDIR = . BUILDDIR = _build +# Select behavior via an environment/CLI variable +# make versioned-html # normal +# make versioned-html WERROR=1 # treat warnings as errors +ifeq ($(WERROR),1) + SPHINXWARNOPTS = -W --keep-going +else + SPHINXWARNOPTS = +endif + # Build into a versioned subdirectory versioned-html: - @echo "Building version: $(DOCS_VERSION)" - $(SPHINXBUILD) -b html "$(SOURCEDIR)" "$(BUILDDIR)/html/$(DOCS_VERSION)" + @echo "Building version: $(DOCS_VERSION) (WERROR=$(WERROR))" + $(SPHINXBUILD) $(SPHINXWARNOPTS) -b html "$(SOURCEDIR)" "$(BUILDDIR)/html/$(DOCS_VERSION)" @echo "Build finished. The HTML pages are in $(BUILDDIR)/html/$(DOCS_VERSION)." @echo "Setting up shared version switcher for local preview..." mkdir -p _build/html/shared diff --git a/conda_package/docs/api.rst b/conda_package/docs/api.rst index 288d5172c..bc3c95c48 100644 --- a/conda_package/docs/api.rst +++ b/conda_package/docs/api.rst @@ -1,3 +1,5 @@ +.. _api_reference: + ############# API reference ############# @@ -375,8 +377,8 @@ Visualization :toctree: generated/ MpasToXdmf - MpasToXdmf.load() - MpasToXdmf.convert_to_xdmf() + MpasToXdmf.load + MpasToXdmf.convert_to_xdmf main .. currentmodule:: mpas_tools.viz.mesh_to_triangles diff --git a/conda_package/docs/conf.py b/conda_package/docs/conf.py index f8e89e2ca..5ea6d9eab 100644 --- a/conda_package/docs/conf.py +++ b/conda_package/docs/conf.py @@ -179,7 +179,7 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - 'python': ('https://docs.python.org/', None), + 'python': ('https://docs.python.org/3/', None), 'numpy': ('http://docs.scipy.org/doc/numpy/', None), 'xarray': ('http://xarray.pydata.org/en/stable/', None), 'geometric_features': diff --git a/conda_package/docs/interpolation.rst b/conda_package/docs/interpolation.rst index 87b0b6cf8..507db44be 100644 --- a/conda_package/docs/interpolation.rst +++ b/conda_package/docs/interpolation.rst @@ -1,8 +1,8 @@ -.. _mesh_interpolation: - .. |---| unicode:: U+2014 .. em dash, trimming surrounding whitespace :trim: +.. _mesh_interpolation: + ************* Interpolation ************* diff --git a/conda_package/docs/mesh_conversion.rst b/conda_package/docs/mesh_conversion.rst index 0e554e690..16f62b4b4 100644 --- a/conda_package/docs/mesh_conversion.rst +++ b/conda_package/docs/mesh_conversion.rst @@ -266,12 +266,26 @@ Extensibility and Limitations - For advanced use cases (e.g., custom mask types or additional properties), see the source code and docstrings for guidance. -See also the API documentation for :py:mod:`mpas_tools.mesh.mask` for further details. +See also the API documentation for :py:mod:`mpas_tools.mesh.mask` for further +details. -See also the API documentation for :py:mod:`mpas_tools.mesh.mask` for further details. +.. code-block:: + + $ compute_mpas_region_masks --help + usage: compute_mpas_region_masks [-h] -m MESH_FILE_NAME -g GEOJSON_FILE_NAME + -o MASK_FILE_NAME + [-t MASK_TYPES [MASK_TYPES ...]] + [-c CHUNK_SIZE] [--show_progress] + [-s SUBDIVISION] + [--process_count PROCESS_COUNT] + [--multiprocessing_method MULTIPROCESSING_METHOD] + + optional arguments: + -h, --help show this help message and exit + -m MESH_FILE_NAME, --mesh_file_name MESH_FILE_NAME An MPAS mesh file -g GEOJSON_FILE_NAME, --geojson_file_name GEOJSON_FILE_NAME - An Geojson file containing mask regions + A GeoJSON file containing mask regions -o MASK_FILE_NAME, --mask_file_name MASK_FILE_NAME An output MPAS region masks file -t MASK_TYPES [MASK_TYPES ...], --mask_types MASK_TYPES [MASK_TYPES ...] @@ -367,7 +381,7 @@ The command-line tool takes the following arguments: -m MESH_FILE_NAME, --mesh_file_name MESH_FILE_NAME An MPAS mesh file -g GEOJSON_FILE_NAME, --geojson_file_name GEOJSON_FILE_NAME - An Geojson file containing transects + A GeoJSON file containing transects -o MASK_FILE_NAME, --mask_file_name MASK_FILE_NAME An output MPAS transect masks file -t MASK_TYPES [MASK_TYPES ...], --mask_types MASK_TYPES [MASK_TYPES ...] @@ -423,7 +437,7 @@ The command-line tool takes the following arguments: -m MESH_FILE_NAME, --mesh_file_name MESH_FILE_NAME An MPAS mesh file -g GEOJSON_FILE_NAME, --geojson_file_name GEOJSON_FILE_NAME - An Geojson file containing points at which to start + A GeoJSON file containing points at which to start the flood fill -o MASK_FILE_NAME, --mask_file_name MASK_FILE_NAME An output MPAS region masks file @@ -461,7 +475,7 @@ The command-line tool takes the following arguments: --lon LON The name of the longitude coordinate --lat LAT The name of the latitude coordinate -g GEOJSON_FILE_NAME, --geojson_file_name GEOJSON_FILE_NAME - An Geojson file containing mask regions + A GeoJSON file containing mask regions -o MASK_FILE_NAME, --mask_file_name MASK_FILE_NAME An output MPAS region masks file -c CHUNK_SIZE, --chunk_size CHUNK_SIZE diff --git a/conda_package/docs/mesh_creation.rst b/conda_package/docs/mesh_creation.rst index 1978c090a..0a602e23c 100644 --- a/conda_package/docs/mesh_creation.rst +++ b/conda_package/docs/mesh_creation.rst @@ -1,8 +1,8 @@ -.. _mesh_creation: - .. |---| unicode:: U+2014 .. em dash, trimming surrounding whitespace :trim: +.. _mesh_creation: + ************* Mesh Creation ************* diff --git a/conda_package/docs/mpas_to_xdmf.rst b/conda_package/docs/mpas_to_xdmf.rst index 0581b58ca..82dd47ee2 100644 --- a/conda_package/docs/mpas_to_xdmf.rst +++ b/conda_package/docs/mpas_to_xdmf.rst @@ -26,12 +26,15 @@ The converter can be used via its command-line interface ``mpas_to_xdmf`` or as a Python library. .. note:: - Special variable keys: - - ``allOnCells``: all variables with dimension ``nCells`` - - ``allOnEdges``: all variables with dimension ``nEdges`` - - ``allOnVertices``: all variables with dimension ``nVertices`` - Extra dimensions (e.g., ``nVertLevels``) can be sliced using the ``-d`` - CLI option or the ``extra_dims`` argument in Python. + + Special variable keys: + + - ``allOnCells``: all variables with dimension ``nCells`` + - ``allOnEdges``: all variables with dimension ``nEdges`` + - ``allOnVertices``: all variables with dimension ``nVertices`` + + Extra dimensions (e.g., ``nVertLevels``) can be sliced using the ``-d`` + CLI option or the ``extra_dims`` argument in Python. Command-Line Arguments ---------------------- diff --git a/conda_package/docs/releasing.rst b/conda_package/docs/releasing.rst index d7abc7b60..300482c79 100644 --- a/conda_package/docs/releasing.rst +++ b/conda_package/docs/releasing.rst @@ -17,13 +17,15 @@ Version Bump and Dependency Updates - ``conda_package/mpas_tools/__init__.py`` - ``conda_package/recipe/meta.yaml`` - - Make sure the version follows [semantic versioning](https://semver.org/). + - Make sure the version follows semantic versioning (see + https://semver.org/). For release candidates, use versions like ``1.3.0rc1`` (no ``v`` prefix). 2. **Check and Update Dependencies** - Ensure that dependencies and their constraints are up-to-date and consistent in: + - ``conda_package/recipe/meta.yaml`` (dependencies for the conda-forge release) - ``conda_package/pyproject.toml`` (dependencies for PyPI; used as a diff --git a/conda_package/mpas_tools/mesh/mask.py b/conda_package/mpas_tools/mesh/mask.py index bbefe792d..793a36cb2 100644 --- a/conda_package/mpas_tools/mesh/mask.py +++ b/conda_package/mpas_tools/mesh/mask.py @@ -111,7 +111,9 @@ def compute_mpas_region_masks( logger.info(f' Computing {maskType} masks:') # create shapely geometry for lon and lat - points = [shapely.geometry.Point(x, y) for x, y in zip(lon, lat)] + points = [ + shapely.geometry.Point(x, y) for x, y in zip(lon, lat, strict=True) + ] regionNames, masks, properties = _compute_region_masks( fcMask, points, @@ -171,7 +173,7 @@ def entry_point_compute_mpas_region_masks(): dest='geojson_file_name', type=str, required=True, - help='An Geojson file containing mask regions', + help='A GeoJSON file containing mask regions', ) parser.add_argument( '-o', @@ -418,7 +420,7 @@ def entry_point_compute_mpas_transect_masks(): dest='geojson_file_name', type=str, required=True, - help='An Geojson file containing transects', + help='A GeoJSON file containing transects', ) parser.add_argument( '-o', @@ -612,7 +614,7 @@ def entry_point_compute_mpas_flood_fill_mask(): dest='geojson_file_name', type=str, required=True, - help='An Geojson file containing points at which to ' + help='A GeoJSON file containing points at which to ' 'start the flood fill', ) parser.add_argument( @@ -708,7 +710,9 @@ def compute_lon_lat_region_masks( Lat = Lat.ravel() # create shapely geometry for lon and lat - points = [shapely.geometry.Point(x, y) for x, y in zip(Lon, Lat)] + points = [ + shapely.geometry.Point(x, y) for x, y in zip(Lon, Lat, strict=True) + ] regionNames, masks, properties = _compute_region_masks( fcMask, points, @@ -783,7 +787,7 @@ def entry_point_compute_lon_lat_region_masks(): dest='geojson_file_name', type=str, required=True, - help='An Geojson file containing mask regions', + help='A GeoJSON file containing mask regions', ) parser.add_argument( '-o', @@ -932,7 +936,8 @@ def compute_projection_grid_region_masks( # create shapely geometry for lon and lat points = [ - shapely.geometry.Point(x, y) for x, y in zip(lon.ravel(), lat.ravel()) + shapely.geometry.Point(x, y) + for x, y in zip(lon.ravel(), lat.ravel(), strict=True) ] regionNames, masks, properties = _compute_region_masks( fcMask, @@ -1005,7 +1010,7 @@ def entry_point_compute_projection_grid_region_masks(): dest='geojson_file_name', type=str, required=True, - help='An Geojson file containing mask regions', + help='A GeoJSON file containing mask regions', ) parser.add_argument( '-o', @@ -1349,7 +1354,7 @@ def _compute_transect_masks( if subdivisionResolution is None: new_coords.append(coords) else: - lon, lat = zip(*coords) + lon, lat = zip(*coords, strict=True) x, y, z = lon_lat_to_cartesian( lon, lat, earthRadius, degrees=True ) @@ -1359,7 +1364,9 @@ def _compute_transect_masks( lon, lat = cartesian_to_lon_lat( x, y, z, earthRadius, degrees=True ) - new_coords.append([list(a) for a in zip(lon, lat)]) + new_coords.append( + [list(a) for a in zip(lon, lat, strict=True)] + ) if geom_type == 'LineString': shape = shapely.geometry.LineString(new_coords[0]) @@ -1471,7 +1478,7 @@ def _get_polygons(dsMesh, maskType): polygons = [] for index in range(lon.shape[0]): - coords = zip(lon[index, :], lat[index, :]) + coords = zip(lon[index, :], lat[index, :], strict=True) polygons.append(shapely.geometry.Polygon(coords)) return polygons, nPolygons, duplicatePolygons @@ -1608,14 +1615,17 @@ def _compute_edge_sign(dsMesh, edgeMask, shape): local_voe[local_mask] = local_v graph = Graph( - n=len(unique_vertices), edges=zip(local_voe[:, 0], local_voe[:, 1]) + n=len(unique_vertices), + edges=zip(local_voe[:, 0], local_voe[:, 1], strict=True), ) graph.vs['distance'] = distance graph.vs['lon'] = unique_lon graph.vs['lat'] = unique_lat graph.vs['vertices'] = numpy.arange(len(unique_vertices)) graph.es['edges'] = edge_indices - graph.es['vertices'] = [(v0, v1) for v0, v1 in zip(voe[:, 0], voe[:, 1])] + graph.es['vertices'] = [ + (v0, v1) for v0, v1 in zip(voe[:, 0], voe[:, 1], strict=True) + ] edgeSign = numpy.zeros(edgeMask.shape, dtype=numpy.int32) diff --git a/conda_package/mpas_tools/viz/mpas_to_xdmf/mpas_to_xdmf.py b/conda_package/mpas_tools/viz/mpas_to_xdmf/mpas_to_xdmf.py index 6f555a841..3499f74d0 100644 --- a/conda_package/mpas_tools/viz/mpas_to_xdmf/mpas_to_xdmf.py +++ b/conda_package/mpas_tools/viz/mpas_to_xdmf/mpas_to_xdmf.py @@ -16,6 +16,7 @@ Example Usage ------------- Python: + >>> from mpas_tools.viz.mpas_to_xdmf.mpas_to_xdmf import MpasToXdmf >>> converter = MpasToXdmf() >>> converter.load(mesh_filename="mesh.nc", time_series_filenames="output.*.nc", @@ -23,6 +24,7 @@ >>> converter.convert_to_xdmf(out_dir="output_dir", extra_dims={"nVertLevels": [0, 1, 2]}) Command line: + $ mpas_to_xdmf -m mesh.nc -t output.*.nc -v temperature salinity -o output_dir -d nVertLevels=0:3 See Also @@ -92,8 +94,7 @@ def load( xtime_var=None, ): """ - Load the MPAS mesh file and optionally combine it with time series - files into a single xarray Dataset. + Load the MPAS mesh file and optional time series. Parameters ---------- @@ -104,12 +105,16 @@ def load( files. If None, only the mesh file is used. variables : list of str, optional List of variables to convert. Special keys: - - 'allOnCells': all variables with dimension 'nCells' - - 'allOnEdges': all variables with dimension 'nEdges' - - 'allOnVertices': all variables with dimension 'nVertices' + + * ``"allOnCells"``: all variables with dimension ``"nCells"``. + * ``"allOnEdges"``: all variables with dimension ``"nEdges"``. + * ``"allOnVertices"``: all variables with dimension + ``"nVertices"``. + If None, all variables are included. xtime_var : str, optional - Name of the variable containing time information (e.g., 'xtime'). + Name of the variable containing time information (e.g., + ``"xtime"``). """ self.ds_mesh, self.ds = _load_dataset( mesh_filename=mesh_filename, @@ -128,8 +133,8 @@ def convert_to_xdmf(self, out_dir, extra_dims=None, quiet=False): Directory where XDMF and HDF5 files will be saved. extra_dims : dict, optional Dictionary mapping extra dimensions to their selected indices. - Example: {'nVertLevels': [0, 1, 2]} - If None, all indices are included. + Example - ``{'nVertLevels': [0, 1, 2]}``. If None, all indices are + included. quiet : bool, optional If True, suppress progress output.