diff --git a/.github/workflows/benchmarks_run.yml b/.github/workflows/benchmarks_run.yml index 5bc2fba6ec..0d9d7f0756 100644 --- a/.github/workflows/benchmarks_run.yml +++ b/.github/workflows/benchmarks_run.yml @@ -46,7 +46,7 @@ jobs: - name: Cache environment directories id: cache-env-dir - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | .nox @@ -56,7 +56,7 @@ jobs: - name: Cache test data directory id: cache-test-data - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ${{ env.IRIS_TEST_DATA_PATH }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ded673f008..11a5d42889 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.1.11" + rev: "v0.1.13" hooks: - id: ruff types: [file, python] diff --git a/.readthedocs.yml b/.readthedocs.yml index af1a8f6303..d82bd513ca 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,9 +1,9 @@ version: 2 build: - os: ubuntu-20.04 + os: "ubuntu-22.04" tools: - python: mambaforge-4.10 + python: "mambaforge-22.9" jobs: post_checkout: # The SciTools/iris repository is shallow i.e., has a .git/shallow, diff --git a/.ruff.toml b/.ruff.toml index ff7ce543be..5d78ecdb57 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -29,8 +29,6 @@ lint.ignore = [ "D102", # Missing docstring in public method # (D-3) Temporary, before an initial review, either fix ocurrences or move to (2). "D103", # Missing docstring in public function - "D205", # 1 blank line required between summary line and description - "D401", # First line of docstring should be in imperative mood: ... # pyupgrade (UP) # https://docs.astral.sh/ruff/rules/#pyupgrade-up diff --git a/benchmarks/asv_delegated_conda.py b/benchmarks/asv_delegated_conda.py index b0bdd6b64b..c8070b063a 100644 --- a/benchmarks/asv_delegated_conda.py +++ b/benchmarks/asv_delegated_conda.py @@ -2,8 +2,9 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""ASV plug-in providing an alternative :class:`asv.plugins.conda.Conda` -subclass that manages the Conda environment via custom user scripts. +"""ASV plug-in providing an alternative :class:`asv.plugins.conda.Conda` subclass. + +Manages the Conda environment via custom user scripts. """ @@ -42,7 +43,9 @@ def __init__( requirements: dict, tagged_env_vars: dict, ) -> None: - """Parameters + """__init__. + + Parameters ---------- conf : Config instance diff --git a/benchmarks/benchmarks/__init__.py b/benchmarks/benchmarks/__init__.py index 14edb2eda7..87a77fa5a4 100644 --- a/benchmarks/benchmarks/__init__.py +++ b/benchmarks/benchmarks/__init__.py @@ -10,7 +10,7 @@ def disable_repeat_between_setup(benchmark_object): - """Decorator for benchmarks where object persistence would be inappropriate. + """Benchmarks where object persistence would be inappropriate (decorator). E.g: * Benchmarking data realisation @@ -36,7 +36,9 @@ def disable_repeat_between_setup(benchmark_object): class TrackAddedMemoryAllocation: - """Context manager which measures by how much process resident memory grew, + """Measures by how much process resident memory grew, during execution. + + Context manager which measures by how much process resident memory grew, during execution of its enclosed code block. Obviously limited as to what it actually measures : Relies on the current @@ -84,7 +86,7 @@ def addedmem_mb(self): @staticmethod def decorator(decorated_func): - """Decorates this benchmark to track growth in resident memory during execution. + """Benchmark to track growth in resident memory during execution. Intended for use on ASV ``track_`` benchmarks. Applies the :class:`TrackAddedMemoryAllocation` context manager to the benchmark @@ -105,7 +107,9 @@ def _wrapper(*args, **kwargs): def on_demand_benchmark(benchmark_object): - """Decorator. Disables these benchmark(s) unless ON_DEMAND_BENCHARKS env var is set. + """Disables these benchmark(s) unless ON_DEMAND_BENCHARKS env var is set. + + This is a decorator. For benchmarks that, for whatever reason, should not be run by default. E.g: diff --git a/benchmarks/benchmarks/aux_factory.py b/benchmarks/benchmarks/aux_factory.py index e5ed710941..2da93351ee 100644 --- a/benchmarks/benchmarks/aux_factory.py +++ b/benchmarks/benchmarks/aux_factory.py @@ -16,7 +16,9 @@ class FactoryCommon: # * make class an ABC # * remove NotImplementedError # * combine setup_common into setup - """A base class running a generalised suite of benchmarks for any factory. + """Run a generalised suite of benchmarks for any factory. + + A base class running a generalised suite of benchmarks for any factory. Factory to be specified in a subclass. ASV will run the benchmarks within this class for any subclasses. diff --git a/benchmarks/benchmarks/coords.py b/benchmarks/benchmarks/coords.py index 1d920ac357..d1f7631e00 100644 --- a/benchmarks/benchmarks/coords.py +++ b/benchmarks/benchmarks/coords.py @@ -23,7 +23,9 @@ class CoordCommon: # * make class an ABC # * remove NotImplementedError # * combine setup_common into setup - """A base class running a generalised suite of benchmarks for any coord. + """Run a generalised suite of benchmarks for any coord. + + A base class running a generalised suite of benchmarks for any coord. Coord to be specified in a subclass. ASV will run the benchmarks within this class for any subclasses. diff --git a/benchmarks/benchmarks/cperf/equality.py b/benchmarks/benchmarks/cperf/equality.py index 97ab9d9b5f..16f8c10aab 100644 --- a/benchmarks/benchmarks/cperf/equality.py +++ b/benchmarks/benchmarks/cperf/equality.py @@ -8,8 +8,11 @@ class EqualityMixin(SingleDiagnosticMixin): - r"""Uses :class:`SingleDiagnosticMixin` as the realistic case will be comparing + r"""Use :class:`SingleDiagnosticMixin` as the realistic case. + + Uses :class:`SingleDiagnosticMixin` as the realistic case will be comparing :class:`~iris.cube.Cube`\\ s that have been loaded from file. + """ # Cut down the parent parameters. @@ -23,9 +26,7 @@ def setup(self, file_type, three_d=False, three_times=False): @on_demand_benchmark class CubeEquality(EqualityMixin): - r"""Benchmark time and memory costs of comparing LFRic and UM - :class:`~iris.cube.Cube`\\ s. - """ + r"""Benchmark time & memory costs of comparing LFRic & UM :class:`~iris.cube.Cube`\\ s.""" def _comparison(self): _ = self.cube == self.other_cube diff --git a/benchmarks/benchmarks/cperf/load.py b/benchmarks/benchmarks/cperf/load.py index bcb1ff117b..cafc4631c0 100644 --- a/benchmarks/benchmarks/cperf/load.py +++ b/benchmarks/benchmarks/cperf/load.py @@ -10,7 +10,8 @@ @on_demand_benchmark class SingleDiagnosticLoad(SingleDiagnosticMixin): def time_load(self, _, __, ___): - """The 'real world comparison' + """Perform a 'real world comparison'. + * UM coords are always realised (DimCoords). * LFRic coords are not realised by default (MeshCoords). @@ -25,7 +26,7 @@ def time_load(self, _, __, ___): assert coord.has_lazy_bounds() == expecting_lazy_coords def time_load_w_realised_coords(self, _, __, ___): - """A valuable extra comparison where both UM and LFRic coords are realised.""" + """Valuable extra comparison where both UM and LFRic coords are realised.""" cube = self.load() for coord_name in "longitude", "latitude": coord = cube.coord(coord_name) diff --git a/benchmarks/benchmarks/cperf/save.py b/benchmarks/benchmarks/cperf/save.py index 28898225d6..2d60f920c4 100644 --- a/benchmarks/benchmarks/cperf/save.py +++ b/benchmarks/benchmarks/cperf/save.py @@ -14,6 +14,7 @@ @on_demand_benchmark class NetcdfSave: """Benchmark time and memory costs of saving ~large-ish data cubes to netcdf. + Parametrised by file type. """ diff --git a/benchmarks/benchmarks/cube.py b/benchmarks/benchmarks/cube.py index d017e5477e..4548d4c28d 100644 --- a/benchmarks/benchmarks/cube.py +++ b/benchmarks/benchmarks/cube.py @@ -28,7 +28,9 @@ class ComponentCommon: # * make class an ABC # * remove NotImplementedError # * combine setup_common into setup - """A base class running a generalised suite of benchmarks for cubes that + """Run a generalised suite of benchmarks for cubes. + + A base class running a generalised suite of benchmarks for cubes that include a specified component (e.g. Coord, CellMeasure etc.). Component to be specified in a subclass. @@ -43,7 +45,7 @@ def setup(self): raise NotImplementedError def create(self): - """Generic cube creation. + """Create a cube (generic). cube_kwargs allow dynamic inclusion of different components; specified in subclasses. diff --git a/benchmarks/benchmarks/experimental/ugrid/__init__.py b/benchmarks/benchmarks/experimental/ugrid/__init__.py index 960734e1c6..c2335990aa 100644 --- a/benchmarks/benchmarks/experimental/ugrid/__init__.py +++ b/benchmarks/benchmarks/experimental/ugrid/__init__.py @@ -15,7 +15,9 @@ class UGridCommon: - """A base class running a generalised suite of benchmarks for any ugrid object. + """Run a generalised suite of benchmarks for any ugrid object. + + A base class running a generalised suite of benchmarks for any ugrid object. Object to be specified in a subclass. ASV will run the benchmarks within this class for any subclasses. diff --git a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py index b47082590b..10711d0349 100644 --- a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py +++ b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py @@ -2,14 +2,18 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Benchmarks stages of operation of the function +"""Benchmarks stages of operation. + +Benchmarks stages of operation of the function :func:`iris.experimental.ugrid.utils.recombine_submeshes`. Where possible benchmarks should be parameterised for two sizes of input data: - * minimal: enables detection of regressions in parts of the run-time that do - NOT scale with data size. - * large: large enough to exclusively detect regressions in parts of the - run-time that scale with data size. + +* minimal: enables detection of regressions in parts of the run-time that do + NOT scale with data size. + +* large: large enough to exclusively detect regressions in parts of the + run-time that scale with data size. """ import os @@ -88,7 +92,7 @@ def setup_cache(self): ) def setup(self, n_cubesphere, imaginary_data=True, create_result_cube=True): - """The combine-tests "standard" setup operation. + """Combine-tests "standard" setup operation. Load the source cubes (full-mesh + region) from disk. These are specific to the cubesize parameter. @@ -193,10 +197,13 @@ def track_addedmem_compute_data(self, n_cubesphere): class CombineRegionsSaveData(MixinCombineRegions): - """Test saving *only*, having replaced the input cube data with 'imaginary' + """Test saving *only*. + + Test saving *only*, having replaced the input cube data with 'imaginary' array data, so that input data is not loaded from disk during the save operation. + """ def time_save(self, n_cubesphere): @@ -219,6 +226,7 @@ def track_filesize_saved(self, n_cubesphere): class CombineRegionsFileStreamedCalc(MixinCombineRegions): """Test the whole cost of file-to-file streaming. + Uses the combined cube which is based on lazy data loading from the region cubes on disk. """ diff --git a/benchmarks/benchmarks/generate_data/stock.py b/benchmarks/benchmarks/generate_data/stock.py index a04eff6701..17f3b23f92 100644 --- a/benchmarks/benchmarks/generate_data/stock.py +++ b/benchmarks/benchmarks/generate_data/stock.py @@ -49,7 +49,7 @@ def _external(func_name_, temp_file_dir, **kwargs_): def create_file__xios_2d_face_half_levels( temp_file_dir, dataset_name, n_faces=866, n_times=1 ): - """Wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_2d_face_half_levels`. + """Create file wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_2d_face_half_levels`. Have taken control of temp_file_dir @@ -67,7 +67,7 @@ def create_file__xios_2d_face_half_levels( def create_file__xios_3d_face_half_levels( temp_file_dir, dataset_name, n_faces=866, n_times=1, n_levels=38 ): - """Wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_3d_face_half_levels`. + """Create file wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_3d_face_half_levels`. Have taken control of temp_file_dir @@ -84,7 +84,7 @@ def create_file__xios_3d_face_half_levels( def sample_mesh(n_nodes=None, n_faces=None, n_edges=None, lazy_values=False): - """Wrapper for :meth:iris.tests.stock.mesh.sample_mesh`.""" + """Sample mesh wrapper for :meth:iris.tests.stock.mesh.sample_mesh`.""" def _external(*args, **kwargs): from iris.experimental.ugrid import save_mesh @@ -112,7 +112,7 @@ def _external(*args, **kwargs): def sample_meshcoord(sample_mesh_kwargs=None, location="face", axis="x"): - """Wrapper for :meth:`iris.tests.stock.mesh.sample_meshcoord`. + """Sample meshcoord wrapper for :meth:`iris.tests.stock.mesh.sample_meshcoord`. Parameters deviate from the original as cannot pass a :class:`iris.experimental.ugrid.Mesh to the separate Python instance - must diff --git a/benchmarks/benchmarks/generate_data/ugrid.py b/benchmarks/benchmarks/generate_data/ugrid.py index d96e991c59..0ac349d882 100644 --- a/benchmarks/benchmarks/generate_data/ugrid.py +++ b/benchmarks/benchmarks/generate_data/ugrid.py @@ -14,7 +14,9 @@ def generate_cube_like_2d_cubesphere(n_cube: int, with_mesh: bool, output_path: str): - """Construct and save to file an LFRIc cubesphere-like cube for a given + """Construct and save to file an LFRIc cubesphere-like cube. + + Construct and save to file an LFRIc cubesphere-like cube for a given cubesphere size, *or* a simpler structured (UM-like) cube of equivalent size. @@ -54,7 +56,9 @@ def generate_cube_like_2d_cubesphere(n_cube: int, with_mesh: bool, output_path: def make_cube_like_2d_cubesphere(n_cube: int, with_mesh: bool): - """Generate an LFRIc cubesphere-like cube for a given cubesphere size, + """Generate an LFRIc cubesphere-like cube. + + Generate an LFRIc cubesphere-like cube for a given cubesphere size, *or* a simpler structured (UM-like) cube of equivalent size. All the cube data, coords and mesh content are LAZY, and produced without @@ -155,7 +159,9 @@ def _external(xy_dims_, save_path_): def make_cubesphere_testfile(c_size, n_levels=0, n_times=1): - """Build a C cubesphere testfile in a given directory, with a standard naming. + """Build a C cubesphere testfile in a given directory. + + Build a C cubesphere testfile in a given directory, with a standard naming. If n_levels > 0 specified: 3d file with the specified number of levels. Return the file path. diff --git a/benchmarks/benchmarks/import_iris.py b/benchmarks/benchmarks/import_iris.py index 51679ce7df..566ffca78b 100644 --- a/benchmarks/benchmarks/import_iris.py +++ b/benchmarks/benchmarks/import_iris.py @@ -31,8 +31,7 @@ class Iris: @staticmethod def _import(module_name, reset_colormaps=False): - """Have experimented with adding sleep() commands into the imported - modules. + """Have experimented with adding sleep() commands into the imported modules. The results reveal: diff --git a/benchmarks/benchmarks/sperf/combine_regions.py b/benchmarks/benchmarks/sperf/combine_regions.py index 27a3560d51..7d677ed74f 100644 --- a/benchmarks/benchmarks/sperf/combine_regions.py +++ b/benchmarks/benchmarks/sperf/combine_regions.py @@ -83,7 +83,7 @@ def setup_cache(self): ) def setup(self, n_cubesphere, imaginary_data=True, create_result_cube=True): - """The combine-tests "standard" setup operation. + """Combine-tests "standard" setup operation. Load the source cubes (full-mesh + region) from disk. These are specific to the cubesize parameter. @@ -195,7 +195,9 @@ def track_addedmem_compute_data(self, n_cubesphere): @on_demand_benchmark class SaveData(Mixin): - """Test saving *only*, having replaced the input cube data with 'imaginary' + """Test saving *only*. + + Test saving *only*, having replaced the input cube data with 'imaginary' array data, so that input data is not loaded from disk during the save operation. @@ -217,8 +219,10 @@ def track_filesize_saved(self, n_cubesphere): @on_demand_benchmark class FileStreamedCalc(Mixin): """Test the whole cost of file-to-file streaming. + Uses the combined cube which is based on lazy data loading from the region cubes on disk. + """ def setup(self, n_cubesphere, imaginary_data=False, create_result_cube=True): diff --git a/benchmarks/benchmarks/sperf/equality.py b/benchmarks/benchmarks/sperf/equality.py index 4d6e43d8b9..339687a22c 100644 --- a/benchmarks/benchmarks/sperf/equality.py +++ b/benchmarks/benchmarks/sperf/equality.py @@ -9,8 +9,10 @@ @on_demand_benchmark class CubeEquality(FileMixin): - r"""Benchmark time and memory costs of comparing :class:`~iris.cube.Cube`\\ s - with attached :class:`~iris.experimental.ugrid.mesh.Mesh`\\ es. + r"""Benchmark time and memory costs. + + Benchmark time and memory costs of comparing :class:`~iris.cube.Cube`\\ s + with attached :class:`~iris.experimental.ugrid.mesh.Mesh`\\ es. Uses :class:`FileMixin` as the realistic case will be comparing :class:`~iris.cube.Cube`\\ s that have been loaded from file. diff --git a/benchmarks/benchmarks/stats.py b/benchmarks/benchmarks/stats.py new file mode 100644 index 0000000000..0530431900 --- /dev/null +++ b/benchmarks/benchmarks/stats.py @@ -0,0 +1,38 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Stats benchmark tests.""" + +import iris +from iris.analysis.stats import pearsonr +import iris.tests + + +class PearsonR: + def setup(self): + cube_temp = iris.load_cube( + iris.tests.get_data_path( + ("NetCDF", "global", "xyt", "SMALL_total_column_co2.nc") + ) + ) + + # Make data non-lazy. + cube_temp.data + + self.cube_a = cube_temp[:6] + self.cube_b = cube_temp[20:26] + self.cube_b.replace_coord(self.cube_a.coord("time")) + for name in ["latitude", "longitude"]: + self.cube_b.coord(name).guess_bounds() + self.weights = iris.analysis.cartography.area_weights(self.cube_b) + + def time_real(self): + pearsonr(self.cube_a, self.cube_b, weights=self.weights) + + def time_lazy(self): + for cube in self.cube_a, self.cube_b: + cube.data = cube.lazy_data() + + result = pearsonr(self.cube_a, self.cube_b, weights=self.weights) + result.data diff --git a/benchmarks/bm_runner.py b/benchmarks/bm_runner.py index 741ecb8580..10dc5f469a 100644 --- a/benchmarks/bm_runner.py +++ b/benchmarks/bm_runner.py @@ -332,7 +332,7 @@ def add_asv_arguments(self) -> None: @staticmethod @abstractmethod def func(args: argparse.Namespace): - """The function to return when the subparser is parsed. + """Return when the subparser is parsed. `func` is then called, performing the user's selected sub-command. diff --git a/docs/gallery_code/general/plot_SOI_filtering.py b/docs/gallery_code/general/plot_SOI_filtering.py index 5082162068..32fe7c6ff7 100644 --- a/docs/gallery_code/general/plot_SOI_filtering.py +++ b/docs/gallery_code/general/plot_SOI_filtering.py @@ -1,4 +1,5 @@ -"""Applying a Filter to a Time-Series +""" +Applying a Filter to a Time-Series ================================== This example demonstrates low pass filtering a time-series by applying a @@ -17,7 +18,7 @@ Trenberth K. E. (1984) Signal Versus Noise in the Southern Oscillation. Monthly Weather Review, Vol 112, pp 326-332 -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt import numpy as np diff --git a/docs/gallery_code/general/plot_anomaly_log_colouring.py b/docs/gallery_code/general/plot_anomaly_log_colouring.py index 98751b0dbf..16210173c5 100644 --- a/docs/gallery_code/general/plot_anomaly_log_colouring.py +++ b/docs/gallery_code/general/plot_anomaly_log_colouring.py @@ -1,4 +1,5 @@ -"""Colouring Anomaly Data With Logarithmic Scaling +""" +Colouring Anomaly Data With Logarithmic Scaling =============================================== In this example, we need to plot anomaly data where the values have a @@ -22,7 +23,7 @@ and :obj:`matplotlib.pyplot.pcolormesh`). See also: https://en.wikipedia.org/wiki/False_color#Pseudocolor. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import cartopy.crs as ccrs import matplotlib.colors as mcols diff --git a/docs/gallery_code/general/plot_coriolis.py b/docs/gallery_code/general/plot_coriolis.py index 3d5aa853e3..905108abfd 100644 --- a/docs/gallery_code/general/plot_coriolis.py +++ b/docs/gallery_code/general/plot_coriolis.py @@ -1,11 +1,12 @@ -"""Deriving the Coriolis Frequency Over the Globe +""" +Deriving the Coriolis Frequency Over the Globe ============================================== This code computes the Coriolis frequency and stores it in a cube with associated metadata. It then plots the Coriolis frequency on an orthographic projection. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_cross_section.py b/docs/gallery_code/general/plot_cross_section.py index 8eac3955b1..8e5bee85ed 100644 --- a/docs/gallery_code/general/plot_cross_section.py +++ b/docs/gallery_code/general/plot_cross_section.py @@ -1,10 +1,11 @@ -"""Cross Section Plots +""" +Cross Section Plots =================== This example demonstrates contour plots of a cross-sectioned multi-dimensional cube which features a hybrid height vertical coordinate system. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_custom_aggregation.py b/docs/gallery_code/general/plot_custom_aggregation.py index 521d0069b9..540f785ed6 100644 --- a/docs/gallery_code/general/plot_custom_aggregation.py +++ b/docs/gallery_code/general/plot_custom_aggregation.py @@ -1,4 +1,5 @@ -"""Calculating a Custom Statistic +""" +Calculating a Custom Statistic ============================== This example shows how to define and use a custom @@ -11,7 +12,7 @@ over North America, and we want to calculate in how many years these exceed a certain temperature over a spell of 5 years or more. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt import numpy as np @@ -27,25 +28,24 @@ # Note: in order to meet the requirements of iris.analysis.Aggregator, it must # do the calculation over an arbitrary (given) data axis. def count_spells(data, threshold, axis, spell_length): - """Function to calculate the number of points in a sequence where the value + """Calculate the number of points in a sequence. + + Function to calculate the number of points in a sequence where the value has exceeded a threshold value for at least a certain number of timepoints. Generalised to operate on multiple time sequences arranged on a specific axis of a multidimensional array. - Args: - - * data (array): + Parameters + ---------- + data : array raw data to be compared with value threshold. - - * threshold (float): + threshold : float threshold point for 'significant' datapoints. - - * axis (int): + axis : int number of the array dimension mapping the time sequences. (Can also be negative, e.g. '-1' means last dimension) - - * spell_length (int): + spell_length : int number of consecutive times at which value > threshold to "count". """ diff --git a/docs/gallery_code/general/plot_custom_file_loading.py b/docs/gallery_code/general/plot_custom_file_loading.py index 81aad4a15a..06de887614 100644 --- a/docs/gallery_code/general/plot_custom_file_loading.py +++ b/docs/gallery_code/general/plot_custom_file_loading.py @@ -1,5 +1,6 @@ -"""Loading a Cube From a Custom File Format -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +""" +Loading a Cube From a Custom File Format +======================================== This example shows how a custom text file can be loaded using the standard Iris load mechanism. @@ -8,25 +9,25 @@ ` for the file format. To create a format specification we need to define the following: -* format_name - Some text that describes the format specification we are +* **format_name** - Some text that describes the format specification we are creating -* file_element - FileElement object describing the element which identifies +* **file_element** - FileElement object describing the element which identifies this FormatSpecification. Possible values are: - ``iris.io.format_picker.MagicNumber(n, o)`` - The n bytes from the file at offset o. + * ``iris.io.format_picker.MagicNumber(n, o)`` + The n bytes from the file at offset o. - ``iris.io.format_picker.FileExtension()`` - The file's extension. + * ``iris.io.format_picker.FileExtension()`` + The file extension. - ``iris.io.format_picker.LeadingLine()`` - The first line of the file. + * ``iris.io.format_picker.LeadingLine()`` + The first line of the file. -* file_element_value - The value that the file_element should take if a file +* **file_element_value** - The value that the file_element should take if a file matches this FormatSpecification -* handler (optional) - A generator function that will be called when the file +* **handler** (optional) - A generator function that will be called when the file specification has been identified. This function is provided by the user and provides the means to parse the whole file. If no handler function is provided, then identification is still possible without any handling. @@ -40,7 +41,7 @@ The handler function must be defined as generator which yields each cube as they are produced. -* priority (optional) - Integer giving a priority for considering this +* **priority** (optional) - Integer giving a priority for considering this specification where higher priority means sooner consideration In the following example, the function :func:`load_NAME_III` has been defined @@ -52,7 +53,7 @@ function which automatically invokes the ``FormatSpecification`` we defined. The cube returned from the load function is then used to produce a plot. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import datetime @@ -89,7 +90,7 @@ def load_NAME_III(filename): - """Loads the Met Office's NAME III grid output files. + """Load the Met Office's NAME III grid output files. Loads the Met Office's NAME III grid output files returning headers, column definitions and data arrays as 3 separate lists. @@ -179,7 +180,7 @@ def load_NAME_III(filename): def NAME_to_cube(filenames, callback): - """Returns a generator of cubes given a list of filenames and a callback.""" + """Return a generator of cubes given a list of filenames and a callback.""" for filename in filenames: header, column_headings, data_arrays = load_NAME_III(filename) diff --git a/docs/gallery_code/general/plot_global_map.py b/docs/gallery_code/general/plot_global_map.py index 9634fc1458..60ac200a43 100644 --- a/docs/gallery_code/general/plot_global_map.py +++ b/docs/gallery_code/general/plot_global_map.py @@ -1,10 +1,11 @@ -"""Quickplot of a 2D Cube on a Map +""" +Quickplot of a 2D Cube on a Map =============================== This example demonstrates a contour plot of global air temperature. The plot title and the labels for the axes are automatically derived from the metadata. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_inset.py b/docs/gallery_code/general/plot_inset.py index 7b7e04c7d3..5edd375743 100644 --- a/docs/gallery_code/general/plot_inset.py +++ b/docs/gallery_code/general/plot_inset.py @@ -1,11 +1,12 @@ -"""Test Data Showing Inset Plots +""" +Test Data Showing Inset Plots ============================= This example demonstrates the use of a single 3D data cube with time, latitude and longitude dimensions to plot a temperature series for a single latitude coordinate, with an inset plot of the data region. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_lineplot_with_legend.py b/docs/gallery_code/general/plot_lineplot_with_legend.py index 981e9694ec..d1b3acd912 100644 --- a/docs/gallery_code/general/plot_lineplot_with_legend.py +++ b/docs/gallery_code/general/plot_lineplot_with_legend.py @@ -1,7 +1,8 @@ -"""Multi-Line Temperature Profile Plot -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +""" +Multi-Line Temperature Profile Plot +=================================== -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_polar_stereo.py b/docs/gallery_code/general/plot_polar_stereo.py index 0cb4e533fa..99abbd0ae0 100644 --- a/docs/gallery_code/general/plot_polar_stereo.py +++ b/docs/gallery_code/general/plot_polar_stereo.py @@ -1,10 +1,11 @@ -"""Example of a Polar Stereographic Plot +""" +Example of a Polar Stereographic Plot ===================================== Demonstrates plotting data that are defined on a polar stereographic projection. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_polynomial_fit.py b/docs/gallery_code/general/plot_polynomial_fit.py index 951f17209e..37cc4e283b 100644 --- a/docs/gallery_code/general/plot_polynomial_fit.py +++ b/docs/gallery_code/general/plot_polynomial_fit.py @@ -1,11 +1,12 @@ -"""Fitting a Polynomial +""" +Fitting a Polynomial ==================== This example demonstrates computing a polynomial fit to 1D data from an Iris cube, adding the fit to the cube's metadata, and plotting both the 1D data and the fit. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt import numpy as np diff --git a/docs/gallery_code/general/plot_projections_and_annotations.py b/docs/gallery_code/general/plot_projections_and_annotations.py index 1a4701837a..6e8ba5a5af 100644 --- a/docs/gallery_code/general/plot_projections_and_annotations.py +++ b/docs/gallery_code/general/plot_projections_and_annotations.py @@ -1,4 +1,5 @@ -"""Plotting in Different Projections +""" +Plotting in Different Projections ================================= This example shows how to overlay data and graphics in different projections, @@ -11,7 +12,7 @@ We plot these over a specified region, in two different map projections. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_rotated_pole_mapping.py b/docs/gallery_code/general/plot_rotated_pole_mapping.py index 3674e89e28..60b187ee56 100644 --- a/docs/gallery_code/general/plot_rotated_pole_mapping.py +++ b/docs/gallery_code/general/plot_rotated_pole_mapping.py @@ -1,15 +1,16 @@ -"""Rotated Pole Mapping +""" +Rotated Pole Mapping ===================== This example uses several visualisation methods to achieve an array of differing images, including: - * Visualisation of point based data - * Contouring of point based data - * Block plot of contiguous bounded data - * Non native projection and a Natural Earth shaded relief image underlay +* Visualisation of point based data +* Contouring of point based data +* Block plot of contiguous bounded data +* Non native projection and a Natural Earth shaded relief image underlay -""" # noqa: D400 +""" # noqa: D205, D212, D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_zonal_means.py b/docs/gallery_code/general/plot_zonal_means.py index 47a7127d06..d4ec1eb1fc 100644 --- a/docs/gallery_code/general/plot_zonal_means.py +++ b/docs/gallery_code/general/plot_zonal_means.py @@ -1,7 +1,11 @@ -"""Zonal Mean Diagram of Air Temperature +""" +Zonal Mean Diagram of Air Temperature ===================================== -This example demonstrates aligning a linear plot and a cartographic plot using Matplotlib. -""" # noqa: D400 + +This example demonstrates aligning a linear plot and a cartographic plot +using Matplotlib. + +""" # noqa: D205, D212, D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_code/meteorology/plot_COP_1d.py b/docs/gallery_code/meteorology/plot_COP_1d.py index d5ef2a9990..84addd140a 100644 --- a/docs/gallery_code/meteorology/plot_COP_1d.py +++ b/docs/gallery_code/meteorology/plot_COP_1d.py @@ -1,4 +1,5 @@ -"""Global Average Annual Temperature Plot +""" +Global Average Annual Temperature Plot ====================================== Produces a time-series plot of North American temperature forecasts for 2 @@ -25,7 +26,7 @@ Further details on the aggregation functionality being used in this example can be found in :ref:`cube-statistics`. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt import numpy as np diff --git a/docs/gallery_code/meteorology/plot_COP_maps.py b/docs/gallery_code/meteorology/plot_COP_maps.py index 5536823fb4..1c5e865a8f 100644 --- a/docs/gallery_code/meteorology/plot_COP_maps.py +++ b/docs/gallery_code/meteorology/plot_COP_maps.py @@ -1,4 +1,5 @@ -"""Global Average Annual Temperature Maps +""" +Global Average Annual Temperature Maps ====================================== Produces maps of global temperature forecasts from the A1B and E1 scenarios. @@ -18,7 +19,7 @@ Analyses, and Scenarios. Eos Trans. AGU, Vol 90, No. 21, doi:10.1029/2009EO210001. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import os.path @@ -31,7 +32,7 @@ def cop_metadata_callback(cube, field, filename): - """Function which adds an "Experiment" coordinate which comes from the filename.""" + """Add an "Experiment" coordinate which comes from the filename.""" # Extract the experiment name (such as A1B or E1) from the filename (in # this case it is just the start of the file name, before the first "."). fname = os.path.basename(filename) # filename without path. diff --git a/docs/gallery_code/meteorology/plot_TEC.py b/docs/gallery_code/meteorology/plot_TEC.py index cb642af588..e6269eaf9b 100644 --- a/docs/gallery_code/meteorology/plot_TEC.py +++ b/docs/gallery_code/meteorology/plot_TEC.py @@ -1,4 +1,5 @@ -"""Ionosphere Space Weather +""" +Ionosphere Space Weather ======================== This space weather example plots a filled contour of rotated pole point @@ -8,7 +9,7 @@ The plot exhibits an interesting outline effect due to excluding data values below a certain threshold. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt import numpy.ma as ma diff --git a/docs/gallery_code/meteorology/plot_deriving_phenomena.py b/docs/gallery_code/meteorology/plot_deriving_phenomena.py index ef78d2f1c9..81a05be9b9 100644 --- a/docs/gallery_code/meteorology/plot_deriving_phenomena.py +++ b/docs/gallery_code/meteorology/plot_deriving_phenomena.py @@ -1,4 +1,5 @@ -"""Deriving Exner Pressure and Air Temperature +""" +Deriving Exner Pressure and Air Temperature =========================================== This example shows some processing of cubes in order to derive further related @@ -7,7 +8,7 @@ specific humidity. Finally, the two new cubes are presented side-by-side in a plot. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt import matplotlib.ticker @@ -20,7 +21,9 @@ def limit_colorbar_ticks(contour_object): - """Takes a contour object which has an associated colorbar and limits the + """Limit colobar number of ticks. + + Take a contour object which has an associated colorbar and limits the number of ticks on the colorbar to 4. """ diff --git a/docs/gallery_code/meteorology/plot_hovmoller.py b/docs/gallery_code/meteorology/plot_hovmoller.py index 6c1f1a800a..829b370d78 100644 --- a/docs/gallery_code/meteorology/plot_hovmoller.py +++ b/docs/gallery_code/meteorology/plot_hovmoller.py @@ -1,4 +1,5 @@ -"""Hovmoller Diagram of Monthly Surface Temperature +""" +Hovmoller Diagram of Monthly Surface Temperature ================================================ This example demonstrates the creation of a Hovmoller diagram with fine control @@ -6,7 +7,7 @@ and has been pre-processed to calculate the monthly mean sea surface temperature. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.dates as mdates import matplotlib.pyplot as plt diff --git a/docs/gallery_code/meteorology/plot_lagged_ensemble.py b/docs/gallery_code/meteorology/plot_lagged_ensemble.py index a8887238d4..7c34572136 100644 --- a/docs/gallery_code/meteorology/plot_lagged_ensemble.py +++ b/docs/gallery_code/meteorology/plot_lagged_ensemble.py @@ -1,4 +1,5 @@ -"""Seasonal Ensemble Model Plots +""" +Seasonal Ensemble Model Plots ============================= This example demonstrates the loading of a lagged ensemble dataset from the @@ -15,7 +16,7 @@ better approach would be to take the climatological mean, calibrated to the model, from each ensemble member. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt import matplotlib.ticker @@ -26,7 +27,9 @@ def realization_metadata(cube, field, fname): - """A function which modifies the cube's metadata to add a "realization" + """Modify the cube's metadata to add a "realization" coordinate. + + A function which modifies the cube's metadata to add a "realization" (ensemble member) coordinate from the filename if one doesn't already exist in the cube. diff --git a/docs/gallery_code/meteorology/plot_wind_barbs.py b/docs/gallery_code/meteorology/plot_wind_barbs.py index 9745a40db2..4126d1f995 100644 --- a/docs/gallery_code/meteorology/plot_wind_barbs.py +++ b/docs/gallery_code/meteorology/plot_wind_barbs.py @@ -1,4 +1,5 @@ -"""Plotting Wind Direction Using Barbs +""" +Plotting Wind Direction Using Barbs =================================== This example demonstrates using barbs to plot wind speed contours and wind @@ -8,7 +9,7 @@ The magnitude of the wind in the original data is low and so doesn't illustrate the full range of barbs. The wind is scaled to simulate a storm that better illustrates the range of barbs that are available. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/meteorology/plot_wind_speed.py b/docs/gallery_code/meteorology/plot_wind_speed.py index dde87824fd..5310ad937d 100644 --- a/docs/gallery_code/meteorology/plot_wind_speed.py +++ b/docs/gallery_code/meteorology/plot_wind_speed.py @@ -1,4 +1,5 @@ -"""Plotting Wind Direction Using Quiver +""" +Plotting Wind Direction Using Quiver ==================================== This example demonstrates using quiver to plot wind speed contours and wind @@ -8,7 +9,7 @@ For the second plot, the data used for the arrows is normalised to produce arrows with a uniform size on the plot. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import cartopy.feature as cfeat import matplotlib.pyplot as plt diff --git a/docs/gallery_code/oceanography/plot_atlantic_profiles.py b/docs/gallery_code/oceanography/plot_atlantic_profiles.py index 736ddbe7fb..a43fb7f8cb 100644 --- a/docs/gallery_code/oceanography/plot_atlantic_profiles.py +++ b/docs/gallery_code/oceanography/plot_atlantic_profiles.py @@ -1,4 +1,5 @@ -"""Oceanographic Profiles and T-S Diagrams +""" +Oceanographic Profiles and T-S Diagrams ======================================= This example demonstrates how to plot vertical profiles of different @@ -13,7 +14,7 @@ presence of the attribute positive=down on the depth coordinate. This means depth values intuitively increase downward on the y-axis. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/oceanography/plot_load_nemo.py b/docs/gallery_code/oceanography/plot_load_nemo.py index 77c95e2353..36ff363a15 100644 --- a/docs/gallery_code/oceanography/plot_load_nemo.py +++ b/docs/gallery_code/oceanography/plot_load_nemo.py @@ -1,11 +1,13 @@ -"""Load a Time Series of Data From the NEMO Model +""" +Load a Time Series of Data From the NEMO Model ============================================== This example demonstrates how to load multiple files containing data output by the NEMO model and combine them into a time series in a single cube. The different time dimensions in these files can prevent Iris from concatenating them without the intervention shown here. -""" # noqa: D400 + +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/oceanography/plot_orca_projection.py b/docs/gallery_code/oceanography/plot_orca_projection.py index 33e3ecac46..bb68056cb3 100644 --- a/docs/gallery_code/oceanography/plot_orca_projection.py +++ b/docs/gallery_code/oceanography/plot_orca_projection.py @@ -1,4 +1,5 @@ -"""Tri-Polar Grid Projected Plotting +""" +Tri-Polar Grid Projected Plotting ================================= This example demonstrates cell plots of data on the semi-structured ORCA2 model @@ -9,7 +10,7 @@ Second four pcolormesh plots are created from this projected dataset, using different projections for the output image. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_tests/conftest.py b/docs/gallery_tests/conftest.py index b1b83b7f42..564a2892a2 100644 --- a/docs/gallery_tests/conftest.py +++ b/docs/gallery_tests/conftest.py @@ -18,7 +18,7 @@ @pytest.fixture def image_setup_teardown(): - """Setup and teardown fixture. + """Perform setup and teardown fixture. Ensures all figures are closed before and after test to prevent one test polluting another if it fails with a figure unclosed. @@ -31,7 +31,9 @@ def image_setup_teardown(): @pytest.fixture def import_patches(monkeypatch): - """Replace plt.show() with a function that does nothing, also add all the + """Replace plt.show() with a function that does nothing, also add to sys.path. + + Replace plt.show() with a function that does nothing, also add all the gallery examples to sys.path. """ @@ -50,7 +52,9 @@ def no_show(): @pytest.fixture def iris_future_defaults(): - """Create a fixture which resets all the iris.FUTURE settings to the defaults, + """Create a fixture which resets all the iris.FUTURE settings to the defaults. + + Create a fixture which resets all the iris.FUTURE settings to the defaults, as otherwise changes made in one test can affect subsequent ones. """ diff --git a/docs/gallery_tests/test_gallery_examples.py b/docs/gallery_tests/test_gallery_examples.py index 37f2858ee9..39e8fe0507 100644 --- a/docs/gallery_tests/test_gallery_examples.py +++ b/docs/gallery_tests/test_gallery_examples.py @@ -17,7 +17,7 @@ def gallery_examples(): - """Generator to yield all current gallery examples.""" + """Entry point for generator to yield all current gallery examples.""" for example_file in GALLERY_DIR.glob("*/plot*.py"): yield example_file.stem diff --git a/docs/src/further_topics/filtering_warnings.rst b/docs/src/further_topics/filtering_warnings.rst index ef743fa9f1..fb1e09e98e 100644 --- a/docs/src/further_topics/filtering_warnings.rst +++ b/docs/src/further_topics/filtering_warnings.rst @@ -47,9 +47,9 @@ Warnings: >>> my_operation() ... - iris/coord_systems.py:432: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:442: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) - iris/coord_systems.py:770: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:782: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( Warnings can be suppressed using the Python warnings filter with the ``ignore`` @@ -110,7 +110,7 @@ You can target specific Warning messages, e.g. ... warnings.filterwarnings("ignore", message="Discarding false_easting") ... my_operation() ... - iris/coord_systems.py:432: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:442: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) :: @@ -128,9 +128,9 @@ Or you can target Warnings raised by specific lines of specific modules, e.g. ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=449) ... my_operation() ... - iris/coord_systems.py:432: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:442: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) - iris/coord_systems.py:770: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:782: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( :: @@ -190,7 +190,7 @@ module during execution: ... ) ... my_operation() ... - iris/coord_systems.py:432: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:442: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) ---- diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index f7037f4ae5..6e6b1b5732 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -83,6 +83,10 @@ This document explains the changes made to Iris for this release #. `@rcomer`_ and `@trexfeathers`_ (reviewer) added handling for realization coordinates when saving pp files (:issue:`4747`, :pull:`5568`) +#. `@ESadek-MO`_ has updated + :mod:`iris.fileformats._nc_load_rules.helpers` to lessen warning duplication. + (:issue:`5536`, :pull:`5685`) + 💣 Incompatible Changes ======================= @@ -103,6 +107,11 @@ This document explains the changes made to Iris for this release #. `@bouweandela`_ changed :func:`iris.coords.Coord.cell` so it does not realize all coordinate data and only loads a single cell instead. (:pull:`5693`) +#. `@rcomer`_ and `@trexfeathers`_ (reviewer) modified + :func:`~iris.analysis.stats.pearsonr` so it preserves lazy data in all cases + and also runs a little faster. (:pull:`5638`) + + 🔥 Deprecations =============== @@ -118,6 +127,9 @@ This document explains the changes made to Iris for this release #. `@bjlittle`_ enforced the minimum pin of ``numpy>1.22`` in accordance with the `NEP29 Drop Schedule`_. (:pull:`5668`) +#. `@bjlittle`_ updated ``ubuntu`` and ``mambaforge`` to the latest versions for ``readthedocs`` + (:pull:`5702`) + 📚 Documentation ================ diff --git a/lib/iris/_concatenate.py b/lib/iris/_concatenate.py index b5c92fd6f9..214cfa00cc 100644 --- a/lib/iris/_concatenate.py +++ b/lib/iris/_concatenate.py @@ -34,7 +34,9 @@ class _CoordAndDims(namedtuple("CoordAndDims", ["coord", "dims"])): - """Container for a coordinate and the associated data dimension(s) + """Container for a coordinate and the associated data dimension(s). + + Container for a coordinate and the associated data dimension(s) spanned over a :class:`iris.cube.Cube`. Parameters @@ -54,8 +56,7 @@ class _CoordMetaData( ["defn", "dims", "points_dtype", "bounds_dtype", "kwargs"], ) ): - """Container for the metadata that defines a dimension or auxiliary - coordinate. + """Container for the metadata that defines a dimension or auxiliary coordinate. Parameters ---------- @@ -150,7 +151,9 @@ def name(self): class _DerivedCoordAndDims( namedtuple("DerivedCoordAndDims", ["coord", "dims", "aux_factory"]) ): - """Container for a derived coordinate, the associated AuxCoordFactory, and the + """Container for a derived coordinate and dimensions(s). + + Container for a derived coordinate, the associated AuxCoordFactory, and the associated data dimension(s) spanned over a :class:`iris.cube.Cube`. Parameters @@ -175,8 +178,7 @@ def __eq__(self, other): class _OtherMetaData(namedtuple("OtherMetaData", ["defn", "dims"])): - """Container for the metadata that defines a cell measure or ancillary - variable. + """Container for the metadata that defines a cell measure or ancillary variable. Parameters ---------- @@ -229,7 +231,9 @@ def name(self): class _SkeletonCube(namedtuple("SkeletonCube", ["signature", "data"])): - """Basis of a source-cube, containing the associated coordinate metadata, + """Basis of a source-cube. + + Basis of a source-cube, containing the associated coordinate metadata, coordinates and cube data payload. Parameters @@ -260,8 +264,7 @@ class _Extent(namedtuple("Extent", ["min", "max"])): class _CoordExtent(namedtuple("CoordExtent", ["points", "bounds"])): - """Container representing the points and bounds extent of a one dimensional - coordinate. + """Container representing the points and bounds extent of a one dimensional coordinate. Parameters ---------- @@ -374,14 +377,15 @@ def _none_sort(item): class _CubeSignature: - """Template for identifying a specific type of :class:`iris.cube.Cube` based + """Template for identifying a specific type of :class:`iris.cube.Cube`. + + Template for identifying a specific type of :class:`iris.cube.Cube` based on its metadata, coordinates and cell_measures. """ def __init__(self, cube): - """Represents the cube metadata and associated coordinate metadata that - allows suitable cubes for concatenation to be identified. + """Represent the cube metadata and associated coordinate metadata. Parameters ---------- @@ -472,7 +476,9 @@ def name_key_func(factory): self.derived_coords_and_dims.append(coord_and_dims) def _coordinate_differences(self, other, attr, reason="metadata"): - """Determine the names of the coordinates that differ between `self` and + """Determine the names of the coordinates that differ. + + Determine the names of the coordinates that differ between `self` and `other` for a coordinate attribute on a _CubeSignature. Parameters @@ -599,13 +605,12 @@ def match(self, other, error_on_mismatch): class _CoordSignature: - """Template for identifying a specific type of :class:`iris.cube.Cube` based - on its coordinates. - - """ + """Template for identifying a specific type of :class:`iris.cube.Cube` based on its coordinates.""" def __init__(self, cube_signature): - """Represents the coordinate metadata required to identify suitable + """Represent the coordinate metadata. + + Represent the coordinate metadata required to identify suitable non-overlapping :class:`iris.cube.Cube` source-cubes for concatenation over a common single dimension. @@ -657,8 +662,7 @@ def _cmp(coord, other): return result, candidate_axis def candidate_axis(self, other): - """Determine the candidate axis of concatenation with the - given coordinate signature. + """Determine the candidate axis of concatenation with the given coordinate signature. If a candidate axis is found, then the coordinate signatures are compatible. @@ -723,14 +727,10 @@ def _calculate_extents(self): class _ProtoCube: - """Framework for concatenating multiple source-cubes over one - common dimension. - - """ + """Framework for concatenating multiple source-cubes over one common dimension.""" def __init__(self, cube): - """Create a new _ProtoCube from the given cube and record the cube - as a source-cube. + """Create a new _ProtoCube from the given cube and record the cube as a source-cube. Parameters ---------- @@ -762,7 +762,9 @@ def axis(self): return self._axis def concatenate(self): - """Concatenates all the source-cubes registered with the + """Concatenates all the source-cubes registered with the :class:`_ProtoCube`. + + Concatenates all the source-cubes registered with the :class:`_ProtoCube` over the nominated common dimension. Returns @@ -992,8 +994,7 @@ def register( return match def _add_skeleton(self, coord_signature, data): - """Create and add the source-cube skeleton to the - :class:`_ProtoCube`. + """Create and add the source-cube skeleton to the :class:`_ProtoCube`. Parameters ---------- @@ -1010,7 +1011,9 @@ def _add_skeleton(self, coord_signature, data): self._skeletons.append(skeleton) def _build_aux_coordinates(self): - """Generate the auxiliary coordinates with associated dimension(s) + """Generate the auxiliary coordinates with associated dimension(s) mapping. + + Generate the auxiliary coordinates with associated dimension(s) mapping for the new concatenated cube. Returns @@ -1082,7 +1085,9 @@ def _build_scalar_coordinates(self): return scalar_coords def _build_cell_measures(self): - """Generate the cell measures with associated dimension(s) + """Generate the cell measures with associated dimension(s) mapping. + + Generate the cell measures with associated dimension(s) mapping for the new concatenated cube. Returns @@ -1120,7 +1125,9 @@ def _build_cell_measures(self): return cell_measures_and_dims def _build_ancillary_variables(self): - """Generate the ancillary variables with associated dimension(s) + """Generate the ancillary variables with associated dimension(s) mapping. + + Generate the ancillary variables with associated dimension(s) mapping for the new concatenated cube. Returns @@ -1243,11 +1250,14 @@ def _build_data(self): return data def _build_dim_coordinates(self): - """Generate the dimension coordinates with associated dimension + """Generate the dimension coordinates. + + Generate the dimension coordinates with associated dimension mapping for the new concatenated cube. - Return: - A list of dimension coordinate and dimension tuple pairs. + Returns + ------- + A list of dimension coordinate and dimension tuple pairs. """ # Setup convenience hooks. @@ -1291,7 +1301,9 @@ def _build_dim_coordinates(self): return dim_coords_and_dims def _sequence(self, extent, axis): - """Determine whether the given extent can be sequenced along with + """Determine whether the extent can be sequenced. + + Determine whether the given extent can be sequenced along with all the extents of the source-cubes already registered with this :class:`_ProtoCube` into non-overlapping segments for the given axis. diff --git a/lib/iris/_data_manager.py b/lib/iris/_data_manager.py index 45f1a6b3e2..6db1d16889 100644 --- a/lib/iris/_data_manager.py +++ b/lib/iris/_data_manager.py @@ -37,10 +37,7 @@ def __init__(self, data): self._assert_axioms() def __copy__(self): - """Forbid :class:`~iris._data_manager.DataManager` instance - shallow-copy support. - - """ + """Forbid :class:`~iris._data_manager.DataManager` instance shallow-copy support.""" name = type(self).__name__ emsg = ( "Shallow-copy of {!r} is not permitted. Use " @@ -49,8 +46,7 @@ def __copy__(self): raise copy.Error(emsg.format(name, name)) def __deepcopy__(self, memo): - """Allow :class:`~iris._data_manager.DataManager` instance - deepcopy support. + """Allow :class:`~iris._data_manager.DataManager` instance deepcopy support. Parameters ---------- @@ -62,6 +58,8 @@ def __deepcopy__(self, memo): def __eq__(self, other): """Perform :class:`~iris._data_manager.DataManager` instance equality. + + Perform :class:`~iris._data_manager.DataManager` instance equality. Note that, this is explicitly not a lazy operation and will load any lazy payload to determine the equality result. @@ -95,6 +93,8 @@ def __eq__(self, other): def __ne__(self, other): """Perform :class:`~iris._data_manager.DataManager` instance inequality. + + Perform :class:`~iris._data_manager.DataManager` instance inequality. Note that, this is explicitly not a lazy operation and will load any lazy payload to determine the inequality result. @@ -117,7 +117,7 @@ def __ne__(self, other): return result def __repr__(self): - """Returns an string representation of the instance.""" + """Return an string representation of the instance.""" fmt = "{cls}({data!r})" result = fmt.format(data=self.core_data(), cls=type(self).__name__) @@ -133,8 +133,7 @@ def _assert_axioms(self): assert state, emsg.format("" if is_lazy else "no ", "" if is_real else "no ") def _deepcopy(self, memo, data=None): - """Perform a deepcopy of the :class:`~iris._data_manager.DataManager` - instance. + """Perform a deepcopy of the :class:`~iris._data_manager.DataManager` instance. Parameters ---------- @@ -204,7 +203,9 @@ def data(self): @data.setter def data(self, data): - """Replaces the currently managed data with the specified data, which must + """Replace the currently managed data with the specified data. + + Replace the currently managed data with the specified data, which must be of an equivalent shape. Note that, the only shape promotion permitted is for 0-dimensional @@ -268,8 +269,7 @@ def shape(self): return self.core_data().shape def copy(self, data=None): - """Returns a deep copy of this :class:`~iris._data_manager.DataManager` - instance. + """Return a deep copy of this :class:`~iris._data_manager.DataManager` instance. Parameters ---------- @@ -285,7 +285,9 @@ def copy(self, data=None): return self._deepcopy(memo, data=data) def core_data(self): - """If real data is being managed, then return the :class:`~numpy.ndarray` + """Provide real data or lazy data. + + If real data is being managed, then return the :class:`~numpy.ndarray` or :class:`numpy.ma.core.MaskedArray`. Otherwise, return the lazy :class:`~dask.array.core.Array`. diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py index 55a4222675..b430a4d682 100644 --- a/lib/iris/_lazy_data.py +++ b/lib/iris/_lazy_data.py @@ -42,7 +42,9 @@ def is_lazy_data(data): def is_lazy_masked_data(data): - """Return True if the argument is both an Iris 'lazy' data array and the + """Determine whether managed data is lazy and masked. + + Return True if the argument is both an Iris 'lazy' data array and the underlying array is of masked type. Otherwise return False. """ @@ -326,6 +328,8 @@ def _co_realise_lazy_arrays(arrays): def as_concrete_data(data): """Return the actual content of a lazy array, as a numpy array. + + Return the actual content of a lazy array, as a numpy array. If the input data is a NumPy `ndarray` or masked array, return it unchanged. diff --git a/lib/iris/_merge.py b/lib/iris/_merge.py index 79427d401a..7705dd6bea 100644 --- a/lib/iris/_merge.py +++ b/lib/iris/_merge.py @@ -77,7 +77,9 @@ class _CoordMetaData( class _CoordAndDims(namedtuple("CoordAndDims", ["coord", "dims"])): - """Container for a coordinate and the associated data dimension/s + """Container for a coordinate and the associated data dimension/s. + + Container for a coordinate and the associated data dimension/s spanned over a :class:`iris.cube.Cube`. Parameters @@ -96,7 +98,9 @@ class _CoordAndDims(namedtuple("CoordAndDims", ["coord", "dims"])): class _ScalarCoordPayload( namedtuple("ScalarCoordPayload", ["defns", "values", "metadata"]) ): - """Container for all scalar coordinate data and metadata represented + """Container for the payload. + + Container for all scalar coordinate data and metadata represented within a :class:`iris.cube.Cube`. All scalar coordinate related data is sorted into ascending order @@ -123,7 +127,9 @@ class _ScalarCoordPayload( class _VectorCoordPayload( namedtuple("VectorCoordPayload", ["dim_coords_and_dims", "aux_coords_and_dims"]) ): - """Container for all vector coordinate data and metadata represented + """Container for the payload. + + Container for all vector coordinate data and metadata represented within a :class:`iris.cube.Cube`. Parameters @@ -145,7 +151,9 @@ class _VectorCoordPayload( class _CoordPayload(namedtuple("CoordPayload", ["scalar", "vector", "factory_defns"])): - """Container for all the scalar and vector coordinate data and + """Container for the payload. + + Container for all the scalar and vector coordinate data and metadata, and auxiliary coordinate factories represented within a :class:`iris.cube.Cube`. @@ -200,7 +208,9 @@ def _coords_msgs(msgs, coord_group, defns_a, defns_b): ) def match_signature(self, signature, error_on_mismatch): - """Return whether this _CoordPayload matches the corresponding + """Check if _CoordPayload matches the corresponding aspects of a _CoordSignature. + + Return whether this _CoordPayload matches the corresponding aspects of a _CoordSignature. Parameters @@ -273,7 +283,9 @@ class _CoordSignature( ], ) ): - """Criterion for identifying a specific type of :class:`iris.cube.Cube` + """Criterion for identifying a specific type of :class:`iris.cube.Cube`. + + Criterion for identifying a specific type of :class:`iris.cube.Cube` based on its scalar and vector coordinate data and metadata, and auxiliary coordinate factories. @@ -311,8 +323,7 @@ class _CubeSignature( ], ) ): - """Criterion for identifying a specific type of :class:`iris.cube.Cube` - based on its metadata. + """Criterion for identifying specific type of :class:`iris.cube.Cube` based on its metadata. Parameters ---------- @@ -435,7 +446,9 @@ def match(self, other, error_on_mismatch): class _Skeleton(namedtuple("Skeleton", ["scalar_values", "data"])): - """Basis of a source-cube, containing the associated scalar coordinate values + """Basis of a source-cube. + + Basis of a source-cube, containing the associated scalar coordinate values and data payload of a :class:`iris.cube.Cube`. Parameters @@ -470,7 +483,9 @@ class _FactoryDefn(namedtuple("_FactoryDefn", ["class_", "dependency_defns"])): class _Relation(namedtuple("Relation", ["separable", "inseparable"])): - """Categorisation of the candidate dimensions belonging to a + """Categorisation of the candidate dimensions. + + Categorisation of the candidate dimensions belonging to a :class:`ProtoCube` into separable 'independent' dimensions, and inseparable dependent dimensions. @@ -490,7 +505,9 @@ class _Relation(namedtuple("Relation", ["separable", "inseparable"])): def _is_combination(name): - """Determine whether the candidate dimension is an 'invented' combination + """Determine whether the candidate dimension is an 'invented' combination. + + Determine whether the candidate dimension is an 'invented' combination of candidate dimensions. Parameters @@ -507,7 +524,9 @@ def _is_combination(name): def build_indexes(positions): - r"""Construct a mapping for each candidate dimension that maps for each + r"""Construct a mapping for each candidate dimension. + + Construct a mapping for each candidate dimension that maps for each of its scalar values the set of values for each of the other candidate dimensions. @@ -600,7 +619,9 @@ def _separable_pair(name, index): def _separable(name, indexes): - """Determine the candidate dimensions that are separable and + """Determine the candidate dimensions that are separable and inseparable. + + Determine the candidate dimensions that are separable and inseparable relative to the provided candidate dimension. A candidate dimension X and Y are separable if each scalar @@ -635,7 +656,9 @@ def _separable(name, indexes): def derive_relation_matrix(indexes): - """Construct a mapping for each candidate dimension that specifies + """Construct a mapping for each candidate dimension. + + Construct a mapping for each candidate dimension that specifies which of the other candidate dimensions are separable or inseparable. A candidate dimension X and Y are separable if each scalar value of @@ -737,7 +760,9 @@ def _derive_separable_group(relation_matrix, group): def _is_dependent(dependent, independent, positions, function_mapping=None): - """Determine whether there exists a one-to-one functional relationship + """Determine whether there exists a one-to-one functional relationship. + + Determine whether there exists a one-to-one functional relationship between the independent candidate dimension/s and the dependent candidate dimension. @@ -782,7 +807,9 @@ def _is_dependent(dependent, independent, positions, function_mapping=None): def _derive_consistent_groups(relation_matrix, separable_group): - """Determine the largest combinations of candidate dimensions within the + """Determine the largest combinations of candidate dimensions. + + Determine the largest combinations of candidate dimensions within the separable group that are self consistently separable from one another. If the candidate dimension A is separable from the candidate dimensions @@ -830,7 +857,9 @@ def _derive_consistent_groups(relation_matrix, separable_group): def _build_separable_group( space, group, separable_consistent_groups, positions, function_matrix ): - """Update the space with the first separable consistent group that + """Update the space with the first separable consistent group. + + Update the space with the first separable consistent group that satisfies a valid functional relationship with all other candidate dimensions in the group. @@ -891,7 +920,9 @@ def _build_separable_group( def _build_inseparable_group(space, group, positions, function_matrix): - """Update the space with the first valid scalar functional relationship + """Update the space with the first valid scalar functional relationship. + + Update the space with the first valid scalar functional relationship between a candidate dimension within the group and all other candidate dimensions. @@ -956,7 +987,9 @@ def _build_inseparable_group(space, group, positions, function_matrix): def _build_combination_group(space, group, positions, function_matrix): - """Update the space with the new combined or invented dimension + """Update the space with the new combined or invented dimension. + + Update the space with the new combined or invented dimension that each member of this inseparable group depends on. As no functional relationship between members of the group can be @@ -1065,10 +1098,7 @@ def derive_space(groups, relation_matrix, positions, function_matrix=None): class ProtoCube: - """Framework for merging source-cubes into one or more higher - dimensional cubes. - - """ + """Framework for merging source-cubes into one or more higher dimensional cubes.""" def __init__(self, cube): """Create a new ProtoCube from the given cube. @@ -1152,8 +1182,7 @@ def _report_duplicate(self, nd_indexes, group_by_nd_index): raise iris.exceptions.DuplicateDataError(msg) def merge(self, unique=True): - """Returns the list of cubes resulting from merging the registered - source-cubes. + """Return the list of cubes resulting from merging the registered source-cubes. Parameters ---------- @@ -1251,7 +1280,9 @@ def merge(self, unique=True): return merged_cubes def register(self, cube, error_on_mismatch=False): - """Add a compatible :class:`iris.cube.Cube` as a source-cube for + """Add a compatible :class:`iris.cube.Cube` as a source for merging. + + Add a compatible :class:`iris.cube.Cube` as a source-cube for merging under this :class:`ProtoCube`. A cube will be deemed compatible based on the signature of the @@ -1288,7 +1319,7 @@ def register(self, cube, error_on_mismatch=False): return match def _guess_axis(self, name): - """Returns a "best guess" axis name of the candidate dimension. + """Return a "best guess" axis name of the candidate dimension. Heuristic categoration of the candidate dimension (i.e. scalar_defn index) into either label 'T', 'Z', 'Y', 'X' @@ -1316,7 +1347,9 @@ def _guess_axis(self, name): return axis def _define_space(self, space, positions, indexes, function_matrix): - """Given the derived :class:`ProtoCube` space, define this space in + """Define space. + + Given the derived :class:`ProtoCube` space, define this space in terms of its dimensionality, shape, coordinates and associated coordinate to space dimension mappings. @@ -1489,7 +1522,9 @@ def name_in_independents(): self._shape.extend(signature.data_shape) def _get_cube(self, data): - """Return a fully constructed cube for the given data, containing + """Generate fully constructed cube. + + Return a fully constructed cube for the given data, containing all its coordinates and metadata. """ @@ -1529,10 +1564,7 @@ def _get_cube(self, data): return cube def _nd_index(self, position): - """Returns the n-dimensional index of this source-cube (position), - within the merged cube. - - """ + """Return the n-dimensional index of thr source-cube, within the merged cube.""" index = [] # Determine the index of the source-cube cell for each dimension. @@ -1550,7 +1582,9 @@ def _nd_index(self, position): return tuple(index) def _build_coordinates(self): - """Build the dimension and auxiliary coordinates for the final + """Build the dimension and auxiliary coordinates. + + Build the dimension and auxiliary coordinates for the final merged cube given that the final dimensionality of the target merged cube is known and the associated dimension/s that each coordinate maps onto in that merged cube. diff --git a/lib/iris/_representation/cube_printout.py b/lib/iris/_representation/cube_printout.py index 9cbf43f300..47c0b8b631 100644 --- a/lib/iris/_representation/cube_printout.py +++ b/lib/iris/_representation/cube_printout.py @@ -9,7 +9,9 @@ class Table: - """A container of text strings in rows + columns, that can format its content + """A container of text strings in rows and columns. + + A container of text strings in rows + columns, that can format its content into a string per row, with contents in columns of fixed width. Supports left- or right- aligned columns, alignment being set "per row". @@ -116,7 +118,9 @@ def __str__(self): class CubePrinter: - """An object created from a + """An object created from a cube summary. + + An object created from a :class:`iris._representation.CubeSummary`, which provides text printout of a :class:`iris.cube.Cube`. @@ -130,7 +134,7 @@ class CubePrinter: N_INDENT_EXTRA = 4 def __init__(self, cube_or_summary): - """An object that provides a printout of a cube. + """Object that provides a printout of a cube. Parameters ---------- diff --git a/lib/iris/analysis/_area_weighted.py b/lib/iris/analysis/_area_weighted.py index 24c608e98e..af07b5e069 100644 --- a/lib/iris/analysis/_area_weighted.py +++ b/lib/iris/analysis/_area_weighted.py @@ -21,8 +21,7 @@ class AreaWeightedRegridder: """Provide support for performing area-weighted regridding.""" def __init__(self, src_grid_cube, target_grid_cube, mdtol=1): - """Create an area-weighted regridder for conversions between the source - and target grids. + """Create an area-weighted regridder for conversions between the source and target grids. Parameters ---------- @@ -75,8 +74,7 @@ def __init__(self, src_grid_cube, target_grid_cube, mdtol=1): ) = _regrid_info def __call__(self, cube): - """Regrid this :class:`~iris.cube.Cube` onto the target grid of - this :class:`AreaWeightedRegridder`. + """Regrid :class:`~iris.cube.Cube` onto target grid :class:`AreaWeightedRegridder`. The given cube must be defined with the same grid as the source grid used to create this :class:`AreaWeightedRegridder`. diff --git a/lib/iris/analysis/_grid_angles.py b/lib/iris/analysis/_grid_angles.py index d50f55125f..109e83d73b 100644 --- a/lib/iris/analysis/_grid_angles.py +++ b/lib/iris/analysis/_grid_angles.py @@ -2,7 +2,10 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Code to implement vector rotation by angles, and inferring gridcell angles + +"""Implement vector rotation by angles. + +Code to implement vector rotation by angles, and inferring gridcell angles from coordinate points and bounds. """ @@ -67,7 +70,9 @@ def _latlon_from_xyz(xyz): def _angle(p, q, r): - """Estimate grid-angles to true-Eastward direction from positions in the same + """Estimate grid-angles to true-Eastward direction. + + Estimate grid-angles to true-Eastward direction from positions in the same grid row, but at increasing column (grid-Eastward) positions. {P, Q, R} are locations of consecutive points in the same grid row. diff --git a/lib/iris/analysis/_interpolation.py b/lib/iris/analysis/_interpolation.py index 518b57e63a..59863e3485 100644 --- a/lib/iris/analysis/_interpolation.py +++ b/lib/iris/analysis/_interpolation.py @@ -57,7 +57,8 @@ def convert_date(date): def extend_circular_coord(coord, points): - """Return coordinates points with a shape extended by one + """Return coordinate points with a shape extended by one. + This is common when dealing with circular coordinates. """ @@ -67,7 +68,9 @@ def extend_circular_coord(coord, points): def extend_circular_coord_and_data(coord, data, coord_dim): - """Return coordinate points and a data array with a shape extended by one + """Return coordinate points and data with a shape extended by one in the provided axis. + + Return coordinate points and a data array with a shape extended by one in the coord_dim axis. This is common when dealing with circular coordinates. @@ -155,10 +158,7 @@ def get_xy_coords(cube, dim_coords=False): def snapshot_grid(cube): - """Helper function that returns deep copies of lateral (dimension) coordinates - from a cube. - - """ + """Return deep copies of lateral (dimension) coordinates from a cube.""" x, y = get_xy_dim_coords(cube) return x.copy(), y.copy() @@ -251,10 +251,7 @@ def extrapolation_mode(self): return self._mode def _account_for_circular(self, points, data): - """Extend the given data array, and re-centralise coordinate points - for circular (1D) coordinates. - - """ + """Extend data array, and re-centralise coordinate points for circular (1D) coordinates.""" from iris.analysis.cartography import wrap_lons for circular, modulus, index, dim, offset in self._circulars: @@ -383,7 +380,9 @@ def _resample_coord(self, sample_points, coord, coord_dims): return new_coord def _setup(self): - """Perform initial start-up configuration and validation based on the + """Perform initial start-up configuration and validation. + + Perform initial start-up configuration and validation based on the cube and the specified coordinates to be interpolated over. """ @@ -434,7 +433,9 @@ def _setup(self): self._validate() def _validate(self): - """Perform all sanity checks to ensure that the interpolation request + """Perform checks to ensure interpolation request is valid. + + Perform all sanity checks to ensure that the interpolation request over the cube with the specified coordinates is valid and can be performed. @@ -458,10 +459,7 @@ def _validate(self): raise ValueError(msg.format(coord.name())) def _interpolated_dtype(self, dtype): - """Determine the minimum base dtype required by the - underlying interpolator. - - """ + """Determine the minimum base dtype required by the underlying interpolator.""" if self._method == "nearest": result = dtype else: @@ -469,7 +467,9 @@ def _interpolated_dtype(self, dtype): return result def _points(self, sample_points, data, data_dims=None): - """Interpolate the given data values at the specified list of orthogonal + """Interpolate at the specified points. + + Interpolate the given data values at the specified list of orthogonal (coord, points) pairs. Parameters diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index 0f50e47f5b..321194d1fc 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -376,8 +376,7 @@ class CurvilinearRegridder: """ def __init__(self, src_grid_cube, target_grid_cube, weights=None): - """Create a regridder for conversions between the source - and target grids. + """Create a regridder for conversions between the source and target grids. Parameters ---------- @@ -408,8 +407,7 @@ def __init__(self, src_grid_cube, target_grid_cube, weights=None): @staticmethod def _get_horizontal_coord(cube, axis): - """Gets the horizontal coordinate on the supplied cube along the - specified axis. + """Get the horizontal coordinate on the supplied cube along the specified axis. Parameters ---------- @@ -433,7 +431,9 @@ def _get_horizontal_coord(cube, axis): return coords[0] def __call__(self, src): - """Regrid the supplied :class:`~iris.cube.Cube` on to the target grid of + """Regrid onto the target grid. + + Regrid the supplied :class:`~iris.cube.Cube` on to the target grid of this :class:`_CurvilinearRegridder`. The given cube must be defined with the same grid as the source @@ -494,8 +494,7 @@ class RectilinearRegridder: """ def __init__(self, src_grid_cube, tgt_grid_cube, method, extrapolation_mode): - """Create a regridder for conversions between the source - and target grids. + """Create a regridder for conversions between the source and target grids. Parameters ---------- @@ -556,7 +555,9 @@ def extrapolation_mode(self): @staticmethod def _sample_grid(src_coord_system, grid_x_coord, grid_y_coord): - """Convert the rectilinear grid coordinates to a curvilinear grid in + """Convert the rectilinear grid to a curvilinear grid. + + Convert the rectilinear grid coordinates to a curvilinear grid in the source coordinate system. The `grid_x_coord` and `grid_y_coord` must share a common coordinate @@ -851,7 +852,9 @@ def _check_units(self, coord): raise ValueError(msg) def __call__(self, src): - """Regrid this :class:`~iris.cube.Cube` on to the target grid of + """Regrid onto target grid. + + Regrid this :class:`~iris.cube.Cube` on to the target grid of this :class:`RectilinearRegridder`. The given cube must be defined with the same grid as the source diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index 78b903c971..58c10c1926 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -83,7 +83,12 @@ def wrap_lons(lons, base, period): def unrotate_pole(rotated_lons, rotated_lats, pole_lon, pole_lat): - """Convert arrays of rotated-pole longitudes and latitudes to unrotated + """Convert rotated-pole to unrotated longitudes and latitudes. + + ``pole_lat`` should describe the location of the rotated pole that + describes the arrays of rotated-pole longitudes and latitudes. + + Convert arrays of rotated-pole longitudes and latitudes to unrotated arrays of longitudes and latitudes. The values of ``pole_lon`` and ``pole_lat`` should describe the location of the rotated pole that describes the arrays of rotated-pole longitudes and latitudes. @@ -128,8 +133,9 @@ def unrotate_pole(rotated_lons, rotated_lats, pole_lon, pole_lat): def rotate_pole(lons, lats, pole_lon, pole_lat): - """Convert arrays of longitudes and latitudes to arrays of rotated-pole - longitudes and latitudes. The values of ``pole_lon`` and ``pole_lat`` + """Convert unrotated longitudes and latitudes to rotated-pole. + + The values of ``pole_lon`` and ``pole_lat`` should describe the rotated pole that the arrays of longitudes and latitudes are to be rotated onto. @@ -370,7 +376,7 @@ def _quadrant_area(radian_lat_bounds, radian_lon_bounds, radius_of_earth): def area_weights(cube, normalize=False): - r"""Returns an array of area weights, with the same dimensions as the cube. + r"""Return an array of area weights, with the same dimensions as the cube. This is a 2D lat/lon area weights array, repeated over the non lat/lon dimensions. @@ -490,7 +496,9 @@ def area_weights(cube, normalize=False): def cosine_latitude_weights(cube): - r"""Returns an array of latitude weights, with the same dimensions as + r"""Calculate cosine latitude weights, with the same dimensions as the cube. + + Return an array of latitude weights, with the same dimensions as the cube. The weights are the cosine of latitude. These are n-dimensional latitude weights repeated over the dimensions @@ -881,8 +889,7 @@ def project(cube, target_proj, nx=None, ny=None): def _transform_xy(crs_from, x, y, crs_to): - """Shorthand function to transform 2d points between coordinate - reference systems. + """Shorthand function to transform 2d points between coordinate reference systems. Parameters ---------- @@ -989,7 +996,9 @@ def _crs_distance_differentials(crs, x, y): def _transform_distance_vectors(u_dist, v_dist, ds, dx2, dy2): - """Transform distance vectors from one coordinate reference system to + """Transform distance vectors to another coordinate reference system. + + Transform distance vectors from one coordinate reference system to another, preserving magnitude and physical direction. Parameters @@ -1022,7 +1031,9 @@ def _transform_distance_vectors(u_dist, v_dist, ds, dx2, dy2): def _transform_distance_vectors_tolerance_mask(src_crs, x, y, tgt_crs, ds, dx2, dy2): - """Return a mask that can be applied to data array to mask elements + """Return a mask that can be applied to data array to mask elements. + + Return a mask that can be applied to data array to mask elements where the magnitude of vectors are not preserved due to numerical errors introduced by the transformation between coordinate systems. diff --git a/lib/iris/analysis/geometry.py b/lib/iris/analysis/geometry.py index 0e44990155..a76498bf45 100644 --- a/lib/iris/analysis/geometry.py +++ b/lib/iris/analysis/geometry.py @@ -18,10 +18,11 @@ def _extract_relevant_cube_slice(cube, geometry): - """Given a shapely geometry object, this helper method returns - the tuple + """Calculate geometry intersection with spatial region defined by cube. + + This helper method returns the tuple (subcube, x_coord_of_subcube, y_coord_of_subcube, - (min_x_index, min_y_index, max_x_index, max_y_index)). + (min_x_index, min_y_index, max_x_index, max_y_index)). If cube and geometry don't overlap, returns None. @@ -134,7 +135,9 @@ def _extract_relevant_cube_slice(cube, geometry): def geometry_area_weights(cube, geometry, normalize=False): - """Returns the array of weights corresponding to the area of overlap between + """Return the array of weights corresponding to the area of overlap. + + Return the array of weights corresponding to the area of overlap between the cells of cube's horizontal grid, and the given shapely geometry. The returned array is suitable for use with :const:`iris.analysis.MEAN`. diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index 50b8b21a28..216d7e3892 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -30,7 +30,9 @@ @lru_cache(maxsize=128, typed=True) def _output_dtype(op, first_dtype, second_dtype=None, in_place=False): - """Get the numpy dtype corresponding to the result of applying a unary or + """Get the numpy dtype. + + Get the numpy dtype corresponding to the result of applying a unary or binary operation to arguments of specified dtype. Parameters @@ -204,7 +206,9 @@ def _assert_is_cube(cube): @_lenient_client(services=SERVICES) def add(cube, other, dim=None, in_place=False): - """Calculate the sum of two cubes, or the sum of a cube and a coordinate or + """Calculate the sum. + + Calculate the sum of two cubes, or the sum of a cube and a coordinate or array or scalar value. When summing two cubes, they must both have the same coordinate systems and @@ -257,7 +261,9 @@ def add(cube, other, dim=None, in_place=False): @_lenient_client(services=SERVICES) def subtract(cube, other, dim=None, in_place=False): - """Calculate the difference between two cubes, or the difference between + """Calculate the difference. + + Calculate the difference between two cubes, or the difference between a cube and a coordinate or array or scalar value. When differencing two cubes, they must both have the same coordinate systems @@ -317,8 +323,7 @@ def _add_subtract_common( dim=None, in_place=False, ): - """Function which shares common code between addition and subtraction - of cubes. + """Share common code between addition and subtraction of cubes. Parameters ---------- @@ -366,7 +371,9 @@ def _add_subtract_common( @_lenient_client(services=SERVICES) def multiply(cube, other, dim=None, in_place=False): - """Calculate the product of two cubes, or the product of a cube and a coordinate + """Calculate the product. + + Calculate the product of two cubes, or the product of a cube and a coordinate or array or scalar value. When multiplying two cubes, they must both have the same coordinate systems @@ -431,7 +438,9 @@ def multiply(cube, other, dim=None, in_place=False): def _inplace_common_checks(cube, other, math_op): - """Check whether an inplace math operation can take place between `cube` and + """Check if an inplace math operation can take place. + + Check whether an inplace math operation can take place between `cube` and `other`. It cannot if `cube` has integer data and `other` has float data as the operation will always produce float data that cannot be 'safely' cast back to the integer data of `cube`. @@ -450,7 +459,9 @@ def _inplace_common_checks(cube, other, math_op): @_lenient_client(services=SERVICES) def divide(cube, other, dim=None, in_place=False): - """Calculate the ratio of two cubes, or the ratio of a cube and a coordinate + """Calculate the ratio. + + Calculate the ratio of two cubes, or the ratio of a cube and a coordinate or array or scalar value. When dividing a cube by another cube, they must both have the same coordinate @@ -521,7 +532,7 @@ def divide(cube, other, dim=None, in_place=False): def exponentiate(cube, exponent, in_place=False): - """Returns the result of the given cube to the power of a scalar. + """Return the result of the given cube to the power of a scalar. Parameters ---------- @@ -692,7 +703,9 @@ def log10(cube, in_place=False): def apply_ufunc(ufunc, cube, other=None, new_unit=None, new_name=None, in_place=False): - """Apply a `numpy universal function + """Apply a `numpy universal function `_ to a cube. + + Apply a `numpy universal function `_ to a cube or pair of cubes. @@ -808,7 +821,7 @@ def _binary_op_common( in_place=False, sanitise_metadata=True, ): - """Function which shares common code between binary operations. + """Share common code between binary operations. Parameters ---------- @@ -958,7 +971,9 @@ def _broadcast_cube_coord_data(cube, other, operation_name, dim=None): def _sanitise_metadata(cube, unit): - """As part of the maths metadata contract, clear the necessary or + """Clear appropriate metadata from the resultant cube. + + As part of the maths metadata contract, clear the necessary or unsupported metadata from the resultant cube of the maths operation. """ @@ -1157,7 +1172,7 @@ def __call__( new_name=None, **kwargs_data_func, ): - """Applies the ifunc to the cube(s). + """Apply the ifunc to the cube(s). Parameters ---------- diff --git a/lib/iris/analysis/stats.py b/lib/iris/analysis/stats.py index 33eb171336..f014dd5013 100644 --- a/lib/iris/analysis/stats.py +++ b/lib/iris/analysis/stats.py @@ -4,13 +4,16 @@ # See LICENSE in the root of the repository for full licensing details. """Statistical operations between cubes.""" +import dask.array as da import numpy as np -import numpy.ma as ma import iris -from iris.util import broadcast_to_shape +from iris.common import SERVICES, Resolve +from iris.common.lenient import _lenient_client +from iris.util import _mask_array +@_lenient_client(services=SERVICES) def pearsonr( cube_a, cube_b, @@ -19,35 +22,36 @@ def pearsonr( mdtol=1.0, common_mask=False, ): - """Calculate the Pearson's r correlation coefficient over specified - dimensions. + """Calculate the Pearson's r correlation coefficient over specified dimensions. Parameters ---------- - cube_a, cube_b : cubes + cube_a, cube_b : :class:`iris.cube.Cube` Cubes between which the correlation will be calculated. The cubes should either be the same shape and have the same dimension coordinates - or one cube should be broadcastable to the other. + or one cube should be broadcastable to the other. Broadcasting rules + are the same as those for cube arithmetic (see :ref:`cube maths`). corr_coords : str or list of str The cube coordinate name(s) over which to calculate correlations. If no names are provided then correlation will be calculated over all common cube dimensions. weights : :class:`numpy.ndarray`, optional - Weights array of same shape as (the smaller of) cube_a and cube_b. Note - that latitude/longitude area weights can be calculated using + Weights array of same shape as (the smaller of) `cube_a` and `cube_b`. + Note that latitude/longitude area weights can be calculated using :func:`iris.analysis.cartography.area_weights`. mdtol : float, default=1.0 Tolerance of missing data. The missing data fraction is calculated - based on the number of grid cells masked in both cube_a and cube_b. If - this fraction exceed mdtol, the returned value in the corresponding - cell is masked. mdtol=0 means no missing data is tolerated while - mdtol=1 means the resulting element will be masked if and only if all - contributing elements are masked in cube_a or cube_b. Defaults to 1. + based on the number of grid cells masked in both `cube_a` and `cube_b`. + If this fraction exceed `mdtol`, the returned value in the + corresponding cell is masked. `mdtol` =0 means no missing data is + tolerated while `mdtol` =1 means the resulting element will be masked + if and only if all contributing elements are masked in `cube_a` or + `cube_b`. common_mask : bool, default=False - If True, applies a common mask to cube_a and cube_b so only cells which - are unmasked in both cubes contribute to the calculation. If False, the - variance for each cube is calculated from all available cells. Defaults - to False. + If ``True``, applies a common mask to cube_a and cube_b so only cells + which are unmasked in both cubes contribute to the calculation. If + ``False``, the variance for each cube is calculated from all available + cells. Returns ------- @@ -57,19 +61,19 @@ def pearsonr( cubes. For example providing two time/altitude/latitude/longitude cubes and - corr_coords of 'latitude' and 'longitude' will result in a + `corr_coords` of 'latitude' and 'longitude' will result in a time/altitude cube describing the latitude/longitude (i.e. pattern) correlation at each time/altitude point. Notes ----- + If either of the input cubes has lazy data, the result will have lazy data. + Reference: https://en.wikipedia.org/wiki/Pearson_correlation_coefficient - This operation is non-lazy. - """ - # Assign larger cube to cube_1 + # Assign larger cube to cube_1 for simplicity. if cube_b.ndim > cube_a.ndim: cube_1 = cube_b cube_2 = cube_a @@ -79,90 +83,88 @@ def pearsonr( smaller_shape = cube_2.shape - dim_coords_1 = [coord.name() for coord in cube_1.dim_coords] - dim_coords_2 = [coord.name() for coord in cube_2.dim_coords] - common_dim_coords = list(set(dim_coords_1) & set(dim_coords_2)) + # Get the broadcast, auto-transposed safe versions of the cube operands. + resolver = Resolve(cube_1, cube_2) + lhs_cube_resolved = resolver.lhs_cube_resolved + rhs_cube_resolved = resolver.rhs_cube_resolved + + if lhs_cube_resolved.has_lazy_data() or rhs_cube_resolved.has_lazy_data(): + al = da + array_lhs = lhs_cube_resolved.lazy_data() + array_rhs = rhs_cube_resolved.lazy_data() + else: + al = np + array_lhs = lhs_cube_resolved.data + array_rhs = rhs_cube_resolved.data + # If no coords passed then set to all common dimcoords of cubes. if corr_coords is None: - corr_coords = common_dim_coords - - def _ones_like(cube): - # Return a copy of cube with the same mask, but all data values set to 1. - # The operation is non-lazy. - # For safety we also discard any cell-measures and ancillary-variables, to - # avoid cube arithmetic possibly objecting to them, or inadvertently retaining - # them in the result where they might be inappropriate. - ones_cube = cube.copy() - ones_cube.data = np.ones_like(cube.data) - ones_cube.rename("unknown") - ones_cube.units = 1 - for cm in ones_cube.cell_measures(): - ones_cube.remove_cell_measure(cm) - for av in ones_cube.ancillary_variables(): - ones_cube.remove_ancillary_variable(av) - return ones_cube + dim_coords_1 = {coord.name() for coord in lhs_cube_resolved.dim_coords} + dim_coords_2 = {coord.name() for coord in rhs_cube_resolved.dim_coords} + corr_coords = list(dim_coords_1.intersection(dim_coords_2)) + + # Interpret coords as array dimensions. + corr_dims = set() + if isinstance(corr_coords, str): + corr_coords = [corr_coords] + for coord in corr_coords: + corr_dims.update(lhs_cube_resolved.coord_dims(coord)) + + corr_dims = tuple(corr_dims) # Match up data masks if required. if common_mask: - # Create a cube of 1's with a common mask. - if ma.is_masked(cube_2.data): - mask_cube = _ones_like(cube_2) - else: - mask_cube = 1.0 - if ma.is_masked(cube_1.data): - # Take a slice to avoid unnecessary broadcasting of cube_2. - slice_coords = [ - dim_coords_1[i] - for i in range(cube_1.ndim) - if dim_coords_1[i] not in common_dim_coords - and np.array_equal( - cube_1.data.mask.any(axis=i), cube_1.data.mask.all(axis=i) - ) - ] - cube_1_slice = next(cube_1.slices_over(slice_coords)) - mask_cube = _ones_like(cube_1_slice) * mask_cube - # Apply common mask to data. - if isinstance(mask_cube, iris.cube.Cube): - cube_1 = cube_1 * mask_cube - cube_2 = mask_cube * cube_2 - dim_coords_2 = [coord.name() for coord in cube_2.dim_coords] - - # Broadcast weights to shape of cubes if necessary. - if weights is None or cube_1.shape == smaller_shape: - weights_1 = weights - weights_2 = weights + mask_lhs = al.ma.getmaskarray(array_lhs) + if al is np: + # Reduce all invariant dimensions of mask_lhs to length 1. This avoids + # unnecessary broadcasting of array_rhs. + index = tuple( + slice(0, 1) + if np.array_equal(mask_lhs.any(axis=dim), mask_lhs.all(axis=dim)) + else slice(None) + for dim in range(mask_lhs.ndim) + ) + mask_lhs = mask_lhs[index] + + array_rhs = _mask_array(array_rhs, mask_lhs) + array_lhs = _mask_array(array_lhs, al.ma.getmaskarray(array_rhs)) + + # Broadcast weights to shape of arrays if necessary. + if weights is None: + weights_lhs = weights_rhs = None else: if weights.shape != smaller_shape: - raise ValueError( - "weights array should have dimensions {}".format(smaller_shape) - ) + msg = f"weights array should have dimensions {smaller_shape}" + raise ValueError(msg) - dims_1_common = [ - i for i in range(cube_1.ndim) if dim_coords_1[i] in common_dim_coords - ] - weights_1 = broadcast_to_shape(weights, cube_1.shape, dims_1_common) - if cube_2.shape != smaller_shape: - dims_2_common = [ - i for i in range(cube_2.ndim) if dim_coords_2[i] in common_dim_coords - ] - weights_2 = broadcast_to_shape(weights, cube_2.shape, dims_2_common) - else: - weights_2 = weights + wt_resolver = Resolve(cube_1, cube_2.copy(weights)) + weights = wt_resolver.rhs_cube_resolved.data + weights_rhs = np.broadcast_to(weights, array_rhs.shape) + weights_lhs = np.broadcast_to(weights, array_lhs.shape) # Calculate correlations. - s1 = cube_1 - cube_1.collapsed(corr_coords, iris.analysis.MEAN, weights=weights_1) - s2 = cube_2 - cube_2.collapsed(corr_coords, iris.analysis.MEAN, weights=weights_2) - - covar = (s1 * s2).collapsed( - corr_coords, iris.analysis.SUM, weights=weights_1, mdtol=mdtol + s_lhs = array_lhs - al.ma.average( + array_lhs, axis=corr_dims, weights=weights_lhs, keepdims=True + ) + s_rhs = array_rhs - al.ma.average( + array_rhs, axis=corr_dims, weights=weights_rhs, keepdims=True ) - var_1 = (s1**2).collapsed(corr_coords, iris.analysis.SUM, weights=weights_1) - var_2 = (s2**2).collapsed(corr_coords, iris.analysis.SUM, weights=weights_2) - denom = iris.analysis.maths.apply_ufunc( - np.sqrt, var_1 * var_2, new_unit=covar.units + s_prod = resolver.cube(s_lhs * s_rhs) + + # Use cube collapsed method as it takes care of coordinate collapsing and missing + # data tolerance. + covar = s_prod.collapsed( + corr_coords, iris.analysis.SUM, weights=weights_lhs, mdtol=mdtol ) + + var_lhs = iris.analysis._sum(s_lhs**2, axis=corr_dims, weights=weights_lhs) + var_rhs = iris.analysis._sum(s_rhs**2, axis=corr_dims, weights=weights_rhs) + + denom = np.sqrt(var_lhs * var_rhs) + corr_cube = covar / denom corr_cube.rename("Pearson's r") + corr_cube.units = 1 return corr_cube diff --git a/lib/iris/analysis/trajectory.py b/lib/iris/analysis/trajectory.py index c46fa2ebea..9adb409970 100644 --- a/lib/iris/analysis/trajectory.py +++ b/lib/iris/analysis/trajectory.py @@ -2,10 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Defines a Trajectory class, and a routine to extract a sub-cube along a -trajectory. - -""" +"""Defines a Trajectory class, and a routine to extract a sub-cube along a trajectory.""" import math @@ -40,7 +37,7 @@ class Trajectory: """A series of given waypoints with pre-calculated sample points.""" def __init__(self, waypoints, sample_count=10): - """Defines a trajectory using a sequence of waypoints. + """Define a trajectory using a sequence of waypoints. Parameters ---------- @@ -115,8 +112,7 @@ def __repr__(self): ) def _get_interp_points(self): - """Translate `self.sampled_points` to the format expected by the - interpolator. + """Translate `self.sampled_points` to the format expected by the interpolator. Returns ------- @@ -132,7 +128,9 @@ def _get_interp_points(self): return [(k, v) for k, v in points.items()] def _src_cube_anon_dims(self, cube): - """A helper method to locate the index of anonymous dimensions on the + """Locate the index of anonymous dimensions. + + A helper method to locate the index of anonymous dimensions on the interpolation target, ``cube``. Returns @@ -144,7 +142,9 @@ def _src_cube_anon_dims(self, cube): return list(set(range(cube.ndim)) - set(named_dims)) def interpolate(self, cube, method=None): - """Calls :func:`~iris.analysis.trajectory.interpolate` to interpolate + """Interpolate ``cube`` on the defined trajectory. + + Call :func:`~iris.analysis.trajectory.interpolate` to interpolate ``cube`` on the defined trajectory. Assumes that the coordinate names supplied in the waypoints @@ -480,6 +480,7 @@ def _ll_to_cart(lon, lat): def _cartesian_sample_points(sample_points, sample_point_coord_names): """Replace geographic lat/lon with cartesian xyz. + Generates coords suitable for nearest point calculations with `scipy.spatial.cKDTree`. @@ -529,7 +530,9 @@ def _cartesian_sample_points(sample_points, sample_point_coord_names): def _nearest_neighbour_indices_ndcoords(cube, sample_points, cache=None): - """Returns the indices to select the data value(s) closest to the given + """Calculate the cube nearest neighbour indices for the samples. + + Return the indices to select the data value(s) closest to the given coordinate point values. 'sample_points' is of the form [[coord-or-coord-name, point-value(s)]*]. @@ -696,7 +699,9 @@ def _nearest_neighbour_indices_ndcoords(cube, sample_points, cache=None): class UnstructuredNearestNeigbourRegridder: - """Encapsulate the operation of :meth:`iris.analysis.trajectory.interpolate` + """Encapsulate the operation of :meth:`iris.analysis.trajectory.interpolate`. + + Encapsulate the operation of :meth:`iris.analysis.trajectory.interpolate` with given source and target grids. This is the type used by the :class:`~iris.analysis.UnstructuredNearest` @@ -707,7 +712,9 @@ class UnstructuredNearestNeigbourRegridder: # TODO: cache the necessary bits of the operation so reuse can actually # be more efficient. def __init__(self, src_cube, target_grid_cube): - """A nearest-neighbour regridder to perform regridding from the source + """Nearest-neighbour regridder. + + A nearest-neighbour regridder to perform regridding from the source grid to the target grid. This can then be applied to any source data with the same structure as diff --git a/lib/iris/common/_split_attribute_dicts.py b/lib/iris/common/_split_attribute_dicts.py index 95dbcbb7b3..17b3014fb1 100644 --- a/lib/iris/common/_split_attribute_dicts.py +++ b/lib/iris/common/_split_attribute_dicts.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Dictionary operations for dealing with the CubeAttrsDict "split"-style attribute -dictionaries. +"""Dictionary operations for dealing with the CubeAttrsDict "split"-style attribute dictionaries. The idea here is to convert a split-dictionary into a "plain" one for calculations, whose keys are all pairs of the form ('global', ) or ('local', ). @@ -67,7 +66,9 @@ def _convert_pairedkeys_dict_to_splitattrs(dic): def adjust_for_split_attribute_dictionaries(operation): - """Decorator to make a function of attribute-dictionaries work with split attributes. + """Generate attribute-dictionaries to work with split attributes. + + Decorator to make a function of attribute-dictionaries work with split attributes. The wrapped function of attribute-dictionaries is currently always one of "equals", "combine" or "difference", with signatures like : @@ -92,6 +93,7 @@ def adjust_for_split_attribute_dictionaries(operation): "Split" dictionaries are all of class :class:`~iris.cube.CubeAttrsDict`, since the only usage of 'split' attribute dictionaries is in Cubes (i.e. they are not used for cube components). + """ @wraps(operation) diff --git a/lib/iris/common/lenient.py b/lib/iris/common/lenient.py index d8fb5e8bfc..4822d88b92 100644 --- a/lib/iris/common/lenient.py +++ b/lib/iris/common/lenient.py @@ -28,24 +28,26 @@ def _lenient_client(*dargs, services=None): - """Decorator that allows a client function/method to declare at runtime that + """Allow a client function/method to declare at runtime. + + Decorator that allows a client function/method to declare at runtime that it is executing and requires lenient behaviour from a prior registered lenient service function/method. - This decorator supports being called with no arguments e.g., + This decorator supports being called with no arguments e.g:: @_lenient_client() def func(): pass - This is equivalent to using it as a simple naked decorator e.g., + This is equivalent to using it as a simple naked decorator e.g:: @_lenient_client def func() pass Alternatively, this decorator supports the lenient client explicitly - declaring the lenient services that it wishes to use e.g., + declaring the lenient services that it wishes to use e.g:: @_lenient_client(services=(service1, service2, ...) def func(): @@ -87,7 +89,9 @@ def func(): @wraps(func) def lenient_client_inner_naked(*args, **kwargs): - """Closure wrapper function to register the wrapped function/method + """Closure wrapper function. + + Closure wrapper function to register the wrapped function/method as active at runtime before executing it. """ @@ -107,7 +111,9 @@ def lenient_client_inner_naked(*args, **kwargs): def lenient_client_outer(func): @wraps(func) def lenient_client_inner(*args, **kwargs): - """Closure wrapper function to register the wrapped function/method + """Closure wrapper function. + + Closure wrapper function to register the wrapped function/method as active at runtime before executing it. """ @@ -123,18 +129,20 @@ def lenient_client_inner(*args, **kwargs): def _lenient_service(*dargs): - """Decorator that allows a function/method to declare that it supports lenient + """Implement the lenient service protocol. + + Decorator that allows a function/method to declare that it supports lenient behaviour as a service. Registration is at Python interpreter parse time. - The decorator supports being called with no arguments e.g., + The decorator supports being called with no arguments e.g:: @_lenient_service() def func(): pass - This is equivalent to using it as a simple naked decorator e.g., + This is equivalent to using it as a simple naked decorator e.g:: @_lenient_service def func(): @@ -215,7 +223,7 @@ def _qualname(func): class Lenient(threading.local): def __init__(self, **kwargs): - """A container for managing the run-time lenient features and options. + """Container for managing the run-time lenient features and options. Parameters ---------- @@ -281,7 +289,9 @@ def _init(self): @contextmanager def context(self, **kwargs): - """Return a context manager which allows temporary modification of the + """Context manager supporting temporary modification of lenient state. + + Return a context manager which allows temporary modification of the lenient option state within the scope of the context manager. On entry to the context manager, all provided keyword arguments are @@ -320,7 +330,9 @@ def configure_state(state): class _Lenient(threading.local): def __init__(self, *args, **kwargs): - """A container for managing the run-time lenient services and client + """Container for managing the run-time lenient services and client options. + + A container for managing the run-time lenient services and client options for pre-defined functions/methods. Parameters @@ -354,7 +366,9 @@ def __init__(self, *args, **kwargs): self.register_client(client, services) def __call__(self, func): - """Determine whether it is valid for the function/method to provide a + """Determine whether it is valid for the function/method to provide a lenient service. + + Determine whether it is valid for the function/method to provide a lenient service at runtime to the actively executing lenient client. Parameters @@ -433,7 +447,9 @@ def __setitem__(self, name, value): @contextmanager def context(self, *args, **kwargs): - """Return a context manager which allows temporary modification of + """Context manager supporting temporary modification of lenient state. + + Return a context manager which allows temporary modification of the lenient option state for the active thread. On entry to the context manager, all provided keyword arguments are @@ -526,7 +542,9 @@ def enable(self, state): self.__dict__["enable"] = state def register_client(self, func, services, append=False): - """Add the provided mapping of lenient client function/method to + """Add the lenient client to service mapping. + + Add the provided mapping of lenient client function/method to required lenient service function/methods. Parameters @@ -565,8 +583,7 @@ def register_client(self, func, services, append=False): self.__dict__[func] = services def register_service(self, func): - """Add the provided function/method as providing a lenient service and - activate it. + """Add the provided function/method as providing a lenient service and activate it. Parameters ---------- diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index 27b3329621..92c3d34f3e 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -94,7 +94,9 @@ def hexdigest(item): class _NamedTupleMeta(ABCMeta): - """Meta-class to support the convenience of creating a namedtuple from + """Meta-class convenience for creating a namedtuple. + + Meta-class to support the convenience of creating a namedtuple from names/members of the metadata class hierarchy. """ @@ -239,7 +241,7 @@ def __str__(self): return f"{type(self).__name__}({', '.join(field_strings)})" def _api_common(self, other, func_service, func_operation, action, lenient=None): - """Common entry-point for lenient metadata API methods. + """Perform common entry-point for lenient metadata API methods. Parameters ---------- @@ -575,8 +577,7 @@ def _is_attributes(field, left, right): @lenient_service def combine(self, other, lenient=None): - """Return a new metadata instance created by combining each of the - associated metadata members. + """Return a new metadata instance created by combining each of the associated metadata members. Parameters ---------- @@ -598,7 +599,9 @@ def combine(self, other, lenient=None): @lenient_service def difference(self, other, lenient=None): - """Return a new metadata instance created by performing a difference + """Perform lenient metadata difference operation. + + Return a new metadata instance created by performing a difference comparison between each of the associated metadata members. A metadata member returned with a value of "None" indicates that there @@ -650,7 +653,9 @@ def equal(self, other, lenient=None): @classmethod def from_metadata(cls, other): - """Convert the provided metadata instance from a different type + """Convert metadata instance to this metadata type. + + Convert the provided metadata instance from a different type to this metadata type, using only the relevant metadata members. Non-common metadata members are set to ``None``. @@ -678,7 +683,7 @@ def from_metadata(cls, other): return result def name(self, default=None, token=False): - """Returns a string name representing the identity of the metadata. + """Return a string name representing the identity of the metadata. First it tries standard name, then it tries the long name, then the NetCDF variable name, before falling-back to a default value, @@ -721,7 +726,9 @@ def _check(item): @classmethod def token(cls, name): - """Determine whether the provided name is a valid NetCDF name and thus + """Verify validity of provided NetCDF name. + + Determine whether the provided name is a valid NetCDF name and thus safe to represent a single parsable token. Parameters @@ -1124,7 +1131,9 @@ def _difference_lenient(self, other): @property def _names(self): - """A tuple containing the value of each name participating in the identity + """A tuple containing the value of each name participating in the identity of a cube. + + A tuple containing the value of each name participating in the identity of a :class:`iris.cube.Cube`. This includes the standard name, long name, NetCDF variable name, and the STASH from the attributes dictionary. @@ -1335,8 +1344,7 @@ def metadata_filter( attributes=None, axis=None, ): - """Filter a collection of objects by their metadata to fit the given metadata - criteria. + """Filter a collection of objects by their metadata to fit the given metadata criteria. Criteria can be either specific properties or other objects with metadata to be matched. @@ -1492,7 +1500,9 @@ def __ne__(self, other): return match def __reduce__(self): - """Dynamically created classes at runtime cannot be pickled, due to not + """Use the __reduce__ interface to allow 'pickle' to recreate this class instance. + + Dynamically created classes at runtime cannot be pickled, due to not being defined at the top level of a module. As a result, we require to use the __reduce__ interface to allow 'pickle' to recreate this class instance, and dump and load instance state successfully. @@ -1551,7 +1561,9 @@ def values(self): def metadata_manager_factory(cls, **kwargs): - """A class instance factory function responsible for manufacturing + """Manufacturing metadata instances. + + A class instance factory function responsible for manufacturing metadata instances dynamically at runtime. The factory instances returned by the factory are capable of managing diff --git a/lib/iris/common/mixin.py b/lib/iris/common/mixin.py index 762d1b1fe6..2d9605de83 100644 --- a/lib/iris/common/mixin.py +++ b/lib/iris/common/mixin.py @@ -119,7 +119,7 @@ def __setitem__(self, key, value): dict.__setitem__(self, key, value) def update(self, other, **kwargs): - """Standard ``dict.update()`` operation.""" + """Perform standard ``dict.update()`` operation.""" # Gather incoming keys keys = [] if hasattr(other, "keys"): @@ -143,7 +143,7 @@ def name(self, default=None, token=None): return self._metadata_manager.name(default=default, token=token) def rename(self, name): - """Changes the human-readable name. + """Change the human-readable name. If 'name' is a valid standard name it will assign it to :attr:`standard_name`, otherwise it will assign it to diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index b35397ee58..1ded7e2603 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -2,7 +2,9 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides the infrastructure to support the analysis, identification and +"""Resolve metadata common between two cubes. + +Provides the infrastructure to support the analysis, identification and combination of metadata common between two :class:`~iris.cube.Cube` operands into a single resultant :class:`~iris.cube.Cube`, which will be auto-transposed, and with the appropriate broadcast shape. @@ -97,7 +99,9 @@ def create_coord(self, metadata): class Resolve: - """At present, :class:`~iris.common.resolve.Resolve` is used by Iris solely + """Resolve the metadata of two cubes into one cube. + + At present, :class:`~iris.common.resolve.Resolve` is used by Iris solely during cube maths to combine a left-hand :class:`~iris.cube.Cube` operand and a right-hand :class:`~iris.cube.Cube` operand into a resultant :class:`~iris.cube.Cube` with common metadata, suitably auto-transposed @@ -208,7 +212,9 @@ class Resolve: """ # noqa: D214, D406, D407, D410, D411 def __init__(self, lhs=None, rhs=None): - """Resolve the provided ``lhs`` :class:`~iris.cube.Cube` operand and + """Resolve the cube operands. + + Resolve the provided ``lhs`` :class:`~iris.cube.Cube` operand and ``rhs`` :class:`~iris.cube.Cube` operand to determine the metadata that is common between them, and the auto-transposed, broadcast shape of the resultant :class:`~iris.cube.Cube`. @@ -323,7 +329,9 @@ def __init__(self, lhs=None, rhs=None): self(lhs, rhs) def __call__(self, lhs, rhs): - """Resolve the ``lhs`` :class:`~iris.cube.Cube` operand and ``rhs`` + """Resolve the cube operands. + + Resolve the ``lhs`` :class:`~iris.cube.Cube` operand and ``rhs`` :class:`~iris.cube.Cube` operand metadata. Involves determining all the common coordinate metadata shared between @@ -378,7 +386,9 @@ def __call__(self, lhs, rhs): return self def _as_compatible_cubes(self): - """Determine whether the ``src`` and ``tgt`` :class:`~iris.cube.Cube` can + """Transpose and/or broadcast operands. + + Determine whether the ``src`` and ``tgt`` :class:`~iris.cube.Cube` can be transposed and/or broadcast successfully together. If compatible, the ``_broadcast_shape`` of the resultant resolved cube is @@ -487,7 +497,9 @@ def _aux_coverage( common_aux_metadata, common_scalar_metadata, ): - """Determine the dimensions covered by each of the local and common + """Perform auxiliary coordinate coverage. + + Determine the dimensions covered by each of the local and common auxiliary coordinates of the provided :class:`~iris.cube.Cube`. The cube dimensions not covered by any of the auxiliary coordinates is @@ -554,7 +566,9 @@ def _aux_coverage( @staticmethod def _aux_mapping(src_coverage, tgt_coverage): - """Establish the mapping of dimensions from the ``src`` to ``tgt`` + """Perform auxiliary coordinate dimension mapping. + + Establish the mapping of dimensions from the ``src`` to ``tgt`` :class:`~iris.cube.Cube` using the auxiliary coordinate metadata common between each of the operands. @@ -620,7 +634,9 @@ def _aux_mapping(src_coverage, tgt_coverage): @staticmethod def _categorise_items(cube): - """Inspect the provided :class:`~iris.cube.Cube` and group its + """Categorise the cube metadata. + + Inspect the provided :class:`~iris.cube.Cube` and group its coordinates and associated metadata into dimension, auxiliary and scalar categories. @@ -668,7 +684,9 @@ def _create_prepared_item( bounds=None, container=None, ): - """Convenience method that creates a :class:`~iris.common.resolve._PreparedItem` + """Package metadata in preparation for resolution. + + Convenience method that creates a :class:`~iris.common.resolve._PreparedItem` containing the data and metadata required to construct and attach a coordinate to the resultant resolved cube. @@ -790,7 +808,9 @@ def _show(items, heading): @staticmethod def _dim_coverage(cube, cube_items_dim, common_dim_metadata): - """Determine the dimensions covered by each of the local and common + """Perform dimension coordinate coverage. + + Determine the dimensions covered by each of the local and common dimension coordinates of the provided :class:`~iris.cube.Cube`. The cube dimensions not covered by any of the dimension coordinates is @@ -840,7 +860,9 @@ def _dim_coverage(cube, cube_items_dim, common_dim_metadata): @staticmethod def _dim_mapping(src_coverage, tgt_coverage): - """Establish the mapping of dimensions from the ``src`` to ``tgt`` + """Perform dimension coordinate dimension mapping. + + Establish the mapping of dimensions from the ``src`` to ``tgt`` :class:`~iris.cube.Cube` using the dimension coordinate metadata common between each of the operands. @@ -897,7 +919,9 @@ def _free_mapping( src_aux_coverage, tgt_aux_coverage, ): - """Attempt to update the :attr:`~iris.common.resolve.Resolve.mapping` with + """Associate free dimensions to covered dimensions. + + Attempt to update the :attr:`~iris.common.resolve.Resolve.mapping` with ``src`` to ``tgt`` :class:`~iris.cube.Cube` mappings from unmapped ``src`` dimensions that are free from coordinate metadata coverage to ``tgt`` dimensions that have local metadata coverage (i.e., is not common between @@ -1071,7 +1095,9 @@ def _pop(item, items): logger.debug(f"mapping free dimensions gives, mapping={self.mapping}") def _metadata_coverage(self): - """Using the pre-categorised metadata of the cubes, determine the dimensions + """Determine free and covered dimensions. + + Using the pre-categorised metadata of the cubes, determine the dimensions covered by their associated dimension and auxiliary coordinates, and which dimensions are free of metadata coverage. @@ -1117,7 +1143,9 @@ def _metadata_coverage(self): ) def _metadata_mapping(self): - """Ensure that each ``src`` :class:`~iris.cube.Cube` dimension is mapped to an associated + """Identify equivalent dimensions using metadata. + + Ensure that each ``src`` :class:`~iris.cube.Cube` dimension is mapped to an associated ``tgt`` :class:`~iris.cube.Cube` dimension using the common dim and aux coordinate metadata. If the common metadata does not result in a full mapping of ``src`` to ``tgt`` dimensions @@ -1225,7 +1253,9 @@ def _metadata_mapping(self): self._as_compatible_cubes() def _metadata_prepare(self): - """Populate the :attr:`~iris.common.resolve.Resolve.prepared_category` and + """Consolidate metadata for resolved cube. + + Populate the :attr:`~iris.common.resolve.Resolve.prepared_category` and :attr:`~iris.common.resolve.Resolve.prepared_factories` with the necessary metadata to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -1285,7 +1315,9 @@ def _metadata_prepare(self): self._prepare_factory_payload(src_cube, src_category_local) def _metadata_resolve(self): - """Categorise the coordinate metadata of the cubes into three distinct + """Categorise the coordinate metadata. + + Categorise the coordinate metadata of the cubes into three distinct groups; metadata from coordinates only available (local) on the LHS cube, metadata from coordinates only available (local) on the RHS cube, and metadata from coordinates common to both the LHS and RHS @@ -1393,7 +1425,9 @@ def _prepare_common_aux_payload( prepared_items, ignore_mismatch=None, ): - """Populate the ``prepared_items`` with a :class:`~iris.common.resolve._PreparedItem` containing + """Consolidate common auxiliary coordinate metadata. + + Populate the ``prepared_items`` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each auxiliary coordinate to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -1516,7 +1550,9 @@ def _prepare_common_aux_payload( def _prepare_common_dim_payload( self, src_coverage, tgt_coverage, ignore_mismatch=None ): - """Populate the ``items_dim`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` + """Consolidate common dimension coordinate metadata. + + Populate the ``items_dim`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each :class:`~iris.coords.DimCoord` to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -1575,7 +1611,9 @@ def _prepare_common_dim_payload( def _get_prepared_item( self, metadata, category_local, from_src=True, from_local=False ): - """Find the :attr:`~iris.common.resolve._PreparedItem` from the + """Find the :attr:`~iris.common.resolve._PreparedItem`. + + Find the :attr:`~iris.common.resolve._PreparedItem` from the :attr:`~iris.common.resolve.Resolve.prepared_category` that matches the provided ``metadata``. Alternatively, the ``category_local`` is searched to find a :class:`~iris.common.resolve._Item` @@ -1651,9 +1689,12 @@ def _get_prepared_item( return result def _prepare_factory_payload(self, cube, category_local, from_src=True): - """Populate the :attr:`~iris.common.resolve.Resolve.prepared_factories` with a :class:`~iris.common.resolve._PreparedFactory` - containing the necessary metadata for each ``src`` and/or ``tgt`` auxiliary factory to be constructed and - attached to the resulting resolved :class:`~iris.cube.Cube`. + """Consolidate common factory metadata. + + Populate the :attr:`~iris.common.resolve.Resolve.prepared_factories` with a + :class:`~iris.common.resolve._PreparedFactory` containing the necessary + metadata for each ``src`` and/or ``tgt`` auxiliary factory to be constructed + and attached to the resulting resolved :class:`~iris.cube.Cube`. .. note:: @@ -1727,7 +1768,9 @@ def _prepare_factory_payload(self, cube, category_local, from_src=True): logger.debug(dmsg) def _prepare_local_payload_aux(self, src_aux_coverage, tgt_aux_coverage): - """Populate the ``items_aux`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` + """Consolidate local auxiliary coordinate metadata. + + Populate the ``items_aux`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each ``src`` or ``tgt`` local auxiliary coordinate to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -1802,7 +1845,9 @@ def _prepare_local_payload_aux(self, src_aux_coverage, tgt_aux_coverage): logger.debug(dmsg) def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage): - """Populate the ``items_dim`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` + """Consolidate local dimension coordinate metadata. + + Populate the ``items_dim`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each ``src`` or ``tgt`` local :class:`~iris.coords.DimCoord` to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -1879,7 +1924,9 @@ def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage): self.prepared_category.items_dim.append(prepared_item) def _prepare_local_payload_scalar(self, src_aux_coverage, tgt_aux_coverage): - """Populate the ``items_scalar`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` + """Consolidate local scalar coordinate metadata. + + Populate the ``items_scalar`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each ``src`` or ``tgt`` local scalar coordinate to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -1933,7 +1980,9 @@ def _prepare_local_payload( tgt_dim_coverage, tgt_aux_coverage, ): - """Populate the :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a + """Consolidate the local metadata. + + Populate the :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata from the ``src`` and/or ``tgt`` :class:`~iris.cube.Cube` for each coordinate to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -1966,7 +2015,9 @@ def _prepare_local_payload( def _prepare_points_and_bounds( self, src_coord, tgt_coord, src_dims, tgt_dims, ignore_mismatch=None ): - """Compare the points and bounds of the ``src`` and ``tgt`` coordinates to ensure + """Consolidate points and bounds. + + Compare the points and bounds of the ``src`` and ``tgt`` coordinates to ensure that they are equivalent, taking into account broadcasting when appropriate. .. note:: @@ -2238,7 +2289,9 @@ def _tgt_cube_prepare(self, data): cube.remove_ancillary_variable(av) def cube(self, data, in_place=False): - """Create the resultant :class:`~iris.cube.Cube` from the resolved ``lhs`` + """Create the resultant resolved cube. + + Create the resultant :class:`~iris.cube.Cube` from the resolved ``lhs`` and ``rhs`` :class:`~iris.cube.Cube` operands, using the provided ``data``. @@ -2403,7 +2456,9 @@ def cube(self, data, in_place=False): @property def mapped(self): - """Boolean state representing whether **all** ``src`` :class:`~iris.cube.Cube` + """Whether all ``src`` dimensions have been mapped. + + Boolean state representing whether **all** ``src`` :class:`~iris.cube.Cube` dimensions have been associated with relevant ``tgt`` :class:`~iris.cube.Cube` dimensions. @@ -2481,7 +2536,9 @@ def mapped(self): @property def shape(self): - """Proposed shape of the final resolved cube given the ``lhs`` + """Proposed shape of the final resolved cube. + + Proposed shape of the final resolved cube given the ``lhs`` :class:`~iris.cube.Cube` operand and the ``rhs`` :class:`~iris.cube.Cube` operand. diff --git a/lib/iris/coord_categorisation.py b/lib/iris/coord_categorisation.py index a17dba6519..96407632c5 100644 --- a/lib/iris/coord_categorisation.py +++ b/lib/iris/coord_categorisation.py @@ -147,10 +147,7 @@ def add_day_of_month(cube, coord, name="day_of_month"): def add_day_of_year(cube, coord, name="day_of_year"): - """Add a categorical day-of-year coordinate, values 1..365 - (1..366 in leap years). - - """ + """Add a categorical day-of-year coordinate, values 1..365 (1..366 in leap years).""" # Note: cftime.datetime objects return a normal tuple from timetuple(), # unlike datetime.datetime objects that return a namedtuple. # Index the time tuple (element 7 is day of year) instead of using named @@ -207,10 +204,7 @@ def add_hour(cube, coord, name="hour"): def _months_in_season(season): - """Returns a list of month numbers corresponding to each month in the - given season. - - """ + """Return a list of month numbers corresponding to each month in the given season.""" cyclic_months = "jfmamjjasondjfmamjjasond" m0 = cyclic_months.find(season.lower()) if m0 < 0: @@ -300,8 +294,7 @@ def _month_season_numbers(seasons): def add_season(cube, coord, name="season", seasons=("djf", "mam", "jja", "son")): - """Add a categorical season-of-year coordinate, with user specified - seasons. + """Add a categorical season-of-year coordinate, with user specified seasons. Parameters ---------- @@ -336,7 +329,9 @@ def _season(coord, value): def add_season_number( cube, coord, name="season_number", seasons=("djf", "mam", "jja", "son") ): - """Add a categorical season-of-year coordinate, values 0..N-1 where + """Add a categorical season-of-year coordinate. + + Add a categorical season-of-year coordinate, values 0..N-1 where N is the number of user specified seasons. Parameters @@ -416,8 +411,7 @@ def _season_year(coord, value): def add_season_membership(cube, coord, season, name="season_membership"): - """Add a categorical season membership coordinate for a user specified - season. + """Add a categorical season membership coordinate for a user specified season. The coordinate has the value True for every time that is within the given season, and the value False otherwise. diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index 7f0af71548..adc9895ed9 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -79,7 +79,7 @@ def __ne__(self, other): return not (self == other) def xml_element(self, doc, attrs=None): - """Default behaviour for coord systems.""" + """Perform default behaviour for coord systems.""" # attrs - optional list of (k,v) items, used for alternate output xml_element_name = type(self).__name__ @@ -115,10 +115,7 @@ def _ellipsoid_to_globe(ellipsoid, globe_default): @abstractmethod def as_cartopy_crs(self): - """Return a cartopy CRS representing our native coordinate - system. - - """ + """Return a cartopy CRS representing our native coordinate system.""" pass @abstractmethod @@ -141,7 +138,9 @@ def as_cartopy_projection(self): class GeogCS(CoordSystem): - """A geographic (ellipsoidal) coordinate system, defined by the shape of + """A geographic (ellipsoidal) coordinate system. + + A geographic (ellipsoidal) coordinate system, defined by the shape of the Earth and a prime meridian. """ @@ -362,7 +361,9 @@ class that invalidates the cache. return ccrs.Geodetic(self._globe) def _wipe_cached_properties(self): - """Wipes the cached properties on the object as part of any update to a + """Wipes the cached properties on the object. + + Wipes the cached properties on the object as part of any update to a value that invalidates the cache. """ try: @@ -383,9 +384,12 @@ def semi_major_axis(self): @semi_major_axis.setter def semi_major_axis(self, value): - """Setting this property to a different value invalidates the current datum + """Assign semi_major_axis. + + Setting this property to a different value invalidates the current datum (if any) because a datum encodes a specific semi-major axis. This also invalidates the cached `cartopy.Globe` and `cartopy.CRS`. + """ value = float(value) if not np.isclose(self.semi_major_axis, value): @@ -402,9 +406,12 @@ def semi_minor_axis(self): @semi_minor_axis.setter def semi_minor_axis(self, value): - """Setting this property to a different value invalidates the current datum + """Assign semi_minor_axis. + + Setting this property to a different value invalidates the current datum (if any) because a datum encodes a specific semi-minor axis. This also invalidates the cached `cartopy.Globe` and `cartopy.CRS`. + """ value = float(value) if not np.isclose(self.semi_minor_axis, value): @@ -421,8 +428,11 @@ def inverse_flattening(self): @inverse_flattening.setter def inverse_flattening(self, value): - """Setting this property to a different value does not affect the behaviour + """Assign inverse_flattening. + + Setting this property to a different value does not affect the behaviour of this object any further than the value of this property. + """ wmsg = ( "Setting inverse_flattening does not affect other properties of " @@ -443,10 +453,13 @@ def datum(self): @datum.setter def datum(self, value): - """Setting this property to a different value invalidates the current + """Assign datum. + + Setting this property to a different value invalidates the current values of the ellipsoid measurements because a datum encodes its own ellipse. This also invalidates the cached `cartopy.Globe` and `cartopy.CRS`. + """ if self._datum != value: self._semi_major_axis = None @@ -483,8 +496,7 @@ def __init__( north_pole_grid_longitude=None, ellipsoid=None, ): - """Constructs a coordinate system with rotated pole, on an - optional :class:`GeogCS`. + """Construct a coordinate system with rotated pole, on an optional :class:`GeogCS`. Args: @@ -592,7 +604,7 @@ def __init__( scale_factor_at_central_meridian=None, ellipsoid=None, ): - """Constructs a TransverseMercator object. + """Construct a TransverseMercator object. Args: @@ -713,7 +725,7 @@ def __init__( false_northing=None, ellipsoid=None, ): - """Constructs an Orthographic coord system. + """Construct an Orthographic coord system. Args: @@ -797,7 +809,7 @@ def __init__( false_northing=None, ellipsoid=None, ): - """Constructs a Vertical Perspective coord system. + """Construct a Vertical Perspective coord system. Args: @@ -889,7 +901,7 @@ def __init__( false_northing=None, ellipsoid=None, ): - """Constructs a Geostationary coord system. + """Construct a Geostationary coord system. Args: @@ -993,7 +1005,7 @@ def __init__( ellipsoid=None, scale_factor_at_projection_origin=None, ): - """Constructs a Stereographic coord system. + """Construct a Stereographic coord system. Parameters ---------- @@ -1165,7 +1177,7 @@ def __init__( secant_latitudes=None, ellipsoid=None, ): - """Constructs a LambertConformal coord system. + """Construct a LambertConformal coord system. Kwargs: @@ -1271,7 +1283,7 @@ def __init__( false_easting=None, false_northing=None, ): - """Constructs a Mercator coord system. + """Construct a Mercator coord system. Kwargs: @@ -1376,7 +1388,7 @@ def __init__( false_northing=None, ellipsoid=None, ): - """Constructs a Lambert Azimuthal Equal Area coord system. + """Construct a Lambert Azimuthal Equal Area coord system. Kwargs: @@ -1457,7 +1469,7 @@ def __init__( standard_parallels=None, ellipsoid=None, ): - """Constructs a Albers Conical Equal Area coord system. + """Construct a Albers Conical Equal Area coord system. Kwargs: @@ -1562,7 +1574,7 @@ def __init__( scale_factor_at_projection_origin=None, ellipsoid=None, ): - """Constructs an ObliqueMercator object. + """Construct an ObliqueMercator object. Parameters ---------- @@ -1677,7 +1689,7 @@ def __init__( scale_factor_at_projection_origin=None, ellipsoid=None, ): - """Constructs a RotatedMercator object. + """Construct a RotatedMercator object. Parameters ---------- diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 689fe7b0a0..89c7434a15 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -67,7 +67,7 @@ def __init__( units=None, attributes=None, ): - """Constructs a single dimensional metadata object. + """Construct a single dimensional metadata object. Parameters ---------- @@ -119,8 +119,7 @@ def __init__( self._bounds_dm = None # Only ever set on Coord-derived instances. def __getitem__(self, keys): - """Returns a new dimensional metadata whose values are obtained by - conventional array indexing. + """Return a new dimensional metadata whose values are obtained by conventional array indexing. .. note:: @@ -157,7 +156,7 @@ def __getitem__(self, keys): return new_metadata def copy(self, values=None): - """Returns a copy of this dimensional metadata object. + """Return a copy of this dimensional metadata object. Parameters ---------- @@ -235,14 +234,11 @@ def _values(self, values): self._values_dm.data = values def _lazy_values(self): - """Returns a lazy array representing the dimensional metadata values.""" + """Return a lazy array representing the dimensional metadata values.""" return self._values_dm.lazy_data() def _core_values(self): - """The values array of this dimensional metadata which may be a NumPy - array or a dask array. - - """ + """Value array of this dimensional metadata which may be a NumPy array or a dask array.""" result = self._values_dm.core_data() if not _lazy.is_lazy_data(result): result = result.view() @@ -250,10 +246,7 @@ def _core_values(self): return result def _has_lazy_values(self): - """Returns a boolean indicating whether the metadata's values array is a - lazy dask array or not. - - """ + """Indicate whether the metadata's values array is a lazy dask array or not.""" return self._values_dm.has_lazy_data() def summary( @@ -626,7 +619,7 @@ def __hash__(self): return hash(id(self)) def __binary_operator__(self, other, mode_constant): - """Common code which is called by add, sub, mul and div. + """Perform common code which is called by add, sub, mul and div. Mode constant is one of ADD, SUB, MUL, DIV, RDIV @@ -752,10 +745,7 @@ def pointwise_convert(values): self.units = unit def is_compatible(self, other, ignore=None): - """Return whether the current dimensional metadata object is compatible - with another. - - """ + """Return whether the current dimensional metadata object is compatible with another.""" compatible = self.name() == other.name() and self.units == other.units if compatible: @@ -773,25 +763,16 @@ def is_compatible(self, other, ignore=None): @property def dtype(self): - """The NumPy dtype of the current dimensional metadata object, as - specified by its values. - - """ + """The NumPy dtype of the current dimensional metadata object, as specified by its values.""" return self._values_dm.dtype @property def ndim(self): - """Return the number of dimensions of the current dimensional metadata - object. - - """ + """Return the number of dimensions of the current dimensional metadata object.""" return self._values_dm.ndim def has_bounds(self): - """Return a boolean indicating whether the current dimensional metadata - object has a bounds array. - - """ + """Indicate whether the current dimensional metadata object has a bounds array.""" # Allows for code to handle unbounded dimensional metadata agnostic of # whether the metadata is a coordinate or not. return False @@ -802,7 +783,9 @@ def shape(self): return self._values_dm.shape def xml_element(self, doc): - """Create the :class:`xml.dom.minidom.Element` that describes this + """Create XML element. + + Create the :class:`xml.dom.minidom.Element` that describes this :class:`_DimensionalMetadata`. Parameters @@ -897,10 +880,7 @@ def _xml_array_repr(data): return result def _value_type_name(self): - """A simple, readable name for the data type of the dimensional metadata - values. - - """ + """Provide a simple name for the data type of the dimensional metadata values.""" dtype = self._core_values().dtype kind = dtype.kind if kind in "SU": @@ -925,7 +905,7 @@ def __init__( units=None, attributes=None, ): - """Constructs a single ancillary variable. + """Construct a single ancillary variable. Parameters ---------- @@ -983,17 +963,16 @@ def lazy_data(self): return super()._lazy_values() def core_data(self): - """The data array at the core of this ancillary variable, which may be a + """Return data array at the core of this ancillary variable. + + The data array at the core of this ancillary variable, which may be a NumPy array or a dask array. """ return super()._core_values() def has_lazy_data(self): - """Return a boolean indicating whether the ancillary variable's data array - is a lazy dask array or not. - - """ + """Indicate whether the ancillary variable's data array is a lazy dask array or not.""" return super()._has_lazy_values() def cube_dims(self, cube): @@ -1006,7 +985,9 @@ def cube_dims(self, cube): class CellMeasure(AncillaryVariable): - """A CF Cell Measure, providing area or volume properties of a cell + """A CF Cell Measure, providing area or volume properties of a cell. + + A CF Cell Measure, providing area or volume properties of a cell where these cannot be inferred from the Coordinates and Coordinate Reference System. @@ -1022,7 +1003,7 @@ def __init__( attributes=None, measure=None, ): - """Constructs a single cell measure. + """Construct a single cell measure. Parameters ---------- @@ -1084,8 +1065,7 @@ def cube_dims(self, cube): return cube.cell_measure_dims(self) def xml_element(self, doc): - """Create the :class:`xml.dom.minidom.Element` that describes this - :class:`CellMeasure`. + """Create the :class:`xml.dom.minidom.Element` that describes this :class:`CellMeasure`. Parameters ---------- @@ -1131,8 +1111,7 @@ def __new__( min_inclusive=True, max_inclusive=True, ): - """Create a CoordExtent for the specified coordinate and range of - values. + """Create a CoordExtent for the specified coordinate and range of values. Parameters ---------- @@ -1168,7 +1147,7 @@ def __new__( def _get_2d_coord_bound_grid(bounds): - """Creates a grid using the bounds of a 2D coordinate with 4 sided cells. + """Create a grid using the bounds of a 2D coordinate with 4 sided cells. Assumes that the four vertices of the cells are in an anti-clockwise order (bottom-left, bottom-right, top-right, top-left). @@ -1212,7 +1191,9 @@ def _get_2d_coord_bound_grid(bounds): class Cell(namedtuple("Cell", ["point", "bound"])): - """An immutable representation of a single cell of a coordinate, including the + """A coordinate cell containing a single point, or point and bounds. + + An immutable representation of a single cell of a coordinate, including the sample point and/or boundary position. Notes on cell comparison: @@ -1296,10 +1277,7 @@ def __hash__(self): return hash((self.point, bound)) def __eq__(self, other): - """Compares Cell equality depending on the type of the object to be - compared. - - """ + """Compare Cell equality depending on the type of the object to be compared.""" if isinstance(other, (int, float, np.number)) or hasattr(other, "timetuple"): if self.bound is not None: return self.contains_point(other) @@ -1326,13 +1304,15 @@ def __ne__(self, other): return result def __common_cmp__(self, other, operator_method): - """Common method called by the rich comparison operators. The method of + """Common equality comparison. + + Common method called by the rich comparison operators. The method of checking equality depends on the type of the object to be compared. Cell vs Cell comparison is used to define a strict order. Non-Cell vs Cell comparison is used to define Constraint matching. - """ + """ # noqa: D401 if (isinstance(other, list) and len(other) == 1) or ( isinstance(other, np.ndarray) and other.shape == (1,) ): @@ -1431,8 +1411,7 @@ def __str__(self): return str(self.point) def contains_point(self, point): - """For a bounded cell, returns whether the given point lies within the - bounds. + """For a bounded cell, returns whether the given point lies within the bounds. .. note:: The test carried out is equivalent to min(bound) <= point <= max(bound). @@ -1528,7 +1507,7 @@ def __init__( self._ignore_axis = DEFAULT_IGNORE_AXIS def copy(self, points=None, bounds=None): - """Returns a copy of this coordinate. + """Return a copy of this coordinate. points : A points array for the new coordinate. @@ -1600,7 +1579,9 @@ def points(self, points): @property def bounds(self): - """The coordinate bounds values, as a NumPy array, + """Coordinate bounds values. + + The coordinate bounds values, as a NumPy array, or None if no bound values are defined. .. note:: The shape of the bound array should be: ``points.shape + @@ -1639,7 +1620,9 @@ def coord_system(self, value): @property def climatological(self): - """A boolean that controls whether the coordinate is a climatological + """Flag for representing a climatological time axis. + + A boolean that controls whether the coordinate is a climatological time axis, in which case the bounds represent a climatological period rather than a normal period. @@ -1674,8 +1657,7 @@ def climatological(self, value): @property def ignore_axis(self): - """A boolean that controls whether guess_coord_axis acts on this - coordinate. + """A boolean that controls whether guess_coord_axis acts on this coordinate. Defaults to False, and when set to True it will be skipped by guess_coord_axis. @@ -1729,17 +1711,11 @@ def lazy_bounds(self): return lazy_bounds def core_points(self): - """The points array at the core of this coord, which may be a NumPy array - or a dask array. - - """ + """Core points array at the core of this coord, which may be a NumPy array or a dask array.""" return super()._core_values() def core_bounds(self): - """The points array at the core of this coord, which may be a NumPy array - or a dask array. - - """ + """Core bounds. The points array at the core of this coord, which may be a NumPy array or a dask array.""" result = None if self.has_bounds(): result = self._bounds_dm.core_data() @@ -1748,14 +1724,13 @@ def core_bounds(self): return result def has_lazy_points(self): - """Return a boolean indicating whether the coord's points array is a - lazy dask array or not. - - """ + """Return a boolean whether the coord's points array is a lazy dask array or not.""" return super()._has_lazy_values() def has_lazy_bounds(self): - """Return a boolean indicating whether the coord's bounds array is a + """Whether coordinate bounds are lazy. + + Return a boolean indicating whether the coord's bounds array is a lazy dask array or not. """ @@ -1782,8 +1757,7 @@ def cube_dims(self, cube): return cube.coord_dims(self) def convert_units(self, unit): - r"""Change the coordinate's units, converting the values in its points - and bounds arrays. + r"""Change the coordinate's units, converting the values in its points and bounds arrays. For example, if a coordinate's :attr:`~iris.coords.Coord.units` attribute is set to radians then:: @@ -1801,7 +1775,7 @@ def convert_units(self, unit): super().convert_units(unit=unit) def cells(self): - """Returns an iterable of Cell instances for this Coord. + """Return an iterable of Cell instances for this Coord. For example:: @@ -1851,7 +1825,7 @@ def _sanity_check_bounds(self): ) def _discontiguity_in_bounds(self, rtol=1e-5, atol=1e-8): - """Checks that the bounds of the coordinate are contiguous. + """Check that the bounds of the coordinate are contiguous. rtol : float, optional Relative tolerance that is used when checking contiguity. Defaults @@ -1941,7 +1915,9 @@ def mod360_adjust(compare_axis): return contiguous, diffs def is_contiguous(self, rtol=1e-05, atol=1e-08): - """Return True if, and only if, this Coord is bounded with contiguous + """Whether coordinate has contiguous bounds. + + Return True if, and only if, this Coord is bounded with contiguous bounds to within the specified relative and absolute tolerances. 1D coords are contiguous if the upper bound of a cell aligns, @@ -1971,7 +1947,9 @@ def is_contiguous(self, rtol=1e-05, atol=1e-08): return contiguous def contiguous_bounds(self): - """Returns the N+1 bound values for a contiguous bounded 1D coordinate + """Contiguous bounds of 1D coordinate. + + Return the N+1 bound values for a contiguous bounded 1D coordinate of length N, or the (N+1, M+1) bound values for a contiguous bounded 2D coordinate of shape (N, M). @@ -2065,7 +2043,9 @@ def is_compatible(self, other, ignore=None): @property def bounds_dtype(self): - """The NumPy dtype of the coord's bounds. Will be `None` if the coord + """The NumPy dtype of the coordinates bounds. + + The NumPy dtype of the coord's bounds. Will be `None` if the coord does not have bounds. """ @@ -2087,8 +2067,11 @@ def has_bounds(self): return self._bounds_dm is not None def cell(self, index): - """Return the single :class:`Cell` instance which results from slicing the + """Point/bound cell at the given coordinate index. + + Return the single :class:`Cell` instance which results from slicing the points/bounds with the given index. + """ index = iris.util._build_full_slice_given_keys(index, self.ndim) @@ -2111,8 +2094,7 @@ def cell(self, index): return Cell(point, bound) def collapsed(self, dims_to_collapse=None): - """Returns a copy of this coordinate, which has been collapsed along - the specified dimensions. + """Return a copy of this coordinate, which has been collapsed along the specified dimensions. Replaces the points & bounds with a simple bounded region. """ @@ -2304,7 +2286,7 @@ def guess_bounds(self, bound_position=0.5): self.bounds = self._guess_bounds(bound_position) def intersect(self, other, return_indices=False): - """Returns a new coordinate from the intersection of two coordinates. + """Return a new coordinate from the intersection of two coordinates. Both coordinates must be compatible as defined by :meth:`~iris.coords.Coord.is_compatible`. @@ -2349,7 +2331,7 @@ def intersect(self, other, return_indices=False): return self[self_intersect_indices] def nearest_neighbour_index(self, point): - """Returns the index of the cell nearest to the given point. + """Return the index of the cell nearest to the given point. Only works for one-dimensional coordinates. @@ -2444,8 +2426,7 @@ def nearest_neighbour_index(self, point): return result_index def xml_element(self, doc): - """Create the :class:`xml.dom.minidom.Element` that describes this - :class:`Coord`. + """Create the :class:`xml.dom.minidom.Element` that describes this :class:`Coord`. Parameters ---------- @@ -2503,8 +2484,7 @@ def from_regular( climatological=False, with_bounds=False, ): - """Create a :class:`DimCoord` with regularly spaced points, and - optionally bounds. + """Create a :class:`DimCoord` with regularly spaced points, and optionally bounds. The majority of the arguments are defined as for :class:`Coord`, but those which differ are defined below. @@ -2839,8 +2819,7 @@ def is_monotonic(self): return True def xml_element(self, doc): - """Create the :class:`xml.dom.minidom.Element` that describes this - :class:`DimCoord`. + """Create the :class:`xml.dom.minidom.Element` that describes this :class:`DimCoord`. Parameters ---------- @@ -3016,8 +2995,7 @@ def __add__(self, other): return NotImplemented def xml_element(self, doc): - """Create the :class:`xml.dom.minidom.Element` that describes this - :class:`CellMethod`. + """Create the :class:`xml.dom.minidom.Element` that describes this :class:`CellMethod`. Parameters ---------- diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 691349f170..5997eaacf5 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -66,17 +66,13 @@ def __len__(self): return len(self.cubes) def add(self, cube): - """Adds the appropriate (sub)cube to the list of cubes where it - matches the constraint. - - """ + """Add the appropriate (sub)cube to the list of cubes where it matches the constraint.""" sub_cube = self.constraint.extract(cube) if sub_cube is not None: self.cubes.append(sub_cube) def merged(self, unique=False): - """Returns a new :class:`_CubeFilter` by merging the list of - cubes. + """Return a new :class:`_CubeFilter` by merging the list of cubes. Parameters ---------- @@ -93,10 +89,7 @@ class _CubeFilterCollection: @staticmethod def from_cubes(cubes, constraints=None): - """Creates a new collection from an iterable of cubes, and some - optional constraints. - - """ + """Create a new collection from an iterable of cubes, and some optional constraints.""" constraints = iris._constraints.list_of_constraints(constraints) pairs = [_CubeFilter(constraint) for constraint in constraints] collection = _CubeFilterCollection(pairs) @@ -108,26 +101,19 @@ def __init__(self, pairs): self.pairs = pairs def add_cube(self, cube): - """Adds the given :class:`~iris.cube.Cube` to all of the relevant - constraint pairs. - - """ + """Add the given :class:`~iris.cube.Cube` to all of the relevant constraint pairs.""" for pair in self.pairs: pair.add(cube) def cubes(self): - """Returns all the cubes in this collection concatenated into a - single :class:`CubeList`. - - """ + """Return all the cubes in this collection concatenated into a single :class:`CubeList`.""" result = CubeList() for pair in self.pairs: result.extend(pair.cubes) return result def merged(self, unique=False): - """Returns a new :class:`_CubeFilterCollection` by merging all the cube - lists of this collection. + """Return a new :class:`_CubeFilterCollection` by merging all the cube lists of this collection. Parameters ---------- @@ -140,10 +126,7 @@ def merged(self, unique=False): class CubeList(list): - """All the functionality of a standard :class:`list` with added "Cube" - context. - - """ + """All the functionality of a standard :class:`list` with added "Cube" context.""" def __init__(self, *args, **kwargs): """Given an iterable of cubes, return a CubeList instance.""" @@ -154,7 +137,7 @@ def __init__(self, *args, **kwargs): self._assert_is_cube(cube) def __str__(self): - """Runs short :meth:`Cube.summary` on every cube.""" + """Run short :meth:`Cube.summary` on every cube.""" result = [ "%s: %s" % (i, cube.summary(shorten=True)) for i, cube in enumerate(self) ] @@ -165,7 +148,7 @@ def __str__(self): return result def __repr__(self): - """Runs repr on every cube.""" + """Run repr on every cube.""" return "[%s]" % ",\n".join([repr(cube) for cube in self]) @staticmethod @@ -258,8 +241,7 @@ def xml(self, checksum=False, order=True, byteorder=True): return doc.toprettyxml(indent=" ") def extract(self, constraints): - """Filter each of the cubes which can be filtered by the given - constraints. + """Filter each of the cubes which can be filtered by the given constraints. This method iterates over each constraint given, and subsets each of the cubes in this CubeList where possible. Thus, a CubeList of length @@ -276,6 +258,8 @@ def extract(self, constraints): def extract_cube(self, constraint): """Extract a single cube from a CubeList, and return it. + + Extract a single cube from a CubeList, and return it. Raise an error if the extract produces no cubes, or more than one. Parameters @@ -296,6 +280,8 @@ def extract_cube(self, constraint): def extract_cubes(self, constraints): """Extract specific cubes from a CubeList, one for each given constraint. + + Extract specific cubes from a CubeList, one for each given constraint. Each constraint must produce exactly one cube, otherwise an error is raised. @@ -351,7 +337,9 @@ def _extract_and_merge(cubes, constraints, strict=False, return_single_cube=Fals return result def extract_overlapping(self, coord_names): - """Returns a :class:`CubeList` of cubes extracted over regions + """Return a :class:`CubeList` of cubes extracted over regions. + + Return a :class:`CubeList` of cubes extracted over regions where the coordinates overlap, for the coordinates in coord_names. @@ -378,8 +366,7 @@ def overlap_fn(cell): return self.extract(iris.Constraint(coord_values=coord_values)) def merge_cube(self): - """Return the merged contents of the :class:`CubeList` as a single - :class:`Cube`. + """Return the merged contents of the :class:`CubeList` as a single :class:`Cube`. If it is not possible to merge the `CubeList` into a single `Cube`, a :class:`~iris.exceptions.MergeError` will be raised @@ -414,8 +401,7 @@ def merge_cube(self): return merged_cube def merge(self, unique=True): - """Returns the :class:`CubeList` resulting from merging this - :class:`CubeList`. + """Return the :class:`CubeList` resulting from merging this :class:`CubeList`. Parameters ---------- @@ -517,8 +503,7 @@ def concatenate_cube( check_ancils=True, check_derived_coords=True, ): - """Return the concatenated contents of the :class:`CubeList` as a single - :class:`Cube`. + """Return the concatenated contents of the :class:`CubeList` as a single :class:`Cube`. If it is not possible to concatenate the `CubeList` into a single `Cube`, a :class:`~iris.exceptions.ConcatenateError` will be raised @@ -754,7 +739,9 @@ def _is_single_item(testee): class CubeAttrsDict(MutableMapping): - r"""A :class:`dict`\\-like object for :attr:`iris.cube.Cube.attributes`, + r"""A :class:`dict`\\-like object for :attr:`iris.cube.Cube.attributes`. + + A :class:`dict`\\-like object for :attr:`iris.cube.Cube.attributes`, providing unified user access to combined cube "local" and "global" attributes dictionaries, with the access behaviour of an ordinary (single) dictionary. @@ -1128,7 +1115,9 @@ class Cube(CFVariableMixin): @classmethod def _sort_xml_attrs(cls, doc): - """Takes an xml document and returns a copy with all element + """Return a copy with all element attributes sorted in alphabetical order. + + Take an xml document and returns a copy with all element attributes sorted in alphabetical order. This is a private utility method required by iris to maintain @@ -1193,7 +1182,7 @@ def __init__( cell_measures_and_dims=None, ancillary_variables_and_dims=None, ): - """Creates a cube with data and optional metadata. + """Create a cube with data and optional metadata. Not typically used - normally cubes are obtained by loading data (e.g. :func:`iris.load`) or from manipulating existing cubes. @@ -1330,7 +1319,9 @@ def __init__( @property def _names(self): - """A tuple containing the value of each name participating in the identity + """Tuple containing the value of each name participating in the identity of a :class:`iris.cube.Cube`. + + A tuple containing the value of each name participating in the identity of a :class:`iris.cube.Cube`. This includes the standard name, long name, NetCDF variable name, and the STASH from the attributes dictionary. @@ -1347,14 +1338,18 @@ def attributes(self) -> CubeAttrsDict: @attributes.setter def attributes(self, attributes: Optional[Mapping]): - """An override to CfVariableMixin.attributes.setter, which ensures that Cube + """Override to CfVariableMixin.attributes.setter. + + An override to CfVariableMixin.attributes.setter, which ensures that Cube attributes are stored in a way which distinguishes global + local ones. """ self._metadata_manager.attributes = CubeAttrsDict(attributes or {}) def _dimensional_metadata(self, name_or_dimensional_metadata): - """Return a single _DimensionalMetadata instance that matches the given + """Return a single _DimensionalMetadata instance that matches. + + Return a single _DimensionalMetadata instance that matches the given name_or_dimensional_metadata. If one is not found, raise an error. """ @@ -1470,7 +1465,7 @@ def add_cell_method(self, cell_method): self.cell_methods += (cell_method,) def add_aux_coord(self, coord, data_dims=None): - """Adds a CF auxiliary coordinate to the cube. + """Add a CF auxiliary coordinate to the cube. Parameters ---------- @@ -1579,7 +1574,7 @@ def _add_unique_aux_coord(self, coord, data_dims): self._aux_coords_and_dims.append((coord, data_dims)) def add_aux_factory(self, aux_factory): - """Adds an auxiliary coordinate factory to the cube. + """Add an auxiliary coordinate factory to the cube. Parameters ---------- @@ -1611,7 +1606,7 @@ def coordsonly(coords_and_dims): self._aux_factories.append(aux_factory) def add_cell_measure(self, cell_measure, data_dims=None): - """Adds a CF cell measure to the cube. + """Add a CF cell measure to the cube. Parameters ---------- @@ -1644,7 +1639,7 @@ def add_cell_measure(self, cell_measure, data_dims=None): ) def add_ancillary_variable(self, ancillary_variable, data_dims=None): - """Adds a CF ancillary variable to the cube. + """Add a CF ancillary variable to the cube. Parameters ---------- @@ -1743,7 +1738,7 @@ def _add_unique_dim_coord(self, dim_coord, data_dim): self._dim_coords_and_dims.append((dim_coord, int(data_dim))) def remove_aux_factory(self, aux_factory): - """Removes the given auxiliary coordinate factory from the cube.""" + """Remove the given auxiliary coordinate factory from the cube.""" self._aux_factories.remove(aux_factory) def _remove_coord(self, coord): @@ -1762,7 +1757,7 @@ def _remove_coord(self, coord): self.remove_aux_factory(aux_factory) def remove_coord(self, coord): - """Removes a coordinate from the cube. + """Remove a coordinate from the cube. Parameters ---------- @@ -1782,7 +1777,7 @@ def remove_coord(self, coord): factory.update(coord) def remove_cell_measure(self, cell_measure): - """Removes a cell measure from the cube. + """Remove a cell measure from the cube. Parameters ---------- @@ -1819,7 +1814,7 @@ def remove_cell_measure(self, cell_measure): ] def remove_ancillary_variable(self, ancillary_variable): - """Removes an ancillary variable from the cube. + """Remove an ancillary variable from the cube. Parameters ---------- @@ -1850,8 +1845,7 @@ def replace_coord(self, new_coord): factory.update(old_coord, new_coord) def coord_dims(self, coord): - """Returns a tuple of the data dimensions relevant to the given - coordinate. + """Return a tuple of the data dimensions relevant to the given coordinate. When searching for the given coordinate in the cube the comparison is made using coordinate metadata equality. Hence the given coordinate @@ -1911,8 +1905,7 @@ def matcher(factory): return match def cell_measure_dims(self, cell_measure): - """Returns a tuple of the data dimensions relevant to the given - CellMeasure. + """Return a tuple of the data dimensions relevant to the given CellMeasure. Parameters ---------- @@ -1934,8 +1927,7 @@ def cell_measure_dims(self, cell_measure): return matches[0] def ancillary_variable_dims(self, ancillary_variable): - """Returns a tuple of the data dimensions relevant to the given - AncillaryVariable. + """Return a tuple of the data dimensions relevant to the given AncillaryVariable. Parameters ---------- @@ -1961,7 +1953,9 @@ def ancillary_variable_dims(self, ancillary_variable): return matches[0] def aux_factory(self, name=None, standard_name=None, long_name=None, var_name=None): - """Returns the single coordinate factory that matches the criteria, + """Return the single coordinate factory that matches the criteria. + + Return the single coordinate factory that matches the criteria, or raises an error if not found. Parameters @@ -2038,8 +2032,7 @@ def coords( dim_coords=None, mesh_coords=None, ): - r"""Return a list of coordinates from the :class:`Cube` that match the - provided criteria. + r"""Return a list of coordinates from the :class:`Cube` that match the provided criteria. Parameters ---------- @@ -2195,8 +2188,7 @@ def coord( dim_coords=None, mesh_coords=None, ): - r"""Return a single coordinate from the :class:`Cube` that matches the - provided criteria. + r"""Return a single coordinate from the :class:`Cube` that matches the provided criteria. Parameters ---------- @@ -2363,7 +2355,9 @@ def _any_meshcoord(self): @property def mesh(self): - r"""Return the unstructured :class:`~iris.experimental.ugrid.Mesh` + r"""Return the unstructured :class:`~iris.experimental.ugrid.Mesh` associated with the cube. + + Return the unstructured :class:`~iris.experimental.ugrid.Mesh` associated with the cube, if the cube has any :class:`~iris.experimental.ugrid.MeshCoord`\\ s, or ``None`` if it has none. @@ -2383,7 +2377,9 @@ def mesh(self): @property def location(self): - r"""Return the mesh "location" of the cube data, if the cube has any + r"""Return the mesh "location" of the cube data. + + Return the mesh "location" of the cube data, if the cube has any :class:`~iris.experimental.ugrid.MeshCoord`\\ s, or ``None`` if it has none. @@ -2401,7 +2397,9 @@ def location(self): return result def mesh_dim(self): - r"""Return the cube dimension of the mesh, if the cube has any + r"""Return the cube dimension of the mesh. + + Return the cube dimension of the mesh, if the cube has any :class:`~iris.experimental.ugrid.MeshCoord`\\ s, or ``None`` if it has none. @@ -2458,8 +2456,7 @@ def cell_measures(self, name_or_cell_measure=None): return cell_measures def cell_measure(self, name_or_cell_measure=None): - """Return a single cell_measure given the same arguments as - :meth:`Cube.cell_measures`. + """Return a single cell_measure given the same arguments as :meth:`Cube.cell_measures`. Notes ----- @@ -2507,8 +2504,7 @@ def cell_measure(self, name_or_cell_measure=None): return cell_measures[0] def ancillary_variables(self, name_or_ancillary_variable=None): - """Return a list of ancillary variable in this cube fitting the given - criteria. + """Return a list of ancillary variable in this cube fitting the given criteria. Parameters ---------- @@ -2547,8 +2543,7 @@ def ancillary_variables(self, name_or_ancillary_variable=None): return ancillary_variables def ancillary_variable(self, name_or_ancillary_variable=None): - """Return a single ancillary_variable given the same arguments as - :meth:`Cube.ancillary_variables`. + """Return a single ancillary_variable given the same arguments as :meth:`Cube.ancillary_variables`. Notes ----- @@ -2600,7 +2595,9 @@ def ancillary_variable(self, name_or_ancillary_variable=None): @property def cell_methods(self): - """Tuple of :class:`iris.coords.CellMethod` representing the processing + """Tuple of :class:`iris.coords.CellMethod`. + + Tuple of :class:`iris.coords.CellMethod` representing the processing done on the phenomenon. """ @@ -2626,7 +2623,9 @@ def cell_methods(self, cell_methods: Iterable): self._metadata_manager.cell_methods = cell_methods def core_data(self): - """Retrieve the data array of this :class:`~iris.cube.Cube` in its + """Retrieve the data array of this :class:`~iris.cube.Cube`. + + Retrieve the data array of this :class:`~iris.cube.Cube` in its current state, which will either be real or lazy. If this :class:`~iris.cube.Cube` has lazy data, accessing its data @@ -2644,10 +2643,7 @@ def shape(self): @property def dtype(self): - """The data type of the values in the data array of this - :class:`~iris.cube.Cube`. - - """ + """The data type of the values in the data array of this :class:`~iris.cube.Cube`.""" return self._data_manager.dtype @property @@ -2656,7 +2652,9 @@ def ndim(self): return self._data_manager.ndim def lazy_data(self): - """Return a "lazy array" representing the Cube data. A lazy array + """Return a "lazy array" representing the Cube data. + + Return a "lazy array" representing the Cube data. A lazy array describes an array whose data values have not been loaded into memory from disk. @@ -2679,8 +2677,7 @@ def lazy_data(self): @property def data(self): - """The :class:`numpy.ndarray` representing the multi-dimensional data of - the cube. + """The :class:`numpy.ndarray` representing the multi-dimensional data of the cube. Notes ----- @@ -2753,10 +2750,7 @@ def dim_coords(self): @property def aux_coords(self): - """Return a tuple of all the auxiliary coordinates, ordered by - dimension(s). - - """ + """Return a tuple of all the auxiliary coordinates, ordered by dimension(s).""" return tuple( ( coord @@ -2769,10 +2763,7 @@ def aux_coords(self): @property def derived_coords(self): - """Return a tuple of all the coordinates generated by the coordinate - factories. - - """ + """Return a tuple of all the coordinates generated by the coordinate factories.""" return tuple( factory.make_coord(self.coord_dims) for factory in sorted( @@ -2786,7 +2777,9 @@ def aux_factories(self): return tuple(self._aux_factories) def summary(self, shorten=False, name_padding=35): - """String summary of the Cube with name+units, a list of dim coord names + """Summary of the Cube. + + String summary of the Cube with name+units, a list of dim coord names versus length and, optionally, a summary of all other components. Parameters @@ -2823,7 +2816,9 @@ def _repr_html_(self): __iter__ = None def __getitem__(self, keys): - """Cube indexing (through use of square bracket notation) has been + """Cube indexing has been implemented at the data level. + + Cube indexing (through use of square bracket notation) has been implemented at the data level. That is, the indices provided to this method should be aligned to the data of the cube, and thus the indices requested must be applicable directly to the cube.data attribute. All @@ -2930,7 +2925,9 @@ def new_ancillary_variable_dims(av_): return cube def subset(self, coord): - """Get a subset of the cube by providing the desired resultant + """Get a subset of the cube by providing the desired resultant coordinate. + + Get a subset of the cube by providing the desired resultant coordinate. If the coordinate provided applies to the whole cube; the whole cube is returned. As such, the operation is not strict. @@ -2985,8 +2982,7 @@ def extract(self, constraint): return constraint.extract(self) def intersection(self, *args, **kwargs): - """Return the intersection of the cube with specified coordinate - ranges. + """Return the intersection of the cube with specified coordinate ranges. Coordinate ranges can be specified as: @@ -3440,7 +3436,9 @@ def slices_over(self, ref_to_slice): return self.slices(opposite_dims, ordered=False) def slices(self, ref_to_slice, ordered=True): - """Return an iterator of all subcubes given the coordinates or dimension + """Return an iterator of all subcubes given the coordinates or dimension indices. + + Return an iterator of all subcubes given the coordinates or dimension indices desired to be present in each subcube. Parameters @@ -3589,7 +3587,7 @@ def remap_cube_metadata(metadata_and_dims): ) def xml(self, checksum=False, order=True, byteorder=True): - """Returns a fully valid CubeML string representation of the Cube.""" + """Return a fully valid CubeML string representation of the Cube.""" doc = Document() cube_xml_element = self._xml_element( @@ -3762,7 +3760,7 @@ def _order(array): return cube_xml_element def copy(self, data=None): - """Returns a deep copy of this cube. + """Return a deep copy of this cube. Parameters ---------- @@ -3930,8 +3928,7 @@ def __neg__(self): # END OPERATOR OVERLOADS def collapsed(self, coords, aggregator, **kwargs): - """Collapse one or more dimensions over the cube given the coordinate/s - and an aggregation. + """Collapse one or more dimensions over the cube given the coordinate/s and an aggregation. Examples of aggregations that may be used include :data:`~iris.analysis.COUNT` and :data:`~iris.analysis.MAX`. @@ -4455,7 +4452,9 @@ def aggregated_by(self, coords, aggregator, climatological=False, **kwargs): return aggregateby_cube def rolling_window(self, coord, aggregator, window, **kwargs): - """Perform rolling window aggregation on a cube given a coordinate, an + """Perform rolling window aggregation on a cube. + + Perform rolling window aggregation on a cube given a coordinate, an aggregation method and a window size. Parameters @@ -4660,7 +4659,9 @@ def rolling_window(self, coord, aggregator, window, **kwargs): return result def interpolate(self, sample_points, scheme, collapse_scalar=True): - """Interpolate from this :class:`~iris.cube.Cube` to the given + """Interpolate from this :class:`~iris.cube.Cube` to the given sample points. + + Interpolate from this :class:`~iris.cube.Cube` to the given sample points using the given interpolation scheme. Parameters @@ -4745,7 +4746,9 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): return interp(points, collapse_scalar=collapse_scalar) def regrid(self, grid, scheme): - r"""Regrid this :class:`~iris.cube.Cube` on to the given target `grid` + r"""Regrid this :class:`~iris.cube.Cube` on to the given target `grid`. + + Regrid this :class:`~iris.cube.Cube` on to the given target `grid` using the given regridding `scheme`. Parameters @@ -4863,7 +4866,9 @@ def keys(self): def sorted_axes(axes): - """Returns the axis names sorted alphabetically, with the exception that + """Return the axis names sorted alphabetically. + + Return the axis names sorted alphabetically, with the exception that 't', 'z', 'y', and, 'x' are sorted to the end. """ diff --git a/lib/iris/exceptions.py b/lib/iris/exceptions.py index c869c1c6c8..854785ef56 100644 --- a/lib/iris/exceptions.py +++ b/lib/iris/exceptions.py @@ -67,10 +67,7 @@ class InvalidCubeError(IrisError): class ConstraintMismatchError(IrisError): - """Raised when a constraint operation has failed to find the correct number - of results. - - """ + """Raised when a constraint operation has failed to find the correct number of results.""" pass @@ -98,18 +95,17 @@ class IgnoreCubeException(IrisError): class ConcatenateError(IrisError): - """Raised when concatenate is expected to produce a single cube, but fails to - do so. - - """ + """Raised when concatenate is expected to produce a single cube, but fails to do so.""" def __init__(self, differences): - """Creates a ConcatenateError with a list of textual descriptions of - the differences which prevented a concatenate. + """Create a ConcatenateError with a list of textual descriptions of differences. - Args: + Create a ConcatenateError with a list of textual descriptions of + the differences which prevented a concatenate. - * differences: + Parameters + ---------- + differences : list of str The list of strings which describe the differences. """ @@ -122,18 +118,17 @@ def __str__(self): class MergeError(IrisError): - """Raised when merge is expected to produce a single cube, but fails to - do so. - - """ + """Raised when merge is expected to produce a single cube, but fails to do so.""" def __init__(self, differences): - """Creates a MergeError with a list of textual descriptions of - the differences which prevented a merge. + """Create a MergeError with a list of textual descriptions of the differences. - Args: + Creates a MergeError with a list of textual descriptions of + the differences which prevented a merge. - * differences: + Parameters + ---------- + differences : list of str The list of strings which describe the differences. """ diff --git a/lib/iris/experimental/raster.py b/lib/iris/experimental/raster.py index 16421947f6..ba7efc68b0 100644 --- a/lib/iris/experimental/raster.py +++ b/lib/iris/experimental/raster.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Experimental module for importing/exporting raster data from Iris cubes using -the GDAL library. +"""Experimental module for importing/exporting raster data from Iris cubes using the GDAL library. See also: `GDAL - Geospatial Data Abstraction Library `_. @@ -42,20 +41,31 @@ def _gdal_write_array(x_min, x_step, y_max, y_step, coord_system, data, fname, ftype): """Use GDAL WriteArray to export data as a 32-bit raster image. + Requires the array data to be of the form: North-at-top and West-on-left. - Args: - * x_min: Minimum X coordinate bounds value. - * x_step: Change in X coordinate per cell. - * y_max: Maximum Y coordinate bounds value. - * y_step: Change in Y coordinate per cell. - * coord_system (iris.coord_systems.CoordSystem): - Coordinate system for X and Y. - * data (numpy.ndarray): 2d array of values to export - * fname (string): Output file name. - * ftype (string): Export file type. - + Parameters + ---------- + x_min : + Minimum X coordinate bounds value. + x_step : + Change in X coordinate per cell. + y_max : + Maximum Y coordinate bounds value. + y_step : + Change in Y coordinate per cell. + coord_system : iris.coord_systems.CoordSystem + Coordinate system for X and Y. + data : numpy.ndarray + 2d array of values to export + fname : str + Output file name. + ftype : str + Export file type. + + Notes + ----- .. note:: Projection information is currently not written to the output. @@ -97,7 +107,7 @@ def _gdal_write_array(x_min, x_step, y_max, y_step, coord_system, data, fname, f def export_geotiff(cube, fname): - """Writes cube data to raster file format as a PixelIsArea GeoTiff image. + """Write cube data to raster file format as a PixelIsArea GeoTiff image. .. deprecated:: 3.2.0 @@ -107,11 +117,16 @@ def export_geotiff(cube, fname): Developers to discuss how to retain it (which could include reversing the deprecation). - Args: - * cube (Cube): The 2D regularly gridded cube slice to be exported. - The cube must have regular, contiguous bounds. - * fname (string): Output file name. + Parameters + ---------- + cube : Cube + The 2D regularly gridded cube slice to be exported. + The cube must have regular, contiguous bounds. + fname : str + Output file name. + Notes + ----- .. note:: For more details on GeoTiff specification and PixelIsArea, see: diff --git a/lib/iris/experimental/regrid.py b/lib/iris/experimental/regrid.py index b17c0cfeeb..74b356722f 100644 --- a/lib/iris/experimental/regrid.py +++ b/lib/iris/experimental/regrid.py @@ -51,7 +51,9 @@ def regrid_area_weighted_rectilinear_src_and_grid(src_cube, grid_cube, mdtol=0): - """Return a new cube with data values calculated using the area weighted + """Regrid using the area weighted mean of data values. + + Return a new cube with data values calculated using the area weighted mean of data values from src_grid regridded onto the horizontal grid of grid_cube. @@ -123,7 +125,9 @@ def regrid_area_weighted_rectilinear_src_and_grid(src_cube, grid_cube, mdtol=0): def regrid_weighted_curvilinear_to_rectilinear(src_cube, weights, grid_cube): - r"""Return a new cube with the data values calculated using the weighted + r"""Regrid using the weighted mean and the weights. + + Return a new cube with the data values calculated using the weighted mean of data values from :data:`src_cube` and the weights from :data:`weights` regridded onto the horizontal grid of :data:`grid_cube`. @@ -215,8 +219,7 @@ class PointInCell: """ def __init__(self, weights=None): - """Point-in-cell regridding scheme suitable for regridding over one - or more orthogonal coordinates. + """Point-in-cell regridding scheme for regridding over one or more orthogonal coordinates. .. warning:: @@ -239,8 +242,7 @@ class _ProjectedUnstructuredRegridder: """Regridding that uses scipy.interpolate.griddata.""" def __init__(self, src_cube, tgt_grid_cube, method, projection=None): - """Create a regridder for conversions between the source - and target grids. + """Create a regridder for conversions between the source and target grids. Parameters ---------- @@ -393,8 +395,7 @@ def _create_cube( grid_y_coord, regrid_callback, ): - """Return a new Cube for the result of regridding the source Cube onto - the new grid. + """Return a new Cube for the result of regridding the source Cube onto the new grid. All the metadata and coordinates of the result Cube are copied from the source Cube, with two exceptions: @@ -520,7 +521,9 @@ def regrid_reference_surface( return result def __call__(self, src_cube): - """Regrid this :class:`~iris.cube.Cube` on to the target grid of + """Regrid to the target grid. + + Regrid this :class:`~iris.cube.Cube` on to the target grid of this :class:`UnstructuredProjectedRegridder`. The given cube must be defined with the same grid as the source @@ -614,7 +617,9 @@ class ProjectedUnstructuredLinear: """ def __init__(self, projection=None): - """Linear regridding scheme that uses scipy.interpolate.griddata on + """Linear regridding scheme. + + Linear regridding scheme that uses scipy.interpolate.griddata on projected unstructured data. .. note:: @@ -644,7 +649,9 @@ def __init__(self, projection=None): warn_deprecated(wmsg) def regridder(self, src_cube, target_grid): - """Creates a linear regridder to perform regridding, using + """Create a linear regridder to perform regridding. + + Creates a linear regridder to perform regridding, using scipy.interpolate.griddata from unstructured source points to the target grid. The regridding calculation is performed in the given projection. @@ -698,8 +705,7 @@ class ProjectedUnstructuredNearest: """ def __init__(self, projection=None): - """Nearest regridding scheme that uses scipy.interpolate.griddata on - projected unstructured data. + """Nearest regridding scheme that uses scipy.interpolate.griddata on projected unstructured data. .. note:: @@ -730,7 +736,9 @@ def __init__(self, projection=None): warn_deprecated(wmsg) def regridder(self, src_cube, target_grid): - """Creates a nearest-neighbour regridder to perform regridding, using + """Create a nearest-neighbour regridder to perform regridding. + + Create a nearest-neighbour regridder to perform regridding, using scipy.interpolate.griddata from unstructured source points to the target grid. The regridding calculation is performed in the given projection. diff --git a/lib/iris/experimental/representation.py b/lib/iris/experimental/representation.py index 6b25023fa2..0b98c24395 100644 --- a/lib/iris/experimental/representation.py +++ b/lib/iris/experimental/representation.py @@ -116,7 +116,9 @@ def __init__(self, cube): self.units = escape(str(self.cube.units)) def _get_dim_names(self): - """Get dimension-describing coordinate names, or '--' if no coordinate] + """Get dimension-describing coordinate names. + + Get dimension-describing coordinate names, or '--' if no coordinate] describes the dimension. Note: borrows from `cube.summary`. @@ -146,7 +148,9 @@ def _get_lines(self): return self.cube_str.split("\n") def _get_bits(self, bits): - """Parse the body content (`bits`) of the cube string in preparation for + """Parse the body content (`bits`) of the cube string. + + Parse the body content (`bits`) of the cube string in preparation for being converted into table rows. """ @@ -175,7 +179,9 @@ def _get_bits(self, bits): self.sections_data[str_heading_name] = content def _make_header(self): - """Make the table header. This is similar to the summary of the cube, + """Make the table header. + + Make the table header. This is similar to the summary of the cube, but does not include dim shapes. These are included on the next table row down, and produced with `make_shapes_row`. @@ -199,16 +205,19 @@ def _make_shapes_row(self): return "\n".join(cell for cell in cells) def _make_row(self, title, body=None, col_span=0): - """Produce one row for the table body; i.e. + """Produce one row for the table body. + + For example:: + Coord namex-.... - `body` contains the content for each cell not in the left-most (title) - column. - If None, indicates this row is a title row (see below). - `title` contains the row heading. If `body` is None, indicates - that the row contains a sub-heading; - e.g. 'Dimension coordinates:'. - `col_span` indicates how many columns the string should span. + * `body` contains the content for each cell not in the left-most (title) + column. + If None, indicates this row is a title row (see below). + * `title` contains the row heading. If `body` is None, indicates + that the row contains a sub-heading; + e.g. 'Dimension coordinates:'. + * `col_span` indicates how many columns the string should span. """ row = [''] @@ -279,7 +288,7 @@ def _make_content(self): return "\n".join(element for element in elements) def repr_html(self): - """The `repr` interface for Jupyter.""" + """Represent html, the `repr` interface for Jupyter.""" # Deal with the header first. header = self._make_header() diff --git a/lib/iris/experimental/stratify.py b/lib/iris/experimental/stratify.py index d79102cc08..3e5fb1da9f 100644 --- a/lib/iris/experimental/stratify.py +++ b/lib/iris/experimental/stratify.py @@ -2,10 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Routines for putting data on new strata (aka. isosurfaces), often in the -Z direction. - -""" +"""Routines for putting data on new strata (aka. isosurfaces), often in the Z direction.""" from functools import partial @@ -17,7 +14,7 @@ def _copy_coords_without_z_dim(src, tgt, z_dim): - """Helper function to copy across non z-dimenson coordinates between cubes. + """Copy across non z-dimenson coordinates between cubes. Parameters ---------- @@ -52,7 +49,9 @@ def _copy_coords_without_z_dim(src, tgt, z_dim): def relevel(cube, src_levels, tgt_levels, axis=None, interpolator=None): - """Interpolate the cube onto the specified target levels, given the + """Perform vertical interpolation. + + Interpolate the cube onto the specified target levels, given the source levels of the cube. For example, suppose we have two datasets `P(i,j,k)` and `H(i,j,k)` @@ -64,8 +63,8 @@ def relevel(cube, src_levels, tgt_levels, axis=None, interpolator=None): that are generally monotonic in the direction of interpolation, such as height/pressure or salinity/depth. - Args: - + Parameters + ---------- cube : :class:`~iris.cube.Cube` The phenomenon data to be re-levelled. diff --git a/lib/iris/experimental/ugrid/__init__.py b/lib/iris/experimental/ugrid/__init__.py index 7cae55a1bd..ccdf05a387 100644 --- a/lib/iris/experimental/ugrid/__init__.py +++ b/lib/iris/experimental/ugrid/__init__.py @@ -3,8 +3,9 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Infra-structure for unstructured mesh support, based on -CF UGRID Conventions (v1.0), https://ugrid-conventions.github.io/ugrid-conventions/. +"""Infra-structure for unstructured mesh support. + +Based on CF UGRID Conventions (v1.0), https://ugrid-conventions.github.io/ugrid-conventions/. .. note:: diff --git a/lib/iris/experimental/ugrid/cf.py b/lib/iris/experimental/ugrid/cf.py index bd4c076ba5..d00fd6ef24 100644 --- a/lib/iris/experimental/ugrid/cf.py +++ b/lib/iris/experimental/ugrid/cf.py @@ -16,7 +16,9 @@ class CFUGridConnectivityVariable(cf.CFVariable): - """A CF_UGRID connectivity variable points to an index variable identifying + """A CF_UGRID connectivity variable. + + A CF_UGRID connectivity variable points to an index variable identifying for every element (edge/face/volume) the indices of its corner nodes. The connectivity array will thus be a matrix of size n-elements x n-corners. For the indexing one may use either 0- or 1-based indexing; the convention @@ -86,7 +88,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class CFUGridAuxiliaryCoordinateVariable(cf.CFVariable): - """A CF-UGRID auxiliary coordinate variable is a CF-netCDF auxiliary + """A CF-UGRID auxiliary coordinate variable. + + A CF-UGRID auxiliary coordinate variable is a CF-netCDF auxiliary coordinate variable representing the element (node/edge/face/volume) locations (latitude, longitude or other spatial coordinates, and optional elevation or other coordinates). These auxiliary coordinate variables will @@ -163,7 +167,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class CFUGridMeshVariable(cf.CFVariable): - """A CF-UGRID mesh variable is a dummy variable for storing topology + """A CF-UGRID mesh variable is a dummy variable for storing topology information as attributes. + + A CF-UGRID mesh variable is a dummy variable for storing topology information as attributes. The mesh variable has the ``cf_role`` 'mesh_topology'. @@ -230,7 +236,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class CFUGridGroup(cf.CFGroup): - """Represents a collection of 'NetCDF Climate and Forecast (CF) Metadata + """Represents a collection of CF Metadata Conventions variables and netCDF global attributes. + + Represents a collection of 'NetCDF Climate and Forecast (CF) Metadata Conventions' variables and netCDF global attributes. Specialisation of :class:`~iris.fileformats.cf.CFGroup` that includes extra @@ -255,10 +263,7 @@ def meshes(self): @property def non_data_variable_names(self): - """:class:`set` of the names of the CF-netCDF/CF-UGRID variables that are - not the data pay-load. - - """ + """:class:`set` of names of the CF-netCDF/CF-UGRID variables that are not the data pay-load.""" extra_variables = (self.connectivities, self.ugrid_coords, self.meshes) extra_result = set() for variable in extra_variables: diff --git a/lib/iris/experimental/ugrid/load.py b/lib/iris/experimental/ugrid/load.py index 07cc20a65a..b8665a499e 100644 --- a/lib/iris/experimental/ugrid/load.py +++ b/lib/iris/experimental/ugrid/load.py @@ -3,7 +3,9 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -r"""Extensions to Iris' NetCDF loading to allow the construction of +r"""Allow the construction of :class:`~iris.experimental.ugrid.mesh.Mesh`\\ es. + +Extensions to Iris' NetCDF loading to allow the construction of :class:`~iris.experimental.ugrid.mesh.Mesh`\\ es from UGRID data in the file. Eventual destination: :mod:`iris.fileformats.netcdf`. @@ -48,7 +50,9 @@ class _WarnComboCfDefaultingIgnoring(_WarnComboCfDefaulting, IrisIgnoringWarning class ParseUGridOnLoad(threading.local): def __init__(self): - """A flag for dictating whether to use the experimental UGRID-aware + """Thead-safe state to enable UGRID-aware NetCDF loading. + + A flag for dictating whether to use the experimental UGRID-aware version of Iris NetCDF loading. Object is thread-safe. Use via the run-time switch @@ -97,7 +101,7 @@ def context(self): def _meshes_from_cf(cf_reader): - """Common behaviour for extracting meshes from a CFReader. + """Mesh from cf, common behaviour for extracting meshes from a CFReader. Simple now, but expected to increase in complexity as Mesh sharing develops. @@ -230,7 +234,9 @@ def load_meshes(uris, var_name=None): def _build_aux_coord(coord_var, file_path): - """Construct a :class:`~iris.coords.AuxCoord` from a given + """Construct a :class:`~iris.coords.AuxCoord`. + + Construct a :class:`~iris.coords.AuxCoord` from a given :class:`~iris.experimental.ugrid.cf.CFUGridAuxiliaryCoordinateVariable`, and guess its mesh axis. @@ -283,7 +289,9 @@ def _build_aux_coord(coord_var, file_path): def _build_connectivity(connectivity_var, file_path, element_dims): - """Construct a :class:`~iris.experimental.ugrid.mesh.Connectivity` from a + """Construct a :class:`~iris.experimental.ugrid.mesh.Connectivity`. + + Construct a :class:`~iris.experimental.ugrid.mesh.Connectivity` from a given :class:`~iris.experimental.ugrid.cf.CFUGridConnectivityVariable`, and identify the name of its first dimension. @@ -325,10 +333,12 @@ def _build_connectivity(connectivity_var, file_path, element_dims): def _build_mesh(cf, mesh_var, file_path): - """Construct a :class:`~iris.experimental.ugrid.mesh.Mesh` from a given + """Construct a :class:`~iris.experimental.ugrid.mesh.Mesh`. + + Construct a :class:`~iris.experimental.ugrid.mesh.Mesh` from a given :class:`~iris.experimental.ugrid.cf.CFUGridMeshVariable`. - todo: integrate with standard loading API post-pyke. + TODO: integrate with standard loading API post-pyke. """ # TODO: integrate with standard saving API when no longer 'experimental'. @@ -458,11 +468,13 @@ def _build_mesh(cf, mesh_var, file_path): def _build_mesh_coords(mesh, cf_var): - """Construct a tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord` using + """Construct a tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord`. + + Construct a tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord` using from a given :class:`~iris.experimental.ugrid.mesh.Mesh` and :class:`~iris.fileformats.cf.CFVariable`. - todo: integrate with standard loading API post-pyke. + TODO: integrate with standard loading API post-pyke. """ # TODO: integrate with standard saving API when no longer 'experimental'. diff --git a/lib/iris/experimental/ugrid/mesh.py b/lib/iris/experimental/ugrid/mesh.py index 79e67593d2..147b1fd513 100644 --- a/lib/iris/experimental/ugrid/mesh.py +++ b/lib/iris/experimental/ugrid/mesh.py @@ -3,7 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Iris' data model representation of CF UGrid's Mesh and its constituent parts. +"""Iris data model representation of CF UGrid's Mesh and its constituent parts. Eventual destination: dedicated module in :mod:`iris` root. @@ -84,7 +84,9 @@ class Connectivity(_DimensionalMetadata): - """A CF-UGRID topology connectivity, describing the topological relationship + """CF-UGRID topology. + + A CF-UGRID topology connectivity, describing the topological relationship between two types of mesh element. One or more connectivities make up a CF-UGRID topology - a constituent of a CF-UGRID mesh. @@ -117,7 +119,7 @@ def __init__( start_index=0, location_axis=0, ): - """Constructs a single connectivity. + """Construct a single connectivity. Parameters ---------- @@ -215,8 +217,8 @@ def _values(self, values): @property def cf_role(self): - """The category of topological relationship that this connectivity - describes. + """The category of topological relationship that this connectivity describes. + **Read-only** - validity of :attr:`indices` is dependent on :attr:`cf_role`. A new :class:`Connectivity` must therefore be defined if a different :attr:`cf_role` is needed. @@ -226,7 +228,9 @@ def cf_role(self): @property def location(self): - """Derived from the connectivity's :attr:`cf_role` - the first part, e.g. + """Derived from the connectivity's :attr:`cf_role`. + + Derived from the connectivity's :attr:`cf_role` - the first part, e.g. ``face`` in ``face_node_connectivity``. Refers to the elements that vary along the :attr:`location_axis` of the connectivity's :attr:`indices` array. @@ -236,7 +240,9 @@ def location(self): @property def connected(self): - """Derived from the connectivity's :attr:`cf_role` - the second part, e.g. + """Derived from the connectivity's :attr:`cf_role`. + + Derived from the connectivity's :attr:`cf_role` - the second part, e.g. ``node`` in ``face_node_connectivity``. Refers to the elements indexed by the values in the connectivity's :attr:`indices` array. @@ -245,8 +251,8 @@ def connected(self): @property def start_index(self): - """The base value of the connectivity's :attr:`indices` array; either - ``0`` or ``1``. + """The base value of the connectivity's :attr:`indices` array; either ``0`` or ``1``. + **Read-only** - validity of :attr:`indices` is dependent on :attr:`start_index`. A new :class:`Connectivity` must therefore be defined if a different :attr:`start_index` is needed. @@ -256,7 +262,9 @@ def start_index(self): @property def location_axis(self): - """The axis of the connectivity's :attr:`indices` array that varies + """The axis of the connectivity's :attr:`indices` array. + + The axis of the connectivity's :attr:`indices` array that varies over the connectivity's :attr:`location` elements. Either ``0`` or ``1``. **Read-only** - validity of :attr:`indices` is dependent on :attr:`location_axis`. Use :meth:`transpose` to create a new, transposed @@ -267,7 +275,9 @@ def location_axis(self): @property def connected_axis(self): - """Derived as the alternate value of :attr:`location_axis` - each must + """Derived as the alternate value of :attr:`location_axis`. + + Derived as the alternate value of :attr:`location_axis` - each must equal either ``0`` or ``1``. The axis of the connectivity's :attr:`indices` array that varies over the :attr:`connected` elements associated with each :attr:`location` element. @@ -277,11 +287,14 @@ def connected_axis(self): @property def indices(self): - """The index values describing the topological relationship of the + """The index values describing the topological relationship of the connectivity. + + The index values describing the topological relationship of the connectivity, as a NumPy array. Masked points indicate a :attr:`location` element with fewer :attr:`connected` elements than other :attr:`location` elements described in this array - unused index 'slots' are masked. + **Read-only** - index values are only meaningful when combined with an appropriate :attr:`cf_role`, :attr:`start_index` and :attr:`location_axis`. A new :class:`Connectivity` must therefore be @@ -392,7 +405,9 @@ def indices_error(message): ) def validate_indices(self): - """Perform a thorough validity check of this connectivity's + """Perform a thorough validity check of this connectivity's :attr:`indices`. + + Perform a thorough validity check of this connectivity's :attr:`indices`. Includes checking the number of :attr:`connected` elements associated with each :attr:`location` element (specified using masks on the :attr:`indices` array) against the :attr:`cf_role`. @@ -436,7 +451,9 @@ def __eq__(self, other): return eq def transpose(self): - """Create a new :class:`Connectivity`, identical to this one but with the + """Transpose :class:`Connectivity`. + + Create a new :class:`Connectivity`, identical to this one but with the :attr:`indices` array transposed and the :attr:`location_axis` value flipped. Returns @@ -477,7 +494,9 @@ def lazy_indices(self): return super()._lazy_values() def core_indices(self): - """The indices array at the core of this connectivity, which may be a + """Return the indices array at the core of this connectivity. + + The indices array at the core of this connectivity, which may be a NumPy array or a Dask array. Returns @@ -488,8 +507,7 @@ def core_indices(self): return super()._core_values() def has_lazy_indices(self): - """Return a boolean indicating whether the connectivity's :attr:`indices` - array is a lazy Dask array or not. + """Check if the connectivity's :attr:`indices` array is a lazy Dask array or not. Returns ------- @@ -499,7 +517,9 @@ def has_lazy_indices(self): return super()._has_lazy_values() def lazy_location_lengths(self): - """Return a lazy array representing the number of :attr:`connected` + """Return a lazy array representing the number of :attr:`connected` elements. + + Return a lazy array representing the number of :attr:`connected` elements associated with each of the connectivity's :attr:`location` elements, accounting for masks if present. @@ -524,7 +544,9 @@ def lazy_location_lengths(self): return max_location_size - location_mask_counts def location_lengths(self): - """Return a NumPy array representing the number of :attr:`connected` + """Return a NumPy array representing the number of :attr:`connected` elements. + + Return a NumPy array representing the number of :attr:`connected` elements associated with each of the connectivity's :attr:`location` elements, accounting for masks if present. @@ -554,7 +576,9 @@ def xml_element(self, doc): class Mesh(CFVariableMixin): - """A container representing the UGRID ``cf_role`` ``mesh_topology``, supporting + """A container representing the UGRID ``cf_role`` ``mesh_topology``. + + A container representing the UGRID ``cf_role`` ``mesh_topology``, supporting 1D network, 2D triangular, and 2D flexible mesh topologies. .. note:: @@ -669,8 +693,7 @@ def normalise(element, axis): @classmethod def from_coords(cls, *coords): - r"""Construct a :class:`Mesh` by derivation from one or more - :class:`~iris.coords.Coord`\\ s. + r"""Construct a :class:`Mesh` by derivation from one or more :class:`~iris.coords.Coord`\\ s. The :attr:`~Mesh.topology_dimension`, :class:`~iris.coords.Coord` membership and :class:`Connectivity` membership are all determined @@ -1069,10 +1092,7 @@ def _set_dimension_names(self, node, edge, face, reset=False): @property def all_connectivities(self): - """All the :class:`~iris.experimental.ugrid.mesh.Connectivity` instances - of the :class:`Mesh`. - - """ + """All the :class:`~iris.experimental.ugrid.mesh.Connectivity` instances of the :class:`Mesh`.""" return self._connectivity_manager.all_members @property @@ -1082,7 +1102,9 @@ def all_coords(self): @property def boundary_node_connectivity(self): - """The *optional* UGRID ``boundary_node_connectivity`` + """The *optional* UGRID ``boundary_node_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity`. + + The *optional* UGRID ``boundary_node_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity` of the :class:`Mesh`. @@ -1091,18 +1113,12 @@ def boundary_node_connectivity(self): @property def edge_coords(self): - """The *optional* UGRID ``edge`` :class:`~iris.coords.AuxCoord` coordinates - of the :class:`Mesh`. - - """ + """The *optional* UGRID ``edge`` :class:`~iris.coords.AuxCoord` coordinates of the :class:`Mesh`.""" return self._coord_manager.edge_coords @property def edge_dimension(self): - """The *optionally required* UGRID NetCDF variable name for the ``edge`` - dimension. - - """ + """The *optionally required* UGRID NetCDF variable name for the ``edge`` dimension.""" return self._metadata_manager.edge_dimension @edge_dimension.setter @@ -1115,7 +1131,9 @@ def edge_dimension(self, name): @property def edge_face_connectivity(self): - """The *optional* UGRID ``edge_face_connectivity`` + """The *optional* UGRID ``edge_face_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity`. + + The *optional* UGRID ``edge_face_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity` of the :class:`Mesh`. @@ -1124,7 +1142,9 @@ def edge_face_connectivity(self): @property def edge_node_connectivity(self): - """The UGRID ``edge_node_connectivity`` + """The UGRID ``edge_node_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity`. + + The UGRID ``edge_node_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity` of the :class:`Mesh`, which is **required** for :attr:`Mesh.topology_dimension` of ``1``, and *optionally required* for @@ -1135,18 +1155,12 @@ def edge_node_connectivity(self): @property def face_coords(self): - """The *optional* UGRID ``face`` :class:`~iris.coords.AuxCoord` coordinates - of the :class:`Mesh`. - - """ + """The *optional* UGRID ``face`` :class:`~iris.coords.AuxCoord` coordinates of the :class:`Mesh`.""" return self._coord_manager.face_coords @property def face_dimension(self): - """The *optionally required* UGRID NetCDF variable name for the ``face`` - dimension. - - """ + """The *optional* UGRID NetCDF variable name for the ``face`` dimension.""" return self._metadata_manager.face_dimension @face_dimension.setter @@ -1168,7 +1182,9 @@ def face_dimension(self, name): @property def face_edge_connectivity(self): - """The *optional* UGRID ``face_edge_connectivity`` + """The *optional* UGRID ``face_edge_connectivity``:class:`~iris.experimental.ugrid.mesh.Connectivity`. + + The *optional* UGRID ``face_edge_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity` of the :class:`Mesh`. @@ -1178,7 +1194,9 @@ def face_edge_connectivity(self): @property def face_face_connectivity(self): - """The *optional* UGRID ``face_face_connectivity`` + """The *optional* UGRID ``face_face_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity`. + + The *optional* UGRID ``face_face_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity` of the :class:`Mesh`. @@ -1187,7 +1205,9 @@ def face_face_connectivity(self): @property def face_node_connectivity(self): - """The UGRID ``face_node_connectivity`` + """Return ``face_node_connectivity``:class:`~iris.experimental.ugrid.mesh.Connectivity`. + + The UGRID ``face_node_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity` of the :class:`Mesh`, which is **required** for :attr:`Mesh.topology_dimension` of ``2``, and *optionally required* for :attr:`Mesh.topology_dimension` @@ -1198,10 +1218,7 @@ def face_node_connectivity(self): @property def node_coords(self): - """The **required** UGRID ``node`` :class:`~iris.coords.AuxCoord` coordinates - of the :class:`Mesh`. - - """ + """The **required** UGRID ``node`` :class:`~iris.coords.AuxCoord` coordinates of the :class:`Mesh`.""" return self._coord_manager.node_coords @property @@ -1283,7 +1300,9 @@ def connectivities( contains_edge=None, contains_face=None, ): - """Return all :class:`~iris.experimental.ugrid.mesh.Connectivity` + """Return all :class:`~iris.experimental.ugrid.mesh.Connectivity`. + + Return all :class:`~iris.experimental.ugrid.mesh.Connectivity` instances from the :class:`Mesh` that match the provided criteria. Criteria can be either specific properties or other objects with @@ -1371,7 +1390,9 @@ def connectivity( contains_edge=None, contains_face=None, ): - """Return a single :class:`~iris.experimental.ugrid.mesh.Connectivity` + """Return a single :class:`~iris.experimental.ugrid.mesh.Connectivity`. + + Return a single :class:`~iris.experimental.ugrid.mesh.Connectivity` from the :class:`Mesh` that matches the provided criteria. Criteria can be either specific properties or other objects with @@ -1465,7 +1486,9 @@ def coord( include_edges=None, include_faces=None, ): - """Return a single :class:`~iris.coords.AuxCoord` coordinate from the + """Return a single :class:`~iris.coords.AuxCoord` coordinate. + + Return a single :class:`~iris.coords.AuxCoord` coordinate from the :class:`Mesh` that matches the provided criteria. Criteria can be either specific properties or other objects with @@ -1548,7 +1571,9 @@ def coords( include_edges=None, include_faces=None, ): - """Return all :class:`~iris.coords.AuxCoord` coordinates from the :class:`Mesh` that + """Return all :class:`~iris.coords.AuxCoord` coordinates from the :class:`Mesh`. + + Return all :class:`~iris.coords.AuxCoord` coordinates from the :class:`Mesh` that match the provided criteria. Criteria can be either specific properties or other objects with @@ -1626,7 +1651,9 @@ def remove_connectivities( contains_edge=None, contains_face=None, ): - """Remove one or more :class:`~iris.experimental.ugrid.mesh.Connectivity` + """Remove one or more :class:`~iris.experimental.ugrid.mesh.Connectivity`. + + Remove one or more :class:`~iris.experimental.ugrid.mesh.Connectivity` from the :class:`Mesh` that match the provided criteria. Criteria can be either specific properties or other objects with @@ -1710,7 +1737,9 @@ def remove_coords( include_edges=None, include_faces=None, ): - """Remove one or more :class:`~iris.coords.AuxCoord` from the :class:`Mesh` + """Remove one or more :class:`~iris.coords.AuxCoord` from the :class:`Mesh`. + + Remove one or more :class:`~iris.coords.AuxCoord` from the :class:`Mesh` that match the provided criteria. Criteria can be either specific properties or other objects with @@ -1780,8 +1809,7 @@ def remove_coords( return self._coord_manager.remove(**kwargs) def xml_element(self, doc): - """Create the :class:`xml.dom.minidom.Element` that describes this - :class:`Mesh`. + """Create the :class:`xml.dom.minidom.Element` that describes this :class:`Mesh`. Parameters ---------- @@ -1812,7 +1840,9 @@ def xml_element(self, doc): # # return the lazy AuxCoord(...), AuxCoord(...) def to_MeshCoord(self, location, axis): - """Generate a :class:`~iris.experimental.ugrid.mesh.MeshCoord` that + """Generate a :class:`~iris.experimental.ugrid.mesh.MeshCoord`. + + Generate a :class:`~iris.experimental.ugrid.mesh.MeshCoord` that references the current :class:`Mesh`, and passing through the ``location`` and ``axis`` arguments. @@ -1839,7 +1869,9 @@ def to_MeshCoord(self, location, axis): return MeshCoord(mesh=self, location=location, axis=axis) def to_MeshCoords(self, location): - r"""Generate a tuple of + r"""Generate a tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord`\\ s. + + Generate a tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord`\\ s, each referencing the current :class:`Mesh`, one for each :attr:`AXES` value, passing through the ``location`` argument. @@ -1866,7 +1898,9 @@ def to_MeshCoords(self, location): return tuple(result) def dimension_names_reset(self, node=False, edge=False, face=False): - """Reset the name used for the NetCDF variable representing the ``node``, + """Reset the name used for the NetCDF variable. + + Reset the name used for the NetCDF variable representing the ``node``, ``edge`` and/or ``face`` dimension to ``None``. Parameters @@ -1885,7 +1919,9 @@ def dimension_names_reset(self, node=False, edge=False, face=False): return self._set_dimension_names(node, edge, face, reset=True) def dimension_names(self, node=None, edge=None, face=None): - """Assign the name to be used for the NetCDF variable representing + """Assign the name to be used for the NetCDF variable. + + Assign the name to be used for the NetCDF variable representing the ``node``, ``edge`` and ``face`` dimension. The default value of ``None`` will not be assigned to clear the @@ -1914,7 +1950,9 @@ def cf_role(self): @property def topology_dimension(self): - """The UGRID ``topology_dimension`` attribute represents the highest + """UGRID ``topology_dimension`` attribute. + + The UGRID ``topology_dimension`` attribute represents the highest dimensionality of all the geometric elements (node, edge, face) represented within the :class:`Mesh`. @@ -1923,7 +1961,8 @@ def topology_dimension(self): class _Mesh1DCoordinateManager: - """TBD: require clarity on coord_systems validation + """TBD: require clarity on coord_systems validation. + TBD: require clarity on __eq__ support TBD: rationalise self.coords() logic with other manager and Cube. @@ -2112,7 +2151,9 @@ def _add(self, coords): setattr(self, member_y, coords[1]) def add(self, node_x=None, node_y=None, edge_x=None, edge_y=None): - """Use self.remove(edge_x=True) to remove a coordinate e.g., using the + """Use self.remove(edge_x=True) to remove a coordinate. + + Use self.remove(edge_x=True) to remove a coordinate e.g., using the pattern self.add(edge_x=None) will not remove the edge_x coordinate. """ @@ -2822,8 +2863,9 @@ def copy(self, points=None, bounds=None): return new_coord def __deepcopy__(self, memo): - """Make this equivalent to "shallow" copy, returning a new MeshCoord based - on the same Mesh. + """Make this equivalent to "shallow" copy. + + Returns a new MeshCoord based on the same Mesh. Required to prevent cube copying from copying the Mesh, which would prevent "cube.copy() == cube" : see notes for :meth:`copy`. @@ -2908,7 +2950,9 @@ def summary(self, *args, **kwargs): return result def _construct_access_arrays(self): - """Build lazy points and bounds arrays, providing dynamic access via the + """Build lazy points and bounds arrays. + + Build lazy points and bounds arrays, providing dynamic access via the Mesh, according to the location and axis. Returns diff --git a/lib/iris/experimental/ugrid/save.py b/lib/iris/experimental/ugrid/save.py index 00891b3044..c78289fe26 100644 --- a/lib/iris/experimental/ugrid/save.py +++ b/lib/iris/experimental/ugrid/save.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Extensions to Iris' NetCDF saving to allow -:class:`~iris.experimental.ugrid.mesh.Mesh` saving in UGRID format. +"""Extension to Iris' NetCDF saving to allow :class:`~iris.experimental.ugrid.mesh.Mesh` saving in UGRID format. Eventual destination: :mod:`iris.fileformats.netcdf`. diff --git a/lib/iris/fileformats/_ff.py b/lib/iris/fileformats/_ff.py index 3edd29c410..301a45736e 100644 --- a/lib/iris/fileformats/_ff.py +++ b/lib/iris/fileformats/_ff.py @@ -125,10 +125,7 @@ class _WarnComboLoadingDefaulting(IrisDefaultingWarning, IrisLoadWarning): class Grid: - """An abstract class representing the default/file-level grid - definition for a FieldsFile. - - """ + """An abstract class representing the default/file-level grid definition for a FieldsFile.""" def __init__( self, @@ -178,8 +175,7 @@ def regular_y(self, subgrid): raise NotImplementedError() def vectors(self, subgrid): - """Return the X and Y coordinate vectors for the given sub-grid of - this grid. + """Return the X and Y coordinate vectors for the given sub-grid of this grid. Parameters ---------- @@ -218,8 +214,7 @@ def _x_vectors(self): return x_p, x_u def regular_x(self, subgrid): - """Return the "zeroth" value and step for the X coordinate on the - given sub-grid of this grid. + """Return the "zeroth" value & step for the X coord on the given sub-grid of this grid. Parameters ---------- @@ -238,7 +233,9 @@ def regular_x(self, subgrid): return bzx, bdx def regular_y(self, subgrid): - """Return the "zeroth" value and step for the Y coordinate on the + """Return the "zeroth" value & step for the Y coord on the given sub-grid of this grid. + + Return the "zeroth" value and step for the Y coordinate on the given sub-grid of this grid. Parameters @@ -300,7 +297,9 @@ class FFHeader: GRID_STAGGERING_CLASS = {3: NewDynamics, 6: ENDGame} def __init__(self, filename, word_depth=DEFAULT_FF_WORD_DEPTH): - """Create a FieldsFile header instance by reading the + """Create a FieldsFile header instance. + + Create a FieldsFile header instance by reading the FIXED_LENGTH_HEADER section of the FieldsFile, making the names defined in FF_HEADER available as attributes of a FFHeader instance. @@ -395,8 +394,7 @@ def _attribute_is_pointer_and_needs_addressing(self, name): return is_referenceable def shape(self, name): - """Return the dimension shape of the FieldsFile FIXED_LENGTH_HEADER - pointer attribute. + """Return the dimension shape of the FieldsFile FIXED_LENGTH_HEADER pointer attribute. Parameters ---------- @@ -438,7 +436,9 @@ class FF2PP: """A class to extract the individual PPFields from within a FieldsFile.""" def __init__(self, filename, read_data=False, word_depth=DEFAULT_FF_WORD_DEPTH): - """Create a FieldsFile to Post Process instance that returns a generator + """Create a generator of PPFields contained within the FieldsFile. + + Create a FieldsFile to Post Process instance that returns a generator of PPFields contained within the FieldsFile. Parameters @@ -804,7 +804,7 @@ def __iter__(self): def _parse_binary_stream(file_like, dtype=np.float64, count=-1): - """Replacement :func:`numpy.fromfile` due to python3 performance issues. + """Parse binary stream, replacement :func:`numpy.fromfile` due to python3 performance issues. Parameters ---------- @@ -835,7 +835,7 @@ def _parse_binary_stream(file_like, dtype=np.float64, count=-1): def load_cubes(filenames, callback, constraints=None): - """Loads cubes from a list of fields files filenames. + """Load cubes from a list of fields files filenames. Parameters ---------- @@ -859,7 +859,7 @@ def load_cubes(filenames, callback, constraints=None): def load_cubes_32bit_ieee(filenames, callback, constraints=None): - """Loads cubes from a list of 32bit ieee converted fieldsfiles filenames. + """Load cubes from a list of 32bit ieee converted fieldsfiles filenames. See Also -------- diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 1532340d00..8f408716e6 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -103,7 +103,7 @@ def inner(engine, *args, **kwargs): @action_function def action_default(engine): - """Standard operations for every cube.""" + """Perform standard operations for every cube.""" hh.build_cube_metadata(engine) diff --git a/lib/iris/fileformats/_nc_load_rules/engine.py b/lib/iris/fileformats/_nc_load_rules/engine.py index e43a1c5c4b..111e8320b6 100644 --- a/lib/iris/fileformats/_nc_load_rules/engine.py +++ b/lib/iris/fileformats/_nc_load_rules/engine.py @@ -2,7 +2,9 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""A simple mimic of the Pyke 'knowledge_engine', for interfacing to the routines +"""A simple mimic of the Pyke 'knowledge_engine'. + +A simple mimic of the Pyke 'knowledge_engine', for interfacing to the routines in 'iris.fileformats.netcdf' with minimal changes to that code. This allows us to replace the Pyke rules operation with the simpler pure-Python @@ -102,10 +104,7 @@ def get_kb(self): return self.facts def print_stats(self): - """No-op, called by - :meth:`iris.fileformats.netcdf._action_activation_stats`. - - """ + """No-op, called by :meth:`iris.fileformats.netcdf._action_activation_stats`.""" pass def add_case_specific_fact(self, fact_name, fact_arglist): diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 6a135a211c..f656667e63 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -2,7 +2,9 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""All the pure-Python 'helper' functions which were previously included in the +"""Helper functions for NetCDF loading rules. + +All the pure-Python 'helper' functions which were previously included in the Pyke rules database 'fc_rules_cf.krb'. The 'action' routines now call these, as the rules used to do. @@ -258,22 +260,28 @@ class _WarnComboIgnoringCfLoad( def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: - """Split a CF cell_methods attribute string into a list of zero or more cell + """Split a CF cell_methods. + + Split a CF cell_methods attribute string into a list of zero or more cell methods, each of which is then parsed with a regex to return a list of match objects. - Args: - - * nc_cell_methods: The value of the cell methods attribute to be split. + Parameters + ---------- + nc_cell_methods : + The value of the cell methods attribute to be split. Returns ------- nc_cell_methods_matches: list of re.Match objects A list of re.Match objects associated with each parsed cell method + Notes + ----- Splitting is done based on words followed by colons outside of any brackets. Validation of anything other than being laid out in the expected format is left to the calling function. + """ # Find name candidates name_start_inds = [] @@ -327,24 +335,26 @@ class UnknownCellMethodWarning(iris.exceptions.IrisUnknownCellMethodWarning): pass -def parse_cell_methods(nc_cell_methods): - """Parse a CF cell_methods attribute string into a tuple of zero or - more CellMethod instances. - - Args: +def parse_cell_methods(nc_cell_methods, cf_name=None): + """Parse a CF cell_methods attribute string into a tuple of zero or more CellMethod instances. - * nc_cell_methods (str): + Parameters + ---------- + nc_cell_methods : str The value of the cell methods attribute to be parsed. Returns ------- iterable of :class:`iris.coords.CellMethod`. + Notes + ----- Multiple coordinates, intervals and comments are supported. If a method has a non-standard name a warning will be issued, but the results are not affected. """ + msg = None cell_methods = [] if nc_cell_methods is not None: for m in _split_cell_methods(nc_cell_methods): @@ -356,10 +366,16 @@ def parse_cell_methods(nc_cell_methods): method_words = method.split() if method_words[0].lower() not in _CM_KNOWN_METHODS: msg = "NetCDF variable contains unknown cell method {!r}" - warnings.warn( - msg.format("{}".format(method_words[0])), - category=UnknownCellMethodWarning, - ) + msg = msg.format(method_words[0]) + if cf_name: + name = "{}".format(cf_name) + msg = msg.replace("variable", "variable {!r}".format(name)) + else: + warnings.warn( + msg, + category=UnknownCellMethodWarning, + ) + msg = None d[_CM_METHOD] = method name = d[_CM_NAME] name = name.replace(" ", "") @@ -417,6 +433,9 @@ def parse_cell_methods(nc_cell_methods): comments=d[_CM_COMMENT], ) cell_methods.append(cell_method) + # only prints one warning, rather than each loop + if msg: + warnings.warn(msg, category=UnknownCellMethodWarning) return tuple(cell_methods) @@ -447,21 +466,7 @@ def build_cube_metadata(engine): # Incorporate cell methods nc_att_cell_methods = getattr(cf_var, CF_ATTR_CELL_METHODS, None) - with warnings.catch_warnings(record=True) as warning_records: - cube.cell_methods = parse_cell_methods(nc_att_cell_methods) - # Filter to get the warning we are interested in. - warning_records = [ - record - for record in warning_records - if issubclass(record.category, UnknownCellMethodWarning) - ] - if len(warning_records) > 0: - # Output an enhanced warning message. - warn_record = warning_records[0] - name = "{}".format(cf_var.cf_name) - msg = warn_record.message.args[0] - msg = msg.replace("variable", "variable {!r}".format(name)) - warnings.warn(message=msg, category=UnknownCellMethodWarning) + cube.cell_methods = parse_cell_methods(nc_att_cell_methods, cf_var.cf_name) # Set the cube global attributes. for attr_name, attr_value in cf_var.cf_group.global_attributes.items(): @@ -477,8 +482,11 @@ def build_cube_metadata(engine): ################################################################################ def _get_ellipsoid(cf_grid_var): - """Return a :class:`iris.coord_systems.GeogCS` using the relevant properties of + """Build a :class:`iris.coord_systems.GeogCS`. + + Return a :class:`iris.coord_systems.GeogCS` using the relevant properties of `cf_grid_var`. Returns None if no relevant properties are specified. + """ major = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MAJOR_AXIS, None) minor = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MINOR_AXIS, None) @@ -560,10 +568,7 @@ def build_rotated_coordinate_system(engine, cf_grid_var): ################################################################################ def build_transverse_mercator_coordinate_system(engine, cf_grid_var): - """Create a transverse Mercator coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create a transverse Mercator coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( @@ -603,10 +608,7 @@ def build_transverse_mercator_coordinate_system(engine, cf_grid_var): ################################################################################ def build_lambert_conformal_coordinate_system(engine, cf_grid_var): - """Create a Lambert conformal conic coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create a Lambert conformal conic coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( @@ -633,10 +635,7 @@ def build_lambert_conformal_coordinate_system(engine, cf_grid_var): ################################################################################ def build_stereographic_coordinate_system(engine, cf_grid_var): - """Create a stereographic coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create a stereographic coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( @@ -667,10 +666,7 @@ def build_stereographic_coordinate_system(engine, cf_grid_var): ################################################################################ def build_polar_stereographic_coordinate_system(engine, cf_grid_var): - """Create a polar stereographic coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create a polar stereographic coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( @@ -702,10 +698,7 @@ def build_polar_stereographic_coordinate_system(engine, cf_grid_var): ################################################################################ def build_mercator_coordinate_system(engine, cf_grid_var): - """Create a Mercator coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create a Mercator coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) longitude_of_projection_origin = getattr( @@ -732,10 +725,7 @@ def build_mercator_coordinate_system(engine, cf_grid_var): ################################################################################ def build_lambert_azimuthal_equal_area_coordinate_system(engine, cf_grid_var): - """Create a lambert azimuthal equal area coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create a lambert azimuthal equal area coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( @@ -760,10 +750,7 @@ def build_lambert_azimuthal_equal_area_coordinate_system(engine, cf_grid_var): ################################################################################ def build_albers_equal_area_coordinate_system(engine, cf_grid_var): - """Create a albers conical equal area coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create a albers conical equal area coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( @@ -790,10 +777,7 @@ def build_albers_equal_area_coordinate_system(engine, cf_grid_var): ################################################################################ def build_vertical_perspective_coordinate_system(engine, cf_grid_var): - """Create a vertical perspective coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create a vertical perspective coordinate system from the CF-netCDF grid mapping variables.""" ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( @@ -822,10 +806,7 @@ def build_vertical_perspective_coordinate_system(engine, cf_grid_var): ################################################################################ def build_geostationary_coordinate_system(engine, cf_grid_var): - """Create a geostationary coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create a geostationary coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( @@ -856,10 +837,7 @@ def build_geostationary_coordinate_system(engine, cf_grid_var): ################################################################################ def build_oblique_mercator_coordinate_system(engine, cf_grid_var): - """Create an oblique mercator coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create an oblique mercator coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) azimuth_of_central_line = getattr(cf_grid_var, CF_ATTR_GRID_AZIMUTH_CENT_LINE, None) @@ -985,10 +963,7 @@ def get_names(cf_coord_var, coord_name, attributes): ################################################################################ def get_cf_bounds_var(cf_coord_var): - """Return the CF variable representing the bounds of a coordinate - variable. - - """ + """Return the CF variable representing the bounds of a coordinate variable.""" attr_bounds = getattr(cf_coord_var, CF_ATTR_BOUNDS, None) attr_climatology = getattr(cf_coord_var, CF_ATTR_CLIMATOLOGY, None) @@ -1020,8 +995,7 @@ def get_cf_bounds_var(cf_coord_var): ################################################################################ def reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var): - """Return a bounds_data array with the vertex dimension as the most - rapidly varying. + """Return a bounds_data array with the vertex dimension as the most rapidly varying. .. note:: @@ -1429,10 +1403,7 @@ def is_longitude(engine, cf_name): ################################################################################ def is_projection_x_coordinate(engine, cf_name): - """Determine whether the CF coordinate variable is a - projection_x_coordinate variable. - - """ + """Determine whether the CF coordinate variable is a projection_x_coordinate variable.""" cf_var = engine.cf_var.cf_group[cf_name] attr_name = getattr(cf_var, CF_ATTR_STD_NAME, None) or getattr( cf_var, CF_ATTR_LONG_NAME, None @@ -1442,10 +1413,7 @@ def is_projection_x_coordinate(engine, cf_name): ################################################################################ def is_projection_y_coordinate(engine, cf_name): - """Determine whether the CF coordinate variable is a - projection_y_coordinate variable. - - """ + """Determine whether the CF coordinate variable is a projection_y_coordinate variable.""" cf_var = engine.cf_var.cf_group[cf_name] attr_name = getattr(cf_var, CF_ATTR_STD_NAME, None) or getattr( cf_var, CF_ATTR_LONG_NAME, None @@ -1561,8 +1529,11 @@ def has_supported_mercator_parameters(engine, cf_name): ################################################################################ def has_supported_polar_stereographic_parameters(engine, cf_name): - """Determine whether the CF grid mapping variable has the supported + """Determine whether CF grid mapping variable supports Polar Stereographic. + + Determine whether the CF grid mapping variable has the supported values for the parameters of the Polar Stereographic projection. + """ is_valid = True cf_grid_var = engine.cf_var.cf_group[cf_name] diff --git a/lib/iris/fileformats/_structured_array_identification.py b/lib/iris/fileformats/_structured_array_identification.py index 05bd04036b..8dada77458 100644 --- a/lib/iris/fileformats/_structured_array_identification.py +++ b/lib/iris/fileformats/_structured_array_identification.py @@ -2,8 +2,11 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -r"""The purpose of this module is to provide utilities for the identification +r"""Identification of multi-dimensional structure in a flat sequence of homogeneous objects. + +The purpose of this module is to provide utilities for the identification of multi-dimensional structure in a flat sequence of homogeneous objects. + One application of this is to efficiently identify a higher dimensional structure from a sorted sequence of PPField instances; for an example, given a list of 12 PPFields, identification that there are 3 unique "time" values @@ -47,14 +50,13 @@ class _UnstructuredArrayException(Exception): - """Raised when an array has been incorrectly assumed to be - structured in a specific way. - - """ + """Raised when an array has been incorrectly assumed to be structured in a specific way.""" class ArrayStructure(namedtuple("ArrayStructure", ["stride", "unique_ordered_values"])): - """Represents the identified structure of an array, where stride is the + """Represent the identified structure of an array. + + Represents the identified structure of an array, where stride is the step between each unique value being seen in order in the flattened version of the array. @@ -92,7 +94,9 @@ def __new__(cls, stride, unique_ordered_values): @property def size(self): - """The ``size`` attribute is the number of the unique values in the + """Number of unique values in the original array. + + The ``size`` attribute is the number of the unique values in the original array. It is **not** the length of the original array. """ @@ -114,7 +118,9 @@ def __ne__(self, other): return not (self == other) def construct_array(self, size): - """The inverse operation of :func:`ArrayStructure.from_array`, returning + """Build 1D array. + + The inverse operation of :func:`ArrayStructure.from_array`, returning a 1D array of the given length with the appropriate repetition pattern. @@ -125,8 +131,7 @@ def construct_array(self, size): ) def nd_array_and_dims(self, original_array, target_shape, order="c"): - """Given a 1D array, and a target shape, construct an ndarray - and associated dimensions. + """Given a 1D array and a target shape, construct an ndarray and associated dimensions. Raises an _UnstructuredArrayException if no optimised shape array can be returned, in which case, simply reshaping the original_array would @@ -199,7 +204,9 @@ def nd_array_and_dims(self, original_array, target_shape, order="c"): @classmethod def from_array(cls, arr): - """Return the computed ArrayStructure for the given flat array + """Return the computed ArrayStructure for the given flat array. + + Return the computed ArrayStructure for the given flat array (if a structure exists, otherwise return None). """ @@ -284,7 +291,9 @@ def from_array(cls, arr): class GroupStructure: - """The GroupStructure class represents a collection of array structures along + """Represent a collection of array structures. + + The GroupStructure class represents a collection of array structures along with additional information such as the length of the arrays and the array order in which they are found (row-major or column-major). @@ -307,14 +316,17 @@ def __init__(self, length, component_structure, array_order="c"): @classmethod def from_component_arrays(cls, component_arrays, array_order="c"): - """Given a dictionary of component name to flattened numpy array, + """From component arrays. + + Given a dictionary of component name to flattened numpy array, return an :class:`GroupStructure` instance which is representative of the underlying array structures. - Args: - - * component_arrays - A dictionary mapping component name to the - full sized 1d (flattened) numpy array. + Parameters + ---------- + component_arrays : + A dictionary mapping component name to the full sized 1d (flattened) + numpy array. """ cmpt_structure = { @@ -329,7 +341,8 @@ def from_component_arrays(cls, component_arrays, array_order="c"): return cls(sizes[0], cmpt_structure, array_order=array_order) def _potentially_flattened_components(self): - """Return a generator of the components which could form non-trivial + """Return a generator of the components which could form non-trivial. + (i.e. ``length > 1``) array dimensions. """ @@ -342,8 +355,7 @@ def is_row_major(self): return self._array_order == "c" def possible_structures(self): - """Return a tuple containing the possible structures that this group - could have. + """Return a tuple containing the possible structures that this group could have. A structure in this case is an iterable of ``(name, ArrayStructure)`` pairs, one per dimension, of a possible @@ -443,7 +455,9 @@ def __str__(self): return "\n".join(result) def build_arrays(self, shape, elements_arrays): - """Given the target shape, and a dictionary mapping name to 1D array of + """Build Arrays. + + Given the target shape, and a dictionary mapping name to 1D array of :attr:`.length`, return a dictionary mapping element name to ``(ndarray, dims)``. diff --git a/lib/iris/fileformats/abf.py b/lib/iris/fileformats/abf.py index 13b0d8aa8d..76f3573882 100644 --- a/lib/iris/fileformats/abf.py +++ b/lib/iris/fileformats/abf.py @@ -192,7 +192,7 @@ def to_cube(self): def load_cubes(filespecs, callback=None): - """Loads cubes from a list of ABF filenames. + """Load cubes from a list of ABF filenames. Args: diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index ec0d66f8c2..d6dab22305 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -2,7 +2,9 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides the capability to load netCDF files and interpret them +"""Provide capability to load netCDF files and interpret them. + +Provides the capability to load netCDF files and interpret them according to the 'NetCDF Climate and Forecast (CF) Metadata Conventions'. References @@ -132,7 +134,9 @@ def identify(self, variables, ignore=None, target=None, warn=True): pass def spans(self, cf_variable): - """Determine whether the dimensionality of this variable + """Determine dimensionality coverage. + + Determine whether the dimensionality of this variable is a subset of the specified target variable. Note that, by default scalar variables always span the @@ -243,7 +247,9 @@ def has_formula_terms(self): class CFAncillaryDataVariable(CFVariable): - """A CF-netCDF ancillary data variable is a variable that provides metadata + """CF-netCDF ancillary data variable. + + A CF-netCDF ancillary data variable is a variable that provides metadata about the individual values of another data variable. Identified by the CF-netCDF variable attribute 'ancillary_variables'. @@ -283,7 +289,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class CFAuxiliaryCoordinateVariable(CFVariable): - """A CF-netCDF auxiliary coordinate variable is any netCDF variable that contains + """CF-netCDF auxiliary coordinate variable. + + A CF-netCDF auxiliary coordinate variable is any netCDF variable that contains coordinate data, but is not a CF-netCDF coordinate variable by definition. There is no relationship between the name of a CF-netCDF auxiliary coordinate @@ -330,7 +338,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class CFBoundaryVariable(CFVariable): - """A CF-netCDF boundary variable is associated with a CF-netCDF variable that contains + """CF-netCDF boundary variable. + + A CF-netCDF boundary variable is associated with a CF-netCDF variable that contains coordinate data. When a data value provides information about conditions in a cell occupying a region of space/time or some other dimension, the boundary variable provides a description of cell extent. @@ -373,7 +383,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): return result def spans(self, cf_variable): - """Determine whether the dimensionality of this variable + """Determine dimensionality coverage. + + Determine whether the dimensionality of this variable is a subset of the specified target variable. Note that, by default scalar variables always span the @@ -402,7 +414,9 @@ def spans(self, cf_variable): class CFClimatologyVariable(CFVariable): - """A CF-netCDF climatology variable is associated with a CF-netCDF variable that contains + """CF-netCDF climatology variable. + + A CF-netCDF climatology variable is associated with a CF-netCDF variable that contains coordinate data. When a data value provides information about conditions in a cell occupying a region of space/time or some other dimension, the climatology variable provides a climatological description of cell extent. @@ -445,7 +459,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): return result def spans(self, cf_variable): - """Determine whether the dimensionality of this variable + """Determine dimensionality coverage. + + Determine whether the dimensionality of this variable is a subset of the specified target variable. Note that, by default scalar variables always span the @@ -474,7 +490,9 @@ def spans(self, cf_variable): class CFCoordinateVariable(CFVariable): - """A CF-netCDF coordinate variable is a one-dimensional variable with the same name + """A CF-netCDF coordinate variable. + + A CF-netCDF coordinate variable is a one-dimensional variable with the same name as its dimension, and it is defined as a numeric data type with values that are ordered monotonically. Missing values are not allowed in CF-netCDF coordinate variables. Also see [NUG] Section 2.3.1. @@ -528,7 +546,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class _CFFormulaTermsVariable(CFVariable): - """A CF-netCDF formula terms variable corresponds to a term in a formula that + """CF-netCDF formula terms variable. + + A CF-netCDF formula terms variable corresponds to a term in a formula that allows dimensional vertical coordinate values to be computed from dimensionless vertical coordinate values and associated variables at specific grid points. @@ -596,7 +616,9 @@ def __repr__(self): class CFGridMappingVariable(CFVariable): - """A CF-netCDF grid mapping variable contains a list of specific attributes that + """CF-netCDF grid mapping variable. + + A CF-netCDF grid mapping variable contains a list of specific attributes that define a particular grid mapping. A CF-netCDF grid mapping variable must contain the attribute 'grid_mapping_name'. @@ -641,7 +663,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class CFLabelVariable(CFVariable): - """A CF-netCDF CF label variable is any netCDF variable that contain string + """Cariable is any netCDF variable that contain string textual information, or labels. + + A CF-netCDF CF label variable is any netCDF variable that contain string textual information, or labels. Identified by the CF-netCDF variable attribute 'coordinates'. @@ -777,7 +801,9 @@ def cf_label_dimensions(self, cf_data_var): ) def spans(self, cf_variable): - """Determine whether the dimensionality of this variable + """Determine dimensionality coverage. + + Determine whether the dimensionality of this variable is a subset of the specified target variable. Note that, by default scalar variables always span the @@ -858,7 +884,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): ################################################################################ class CFGroup(MutableMapping): - """Represents a collection of 'NetCDF Climate and Forecast (CF) Metadata + """Collection of 'NetCDF CF Metadata Conventions variables and netCDF global attributes. + + Represents a collection of 'NetCDF Climate and Forecast (CF) Metadata Conventions' variables and netCDF global attributes. """ @@ -935,10 +963,7 @@ def cell_measures(self): @property def non_data_variable_names(self): - """:class:`set` of the names of the CF-netCDF variables that are not - the data pay-load. - - """ + """:class:`set` names of the CF-netCDF variables that are not the data pay-load.""" non_data_variables = ( self.ancillary_variables, self.auxiliary_coordinates, @@ -1303,10 +1328,7 @@ def __del__(self): def _getncattr(dataset, attr, default=None): - """Simple wrapper round `netCDF4.Dataset.getncattr` to make it behave - more like `getattr`. - - """ + """Wrap `netCDF4.Dataset.getncattr` to make it behave more like `getattr`.""" try: value = dataset.getncattr(attr) except AttributeError: diff --git a/lib/iris/fileformats/dot.py b/lib/iris/fileformats/dot.py index cd875b3e4a..daa3f2fd61 100644 --- a/lib/iris/fileformats/dot.py +++ b/lib/iris/fileformats/dot.py @@ -55,8 +55,8 @@ def _dot_path(): def save(cube, target): """Save a dot representation of the cube. - Args - ---- + Parameters + ---------- cube: :class:`iris.cube.Cube`. target A filename or open file handle. @@ -83,17 +83,18 @@ def save(cube, target): def save_png(source, target, launch=False): - """Produce a "dot" instance diagram by calling dot and optionally launching + """Produce a "dot" instance diagram by calling dot. + + Produce a "dot" instance diagram by calling dot and optionally launching the resulting image. - Args - ---- + Parameters + ---------- source: :class:`iris.cube.Cube`, or dot filename. - target + target : A filename or open file handle. If passing a file handle, take care to open it for binary output. - - **kwargs + **kwargs : * launch Display the image. Default is False. @@ -148,9 +149,9 @@ def save_png(source, target, launch=False): def cube_text(cube): """Return a DOT text representation a `iris.cube.Cube`. - Args - ---- - cube + Parameters + ---------- + cube : The cube for which to create DOT text. """ @@ -276,14 +277,13 @@ def cube_text(cube): def _coord_text(label, coord): - """Return a string containing the dot representation for a single coordinate - node. + """Return a string containing the dot representation for a single coordinate node. - Args - ---- - label + Parameters + ---------- + label : The dot ID of the coordinate node. - coord + coord : The coordinate to convert. """ @@ -306,14 +306,13 @@ def _coord_text(label, coord): def _coord_system_text(cs, uid): - """Return a string containing the dot representation for a single coordinate - system node. + """Return string containing dot representation for a single coordinate system node. - Args - ---- - cs + Parameters + ---------- + cs : The coordinate system to convert. - uid + uid : The uid allows/distinguishes non-identical CoordSystems of the same type. diff --git a/lib/iris/fileformats/name.py b/lib/iris/fileformats/name.py index 3d01b07dee..bc1bb690c2 100644 --- a/lib/iris/fileformats/name.py +++ b/lib/iris/fileformats/name.py @@ -6,7 +6,9 @@ def _get_NAME_loader(filename): - """Return the appropriate load function for a NAME file based + """Return a NAME load function. + + Return the appropriate load function for a NAME file based on the contents of its header. """ @@ -43,8 +45,7 @@ def _get_NAME_loader(filename): def load_cubes(filenames, callback): - """Return a generator of cubes given one or more filenames and an - optional callback. + """Return a generator of cubes given one or more filenames and an optional callback. Parameters ---------- diff --git a/lib/iris/fileformats/name_loaders.py b/lib/iris/fileformats/name_loaders.py index 545b242994..3e337383cb 100644 --- a/lib/iris/fileformats/name_loaders.py +++ b/lib/iris/fileformats/name_loaders.py @@ -98,7 +98,9 @@ def read_header(file_handle): def _read_data_arrays(file_handle, n_arrays, shape): - """Return a list of NumPy arrays containing the data extracted from + """Return a list of NumPy arrays containing the data extracted. + + Return a list of NumPy arrays containing the data extracted from the provided file object. The number and shape of the arrays must be specified. @@ -127,8 +129,11 @@ def _read_data_arrays(file_handle, n_arrays, shape): def _build_lat_lon_for_NAME_field( header, dimindex, x_or_y, coord_names=["longitude", "latitude"] ): - """Return regular latitude and longitude coordinates extracted from + """Return regular latitude and longitude coordinates. + + Return regular latitude and longitude coordinates extracted from the provided header dictionary. + """ if x_or_y == "X": start = header["X grid origin"] @@ -147,7 +152,9 @@ def _build_lat_lon_for_NAME_field( def _build_lat_lon_for_NAME_timeseries(column_headings): - """Return regular latitude and longitude coordinates extracted from + """Return regular latitude and longitude coordinates. + + Return regular latitude and longitude coordinates extracted from the provided column_headings dictionary. """ @@ -186,7 +193,9 @@ def _build_lat_lon_for_NAME_timeseries(column_headings): def _calc_integration_period(time_avgs): - """Return a list of datetime.timedelta objects determined from the provided + """Calculate averaging/integration time periods. + + Return a list of datetime.timedelta objects determined from the provided list of averaging/integration period column headings. """ @@ -387,7 +396,9 @@ def _cf_height_from_name(z_coord, lower_bound=None, upper_bound=None): def _generate_cubes(header, column_headings, coords, data_arrays, cell_methods=None): - """Yield :class:`iris.cube.Cube` instances given + """Generate NAME cubes. + + Yield :class:`iris.cube.Cube` instances given the headers, column headings, coords and data_arrays extracted from a NAME file. @@ -549,7 +560,9 @@ def _generate_cubes(header, column_headings, coords, data_arrays, cell_methods=N def _build_cell_methods(av_or_ints, coord): - """Return a list of :class:`iris.coords.CellMethod` instances + """Create cell-methods. + + Return a list of :class:`iris.coords.CellMethod` instances based on the provided list of column heading entries and the associated coordinate. If a given entry does not correspond to a cell method (e.g. "No time averaging"), a value of None is inserted. @@ -588,7 +601,9 @@ def _build_cell_methods(av_or_ints, coord): def load_NAMEIII_field(filename): - """Load a NAME III grid output file returning a + """Load NAME III cubes. + + Load a NAME III grid output file returning a generator of :class:`iris.cube.Cube` instances. Parameters diff --git a/lib/iris/fileformats/nimrod.py b/lib/iris/fileformats/nimrod.py index 778c7b3251..009535dc8f 100644 --- a/lib/iris/fileformats/nimrod.py +++ b/lib/iris/fileformats/nimrod.py @@ -290,7 +290,7 @@ def _read_data(self, infile): def load_cubes(filenames, callback=None): - """Loads cubes from a list of NIMROD filenames. + """Load cubes from a list of NIMROD filenames. Parameters ---------- diff --git a/lib/iris/fileformats/nimrod_load_rules.py b/lib/iris/fileformats/nimrod_load_rules.py index bb6d13f50a..3c4c56bc3b 100644 --- a/lib/iris/fileformats/nimrod_load_rules.py +++ b/lib/iris/fileformats/nimrod_load_rules.py @@ -45,9 +45,12 @@ def is_missing(field, value): def name(cube, field, handle_metadata_errors): """Set the cube's name from the field. + Modifies the Nimrod object title based on other meta-data in the Nimrod field and known use cases. + Adds "mean_of" or "standard_deviation_of_" to the cube name if appropriate. + """ title_from_field_code = { 12: "air_pressure", @@ -237,8 +240,7 @@ def reference_time(cube, field): def forecast_period(cube): - """Add a forecast_period coord based on existing time and - forecast_reference_time coords. + """Add forecast_period coord based on existing time and forecast_reference_time coords. Must be run after time() and reference_time() @@ -293,8 +295,7 @@ def experiment(cube, field): def proj_biaxial_ellipsoid(field, handle_metadata_errors): - """Return the correct dictionary of arguments needed to define an - iris.coord_systems.GeogCS. + """Return correct dict of arguments needed to define an iris.coord_systems.GeogCS. Based firstly on the value given by ellipsoid, then by grid if ellipsoid is missing, select the right pre-defined ellipsoid dictionary (Airy_1830 or @@ -339,7 +340,9 @@ def proj_biaxial_ellipsoid(field, handle_metadata_errors): def set_british_national_grid_defaults(field, handle_metadata_errors): - """Check for missing coord-system meta-data and set default values for + """Check for missing coord-system meta-data and set default values. + + Check for missing coord-system meta-data and set default values for the Ordnance Survey GB Transverse Mercator projection. Some Radarnet files are missing these. @@ -374,6 +377,7 @@ def set_british_national_grid_defaults(field, handle_metadata_errors): def coord_system(field, handle_metadata_errors): """Define the coordinate system for the field. + Handles Transverse Mercator, Universal Transverse Mercator and Plate Carree. Transverse Mercator projections will default to the British National Grid if any @@ -411,6 +415,7 @@ def coord_system(field, handle_metadata_errors): def horizontal_grid(cube, field, handle_metadata_errors): """Add X and Y coordinates to the cube. + Handles Transverse Mercator, Universal Transverse Mercator and Plate Carree. coordinate reference system is supplied by coord_system(field) @@ -455,7 +460,9 @@ def horizontal_grid(cube, field, handle_metadata_errors): def vertical_coord(cube, field): - """Add a vertical coord to the cube, with bounds, if appropriate. + """Add a vertical coord to the cube, with bounds. + + Add a vertical coord to the cube, with bounds, if appropriate. Handles special numbers for "at-sea-level" (8888) and "at-ground-level" (9999). @@ -695,7 +702,9 @@ def known_threshold_coord(field): def probability_coord(cube, field, handle_metadata_errors): - """Add a coord relating to probability meta-data from the header to the + """Add a coord relating to probability meta-data from the header to the cube. + + Add a coord relating to probability meta-data from the header to the cube if appropriate. Must be run after the name method. diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index 8ac49b827d..fa95d0dab1 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -273,13 +273,15 @@ class STASH(collections.namedtuple("STASH", "model section item")): __slots__ = () def __new__(cls, model, section, item): - """Args + """Create namedtuple STASH instance. + + Args ---- - model + model : A positive integer less than 100, or None. - section + section : A non-negative integer less than 100, or None. - item + item : A positive integer less than 1000, or None. """ @@ -356,8 +358,7 @@ def __ne__(self, other): class SplittableInt: - """A class to hold integers which can easily get each decimal digit - individually. + """A class to hold integers which can easily get each decimal digit individually. >>> three_six_two = SplittableInt(362) >>> print(three_six_two) @@ -657,9 +658,10 @@ def __ne__(self, other): def _data_bytes_to_shaped_array( data_bytes, lbpack, boundary_packing, data_shape, data_type, mdi, mask=None ): - """Convert the already read binary data payload into a numpy array, unpacking - and decompressing as per the F3 specification. + """Convert binary payload into a numpy array. + Convert the already read binary data payload into a numpy array, unpacking + and decompressing as per the F3 specification. """ if lbpack.n1 in (0, 2): data = np.frombuffer(data_bytes, dtype=data_type) @@ -809,10 +811,7 @@ def _data_bytes_to_shaped_array( def _header_defn(release_number): - """Return the zero-indexed header definition for a particular release of - a PPField. - - """ + """Return zero-indexed header definition for a particular release of a PPField.""" um_header = UM_HEADERS[release_number] offset = UM_TO_PP_HEADER_OFFSET return [ @@ -822,7 +821,9 @@ def _header_defn(release_number): def _pp_attribute_names(header_defn): - """Return the allowed attributes of a PPField: + """Return the allowed attributes of a PPField. + + Return the allowed attributes of a PPField: all of the normal headers (i.e. not the _SPECIAL_HEADERS), the _SPECIAL_HEADERS with '_' prefixed, the possible extra data headers. @@ -844,7 +845,9 @@ def _pp_attribute_names(header_defn): class PPField(metaclass=ABCMeta): - """A generic class for PP fields - not specific to a particular + """Base class for PP fields. + + A generic class for PP fields - not specific to a particular header release number. A PPField instance can easily access the PP header "words" as attributes @@ -877,7 +880,9 @@ def __init__(self, header=None): self.raw_lbpack = header[self.HEADER_DICT["lbpack"][0]] def __getattr__(self, key): - """Method supports deferred attribute creation, which offers a + """Return the value of the key. + + Method supports deferred attribute creation, which offers a significant loading optimisation, particularly when not all attributes are referenced and therefore created on the instance. @@ -970,8 +975,9 @@ def __repr__(self): @property def stash(self): - """Stash property giving access to the associated STASH object, - now supporting __eq__. + """Stash property giving access to the associated STASH object. + + Now supporting __eq__. """ if ( @@ -1049,10 +1055,7 @@ def lbproc(self, value): @property def data(self): - """:class:`numpy.ndarray` representing the multidimensional data - of the pp file. - - """ + """:class:`numpy.ndarray` representing multidimensional data of the pp file.""" if is_lazy_data(self._data): # Replace with real data on the first access. self._data = as_concrete_data(self._data) @@ -1445,10 +1448,7 @@ def __ne__(self, other): class PPField2(PPField): - """A class to hold a single field from a PP file, with a - header release number of 2. - - """ + """Hold a single field from a PP file, with a header release number of 2.""" HEADER_DEFN = _header_defn(2) HEADER_DICT = dict(HEADER_DEFN) @@ -1457,7 +1457,9 @@ class PPField2(PPField): @property def t1(self): - """cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, + """cftime.datetime object. + + cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, and lbmin attributes. """ @@ -1488,7 +1490,9 @@ def t1(self, dt): @property def t2(self): - """cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, + """cftime.datetime object. + + cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, lbhrd, and lbmind attributes. """ @@ -1519,10 +1523,7 @@ def t2(self, dt): class PPField3(PPField): - """A class to hold a single field from a PP file, with a - header release number of 3. - - """ + """Hold a single field from a PP file, with a header release number of 3.""" HEADER_DEFN = _header_defn(3) HEADER_DICT = dict(HEADER_DEFN) @@ -1531,7 +1532,9 @@ class PPField3(PPField): @property def t1(self): - """cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, + """cftime.datetime object. + + cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, lbmin, and lbsec attributes. """ @@ -1563,7 +1566,9 @@ def t1(self, dt): @property def t2(self): - """cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, + """cftime.datetime object. + + cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, lbhrd, lbmind, and lbsecd attributes. """ @@ -1638,7 +1643,9 @@ def load(filename, read_data=False, little_ended=False): def _interpret_fields(fields): - """Turn the fields read with load and FF2PP._extract_field into usable + """Turn the fields read with load and FF2PP._extract_field into usable fields. + + Turn the fields read with load and FF2PP._extract_field into usable fields. One of the primary purposes of this function is to either convert "deferred bytes" into "deferred arrays" or "loaded bytes" into actual numpy arrays (via the _create_field_data) function. @@ -1706,9 +1713,12 @@ def _interpret_fields(fields): def _create_field_data(field, data_shape, land_mask_field=None): - """Modify a field's ``_data`` attribute either by: - * converting a 'deferred array bytes' tuple into a lazy array, - * converting LoadedArrayBytes into an actual numpy array. + """Modify a field's ``_data`` attribute. + + Modify a field's ``_data`` attribute either by: + + * converting a 'deferred array bytes' tuple into a lazy array, + * converting LoadedArrayBytes into an actual numpy array. If 'land_mask_field' is passed (not None), then it contains the associated landmask, which is also a field : Its data array is used as a template for @@ -1803,8 +1813,7 @@ def calc_array(mask, values): def _field_gen(filename, read_data_bytes, little_ended=False): - """Return a generator of "half-formed" PPField instances derived from - the given filename. + """Return generator of "half-formed" PPField instances derived from given filename. A field returned by the generator is only "half-formed" because its `_data` attribute represents a simple one-dimensional stream of @@ -1931,7 +1940,9 @@ def _field_gen(filename, read_data_bytes, little_ended=False): def _convert_constraints(constraints): - """Convert known constraints from Iris semantics to PP semantics + """Convert known constraints from Iris semantics to PP semantics. + + Convert known constraints from Iris semantics to PP semantics ignoring all unknown constraints. """ @@ -1940,10 +1951,7 @@ def _convert_constraints(constraints): unhandled_constraints = False def _make_func(stashobj): - """Provide unique name-space for each lambda function's stashobj - variable. - - """ + """Provide unique name-space for each lambda function's stashobj variable.""" return lambda stash: stash == stashobj for con in constraints: @@ -1974,10 +1982,7 @@ def _make_func(stashobj): unhandled_constraints = True def pp_filter(field): - """Return True if field is to be kept, - False if field does not match filter. - - """ + """Return True if field is to be kept, False if field does not match filter.""" res = True if field.stash not in _STASH_ALLOW: if pp_constraints.get("stash"): @@ -1998,11 +2003,11 @@ def pp_filter(field): def load_cubes(filenames, callback=None, constraints=None): """Load cubes from a list of pp filenames. - Args - ---- - filenames + Parameters + ---------- + filenames : list of pp filenames to load - **kwargs + **kwargs : * constraints a list of Iris constraints * callback @@ -2050,8 +2055,7 @@ def load_cubes_little_endian(filenames, callback=None, constraints=None): def load_pairs_from_fields(pp_fields): - r"""Convert an iterable of PP fields into an iterable of tuples of - (Cubes, PPField). + r"""Convert an iterable of PP fields into an iterable of tuples of (Cubes, PPField). Args ---- @@ -2182,7 +2186,9 @@ def save(cube, target, append=False, field_coords=None): def save_pairs_from_cube(cube, field_coords=None, target=None): - """Use the PP saving rules to convert a cube or + """Use the PP saving rules to convert a cube. + + Use the PP saving rules to convert a cube or iterable of cubes to an iterable of (2D cube, PP field) pairs. Args @@ -2298,14 +2304,16 @@ def save_pairs_from_cube(cube, field_coords=None, target=None): def as_fields(cube, field_coords=None, target=None): - """Use the PP saving rules (and any user rules) to convert a cube to + """Use the PP saving rules to convert a cube to an iterable of PP fields. + + Use the PP saving rules (and any user rules) to convert a cube to an iterable of PP fields. Args ---- cube A :class:`iris.cube.Cube` - **kwargs + **kwargs : * field_coords: List of 2 coords or coord names which are to be used for reducing the given cube into 2d slices, which will ultimately @@ -2332,7 +2340,7 @@ def save_fields(fields, target, append=False): An iterable of PP fields. target: A filename or open file handle. - **kwargs + **kwargs : * append: Whether to start a new file afresh or add the cube(s) to the end of the file. diff --git a/lib/iris/fileformats/pp_load_rules.py b/lib/iris/fileformats/pp_load_rules.py index 7c164edd41..f93f177ba8 100644 --- a/lib/iris/fileformats/pp_load_rules.py +++ b/lib/iris/fileformats/pp_load_rules.py @@ -59,8 +59,7 @@ def _convert_vertical_coords( brlev, dim=None, ): - """Encode scalar or vector vertical level values from PP headers as CM data - components. + """Encode scalar or vector vertical level values from PP headers as CM data components. Parameters ---------- @@ -340,7 +339,9 @@ def _reshape_vector_args(values_and_dims): def _collapse_degenerate_points_and_bounds(points, bounds=None, rtol=1.0e-7): - """Collapse points (and optionally bounds) in any dimensions over which all + """Collapse points (and optionally bounds) in any dimensions. + + Collapse points (and optionally bounds) in any dimensions over which all values are the same. All dimensions are tested, and if degenerate are reduced to length 1. @@ -385,8 +386,7 @@ def _collapse_degenerate_points_and_bounds(points, bounds=None, rtol=1.0e-7): def _reduce_points_and_bounds(points, lower_and_upper_bounds=None): - """Reduce the dimensionality of arrays of coordinate points (and optionally - bounds). + """Reduce the dimensionality of arrays of coordinate points (and optionally bounds). Dimensions over which all values are the same are reduced to size 1, using :func:`_collapse_degenerate_points_and_bounds`. @@ -441,7 +441,9 @@ def _reduce_points_and_bounds(points, lower_and_upper_bounds=None): def _new_coord_and_dims( is_vector_operation, name, units, points, lower_and_upper_bounds=None ): - """Make a new (coordinate, cube_dims) pair with the given points, name, units + """Make a new (coordinate, cube_dims) pair. + + Make a new (coordinate, cube_dims) pair with the given points, name, units and optional bounds. In 'vector' style operation, the data arrays must have same number of @@ -855,7 +857,7 @@ def _convert_scalar_pseudo_level_coords(lbuser5): def convert(f): - """Converts a PP field into the corresponding items of Cube metadata. + """Convert a PP field into the corresponding items of Cube metadata. Parameters ---------- diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py index 4467ae1cfa..bf6ae4de34 100644 --- a/lib/iris/fileformats/rules.py +++ b/lib/iris/fileformats/rules.py @@ -100,7 +100,9 @@ def scalar_cell_method(cube, method, coord_name): def has_aux_factory(cube, aux_factory_class): - """Try to find an class:`~iris.aux_factory.AuxCoordFactory` instance of the + """Determine class:`~iris.aux_factory.AuxCoordFactory` availability within cube. + + Try to find an class:`~iris.aux_factory.AuxCoordFactory` instance of the specified type on the cube. """ @@ -111,7 +113,9 @@ def has_aux_factory(cube, aux_factory_class): def aux_factory(cube, aux_factory_class): - """Return the class:`~iris.aux_factory.AuxCoordFactory` instance of the + """Retrieve class:`~iris.aux_factory.AuxCoordFactory` instance from cube. + + Return the class:`~iris.aux_factory.AuxCoordFactory` instance of the specified type from a cube. """ @@ -138,7 +142,7 @@ class _ReferenceError(Exception): def _dereference_args(factory, reference_targets, regrid_cache, cube): - """Converts all the arguments for a factory into concrete coordinates.""" + """Convert all the arguments for a factory into concrete coordinates.""" args = [] for arg in factory.args: if isinstance(arg, Reference): @@ -195,7 +199,9 @@ def _regrid_to_target(src_cube, target_coords, target_cube): def _ensure_aligned(regrid_cache, src_cube, target_cube): - """Returns a version of `src_cube` suitable for use as an AuxCoord + """Ensure dimension compatible cubes are spatially aligned. + + Returns a version of `src_cube` suitable for use as an AuxCoord on `target_cube`, or None if no version can be made. """ @@ -381,8 +387,7 @@ def _load_pairs_from_fields_and_filenames( def load_pairs_from_fields(fields, converter): - """Convert an iterable of fields into an iterable of Cubes using the - provided converter. + """Convert iterable of fields into iterable of Cubes using the provided converter. Parameters ---------- diff --git a/lib/iris/fileformats/um/_fast_load.py b/lib/iris/fileformats/um/_fast_load.py index 6940bfed9c..12441acdcc 100644 --- a/lib/iris/fileformats/um/_fast_load.py +++ b/lib/iris/fileformats/um/_fast_load.py @@ -2,7 +2,8 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Support for "fast" loading of structured UM files in iris load functions, +"""Support for "fast" loading of structured UM files in iris load functions. + i.e. :meth:`iris.load` and its associates. This provides a context manager to enable structured loading via all the iris @@ -154,8 +155,7 @@ def iter_fields_decorated_with_load_indices(fields_iter): def _convert_collation(collation): - """Converts a FieldCollation into the corresponding items of Cube - metadata. + """Convert a FieldCollation into the corresponding items of Cube metadata. Parameters ---------- @@ -542,7 +542,9 @@ def structured_um_loading(): @contextmanager def _raw_structured_loading(): - """Private context manager called by :func:`iris.load_raw` to prevent + """Prevent structured loading from concatenating its result cubes. + + Private context manager called by :func:`iris.load_raw` to prevent structured loading from concatenating its result cubes in that case. """ diff --git a/lib/iris/fileformats/um/_fast_load_structured_fields.py b/lib/iris/fileformats/um/_fast_load_structured_fields.py index 81bec94f79..6b09227491 100644 --- a/lib/iris/fileformats/um/_fast_load_structured_fields.py +++ b/lib/iris/fileformats/um/_fast_load_structured_fields.py @@ -20,7 +20,9 @@ class BasicFieldCollation: - """An object representing a group of UM fields with array structure that can + """An object representing a group of UM fields with array structure. + + An object representing a group of UM fields with array structure that can be vectorized into a single cube. For example: @@ -234,7 +236,7 @@ def _calculate_structure(self): def _um_collation_key_function(field): - """Standard collation key definition for fast structured field loading. + """Collation key definition for fast structured field loading. The elements used here are the minimum sufficient to define the 'phenomenon', as described for :meth:`group_structured_fields`. @@ -264,7 +266,9 @@ def _um_collation_key_function(field): def group_structured_fields( field_iterator, collation_class=BasicFieldCollation, **collation_kwargs ): - """Collect structured fields into identified groups whose fields can be + """Collect structured fields into identified groups. + + Collect structured fields into identified groups whose fields can be combined to form a single cube. Parameters diff --git a/lib/iris/fileformats/um/_ff_replacement.py b/lib/iris/fileformats/um/_ff_replacement.py index 8ceb30bb92..ee40bacafa 100644 --- a/lib/iris/fileformats/um/_ff_replacement.py +++ b/lib/iris/fileformats/um/_ff_replacement.py @@ -51,7 +51,7 @@ def um_to_pp(filename, read_data=False, word_depth=None): def load_cubes(filenames, callback, constraints=None, _loader_kwargs=None): - """Loads cubes from filenames of UM fieldsfile-like files. + """Load cubes from filenames of UM fieldsfile-like files. Parameters ---------- @@ -81,8 +81,7 @@ def load_cubes(filenames, callback, constraints=None, _loader_kwargs=None): def load_cubes_32bit_ieee(filenames, callback, constraints=None): - """Loads cubes from filenames of 32bit ieee converted UM fieldsfile-like - files. + """Load cubes from filenames of 32bit ieee converted UM fieldsfile-like files. See Also -------- diff --git a/lib/iris/fileformats/um/_optimal_array_structuring.py b/lib/iris/fileformats/um/_optimal_array_structuring.py index 1e58a33f41..64f4e8fad2 100644 --- a/lib/iris/fileformats/um/_optimal_array_structuring.py +++ b/lib/iris/fileformats/um/_optimal_array_structuring.py @@ -9,7 +9,9 @@ def _optimal_dimensioning_structure(structure, element_priorities): - """Uses the structure options provided by the + """Determine the optimal array structure for the :class:`FieldCollation`. + + Uses the structure options provided by the :class:`~iris.fileformats._structured_array_identification.GroupStructure` to determine the optimal array structure for the :class:`FieldCollation`. diff --git a/lib/iris/iterate.py b/lib/iris/iterate.py index 1d8ae49d06..be2a436a5e 100644 --- a/lib/iris/iterate.py +++ b/lib/iris/iterate.py @@ -166,7 +166,9 @@ def izip(*cubes, **kwargs): class _ZipSlicesIterator(Iterator): - """Extension to _SlicesIterator (see cube.py) to support iteration over a + """Support iteration over a collection of cubes. + + Extension to _SlicesIterator (see cube.py) to support iteration over a collection of cubes in step. """ @@ -278,7 +280,9 @@ def __next__(self): class _CoordWrapper: - """Class for creating a coordinate wrapper that allows the use of an + """Create a coordinate wrapper. + + Class for creating a coordinate wrapper that allows the use of an alternative equality function based on metadata rather than metadata + points/bounds. diff --git a/lib/iris/palette.py b/lib/iris/palette.py index 078d9b73ba..e180b649a8 100644 --- a/lib/iris/palette.py +++ b/lib/iris/palette.py @@ -136,7 +136,7 @@ def cmap_norm(cube): def auto_palette(func): - """Decorator wrapper function to control the default behaviour. + """Auto palette decorator wrapper function to control the default behaviour. Decorator wrapper function to control the default behaviour of the matplotlib cmap and norm keyword arguments. diff --git a/lib/iris/plot.py b/lib/iris/plot.py index d8e9426035..57e338c86a 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Iris-specific extensions to matplotlib, mimicking the :mod:`matplotlib.pyplot` -interface. +"""Iris-specific extensions to matplotlib, mimicking the :mod:`matplotlib.pyplot` interface. See also: :ref:`matplotlib `. @@ -133,7 +132,9 @@ def _valid_bound_dim_coord(coord): def _get_plot_defn(cube, mode, ndims=2): - """Return data and plot-axis coords given a cube & a mode of either + """Return data and plot-axis coords. + + Return data and plot-axis coords given a cube & a mode of either POINT_MODE or BOUND_MODE. """ @@ -275,7 +276,9 @@ def _invert_yaxis(v_coord, axes=None): def _check_bounds_contiguity_and_mask(coord, data, atol=None, rtol=None): - """Checks that any discontiguities in the bounds of the given coordinate only + """Check discontiguities in the bounds of the given coordinate. + + Checks that any discontiguities in the bounds of the given coordinate only occur where the data is masked. Where a discontinuity occurs the grid created for plotting will not be @@ -698,7 +701,9 @@ def _get_geodesic_params(globe): def _shift_plot_sections(u_object, u, v): - """Shifts subsections of u by multiples of 360 degrees within ranges + """Shifts subsections of u by multiples of 360 degrees. + + Shifts subsections of u by multiples of 360 degrees within ranges defined by the points where the line should cross over the 0/360 degree longitude boundary. @@ -854,7 +859,9 @@ def _draw_two_1d_from_points(draw_method_name, arg_func, *args, **kwargs): def _replace_axes_with_cartopy_axes(cartopy_proj): - """Replace non-cartopy subplot/axes with a cartopy alternative + """Replace non-cartopy subplot/axes with a cartopy alternative. + + Replace non-cartopy subplot/axes with a cartopy alternative based on the provided projection. If the current axes are already an instance of :class:`cartopy.mpl.geoaxes.GeoAxes` then no action is taken. @@ -885,7 +892,9 @@ def _replace_axes_with_cartopy_axes(cartopy_proj): def _ensure_cartopy_axes_and_determine_kwargs(x_coord, y_coord, kwargs): - """Replace the current non-cartopy axes with + """Replace the current non-cartopy axes with :class:`cartopy.mpl.geoaxes.GeoAxes`. + + Replace the current non-cartopy axes with :class:`cartopy.mpl.geoaxes.GeoAxes` and return the appropriate kwargs dict based on the provided coordinates and kwargs. @@ -1220,7 +1229,7 @@ def _fill_orography(cube, coords, mode, vert_plot, horiz_plot, style_args): def orography_at_bounds(cube, facecolor="#888888", coords=None, axes=None): - """Plots orography defined at cell boundaries from the given Cube. + """Plot orography defined at cell boundaries from the given Cube. Notes ----- @@ -1258,7 +1267,7 @@ def horiz_plot(v_coord, orography, style_args): def orography_at_points(cube, facecolor="#888888", coords=None, axes=None): - """Plots orography defined at sample points from the given Cube. + """Plot orography defined at sample points from the given Cube. Notes ----- @@ -1435,7 +1444,7 @@ def _scatter_args(u, v, data, *args, **kwargs): def _vector_component_args(x_points, y_points, u_data, *args, **kwargs): - """Callback from _draw_2d_from_points for 'quiver' and 'streamlines'. + """Vector component callback from _draw_2d_from_points for 'quiver' and 'streamlines'. Returns arguments (x, y, u, v), to be passed to the underlying matplotlib call. @@ -1475,7 +1484,9 @@ def _vector_component_args(x_points, y_points, u_data, *args, **kwargs): def barbs(u_cube, v_cube, *args, **kwargs): - """Draws a barb plot from two vector component cubes. Triangles, full-lines + """Draw a barb plot from two vector component cubes. + + Draws a barb plot from two vector component cubes. Triangles, full-lines and half-lines represent increments of 50, 10 and 5 respectively. Args: @@ -1670,7 +1681,7 @@ def scatter(x, y, *args, **kwargs): def fill_between(x, y1, y2, *args, **kwargs): - """Plots y1 and y2 against x, and fills the space between them. + """Plot y1 and y2 against x, and fills the space between them. Args: diff --git a/lib/iris/quickplot.py b/lib/iris/quickplot.py index 1fa2edd630..1c1eaf5a56 100644 --- a/lib/iris/quickplot.py +++ b/lib/iris/quickplot.py @@ -49,7 +49,7 @@ def _title(cube_or_coord, with_units): def _label(cube, mode, result=None, ndims=2, coords=None, axes=None): - """Puts labels on the current plot using the given cube.""" + """Put labels on the current plot using the given cube.""" if axes is None: axes = plt.gca() @@ -282,8 +282,7 @@ def points(cube, *args, **kwargs): def plot(*args, **kwargs): - """Draws a labelled line plot based on the given cube(s) or - coordinate(s). + """Draws a labelled line plot based on the given cube(s) or coordinate(s). See :func:`iris.plot.plot` for details of valid arguments and keyword arguments. @@ -301,8 +300,7 @@ def plot(*args, **kwargs): def scatter(x, y, *args, **kwargs): - """Draws a labelled scatter plot based on the given cubes or - coordinates. + """Draws a labelled scatter plot based on the given cubes or coordinates. See :func:`iris.plot.scatter` for details of valid arguments and keyword arguments. @@ -329,6 +327,7 @@ def fill_between(x, y1, y2, *args, **kwargs): ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. + """ axes = kwargs.get("axes") result = iplt.fill_between(x, y1, y2, *args, **kwargs) @@ -346,6 +345,7 @@ def hist(x, *args, **kwargs): ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. + """ axes = kwargs.get("axes") result = iplt.hist(x, *args, **kwargs) diff --git a/lib/iris/symbols.py b/lib/iris/symbols.py index 0e53588a66..4e66daa100 100644 --- a/lib/iris/symbols.py +++ b/lib/iris/symbols.py @@ -193,8 +193,11 @@ def _backslash_path(): def _wedge_fix(wedge_path): - """Fixes the problem with Path.wedge where it doesn't initialise the first, + """Fix the problem with Path.wedge. + + Fixes the problem with Path.wedge where it doesn't initialise the first, and last two vertices. + This fix should not have any side-effects once Path.wedge has been fixed, but will then be redundant and should be removed. diff --git a/lib/iris/tests/integration/netcdf/test_general.py b/lib/iris/tests/integration/netcdf/test_general.py index 8c27742185..751c160805 100644 --- a/lib/iris/tests/integration/netcdf/test_general.py +++ b/lib/iris/tests/integration/netcdf/test_general.py @@ -484,5 +484,40 @@ def test_path_string_save_same(self): self.assertCDL(tempfile_frompath) +@tests.skip_data +class TestWarningRepeats(tests.IrisTest): + def test_datum_once(self): + """Tests for warnings being duplicated. + + Notes + ----- + This test relies on `iris.load` throwing a warning. This warning might + be removed in the future, in which case `assert len(record) == 2 should` + be change to `assert len(record) == 1`. + + toa_brightness_temperature.nc has an AuxCoord with lazy data, and triggers a + specific part of dask which contains a `catch_warnings()` call which + causes warnings to be repeated, and so has been removed from the + `fnames` list until a solution is found for such a file. + + """ + # + fnames = [ + "false_east_north_merc.nc", + "non_unit_scale_factor_merc.nc", + # toa_brightness_temperature.nc, + ] + fpaths = [ + tests.get_data_path(("NetCDF", "mercator", fname)) for fname in fnames + ] + + with warnings.catch_warnings(record=True) as record: + warnings.simplefilter("default") + for fpath in fpaths: + iris.load(fpath) + warnings.warn("Dummy warning", category=iris.exceptions.IrisUserWarning) + assert len(record) == 2 + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/analysis/stats/test_pearsonr.py b/lib/iris/tests/unit/analysis/stats/test_pearsonr.py index 50387e1418..d46bcd21ba 100644 --- a/lib/iris/tests/unit/analysis/stats/test_pearsonr.py +++ b/lib/iris/tests/unit/analysis/stats/test_pearsonr.py @@ -8,17 +8,22 @@ # importing anything else. import iris.tests as tests # isort:skip +from unittest import mock + +import dask +import dask.array import numpy as np import numpy.ma as ma +import pytest import iris +import iris._lazy_data import iris.analysis.stats as stats from iris.exceptions import CoordinateNotFoundError -@tests.skip_data -class Test(tests.IrisTest): - def setUp(self): +class Mixin: + def setup_method(self): # 3D cubes: cube_temp = iris.load_cube( tests.get_data_path( @@ -33,21 +38,36 @@ def setUp(self): cube_temp.coord("longitude").guess_bounds() self.weights = iris.analysis.cartography.area_weights(cube_temp) - def test_perfect_corr(self): + +@tests.skip_data +class TestLazy(Mixin): + @pytest.fixture + def mocked_compute(self, monkeypatch): + m_compute = mock.Mock(wraps=dask.base.compute) + + # The three dask compute functions are all the same function but monkeypatch + # does not automatically know that. + # https://stackoverflow.com/questions/77820437 + monkeypatch.setattr(dask.base, dask.base.compute.__name__, m_compute) + monkeypatch.setattr(dask, dask.compute.__name__, m_compute) + monkeypatch.setattr(dask.array, dask.array.compute.__name__, m_compute) + + return m_compute + + def test_perfect_corr(self, mocked_compute): r = stats.pearsonr(self.cube_a, self.cube_a, ["latitude", "longitude"]) - self.assertArrayEqual(r.data, np.array([1.0] * 6)) + mocked_compute.assert_not_called() + np.testing.assert_array_equal(r.data, np.array([1.0] * 6)) - def test_perfect_corr_all_dims(self): + def test_perfect_corr_all_dims(self, mocked_compute): r = stats.pearsonr(self.cube_a, self.cube_a) - self.assertArrayEqual(r.data, np.array([1.0])) + mocked_compute.assert_not_called() + np.testing.assert_array_equal(r.data, np.array([1.0])) - def test_incompatible_cubes(self): - with self.assertRaises(ValueError): - stats.pearsonr(self.cube_a[:, 0, :], self.cube_b[0, :, :], "longitude") - - def test_compatible_cubes(self): + def test_compatible_cubes(self, mocked_compute): r = stats.pearsonr(self.cube_a, self.cube_b, ["latitude", "longitude"]) - self.assertArrayAlmostEqual( + mocked_compute.assert_not_called() + np.testing.assert_array_almost_equal( r.data, [ 0.81114936, @@ -59,13 +79,15 @@ def test_compatible_cubes(self): ], ) - def test_broadcast_cubes(self): + def test_broadcast_cubes(self, mocked_compute): r1 = stats.pearsonr( self.cube_a, self.cube_b[0, :, :], ["latitude", "longitude"] ) r2 = stats.pearsonr( self.cube_b[0, :, :], self.cube_a, ["latitude", "longitude"] ) + + mocked_compute.assert_not_called() r_by_slice = [ stats.pearsonr( self.cube_a[i, :, :], @@ -74,14 +96,16 @@ def test_broadcast_cubes(self): ).data for i in range(6) ] - self.assertArrayEqual(r1.data, np.array(r_by_slice)) - self.assertArrayEqual(r2.data, np.array(r_by_slice)) + np.testing.assert_array_equal(r1.data, np.array(r_by_slice)) + np.testing.assert_array_equal(r2.data, np.array(r_by_slice)) - def test_compatible_cubes_weighted(self): + def test_compatible_cubes_weighted(self, mocked_compute): r = stats.pearsonr( self.cube_a, self.cube_b, ["latitude", "longitude"], self.weights ) - self.assertArrayAlmostEqual( + + mocked_compute.assert_not_called() + np.testing.assert_array_almost_equal( r.data, [ 0.79105429, @@ -93,13 +117,15 @@ def test_compatible_cubes_weighted(self): ], ) - def test_broadcast_cubes_weighted(self): + def test_broadcast_cubes_weighted(self, mocked_compute): r = stats.pearsonr( self.cube_a, self.cube_b[0, :, :], ["latitude", "longitude"], weights=self.weights[0, :, :], ) + + mocked_compute.assert_not_called() r_by_slice = [ stats.pearsonr( self.cube_a[i, :, :], @@ -109,10 +135,31 @@ def test_broadcast_cubes_weighted(self): ).data for i in range(6) ] - self.assertArrayAlmostEqual(r.data, np.array(r_by_slice)) + np.testing.assert_array_almost_equal(r.data, np.array(r_by_slice)) + + def test_broadcast_transpose_cubes_weighted(self, mocked_compute): + # Reference is calculated with no transposition. + r_ref = stats.pearsonr( + self.cube_a, + self.cube_b[0, :, :], + ["latitude", "longitude"], + weights=self.weights[0, :, :], + ) + + self.cube_a.transpose() + r_test = stats.pearsonr( + self.cube_a, + self.cube_b[0, :, :], + ["latitude", "longitude"], + weights=self.weights[0, :, :], + ) + + mocked_compute.assert_not_called() + # Should get the same result, but transposed. + np.testing.assert_array_almost_equal(r_test.data, r_ref.data.T) def test_weight_error(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): stats.pearsonr( self.cube_a, self.cube_b[0, :, :], @@ -120,54 +167,74 @@ def test_weight_error(self): weights=self.weights, ) - def test_non_existent_coord(self): - with self.assertRaises(CoordinateNotFoundError): - stats.pearsonr(self.cube_a, self.cube_b, "bad_coord") - - def test_mdtol(self): + def test_mdtol(self, mocked_compute): cube_small = self.cube_a[:, 0, 0] - cube_small_masked = cube_small.copy() - cube_small_masked.data = ma.array( - cube_small.data, mask=np.array([0, 0, 0, 1, 1, 1], dtype=bool) - ) + cube_small_masked = iris.util.mask_cube(cube_small, [0, 0, 0, 1, 1, 1]) r1 = stats.pearsonr(cube_small, cube_small_masked) r2 = stats.pearsonr(cube_small, cube_small_masked, mdtol=0.49) - self.assertArrayAlmostEqual(r1.data, np.array([0.74586593])) - self.assertMaskedArrayEqual(r2.data, ma.array([0], mask=[True])) - def test_common_mask_simple(self): + mocked_compute.assert_not_called() + np.testing.assert_array_almost_equal(r1.data, np.array([0.74586593])) + tests.assert_masked_array_equal(r2.data, ma.array([0], mask=[True])) + + def test_common_mask_simple(self, mocked_compute): cube_small = self.cube_a[:, 0, 0] - cube_small_masked = cube_small.copy() - cube_small_masked.data = ma.array( - cube_small.data, mask=np.array([0, 0, 0, 1, 1, 1], dtype=bool) - ) + cube_small_masked = iris.util.mask_cube(cube_small, [0, 0, 0, 1, 1, 1]) r = stats.pearsonr(cube_small, cube_small_masked, common_mask=True) - self.assertArrayAlmostEqual(r.data, np.array([1.0])) - def test_common_mask_broadcast(self): - cube_small = self.cube_a[:, 0, 0] + mocked_compute.assert_not_called() + np.testing.assert_array_almost_equal(r.data, np.array([1.0])) + + def test_common_mask_broadcast(self, mocked_compute): + cube_small = iris.util.mask_cube(self.cube_a[:, 0, 0], [0, 0, 0, 0, 0, 1]) + mask_2d = np.zeros((6, 2), dtype=bool) + # 2d mask varies on unshared coord: + mask_2d[0, 1] = 1 + + # Make a (6, 2) cube. cube_small_2d = self.cube_a[:, 0:2, 0] - cube_small.data = ma.array( - cube_small.data, mask=np.array([0, 0, 0, 0, 0, 1], dtype=bool) - ) - cube_small_2d.data = ma.array( - np.tile(cube_small.data[:, np.newaxis], 2), - mask=np.zeros((6, 2), dtype=bool), + # Duplicate data along unshared coord's dimension. + new_data = iris.util.broadcast_to_shape( + cube_small.core_data(), (6, 2), dim_map=[0] ) - # 2d mask varies on unshared coord: - cube_small_2d.data.mask[0, 1] = 1 + cube_small_2d.data = iris.util._mask_array(new_data, mask_2d) + r = stats.pearsonr( cube_small, cube_small_2d, weights=self.weights[:, 0, 0], common_mask=True, ) - self.assertArrayAlmostEqual(r.data, np.array([1.0, 1.0])) + + mocked_compute.assert_not_called() + np.testing.assert_array_almost_equal(r.data, np.array([1.0, 1.0])) # 2d mask does not vary on unshared coord: cube_small_2d.data.mask[0, 0] = 1 r = stats.pearsonr(cube_small, cube_small_2d, common_mask=True) - self.assertArrayAlmostEqual(r.data, np.array([1.0, 1.0])) + np.testing.assert_array_almost_equal(r.data, np.array([1.0, 1.0])) + + +class TestReal(TestLazy): + def setup_method(self): + super().setup_method() + for cube in [self.cube_a, self.cube_b]: + _ = cube.data + +class TestCoordHandling(Mixin): + def test_lenient_handling(self): + # Smoke test that mismatched var_name does not prevent operation. + self.cube_a.coord("time").var_name = "wibble" + stats.pearsonr(self.cube_a, self.cube_b) -if __name__ == "__main__": - tests.main() + def test_incompatible_cubes(self): + with pytest.raises(ValueError): + stats.pearsonr(self.cube_a[:, 0, :], self.cube_b[0, :, :], "longitude") + + def test_single_coord(self): + # Smoke test that single coord can be passed as single string. + stats.pearsonr(self.cube_a, self.cube_b, "latitude") + + def test_non_existent_coord(self): + with pytest.raises(CoordinateNotFoundError): + stats.pearsonr(self.cube_a, self.cube_b, "bad_coord") diff --git a/lib/iris/util.py b/lib/iris/util.py index 7d87492c7f..878c62e2f1 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -95,7 +95,7 @@ def broadcast_to_shape(array, shape, dim_map): def delta(ndarray, dimension, circular=False): - """Calculates the difference between values along a given dimension. + """Calculate the difference between values along a given dimension. Parameters ---------- @@ -158,7 +158,9 @@ def delta(ndarray, dimension, circular=False): def describe_diff(cube_a, cube_b, output_file=None): - """Prints the differences that prevent compatibility between two cubes, as + """Print the differences that prevent compatibility between two cubes. + + Print the differences that prevent compatibility between two cubes, as defined by :meth:`iris.cube.Cube.is_compatible()`. Parameters @@ -227,7 +229,7 @@ def describe_diff(cube_a, cube_b, output_file=None): def guess_coord_axis(coord): - """Returns a "best guess" axis name of the coordinate. + """Return a "best guess" axis name of the coordinate. Heuristic categorisation of the coordinate into either label 'T', 'Z', 'Y', 'X' or None. @@ -351,7 +353,7 @@ def rolling_window(a, window=1, step=1, axis=-1): def array_equal(array1, array2, withnans=False): - """Returns whether two arrays have the same shape and elements. + """Return whether two arrays have the same shape and elements. Parameters ---------- @@ -390,7 +392,9 @@ def normalise_array(array): def approx_equal(a, b, max_absolute_error=1e-10, max_relative_error=1e-10): - """Returns whether two numbers are almost equal, allowing for the finite + """Check if two numbers are almost equal. + + Returns whether two numbers are almost equal, allowing for the finite precision of floating point numbers. .. deprecated:: 3.2.0 @@ -424,8 +428,9 @@ def approx_equal(a, b, max_absolute_error=1e-10, max_relative_error=1e-10): def between(lh, rh, lh_inclusive=True, rh_inclusive=True): - """Provides a convenient way of defining a 3 element inequality such as - ``a < number < b``. + """Provide convenient way of defining a 3 element inequality. + + Such as ``a < number < b``. Parameters ---------- @@ -630,7 +635,9 @@ def monotonic(array, strict=False, return_direction=False): def column_slices_generator(full_slice, ndims): - """Given a full slice full of tuples, return a dictionary mapping old + """Return a dictionary mapping old data dimensions to new. + + Given a full slice full of tuples, return a dictionary mapping old data dimensions to new and a generator which gives the successive slices needed to index correctly (across columns). @@ -703,7 +710,9 @@ def is_tuple_style_index(key): def _build_full_slice_given_keys(keys, ndim): - """Given the keys passed to a __getitem__ call, build an equivalent + """Build an equivalent tuple of keys which span ndims. + + Given the keys passed to a __getitem__ call, build an equivalent tuple of keys which span ndims. """ @@ -811,8 +820,7 @@ def _slice_data_with_keys(data, keys): def _wrap_function_for_method(function, docstring=None): - """Returns a wrapper function modified to be suitable for use as a - method. + """Return a wrapper function modified to be suitable for use as a method. The wrapper function renames the first argument as "self" and allows an alternative docstring, thus allowing the built-in help(...) @@ -857,7 +865,9 @@ def _wrap_function_for_method(function, docstring=None): class _MetaOrderedHashable(ABCMeta): - """A metaclass that ensures that non-abstract subclasses of _OrderedHashable + """Ensures that non-abstract subclasses are given a default __init__ method. + + A metaclass that ensures that non-abstract subclasses of _OrderedHashable without an explicit __init__ method are given a default __init__ method with the appropriate method signature. @@ -922,7 +932,9 @@ class _OrderedHashable(Hashable, metaclass=_MetaOrderedHashable): @property @abstractmethod def _names(self): - """Override this attribute to declare the names of all the attributes + """Override this attribute to declare the names of all the attributes relevant. + + Override this attribute to declare the names of all the attributes relevant to the hash/comparison semantics. """ @@ -990,7 +1002,9 @@ def create_temp_filename(suffix=""): def clip_string(the_str, clip_length=70, rider="..."): - """Returns a clipped version of the string based on the specified clip + """Return clipped version of the string based on the specified clip length. + + Return a clipped version of the string based on the specified clip length and whether or not any graceful clip points can be found. If the string to be clipped is shorter than the specified clip @@ -1025,6 +1039,7 @@ def clip_string(the_str, clip_length=70, rider="..."): ----- This function does maintain laziness when called; it doesn't realise data. See more at :doc:`/userguide/real_and_lazy_data`. + """ if clip_length >= len(the_str) or clip_length <= 0: return the_str @@ -1046,7 +1061,9 @@ def clip_string(the_str, clip_length=70, rider="..."): def format_array(arr): - """Returns the given array as a string, using the python builtin str + """Create a new axis as the leading dimension of the cube. + + Returns the given array as a string, using the python builtin str function on a piecewise basis. Useful for xml representation of arrays. @@ -1072,7 +1089,9 @@ def format_array(arr): def new_axis(src_cube, scalar_coord=None, expand_extras=()): # maybe not lazy - """Create a new axis as the leading dimension of the cube, promoting a scalar + """Create a new axis as the leading dimension of the cube. + + Create a new axis as the leading dimension of the cube, promoting a scalar coordinate if specified. Parameters @@ -1108,6 +1127,7 @@ def new_axis(src_cube, scalar_coord=None, expand_extras=()): # maybe not lazy ----- This function does maintain laziness when called; it doesn't realise data. See more at :doc:`/userguide/real_and_lazy_data`. + """ def _reshape_data_array(data_manager): @@ -1196,7 +1216,9 @@ def _handle_dimensional_metadata(cube, dm_item, cube_add_method, expand_extras): def squeeze(cube): - """Removes any dimension of length one. If it has an associated DimCoord or + """Remove any dimension of length one. + + Remove any dimension of length one. If it has an associated DimCoord or AuxCoord, this becomes a scalar coord. Parameters @@ -1234,7 +1256,9 @@ def squeeze(cube): def file_is_newer_than(result_path, source_paths): - """Return whether the 'result' file has a later modification time than all of + """Determine if the 'result' file was modified last. + + Return whether the 'result' file has a later modification time than all of the 'source' files. If a stored result depends entirely on known 'sources', it need only be @@ -1393,7 +1417,9 @@ def points_step(points): def unify_time_units(cubes): - """Performs an in-place conversion of the time units of all time coords in the + """Perform an in-place conversion of the time units. + + Perform an in-place conversion of the time units of all time coords in the cubes in a given iterable. One common epoch is defined for each calendar found in the cubes to prevent units being defined with inconsistencies between epoch and calendar. During this process, all time coordinates have @@ -1429,7 +1455,9 @@ def unify_time_units(cubes): def _is_circular(points, modulus, bounds=None): - """Determine whether the provided points or bounds are circular in nature + """Determine whether the provided points or bounds are circular. + + Determine whether the provided points or bounds are circular in nature relative to the modulus value. If the bounds are provided then these are checked for circularity rather @@ -1493,10 +1521,11 @@ def _is_circular(points, modulus, bounds=None): def promote_aux_coord_to_dim_coord(cube, name_or_coord): - r"""Promotes an AuxCoord on the cube to a DimCoord. This AuxCoord must be - associated with a single cube dimension. If the AuxCoord is associated - with a dimension that already has a DimCoord, that DimCoord gets - demoted to an AuxCoord. + r"""Promote an auxiliary to a dimension coordinate on the cube. + + This AuxCoord must be associated with a single cube dimension. If the + AuxCoord is associated with a dimension that already has a DimCoord, that + DimCoord gets demoted to an AuxCoord. Parameters ---------- @@ -1671,7 +1700,6 @@ def demote_dim_coord_to_aux_coord(cube, name_or_coord): This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. - """ from iris.coords import Coord @@ -1702,13 +1730,12 @@ def demote_dim_coord_to_aux_coord(cube, name_or_coord): @functools.wraps(np.meshgrid) def _meshgrid(*xi, **kwargs): - """@numpy v1.13, the dtype of each output n-D coordinate is the same as its + """Ensure consistent meshgrid behaviour across numpy versions. + + @numpy v1.13, the dtype of each output n-D coordinate is the same as its associated input 1D coordinate. This is not the case prior to numpy v1.13, where the output dtype is cast up to its highest resolution, regardlessly. - This convenience function ensures consistent meshgrid behaviour across - numpy versions. - Reference: https://github.com/numpy/numpy/pull/5302 """ @@ -1720,7 +1747,9 @@ def _meshgrid(*xi, **kwargs): def find_discontiguities(cube, rel_tol=1e-5, abs_tol=1e-8): - """Searches the 'x' and 'y' coord on the cube for discontiguities in the + """Identify spatial discontiguities. + + Searches the 'x' and 'y' coord on the cube for discontiguities in the bounds array, returned as a boolean array (True for all cells which are discontiguous with the cell immediately above them or to their right). @@ -1765,7 +1794,6 @@ def find_discontiguities(cube, rel_tol=1e-5, abs_tol=1e-8): This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. - """ lats_and_lons = [ "latitude", @@ -1812,10 +1840,11 @@ def find_discontiguities(cube, rel_tol=1e-5, abs_tol=1e-8): def _mask_array(array, points_to_mask, in_place=False): - """Apply masking to array where points_to_mask is True/non-zero. Designed to - work with iris.analysis.maths._binary_op_common so array and points_to_mask - will be broadcastable to each other. array and points_to_mask may be numpy - or dask types (or one of each). + """Apply masking to array where points_to_mask is True/non-zero. + + Designed to work with iris.analysis.maths._binary_op_common so array + and points_to_mask will be broadcastable to each other. + array and points_to_mask may be numpy or dask types (or one of each). If array is lazy then in_place is ignored: _math_op_common will use the returned value regardless of in_place, so we do not need to implement it @@ -1865,7 +1894,9 @@ def _mask_array(array, points_to_mask, in_place=False): @_lenient_client(services=SERVICES) def mask_cube(cube, points_to_mask, in_place=False, dim=None): - """Masks any cells in the cube's data array which correspond to cells marked + """Masks any cells in the cube's data array. + + Masks any cells in the cube's data array which correspond to cells marked ``True`` (or non zero) in ``points_to_mask``. ``points_to_mask`` may be specified as a :class:`numpy.ndarray`, :class:`dask.array.Array`, :class:`iris.coords.Coord` or :class:`iris.cube.Cube`, following the same @@ -2035,10 +2066,13 @@ def is_masked(array): def _strip_metadata_from_dims(cube, dims): """Remove ancillary variables and cell measures that map to specific dimensions. - Returns a cube copy with (possibly) some cell-measures and ancillary variables removed. + Returns a cube copy with (possibly) some cell-measures and ancillary + variables removed. To be used by operations that modify or remove dimensions. - Note: does nothing to (aux)-coordinates. Those would be handled explicitly by the calling operation. + + Note: does nothing to (aux)-coordinates. Those would be handled explicitly + by the calling operation. """ reduced_cube = cube.copy() diff --git a/noxfile.py b/noxfile.py index 4d3bb85f98..a30b6ce784 100644 --- a/noxfile.py +++ b/noxfile.py @@ -39,7 +39,7 @@ def session_lockfile(session: nox.sessions.Session) -> Path: def session_cachefile(session: nox.sessions.Session) -> Path: - """Returns the path of the session lockfile cache.""" + """Return the path of the session lockfile cache.""" lockfile = session_lockfile(session) tmp_dir = Path(session.create_tmp()) cache = tmp_dir / lockfile.name @@ -55,7 +55,7 @@ def venv_populated(session: nox.sessions.Session) -> bool: def venv_changed(session: nox.sessions.Session) -> bool: - """Returns True if the installed session is different. + """Return True if the installed session is different. Compares to that specified in the lockfile. """ @@ -109,8 +109,9 @@ def cache_cartopy(session: nox.sessions.Session) -> None: def prepare_venv(session: nox.sessions.Session) -> None: - """Create and cache the nox session conda environment, and additionally - provide conda environment package details and info. + """Create and cache the nox session conda environment. + + Additionally provide conda environment package details and info. Note that, iris is installed into the environment using pip. diff --git a/pyproject.toml b/pyproject.toml index 37384a3c3e..4325de0e0e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -105,12 +105,14 @@ known-first-party = ["iris"] [tool.ruff.lint.per-file-ignores] # All test scripts -"test_*.py" = [ + +"lib/iris/tests/*.py" = [ # https://docs.astral.sh/ruff/rules/undocumented-public-module/ "D100", # Missing docstring in public module + "D205", # 1 blank line required between summary line and description + "D401", # 1 First line of docstring should be in imperative mood ] - [tool.ruff.lint.pydocstyle] convention = "numpy" diff --git a/setup.py b/setup.py index 1027085e81..28e7a003a9 100644 --- a/setup.py +++ b/setup.py @@ -25,8 +25,10 @@ def run(self): def custom_command(cmd, help=""): - """Factory function to generate a custom command that adds additional - behaviour to build the CF standard names module. + """Create custom command with factory function. + + Custom command will add additional behaviour to build the CF + standard names module. """