Skip to content

Commit d95c9e2

Browse files
tkknightpre-commit-ci[bot]bjlittle
authored
ruff compliance for D205. (#5681)
* ruff complliance for D205 (wip) * wip * wip * wip * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * various minor fixes. * fix doctest. * gallery noqa and minor fixes. * removed comments * Update lib/iris/_data_manager.py Co-authored-by: Bill Little <[email protected]> * Update lib/iris/_lazy_data.py Co-authored-by: Bill Little <[email protected]> * Update lib/iris/_merge.py Co-authored-by: Bill Little <[email protected]> * Update lib/iris/_representation/cube_printout.py Co-authored-by: Bill Little <[email protected]> * Update lib/iris/_representation/cube_printout.py Co-authored-by: Bill Little <[email protected]> * Update lib/iris/analysis/_interpolation.py Co-authored-by: Bill Little <[email protected]> * Update lib/iris/coords.py Co-authored-by: Bill Little <[email protected]> * Update lib/iris/experimental/ugrid/mesh.py Co-authored-by: Bill Little <[email protected]> * Apply suggestions from code review Co-authored-by: Bill Little <[email protected]> * minor tweaks. --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Bill Little <[email protected]>
1 parent 1e2a85a commit d95c9e2

File tree

98 files changed

+1422
-976
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

98 files changed

+1422
-976
lines changed

.ruff.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@ lint.ignore = [
2929
"D102", # Missing docstring in public method
3030
# (D-3) Temporary, before an initial review, either fix ocurrences or move to (2).
3131
"D103", # Missing docstring in public function
32-
"D205", # 1 blank line required between summary line and description
3332
"D401", # First line of docstring should be in imperative mood: ...
3433

3534
# pyupgrade (UP)

benchmarks/asv_delegated_conda.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,9 @@
22
#
33
# This file is part of Iris and is released under the BSD license.
44
# See LICENSE in the root of the repository for full licensing details.
5-
"""ASV plug-in providing an alternative :class:`asv.plugins.conda.Conda`
6-
subclass that manages the Conda environment via custom user scripts.
5+
"""ASV plug-in providing an alternative :class:`asv.plugins.conda.Conda` subclass.
6+
7+
Manages the Conda environment via custom user scripts.
78
89
"""
910

@@ -42,7 +43,9 @@ def __init__(
4243
requirements: dict,
4344
tagged_env_vars: dict,
4445
) -> None:
45-
"""Parameters
46+
"""__init__.
47+
48+
Parameters
4649
----------
4750
conf : Config instance
4851

benchmarks/benchmarks/__init__.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,9 @@ def disable_repeat_between_setup(benchmark_object):
3636

3737

3838
class TrackAddedMemoryAllocation:
39-
"""Context manager which measures by how much process resident memory grew,
39+
"""Measures by how much process resident memory grew, during execution.
40+
41+
Context manager which measures by how much process resident memory grew,
4042
during execution of its enclosed code block.
4143
4244
Obviously limited as to what it actually measures : Relies on the current

benchmarks/benchmarks/aux_factory.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,9 @@ class FactoryCommon:
1616
# * make class an ABC
1717
# * remove NotImplementedError
1818
# * combine setup_common into setup
19-
"""A base class running a generalised suite of benchmarks for any factory.
19+
"""Run a generalised suite of benchmarks for any factory.
20+
21+
A base class running a generalised suite of benchmarks for any factory.
2022
Factory to be specified in a subclass.
2123
2224
ASV will run the benchmarks within this class for any subclasses.

benchmarks/benchmarks/coords.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,9 @@ class CoordCommon:
2323
# * make class an ABC
2424
# * remove NotImplementedError
2525
# * combine setup_common into setup
26-
"""A base class running a generalised suite of benchmarks for any coord.
26+
"""Run a generalised suite of benchmarks for any coord.
27+
28+
A base class running a generalised suite of benchmarks for any coord.
2729
Coord to be specified in a subclass.
2830
2931
ASV will run the benchmarks within this class for any subclasses.

benchmarks/benchmarks/cperf/equality.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,11 @@
88

99

1010
class EqualityMixin(SingleDiagnosticMixin):
11-
r"""Uses :class:`SingleDiagnosticMixin` as the realistic case will be comparing
11+
r"""Use :class:`SingleDiagnosticMixin` as the realistic case.
12+
13+
Uses :class:`SingleDiagnosticMixin` as the realistic case will be comparing
1214
:class:`~iris.cube.Cube`\\ s that have been loaded from file.
15+
1316
"""
1417

1518
# Cut down the parent parameters.
@@ -23,9 +26,7 @@ def setup(self, file_type, three_d=False, three_times=False):
2326

2427
@on_demand_benchmark
2528
class CubeEquality(EqualityMixin):
26-
r"""Benchmark time and memory costs of comparing LFRic and UM
27-
:class:`~iris.cube.Cube`\\ s.
28-
"""
29+
r"""Benchmark time & memory costs of comparing LFRic & UM :class:`~iris.cube.Cube`\\ s."""
2930

3031
def _comparison(self):
3132
_ = self.cube == self.other_cube

benchmarks/benchmarks/cperf/load.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,8 @@
1010
@on_demand_benchmark
1111
class SingleDiagnosticLoad(SingleDiagnosticMixin):
1212
def time_load(self, _, __, ___):
13-
"""The 'real world comparison'
13+
"""The 'real world comparison'.
14+
1415
* UM coords are always realised (DimCoords).
1516
* LFRic coords are not realised by default (MeshCoords).
1617

benchmarks/benchmarks/cperf/save.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
@on_demand_benchmark
1515
class NetcdfSave:
1616
"""Benchmark time and memory costs of saving ~large-ish data cubes to netcdf.
17+
1718
Parametrised by file type.
1819
1920
"""

benchmarks/benchmarks/cube.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,9 @@ class ComponentCommon:
2828
# * make class an ABC
2929
# * remove NotImplementedError
3030
# * combine setup_common into setup
31-
"""A base class running a generalised suite of benchmarks for cubes that
31+
"""Run a generalised suite of benchmarks for cubes.
32+
33+
A base class running a generalised suite of benchmarks for cubes that
3234
include a specified component (e.g. Coord, CellMeasure etc.). Component to
3335
be specified in a subclass.
3436

benchmarks/benchmarks/experimental/ugrid/__init__.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,9 @@
1515

1616

1717
class UGridCommon:
18-
"""A base class running a generalised suite of benchmarks for any ugrid object.
18+
"""Run a generalised suite of benchmarks for any ugrid object.
19+
20+
A base class running a generalised suite of benchmarks for any ugrid object.
1921
Object to be specified in a subclass.
2022
2123
ASV will run the benchmarks within this class for any subclasses.

benchmarks/benchmarks/experimental/ugrid/regions_combine.py

Lines changed: 14 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,18 @@
22
#
33
# This file is part of Iris and is released under the BSD license.
44
# See LICENSE in the root of the repository for full licensing details.
5-
"""Benchmarks stages of operation of the function
5+
"""Benchmarks stages of operation.
6+
7+
Benchmarks stages of operation of the function
68
:func:`iris.experimental.ugrid.utils.recombine_submeshes`.
79
810
Where possible benchmarks should be parameterised for two sizes of input data:
9-
* minimal: enables detection of regressions in parts of the run-time that do
10-
NOT scale with data size.
11-
* large: large enough to exclusively detect regressions in parts of the
12-
run-time that scale with data size.
11+
12+
* minimal: enables detection of regressions in parts of the run-time that do
13+
NOT scale with data size.
14+
15+
* large: large enough to exclusively detect regressions in parts of the
16+
run-time that scale with data size.
1317
1418
"""
1519
import os
@@ -193,10 +197,13 @@ def track_addedmem_compute_data(self, n_cubesphere):
193197

194198

195199
class CombineRegionsSaveData(MixinCombineRegions):
196-
"""Test saving *only*, having replaced the input cube data with 'imaginary'
200+
"""Test saving *only*.
201+
202+
Test saving *only*, having replaced the input cube data with 'imaginary'
197203
array data, so that input data is not loaded from disk during the save
198204
operation.
199205
206+
200207
"""
201208

202209
def time_save(self, n_cubesphere):
@@ -219,6 +226,7 @@ def track_filesize_saved(self, n_cubesphere):
219226

220227
class CombineRegionsFileStreamedCalc(MixinCombineRegions):
221228
"""Test the whole cost of file-to-file streaming.
229+
222230
Uses the combined cube which is based on lazy data loading from the region
223231
cubes on disk.
224232
"""

benchmarks/benchmarks/generate_data/ugrid.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,9 @@
1414

1515

1616
def generate_cube_like_2d_cubesphere(n_cube: int, with_mesh: bool, output_path: str):
17-
"""Construct and save to file an LFRIc cubesphere-like cube for a given
17+
"""Construct and save to file an LFRIc cubesphere-like cube.
18+
19+
Construct and save to file an LFRIc cubesphere-like cube for a given
1820
cubesphere size, *or* a simpler structured (UM-like) cube of equivalent
1921
size.
2022
@@ -54,7 +56,9 @@ def generate_cube_like_2d_cubesphere(n_cube: int, with_mesh: bool, output_path:
5456

5557

5658
def make_cube_like_2d_cubesphere(n_cube: int, with_mesh: bool):
57-
"""Generate an LFRIc cubesphere-like cube for a given cubesphere size,
59+
"""Generate an LFRIc cubesphere-like cube.
60+
61+
Generate an LFRIc cubesphere-like cube for a given cubesphere size,
5862
*or* a simpler structured (UM-like) cube of equivalent size.
5963
6064
All the cube data, coords and mesh content are LAZY, and produced without
@@ -155,7 +159,9 @@ def _external(xy_dims_, save_path_):
155159

156160

157161
def make_cubesphere_testfile(c_size, n_levels=0, n_times=1):
158-
"""Build a C<c_size> cubesphere testfile in a given directory, with a standard naming.
162+
"""Build a C<c_size> cubesphere testfile in a given directory.
163+
164+
Build a C<c_size> cubesphere testfile in a given directory, with a standard naming.
159165
If n_levels > 0 specified: 3d file with the specified number of levels.
160166
Return the file path.
161167

benchmarks/benchmarks/import_iris.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,7 @@
3131
class Iris:
3232
@staticmethod
3333
def _import(module_name, reset_colormaps=False):
34-
"""Have experimented with adding sleep() commands into the imported
35-
modules.
34+
"""Have experimented with adding sleep() commands into the imported modules.
3635
3736
The results reveal:
3837

benchmarks/benchmarks/sperf/combine_regions.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -195,7 +195,9 @@ def track_addedmem_compute_data(self, n_cubesphere):
195195

196196
@on_demand_benchmark
197197
class SaveData(Mixin):
198-
"""Test saving *only*, having replaced the input cube data with 'imaginary'
198+
"""Test saving *only*.
199+
200+
Test saving *only*, having replaced the input cube data with 'imaginary'
199201
array data, so that input data is not loaded from disk during the save
200202
operation.
201203
@@ -217,8 +219,10 @@ def track_filesize_saved(self, n_cubesphere):
217219
@on_demand_benchmark
218220
class FileStreamedCalc(Mixin):
219221
"""Test the whole cost of file-to-file streaming.
222+
220223
Uses the combined cube which is based on lazy data loading from the region
221224
cubes on disk.
225+
222226
"""
223227

224228
def setup(self, n_cubesphere, imaginary_data=False, create_result_cube=True):

benchmarks/benchmarks/sperf/equality.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,10 @@
99

1010
@on_demand_benchmark
1111
class CubeEquality(FileMixin):
12-
r"""Benchmark time and memory costs of comparing :class:`~iris.cube.Cube`\\ s
13-
with attached :class:`~iris.experimental.ugrid.mesh.Mesh`\\ es.
12+
r"""Benchmark time and memory costs.
13+
14+
Benchmark time and memory costs of comparing :class:`~iris.cube.Cube`\\ s
15+
with attached :class:`~iris.experimental.ugrid.mesh.Mesh`\\ es.
1416
1517
Uses :class:`FileMixin` as the realistic case will be comparing
1618
:class:`~iris.cube.Cube`\\ s that have been loaded from file.

docs/gallery_code/general/plot_SOI_filtering.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
"""Applying a Filter to a Time-Series
1+
"""
2+
Applying a Filter to a Time-Series
23
==================================
34
45
This example demonstrates low pass filtering a time-series by applying a
@@ -17,7 +18,7 @@
1718
Trenberth K. E. (1984) Signal Versus Noise in the Southern Oscillation.
1819
Monthly Weather Review, Vol 112, pp 326-332
1920
20-
""" # noqa: D400
21+
""" # noqa: D205, D212, D400
2122

2223
import matplotlib.pyplot as plt
2324
import numpy as np

docs/gallery_code/general/plot_anomaly_log_colouring.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
"""Colouring Anomaly Data With Logarithmic Scaling
1+
"""
2+
Colouring Anomaly Data With Logarithmic Scaling
23
===============================================
34
45
In this example, we need to plot anomaly data where the values have a
@@ -22,7 +23,7 @@
2223
and :obj:`matplotlib.pyplot.pcolormesh`).
2324
See also: https://en.wikipedia.org/wiki/False_color#Pseudocolor.
2425
25-
""" # noqa: D400
26+
""" # noqa: D205, D212, D400
2627

2728
import cartopy.crs as ccrs
2829
import matplotlib.colors as mcols

docs/gallery_code/general/plot_coriolis.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
1-
"""Deriving the Coriolis Frequency Over the Globe
1+
"""
2+
Deriving the Coriolis Frequency Over the Globe
23
==============================================
34
45
This code computes the Coriolis frequency and stores it in a cube with
56
associated metadata. It then plots the Coriolis frequency on an orthographic
67
projection.
78
8-
""" # noqa: D400
9+
""" # noqa: D205, D212, D400
910

1011
import cartopy.crs as ccrs
1112
import matplotlib.pyplot as plt

docs/gallery_code/general/plot_cross_section.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
1-
"""Cross Section Plots
1+
"""
2+
Cross Section Plots
23
===================
34
45
This example demonstrates contour plots of a cross-sectioned multi-dimensional
56
cube which features a hybrid height vertical coordinate system.
67
7-
""" # noqa: D400
8+
""" # noqa: D205, D212, D400
89

910
import matplotlib.pyplot as plt
1011

docs/gallery_code/general/plot_custom_aggregation.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
"""Calculating a Custom Statistic
1+
"""
2+
Calculating a Custom Statistic
23
==============================
34
45
This example shows how to define and use a custom
@@ -11,7 +12,7 @@
1112
over North America, and we want to calculate in how many years these exceed a
1213
certain temperature over a spell of 5 years or more.
1314
14-
""" # noqa: D400
15+
""" # noqa: D205, D212, D400
1516

1617
import matplotlib.pyplot as plt
1718
import numpy as np
@@ -27,25 +28,24 @@
2728
# Note: in order to meet the requirements of iris.analysis.Aggregator, it must
2829
# do the calculation over an arbitrary (given) data axis.
2930
def count_spells(data, threshold, axis, spell_length):
30-
"""Function to calculate the number of points in a sequence where the value
31+
"""Calculate the number of points in a sequence.
32+
33+
Function to calculate the number of points in a sequence where the value
3134
has exceeded a threshold value for at least a certain number of timepoints.
3235
3336
Generalised to operate on multiple time sequences arranged on a specific
3437
axis of a multidimensional array.
3538
36-
Args:
37-
38-
* data (array):
39+
Parameters
40+
----------
41+
data : array
3942
raw data to be compared with value threshold.
40-
41-
* threshold (float):
43+
threshold : float
4244
threshold point for 'significant' datapoints.
43-
44-
* axis (int):
45+
axis : int
4546
number of the array dimension mapping the time sequences.
4647
(Can also be negative, e.g. '-1' means last dimension)
47-
48-
* spell_length (int):
48+
spell_length : int
4949
number of consecutive times at which value > threshold to "count".
5050
5151
"""

0 commit comments

Comments
 (0)