Skip to content

Commit 4570c79

Browse files
authored
Merge 6bc522b into 95b7ffe
2 parents 95b7ffe + 6bc522b commit 4570c79

File tree

3 files changed

+260
-13
lines changed

3 files changed

+260
-13
lines changed
Lines changed: 212 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,212 @@
1+
# Copyright Iris contributors
2+
#
3+
# This file is part of Iris and is released under the BSD license.
4+
# See LICENSE in the root of the repository for full licensing details.
5+
"""Benchmarks relating to :meth:`iris.cube.CubeList.merge` and ``concatenate``."""
6+
7+
import warnings
8+
9+
import numpy as np
10+
11+
from iris import analysis, coords, cube
12+
from iris.warnings import IrisVagueMetadataWarning
13+
14+
from .generate_data.stock import realistic_4d_w_everything
15+
16+
17+
class AggregationMixin:
18+
params = [[False, True]]
19+
param_names = ["Lazy operations"]
20+
21+
def setup(self, lazy_run: bool):
22+
warnings.filterwarnings("ignore", message="Ignoring a datum")
23+
warnings.filterwarnings("ignore", category=IrisVagueMetadataWarning)
24+
cube = realistic_4d_w_everything(lazy=lazy_run)
25+
26+
for cm in cube.cell_measures():
27+
cube.remove_cell_measure(cm)
28+
for av in cube.ancillary_variables():
29+
cube.remove_ancillary_variable(av)
30+
31+
agg_mln_data = np.arange(0, 70, 10)
32+
agg_mln_repeat = np.repeat(agg_mln_data, 10)
33+
34+
cube = cube[..., :10, :10]
35+
36+
self.mln_aux = "aggregatable"
37+
self.mln = "model_level_number"
38+
agg_mln_coord = coords.AuxCoord(points=agg_mln_repeat, long_name=self.mln_aux)
39+
40+
if lazy_run:
41+
agg_mln_coord.points = agg_mln_coord.lazy_points()
42+
cube.add_aux_coord(agg_mln_coord, 1)
43+
self.cube = cube
44+
45+
46+
class Aggregation(AggregationMixin):
47+
def time_aggregated_by_MEAN(self, _):
48+
_ = self.cube.aggregated_by(self.mln_aux, analysis.MEAN).data
49+
50+
def time_aggregated_by_COUNT(self, _):
51+
_ = self.cube.aggregated_by(
52+
self.mln_aux, analysis.COUNT, function=lambda values: values > 280
53+
).data
54+
55+
def time_aggregated_by_GMEAN(self, _):
56+
_ = self.cube.aggregated_by(self.mln_aux, analysis.GMEAN).data
57+
58+
def time_aggregated_by_HMEAN(self, _):
59+
_ = self.cube.aggregated_by(self.mln_aux, analysis.HMEAN).data
60+
61+
def time_aggregated_by_MAX_RUN(self, _):
62+
_ = self.cube.aggregated_by(
63+
self.mln_aux, analysis.MAX_RUN, function=lambda values: values > 280
64+
).data
65+
66+
def time_aggregated_by_MAX(self, _):
67+
_ = self.cube.aggregated_by(self.mln_aux, analysis.MAX).data
68+
69+
def time_aggregated_by_MEDIAN(self, _):
70+
_ = self.cube.aggregated_by(self.mln_aux, analysis.MEDIAN).data
71+
72+
def time_aggregated_by_MIN(self, _):
73+
_ = self.cube.aggregated_by(self.mln_aux, analysis.MIN).data
74+
75+
def time_aggregated_by_PEAK(self, _):
76+
_ = self.cube.aggregated_by(self.mln_aux, analysis.PEAK).data
77+
78+
def time_aggregated_by_PERCENTILE(self, _):
79+
_ = self.cube.aggregated_by(
80+
self.mln_aux, analysis.PERCENTILE, percent=[10, 50, 90]
81+
).data
82+
83+
def time_aggregated_by_FAST_PERCENTILE(self, _):
84+
_ = self.cube.aggregated_by(
85+
self.mln_aux,
86+
analysis.PERCENTILE,
87+
mdtol=0,
88+
percent=[10, 50, 90],
89+
fast_percentile_method=True,
90+
).data
91+
92+
def time_aggregated_by_PROPORTION(self, _):
93+
_ = self.cube.aggregated_by(
94+
self.mln_aux,
95+
analysis.PROPORTION,
96+
function=lambda values: values > 280,
97+
).data
98+
99+
def time_aggregated_by_STD_DEV(self, _):
100+
_ = self.cube.aggregated_by(self.mln_aux, analysis.STD_DEV).data
101+
102+
def time_aggregated_by_VARIANCE(self, _):
103+
_ = self.cube.aggregated_by(self.mln_aux, analysis.VARIANCE).data
104+
105+
def time_aggregated_by_RMS(self, _):
106+
_ = self.cube.aggregated_by(self.mln_aux, analysis.RMS).data
107+
108+
def time_collapsed_by_MEAN(self, _):
109+
_ = self.cube.collapsed(self.mln, analysis.MEAN).data
110+
111+
def time_collapsed_by_COUNT(self, _):
112+
_ = self.cube.collapsed(
113+
self.mln, analysis.COUNT, function=lambda values: values > 280
114+
).data
115+
116+
def time_collapsed_by_GMEAN(self, _):
117+
_ = self.cube.collapsed(self.mln, analysis.GMEAN).data
118+
119+
def time_collapsed_by_HMEAN(self, _):
120+
_ = self.cube.collapsed(self.mln, analysis.HMEAN).data
121+
122+
def time_collapsed_by_MAX_RUN(self, _):
123+
_ = self.cube.collapsed(
124+
self.mln, analysis.MAX_RUN, function=lambda values: values > 280
125+
).data
126+
127+
def time_collapsed_by_MAX(self, _):
128+
_ = self.cube.collapsed(self.mln, analysis.MAX).data
129+
130+
def time_collapsed_by_MEDIAN(self, _):
131+
_ = self.cube.collapsed(self.mln, analysis.MEDIAN).data
132+
133+
def time_collapsed_by_MIN(self, _):
134+
_ = self.cube.collapsed(self.mln, analysis.MIN).data
135+
136+
def time_collapsed_by_PEAK(self, _):
137+
_ = self.cube.collapsed(self.mln, analysis.PEAK).data
138+
139+
def time_collapsed_by_PERCENTILE(self, _):
140+
_ = self.cube.collapsed(
141+
self.mln, analysis.PERCENTILE, percent=[10, 50, 90]
142+
).data
143+
144+
def time_collapsed_by_FAST_PERCENTILE(self, _):
145+
_ = self.cube.collapsed(
146+
self.mln,
147+
analysis.PERCENTILE,
148+
mdtol=0,
149+
percent=[10, 50, 90],
150+
fast_percentile_method=True,
151+
).data
152+
153+
def time_collapsed_by_PROPORTION(self, _):
154+
_ = self.cube.collapsed(
155+
self.mln, analysis.PROPORTION, function=lambda values: values > 280
156+
).data
157+
158+
def time_collapsed_by_STD_DEV(self, _):
159+
_ = self.cube.collapsed(self.mln, analysis.STD_DEV).data
160+
161+
def time_collapsed_by_VARIANCE(self, _):
162+
_ = self.cube.collapsed(self.mln, analysis.VARIANCE).data
163+
164+
def time_collapsed_by_RMS(self, _):
165+
_ = self.cube.collapsed(self.mln, analysis.RMS).data
166+
167+
168+
class WeightedAggregation(AggregationMixin):
169+
def setup(self, lazy_run):
170+
super().setup(lazy_run)
171+
172+
weights = np.linspace(0, 1, 70)
173+
weights = np.broadcast_to(weights, self.cube.shape[:2])
174+
weights = np.broadcast_to(weights.T, self.cube.shape[::-1])
175+
weights = weights.T
176+
177+
self.weights = weights
178+
179+
## currently has problems with indexing weights
180+
# def time_w_aggregated_by_WPERCENTILE(self, _):
181+
# _ = self.cube.aggregated_by(
182+
# self.mln_aux, analysis.WPERCENTILE, weights=self.weights, percent=[10, 50, 90]
183+
# ).data
184+
185+
def time_w_aggregated_by_SUM(self, _):
186+
_ = self.cube.aggregated_by(
187+
self.mln_aux, analysis.SUM, weights=self.weights
188+
).data
189+
190+
def time_w_aggregated_by_RMS(self, _):
191+
_ = self.cube.aggregated_by(
192+
self.mln_aux, analysis.RMS, weights=self.weights
193+
).data
194+
195+
def time_w_aggregated_by_MEAN(self, _):
196+
_ = self.cube.aggregated_by(
197+
self.mln_aux, analysis.MEAN, weights=self.weights
198+
).data
199+
200+
def time_w_collapsed_by_WPERCENTILE(self, _):
201+
_ = self.cube.collapsed(
202+
self.mln, analysis.WPERCENTILE, weights=self.weights, percent=[10, 50, 90]
203+
).data
204+
205+
def time_w_collapsed_by_SUM(self, _):
206+
_ = self.cube.collapsed(self.mln, analysis.SUM, weights=self.weights).data
207+
208+
def time_w_collapsed_by_RMS(self, _):
209+
_ = self.cube.collapsed(self.mln, analysis.RMS, weights=self.weights).data
210+
211+
def time_w_collapsed_by_MEAN(self, _):
212+
_ = self.cube.collapsed(self.mln, analysis.MEAN, weights=self.weights).data

benchmarks/benchmarks/generate_data/stock.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
from pathlib import Path
1414

1515
import iris
16+
from iris import cube
1617
from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD, load_mesh
1718

1819
from . import BENCHMARK_DATA, REUSE_DATA, load_realised, run_function_elsewhere
@@ -153,7 +154,7 @@ def _external(sample_mesh_kwargs_, save_path_):
153154
return source_mesh.to_MeshCoord(location=location, axis=axis)
154155

155156

156-
def realistic_4d_w_everything(w_mesh=False, lazy=False):
157+
def realistic_4d_w_everything(w_mesh=False, lazy=False) -> iris.cube.Cube:
157158
"""Run :func:`iris.tests.stock.realistic_4d_w_everything` in ``DATA_GEN_PYTHON``.
158159
159160
Parameters

lib/iris/cube.py

Lines changed: 46 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
import numpy as np
2929
import numpy.ma as ma
3030

31+
from iris import cube
3132
import iris._constraints
3233
from iris._data_manager import DataManager
3334
import iris._lazy_data as _lazy
@@ -93,8 +94,8 @@ def from_cubes(cubes, constraints=None):
9394
constraints = iris._constraints.list_of_constraints(constraints)
9495
pairs = [_CubeFilter(constraint) for constraint in constraints]
9596
collection = _CubeFilterCollection(pairs)
96-
for cube in cubes:
97-
collection.add_cube(cube)
97+
for c in cubes:
98+
collection.add_cube(c)
9899
return collection
99100

100101
def __init__(self, pairs):
@@ -133,8 +134,8 @@ def __init__(self, *args, **kwargs):
133134
# Do whatever a list does, to initialise ourself "as a list"
134135
super().__init__(*args, **kwargs)
135136
# Check that all items in the list are cubes.
136-
for cube in self:
137-
self._assert_is_cube(cube)
137+
for c in self:
138+
self._assert_is_cube(c)
138139

139140
def __str__(self):
140141
"""Run short :meth:`Cube.summary` on every cube."""
@@ -309,9 +310,9 @@ def _extract_and_merge(cubes, constraints, strict=False, return_single_cube=Fals
309310
constraint_groups = dict(
310311
[(constraint, CubeList()) for constraint in constraints]
311312
)
312-
for cube in cubes:
313+
for c in cubes:
313314
for constraint, cube_list in constraint_groups.items():
314-
sub_cube = constraint.extract(cube)
315+
sub_cube = constraint.extract(c)
315316
if sub_cube is not None:
316317
cube_list.append(sub_cube)
317318

@@ -395,8 +396,8 @@ def merge_cube(self):
395396

396397
# Register each of our cubes with a single ProtoCube.
397398
proto_cube = iris._merge.ProtoCube(self[0])
398-
for cube in self[1:]:
399-
proto_cube.register(cube, error_on_mismatch=True)
399+
for c in self[1:]:
400+
proto_cube.register(c, error_on_mismatch=True)
400401

401402
# Extract the merged cube from the ProtoCube.
402403
(merged_cube,) = proto_cube.merge()
@@ -472,18 +473,18 @@ def merge(self, unique=True):
472473
"""
473474
# Register each of our cubes with its appropriate ProtoCube.
474475
proto_cubes_by_name = {}
475-
for cube in self:
476-
name = cube.standard_name
476+
for c in self:
477+
name = c.standard_name
477478
proto_cubes = proto_cubes_by_name.setdefault(name, [])
478479
proto_cube = None
479480

480481
for target_proto_cube in proto_cubes:
481-
if target_proto_cube.register(cube):
482+
if target_proto_cube.register(c):
482483
proto_cube = target_proto_cube
483484
break
484485

485486
if proto_cube is None:
486-
proto_cube = iris._merge.ProtoCube(cube)
487+
proto_cube = iris._merge.ProtoCube(c)
487488
proto_cubes.append(proto_cube)
488489

489490
# Emulate Python 2 behaviour.
@@ -3175,8 +3176,41 @@ def create_coords(src_coords, add_coord):
31753176
add_coord(result_coord, dims)
31763177
coord_mapping[id(src_coord)] = result_coord
31773178

3179+
def create_metadata(src_metadatas, add_metadata, metadata_type):
3180+
if metadata_type == "cell_measure":
3181+
metadata_search_object = cube.Cube.cell_measure
3182+
elif metadata_type == "ancillary_var":
3183+
metadata_search_object = cube.Cube.ancillary_variable
3184+
else:
3185+
raise ValueError
3186+
for src_metadata in src_metadatas:
3187+
dims = src_metadata.cube_dims(self)
3188+
if dim in dims:
3189+
dim_within_coord = dims.index(dim)
3190+
data = np.concatenate(
3191+
[
3192+
metadata_search_object(
3193+
chunk, src_metadata.name()
3194+
).core_data()
3195+
for chunk in chunks
3196+
],
3197+
dim_within_coord,
3198+
)
3199+
result_coord = src_metadata.copy(values=data)
3200+
else:
3201+
result_coord = src_metadata.copy()
3202+
add_metadata(result_coord, dims)
3203+
31783204
create_coords(self.dim_coords, result.add_dim_coord)
31793205
create_coords(self.aux_coords, result.add_aux_coord)
3206+
create_metadata(
3207+
self.cell_measures(), result.add_cell_measure, "cell_measure"
3208+
)
3209+
create_metadata(
3210+
self.ancillary_variables(),
3211+
result.add_ancillary_variable,
3212+
"ancillary_var",
3213+
)
31803214
for factory in self.aux_factories:
31813215
result.add_aux_factory(factory.updated(coord_mapping))
31823216
return result

0 commit comments

Comments
 (0)