From b0463fbd1ba1b10fe8d27fe4eb09378080cf79c4 Mon Sep 17 00:00:00 2001 From: George Bisbas Date: Thu, 23 Nov 2023 17:04:11 +0000 Subject: [PATCH 01/13] tests: enhance indirection testing --- tests/test_operator.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/tests/test_operator.py b/tests/test_operator.py index d5759c1c92..fb0aaafae8 100644 --- a/tests/test_operator.py +++ b/tests/test_operator.py @@ -1990,8 +1990,11 @@ def test_2194_v2(self, eqns, expected, exp_trees, exp_iters): class TestInternals: - def test_indirection(self): - nt = 10 + @pytest.mark.parametrize('nt, offset, epass', + ([1, 1, True], [1, 2, False], + [5, 1, True], [3, 5, False], + [4, 1, True], [5, 10, False])) + def test_indirection(self, nt, offset, epass): grid = Grid(shape=(4, 4)) time = grid.time_dim x, y = grid.dimensions @@ -1999,7 +2002,7 @@ def test_indirection(self): f = TimeFunction(name='f', grid=grid, save=nt) g = TimeFunction(name='g', grid=grid) - idx = time + 1 + idx = time + offset s = Indirection(name='ofs0', mapped=idx) eqns = [ @@ -2010,10 +2013,10 @@ def test_indirection(self): op = Operator(eqns) assert op._dspace[time].lower == 0 - assert op._dspace[time].upper == 1 - assert op.arguments()['time_M'] == nt - 2 + assert op._dspace[time].upper == offset - op() - - assert np.all(f.data[0] == 0.) - assert np.all(f.data[i] == 3. for i in range(1, 10)) + if epass: + assert op.arguments()['time_M'] == nt - offset - 1 + op() + assert np.all(f.data[0] == 0.) + assert np.all(f.data[i] == 3. for i in range(1, nt)) From 3ef1ce7737e032a4efbd0e4615a525b6e5a529bc Mon Sep 17 00:00:00 2001 From: George Bisbas Date: Fri, 13 Oct 2023 17:50:37 +0100 Subject: [PATCH 02/13] compiler: Intersect intervals from buffered TimeDimensions --- devito/ir/clusters/cluster.py | 16 ++++++++++++++++ devito/operator/operator.py | 1 + tests/test_buffering.py | 21 +++++++++++++++++++++ 3 files changed, 38 insertions(+) diff --git a/devito/ir/clusters/cluster.py b/devito/ir/clusters/cluster.py index f0b0d718ea..913ea207b1 100644 --- a/devito/ir/clusters/cluster.py +++ b/devito/ir/clusters/cluster.py @@ -387,12 +387,28 @@ def dspace(self): # Construct the `intervals` of the DataSpace, that is a global, # Dimension-centric view of the data space + intervals = IntervalGroup.generate('union', *parts.values()) # E.g., `db0 -> time`, but `xi NOT-> x` intervals = intervals.promote(lambda d: not d.is_Sub) intervals = intervals.zero(set(intervals.dimensions) - oobs) + # Intersect with intervals from buffered dimensions. Unions of + # buffered dimension intervals may result in shrinking time size + try: + proc = [] + for f, v in parts.items(): + if f.save: + for i in v: + if i.dim.is_Time: + proc.append(intervals[i.dim].intersection(i)) + else: + proc.append(intervals[i.dim]) + intervals = IntervalGroup(proc) + except AttributeError: + pass + return DataSpace(intervals, parts) @cached_property diff --git a/devito/operator/operator.py b/devito/operator/operator.py index 1cf99625ca..fff85007fe 100644 --- a/devito/operator/operator.py +++ b/devito/operator/operator.py @@ -244,6 +244,7 @@ def _build(cls, expressions, **kwargs): op._reads = filter_sorted(flatten(e.reads for e in irs.expressions)) op._writes = filter_sorted(flatten(e.writes for e in irs.expressions)) op._dimensions = set().union(*[e.dimensions for e in irs.expressions]) + # import pdb;pdb.set_trace() op._dtype, op._dspace = irs.clusters.meta op._profiler = profiler diff --git a/tests/test_buffering.py b/tests/test_buffering.py index c1196466e3..06f8dc8d99 100644 --- a/tests/test_buffering.py +++ b/tests/test_buffering.py @@ -723,3 +723,24 @@ def test_stencil_issue_1915_v2(subdomain): op1.apply(time_M=nt-2, u=u1) assert np.all(u.data == u1.data) + + +def test_default_timeM(): + """ + MFE for issue #2235 + """ + grid = Grid(shape=(4, 4)) + + u = TimeFunction(name='u', grid=grid) + usave = TimeFunction(name='usave', grid=grid, save=5) + + eqns = [Eq(u.forward, u + 1), + Eq(usave, u)] + + op = Operator(eqns) + + assert op.arguments()['time_M'] == 4 + + op.apply() + + assert all(np.all(usave.data[i] == i) for i in range(4)) From 4bdb490fe60e83c33ba8695d5c8dade6aebaca20 Mon Sep 17 00:00:00 2001 From: George Bisbas Date: Fri, 13 Oct 2023 18:10:14 +0100 Subject: [PATCH 03/13] tests: Edit test_indirection --- devito/ir/clusters/cluster.py | 2 -- devito/operator/operator.py | 1 - 2 files changed, 3 deletions(-) diff --git a/devito/ir/clusters/cluster.py b/devito/ir/clusters/cluster.py index 913ea207b1..a75a1292e5 100644 --- a/devito/ir/clusters/cluster.py +++ b/devito/ir/clusters/cluster.py @@ -387,9 +387,7 @@ def dspace(self): # Construct the `intervals` of the DataSpace, that is a global, # Dimension-centric view of the data space - intervals = IntervalGroup.generate('union', *parts.values()) - # E.g., `db0 -> time`, but `xi NOT-> x` intervals = intervals.promote(lambda d: not d.is_Sub) intervals = intervals.zero(set(intervals.dimensions) - oobs) diff --git a/devito/operator/operator.py b/devito/operator/operator.py index fff85007fe..1cf99625ca 100644 --- a/devito/operator/operator.py +++ b/devito/operator/operator.py @@ -244,7 +244,6 @@ def _build(cls, expressions, **kwargs): op._reads = filter_sorted(flatten(e.reads for e in irs.expressions)) op._writes = filter_sorted(flatten(e.writes for e in irs.expressions)) op._dimensions = set().union(*[e.dimensions for e in irs.expressions]) - # import pdb;pdb.set_trace() op._dtype, op._dspace = irs.clusters.meta op._profiler = profiler From c3d4bbb22a867616c9bcb9972d4cf85d9abc1cc4 Mon Sep 17 00:00:00 2001 From: George Bisbas Date: Tue, 17 Oct 2023 18:09:30 +0100 Subject: [PATCH 04/13] compiler: Only relax upper dspace in case of save --- devito/ir/clusters/cluster.py | 20 ++++++-------------- devito/ir/support/space.py | 10 ++++++++++ tests/test_buffering.py | 21 --------------------- tests/test_checkpointing.py | 2 +- tests/test_dimension.py | 21 ++++++++++++++++++++- 5 files changed, 37 insertions(+), 37 deletions(-) diff --git a/devito/ir/clusters/cluster.py b/devito/ir/clusters/cluster.py index a75a1292e5..5f19641afd 100644 --- a/devito/ir/clusters/cluster.py +++ b/devito/ir/clusters/cluster.py @@ -388,24 +388,16 @@ def dspace(self): # Construct the `intervals` of the DataSpace, that is a global, # Dimension-centric view of the data space intervals = IntervalGroup.generate('union', *parts.values()) + # E.g., `db0 -> time`, but `xi NOT-> x` intervals = intervals.promote(lambda d: not d.is_Sub) intervals = intervals.zero(set(intervals.dimensions) - oobs) - # Intersect with intervals from buffered dimensions. Unions of - # buffered dimension intervals may result in shrinking time size - try: - proc = [] - for f, v in parts.items(): - if f.save: - for i in v: - if i.dim.is_Time: - proc.append(intervals[i.dim].intersection(i)) - else: - proc.append(intervals[i.dim]) - intervals = IntervalGroup(proc) - except AttributeError: - pass + # Buffered TimeDimensions should not shirnk their upper time offset + for f, v in parts.items(): + if f.is_TimeFunction: + if f.save and not f.time_dim.is_Conditional: + intervals = intervals.ceil(v[f.time_dim]) return DataSpace(intervals, parts) diff --git a/devito/ir/support/space.py b/devito/ir/support/space.py index 6a11acdc28..5831b1b70a 100644 --- a/devito/ir/support/space.py +++ b/devito/ir/support/space.py @@ -259,6 +259,11 @@ def negate(self): def zero(self): return Interval(self.dim, 0, 0, self.stamp) + def ceil(self, o): + if o.is_Null: + return self._rebuild() + return Interval(self.dim, self.lower, o.upper, self.stamp) + def flip(self): return Interval(self.dim, self.upper, self.lower, self.stamp) @@ -496,6 +501,11 @@ def zero(self, d=None): return IntervalGroup(intervals, relations=self.relations, mode=self.mode) + def ceil(self, o=None): + d = self.dimensions if o is None else as_tuple(o.dim) + return IntervalGroup([i.ceil(o) if i.dim in d else i for i in self], + relations=self.relations) + def lift(self, d=None, v=None): d = set(self.dimensions if d is None else as_tuple(d)) intervals = [i.lift(v) if i.dim._defines & d else i for i in self] diff --git a/tests/test_buffering.py b/tests/test_buffering.py index 06f8dc8d99..c1196466e3 100644 --- a/tests/test_buffering.py +++ b/tests/test_buffering.py @@ -723,24 +723,3 @@ def test_stencil_issue_1915_v2(subdomain): op1.apply(time_M=nt-2, u=u1) assert np.all(u.data == u1.data) - - -def test_default_timeM(): - """ - MFE for issue #2235 - """ - grid = Grid(shape=(4, 4)) - - u = TimeFunction(name='u', grid=grid) - usave = TimeFunction(name='usave', grid=grid, save=5) - - eqns = [Eq(u.forward, u + 1), - Eq(usave, u)] - - op = Operator(eqns) - - assert op.arguments()['time_M'] == 4 - - op.apply() - - assert all(np.all(usave.data[i] == i) for i in range(4)) diff --git a/tests/test_checkpointing.py b/tests/test_checkpointing.py index 75cca861cc..0217f46d52 100644 --- a/tests/test_checkpointing.py +++ b/tests/test_checkpointing.py @@ -10,7 +10,7 @@ @switchconfig(log_level='WARNING') -def test_segmented_incremment(): +def test_segmented_increment(): """ Test for segmented operator execution of a one-sided first order function (increment). The corresponding set of stencil offsets in diff --git a/tests/test_dimension.py b/tests/test_dimension.py index 5ed0bd08a5..1817bec889 100644 --- a/tests/test_dimension.py +++ b/tests/test_dimension.py @@ -231,6 +231,25 @@ def test_degenerate_to_zero(self): assert np.all(u.data == 10) + def test_default_timeM(self): + """ + MFE for issue #2235 + """ + grid = Grid(shape=(4, 4)) + + u = TimeFunction(name='u', grid=grid) + usave = TimeFunction(name='usave', grid=grid, save=5) + + eqns = [Eq(u.forward, u + 1), + Eq(usave, u)] + + op = Operator(eqns) + + assert op.arguments()['time_M'] == 4 + op.apply() + + assert all(np.all(usave.data[i] == i) for i in range(4)) + class TestSubDimension: @@ -825,7 +844,7 @@ def test_basic(self): eqns = [Eq(u.forward, u + 1.), Eq(u2.forward, u2 + 1.), Eq(usave, u)] op = Operator(eqns) - op.apply() + op.apply(time_M=nt-2) assert np.all(np.allclose(u.data[(nt-1) % 3], nt-1)) assert np.all([np.allclose(u2.data[i], i) for i in range(nt)]) assert np.all([np.allclose(usave.data[i], i*factor) From dbab2e0d3ae85280c66bffaddd000cf2e8abd385 Mon Sep 17 00:00:00 2001 From: George Bisbas Date: Wed, 18 Oct 2023 12:59:22 +0100 Subject: [PATCH 05/13] compiler: add mode to IG creation, rebase on 2208 --- devito/ir/support/space.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/devito/ir/support/space.py b/devito/ir/support/space.py index 5831b1b70a..64f1726d97 100644 --- a/devito/ir/support/space.py +++ b/devito/ir/support/space.py @@ -260,7 +260,7 @@ def zero(self): return Interval(self.dim, 0, 0, self.stamp) def ceil(self, o): - if o.is_Null: + if not self.is_compatible(o): return self._rebuild() return Interval(self.dim, self.lower, o.upper, self.stamp) @@ -399,7 +399,9 @@ def generate(cls, op, *interval_groups, relations=None): relations.update(set().union(*[ig.relations for ig in interval_groups])) modes = set(ig.mode for ig in interval_groups) + assert len(modes) <= 1 + try: mode = modes.pop() except KeyError: @@ -504,7 +506,7 @@ def zero(self, d=None): def ceil(self, o=None): d = self.dimensions if o is None else as_tuple(o.dim) return IntervalGroup([i.ceil(o) if i.dim in d else i for i in self], - relations=self.relations) + relations=self.relations, mode=self.mode) def lift(self, d=None, v=None): d = set(self.dimensions if d is None else as_tuple(d)) From 9602ff9091095f4a38450cd73547bc9b2ae3d145 Mon Sep 17 00:00:00 2001 From: George Bisbas Date: Thu, 19 Oct 2023 17:57:06 +0100 Subject: [PATCH 06/13] compiler: simplify solution --- devito/ir/clusters/cluster.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/devito/ir/clusters/cluster.py b/devito/ir/clusters/cluster.py index 5f19641afd..ae85df3f5d 100644 --- a/devito/ir/clusters/cluster.py +++ b/devito/ir/clusters/cluster.py @@ -393,11 +393,14 @@ def dspace(self): intervals = intervals.promote(lambda d: not d.is_Sub) intervals = intervals.zero(set(intervals.dimensions) - oobs) - # Buffered TimeDimensions should not shirnk their upper time offset + # Buffered TimeDimensions should inherit the higher upper bound + # of the involved parts for f, v in parts.items(): - if f.is_TimeFunction: - if f.save and not f.time_dim.is_Conditional: + try: + if f.save: intervals = intervals.ceil(v[f.time_dim]) + except: + pass return DataSpace(intervals, parts) From 252eeae063022340bd871a117243e391e2281dfb Mon Sep 17 00:00:00 2001 From: George Bisbas Date: Fri, 3 Nov 2023 16:53:16 +0000 Subject: [PATCH 07/13] compiler: Make DataSpace intervals get the upper limit available of time_dims --- devito/ir/clusters/cluster.py | 7 +++---- devito/ir/support/space.py | 2 -- devito/types/dimension.py | 2 +- 3 files changed, 4 insertions(+), 7 deletions(-) diff --git a/devito/ir/clusters/cluster.py b/devito/ir/clusters/cluster.py index ae85df3f5d..67b32b3799 100644 --- a/devito/ir/clusters/cluster.py +++ b/devito/ir/clusters/cluster.py @@ -393,12 +393,11 @@ def dspace(self): intervals = intervals.promote(lambda d: not d.is_Sub) intervals = intervals.zero(set(intervals.dimensions) - oobs) - # Buffered TimeDimensions should inherit the higher upper bound - # of the involved parts + # DataSpace intervals should derive their upper bound from + # the higher upper bound available in the involved parts for f, v in parts.items(): try: - if f.save: - intervals = intervals.ceil(v[f.time_dim]) + intervals = intervals.ceil(v[f.time_dim]) except: pass diff --git a/devito/ir/support/space.py b/devito/ir/support/space.py index 64f1726d97..7e1def4536 100644 --- a/devito/ir/support/space.py +++ b/devito/ir/support/space.py @@ -399,9 +399,7 @@ def generate(cls, op, *interval_groups, relations=None): relations.update(set().union(*[ig.relations for ig in interval_groups])) modes = set(ig.mode for ig in interval_groups) - assert len(modes) <= 1 - try: mode = modes.pop() except KeyError: diff --git a/devito/types/dimension.py b/devito/types/dimension.py index 152cf4f627..6b01881c6f 100644 --- a/devito/types/dimension.py +++ b/devito/types/dimension.py @@ -346,7 +346,7 @@ def _arg_check(self, args, size, interval): # Autopadding causes non-integer upper limit from devito.symbolics import normalize_args upper = interval.upper.subs(normalize_args(args)) - if args[self.max_name] + upper >= size: + if args[self.max_name] + upper > size: raise InvalidArgument("OOB detected due to %s=%d" % (self.max_name, args[self.max_name])) From a2b5cc9ee409928bb04542c29758a241b9f33874 Mon Sep 17 00:00:00 2001 From: George Bisbas Date: Fri, 3 Nov 2023 16:57:32 +0000 Subject: [PATCH 08/13] tests: Restore redundant, fix tutorial --- examples/userapi/02_apply.ipynb | 22 +++++++++++++--------- tests/test_dimension.py | 2 +- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/examples/userapi/02_apply.ipynb b/examples/userapi/02_apply.ipynb index 693c4eed08..80584051bd 100644 --- a/examples/userapi/02_apply.ipynb +++ b/examples/userapi/02_apply.ipynb @@ -142,7 +142,11 @@ " 'y_m': 0,\n", " 'y_size': 4,\n", " 'y_M': 3,\n", - " 'timers': }" + " 'h_x': 0.33333334,\n", + " 'h_y': 0.33333334,\n", + " 'o_x': 0.0,\n", + " 'o_y': 0.0,\n", + " 'timers': }" ] }, "execution_count": 5, @@ -246,14 +250,14 @@ "name": "stdout", "output_type": "stream", "text": [ - "OOB detected due to time_M=2\n" + "OOB detected due to time_M=3\n" ] } ], "source": [ "from devito.exceptions import InvalidArgument\n", "try:\n", - " op.apply(time_M=2)\n", + " op.apply(time_M=3)\n", "except InvalidArgument as e:\n", " print(e)" ] @@ -419,8 +423,8 @@ { "data": { "text/plain": [ - "PerformanceSummary([('section0',\n", - " PerfEntry(time=3e-06, gflopss=0.0, gpointss=0.0, oi=0.0, ops=0, itershapes=[]))])" + "PerformanceSummary([(PerfKey(name='section0', rank=None),\n", + " PerfEntry(time=1e-06, gflopss=0.0, gpointss=0.0, oi=0.0, ops=0, itershapes=[]))])" ] }, "execution_count": 14, @@ -449,14 +453,14 @@ "name": "stderr", "output_type": "stream", "text": [ - "Operator `Kernel` run in 0.00 s\n" + "Operator `Kernel` ran in 0.01 s\n" ] }, { "data": { "text/plain": [ - "PerformanceSummary([('section0',\n", - " PerfEntry(time=3e-06, gflopss=0.021333333333333333, gpointss=0.010666666666666666, oi=0.16666666666666666, ops=2, itershapes=[(2, 4, 4)]))])" + "PerformanceSummary([(PerfKey(name='section0', rank=None),\n", + " PerfEntry(time=1e-06, gflopss=0.064, gpointss=0.032, oi=0.16666666666666666, ops=2, itershapes=((2, 4, 4),)))])" ] }, "execution_count": 15, @@ -525,7 +529,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.8" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/tests/test_dimension.py b/tests/test_dimension.py index 1817bec889..ecc7eb89fe 100644 --- a/tests/test_dimension.py +++ b/tests/test_dimension.py @@ -844,7 +844,7 @@ def test_basic(self): eqns = [Eq(u.forward, u + 1.), Eq(u2.forward, u2 + 1.), Eq(usave, u)] op = Operator(eqns) - op.apply(time_M=nt-2) + op.apply() assert np.all(np.allclose(u.data[(nt-1) % 3], nt-1)) assert np.all([np.allclose(u2.data[i], i) for i in range(nt)]) assert np.all([np.allclose(usave.data[i], i*factor) From 0bea7d3ad7420ba5712308ef71e2bdcb5cb1c4ad Mon Sep 17 00:00:00 2001 From: George Bisbas Date: Thu, 16 Nov 2023 15:49:12 +0000 Subject: [PATCH 09/13] compiler: Derive solution from oob set of dims --- devito/ir/clusters/cluster.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/devito/ir/clusters/cluster.py b/devito/ir/clusters/cluster.py index 67b32b3799..ad92580bb7 100644 --- a/devito/ir/clusters/cluster.py +++ b/devito/ir/clusters/cluster.py @@ -393,13 +393,12 @@ def dspace(self): intervals = intervals.promote(lambda d: not d.is_Sub) intervals = intervals.zero(set(intervals.dimensions) - oobs) - # DataSpace intervals should derive their upper bound from - # the higher upper bound available in the involved parts + # Upper bound of intervals including dimensions classified for + # shifting should retain the "oobs" upper bound for f, v in parts.items(): - try: - intervals = intervals.ceil(v[f.time_dim]) - except: - pass + for i in v: + if i.dim in oobs: + intervals = intervals.ceil(v[i.dim]) return DataSpace(intervals, parts) From d53eaa2a40efcbb0a8c58927f79cccaceb07b1c4 Mon Sep 17 00:00:00 2001 From: George Bisbas Date: Thu, 23 Nov 2023 18:36:32 +0000 Subject: [PATCH 10/13] compiler: Rework correctness for indirections --- devito/ir/clusters/cluster.py | 7 ++++++- devito/types/dimension.py | 2 +- examples/userapi/02_apply.ipynb | 4 ++-- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/devito/ir/clusters/cluster.py b/devito/ir/clusters/cluster.py index ad92580bb7..a555063c14 100644 --- a/devito/ir/clusters/cluster.py +++ b/devito/ir/clusters/cluster.py @@ -398,7 +398,12 @@ def dspace(self): for f, v in parts.items(): for i in v: if i.dim in oobs: - intervals = intervals.ceil(v[i.dim]) + try: + if intervals[i.dim].upper > v[i.dim].upper and \ + bool(i.dim in f.dimensions): + intervals = intervals.ceil(v[i.dim]) + except AttributeError: + pass return DataSpace(intervals, parts) diff --git a/devito/types/dimension.py b/devito/types/dimension.py index 6b01881c6f..152cf4f627 100644 --- a/devito/types/dimension.py +++ b/devito/types/dimension.py @@ -346,7 +346,7 @@ def _arg_check(self, args, size, interval): # Autopadding causes non-integer upper limit from devito.symbolics import normalize_args upper = interval.upper.subs(normalize_args(args)) - if args[self.max_name] + upper > size: + if args[self.max_name] + upper >= size: raise InvalidArgument("OOB detected due to %s=%d" % (self.max_name, args[self.max_name])) diff --git a/examples/userapi/02_apply.ipynb b/examples/userapi/02_apply.ipynb index 80584051bd..f8f730dd52 100644 --- a/examples/userapi/02_apply.ipynb +++ b/examples/userapi/02_apply.ipynb @@ -250,14 +250,14 @@ "name": "stdout", "output_type": "stream", "text": [ - "OOB detected due to time_M=3\n" + "OOB detected due to time_M=2\n" ] } ], "source": [ "from devito.exceptions import InvalidArgument\n", "try:\n", - " op.apply(time_M=3)\n", + " op.apply(time_M=2)\n", "except InvalidArgument as e:\n", " print(e)" ] From 18e61d42b5a6b1824370cfc294b221e14710ae08 Mon Sep 17 00:00:00 2001 From: George Bisbas Date: Fri, 24 Nov 2023 16:31:15 +0000 Subject: [PATCH 11/13] compiler: Relax intervals with upper from not mapped dimensions --- devito/ir/clusters/cluster.py | 24 ++++++++++++------------ devito/ir/support/space.py | 12 +++++------- tests/test_operator.py | 2 +- 3 files changed, 18 insertions(+), 20 deletions(-) diff --git a/devito/ir/clusters/cluster.py b/devito/ir/clusters/cluster.py index a555063c14..55e39f69d1 100644 --- a/devito/ir/clusters/cluster.py +++ b/devito/ir/clusters/cluster.py @@ -389,22 +389,22 @@ def dspace(self): # Dimension-centric view of the data space intervals = IntervalGroup.generate('union', *parts.values()) + # 'union' may consume intervals (values) from keys that have dimensions + # not mapped to intervals e.g. issue #2235, resulting in reduced + # iteration size. Here, we relax this mapped upper interval, by + # intersecting intervals with matching only dimensions + for f, v in parts.items(): + for i in v: + # oobs check is not required but helps reduce + # interval reconstruction + if i.dim in oobs and i.dim in f.dimensions: + ii = intervals[i.dim].intersection(v[i.dim]) + intervals = intervals.set_upper(i.dim, ii.upper) + # E.g., `db0 -> time`, but `xi NOT-> x` intervals = intervals.promote(lambda d: not d.is_Sub) intervals = intervals.zero(set(intervals.dimensions) - oobs) - # Upper bound of intervals including dimensions classified for - # shifting should retain the "oobs" upper bound - for f, v in parts.items(): - for i in v: - if i.dim in oobs: - try: - if intervals[i.dim].upper > v[i.dim].upper and \ - bool(i.dim in f.dimensions): - intervals = intervals.ceil(v[i.dim]) - except AttributeError: - pass - return DataSpace(intervals, parts) @cached_property diff --git a/devito/ir/support/space.py b/devito/ir/support/space.py index 7e1def4536..3f11ec0bd2 100644 --- a/devito/ir/support/space.py +++ b/devito/ir/support/space.py @@ -259,10 +259,8 @@ def negate(self): def zero(self): return Interval(self.dim, 0, 0, self.stamp) - def ceil(self, o): - if not self.is_compatible(o): - return self._rebuild() - return Interval(self.dim, self.lower, o.upper, self.stamp) + def set_upper(self, v=0): + return Interval(self.dim, self.lower, v, self.stamp) def flip(self): return Interval(self.dim, self.upper, self.lower, self.stamp) @@ -501,9 +499,9 @@ def zero(self, d=None): return IntervalGroup(intervals, relations=self.relations, mode=self.mode) - def ceil(self, o=None): - d = self.dimensions if o is None else as_tuple(o.dim) - return IntervalGroup([i.ceil(o) if i.dim in d else i for i in self], + def set_upper(self, d, v=0): + dims = as_tuple(d) + return IntervalGroup([i.set_upper(v) if i.dim in dims else i for i in self], relations=self.relations, mode=self.mode) def lift(self, d=None, v=None): diff --git a/tests/test_operator.py b/tests/test_operator.py index fb0aaafae8..9d24566468 100644 --- a/tests/test_operator.py +++ b/tests/test_operator.py @@ -1992,7 +1992,7 @@ class TestInternals: @pytest.mark.parametrize('nt, offset, epass', ([1, 1, True], [1, 2, False], - [5, 1, True], [3, 5, False], + [5, 3, True], [3, 5, False], [4, 1, True], [5, 10, False])) def test_indirection(self, nt, offset, epass): grid = Grid(shape=(4, 4)) From d36f063f06dfe2f0c74bf4e60827e8f9ae04dc48 Mon Sep 17 00:00:00 2001 From: George Bisbas Date: Wed, 20 Nov 2024 18:05:54 +0200 Subject: [PATCH 12/13] compiler: Fix occurence of NullInterval after rebase --- devito/deprecations.py | 4 ++-- devito/ir/clusters/cluster.py | 9 +++++---- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/devito/deprecations.py b/devito/deprecations.py index f13c145de5..7484d25a3a 100644 --- a/devito/deprecations.py +++ b/devito/deprecations.py @@ -6,14 +6,14 @@ class DevitoDeprecation(): @cached_property def coeff_warn(self): - warn("The Coefficient API is deprecated and will be removed, coefficients should" + warn("The Coefficient API is deprecated and will be removed, coefficients should " "be passed directly to the derivative object `u.dx(weights=...)", DeprecationWarning, stacklevel=2) return @cached_property def symbolic_warn(self): - warn("coefficients='symbolic' is deprecated, coefficients should" + warn("coefficients='symbolic' is deprecated, coefficients should " "be passed directly to the derivative object `u.dx(weights=...)", DeprecationWarning, stacklevel=2) return diff --git a/devito/ir/clusters/cluster.py b/devito/ir/clusters/cluster.py index 55e39f69d1..54a48be120 100644 --- a/devito/ir/clusters/cluster.py +++ b/devito/ir/clusters/cluster.py @@ -395,11 +395,12 @@ def dspace(self): # intersecting intervals with matching only dimensions for f, v in parts.items(): for i in v: - # oobs check is not required but helps reduce - # interval reconstruction - if i.dim in oobs and i.dim in f.dimensions: + if i.dim in self.ispace and i.dim in f.dimensions: + # oobs check is not required but helps reduce + # interval reconstruction ii = intervals[i.dim].intersection(v[i.dim]) - intervals = intervals.set_upper(i.dim, ii.upper) + if not ii.is_Null: + intervals = intervals.set_upper(i.dim, ii.upper) # E.g., `db0 -> time`, but `xi NOT-> x` intervals = intervals.promote(lambda d: not d.is_Sub) From d38e59bd5291438a5cd053f5dedb751f01935bc5 Mon Sep 17 00:00:00 2001 From: George Bisbas Date: Fri, 14 Feb 2025 15:41:24 +0200 Subject: [PATCH 13/13] docs: Pass fstrings in dense after rebase --- devito/types/dense.py | 79 +++++++++++++++++++++---------------------- 1 file changed, 39 insertions(+), 40 deletions(-) diff --git a/devito/types/dense.py b/devito/types/dense.py index 29d7d24d1d..fe1ae2a09c 100644 --- a/devito/types/dense.py +++ b/devito/types/dense.py @@ -111,8 +111,8 @@ def __init_finalize__(self, *args, function=None, **kwargs): # case `self._data is None` self.data else: - raise ValueError("`initializer` must be callable or buffer, not %s" - % type(initializer)) + raise ValueError(f"`initializer` must be callable or buffer, " + f"not {type(initializer)}") _subs = Differentiable._subs @@ -125,8 +125,8 @@ def wrapper(self): # Aliasing Functions must not allocate data return - debug("Allocating host memory for %s%s [%s]" - % (self.name, self.shape_allocated, humanbytes(self.nbytes))) + debug(f"Allocating host memory for {self.name}{self.shape_allocated} " + f"[{humanbytes(self.nbytes)}]") # Clear up both SymPy and Devito caches to drop unreachable data CacheManager.clear(force=False) @@ -176,8 +176,8 @@ def __coefficients_setup__(self, **kwargs): if coeffs == 'symbolic': deprecations.symbolic_warn else: - raise ValueError("coefficients must be one of %s" - " not %s" % (str(fd_weights_registry), coeffs)) + raise ValueError(f"coefficients must be one of {fd_weights_registry} " + f"not {coeffs}") return coeffs def __staggered_setup__(self, **kwargs): @@ -352,12 +352,12 @@ def _size_outhalo(self): if self._distributor.is_parallel and (any(left) > 0 or any(right)) > 0: try: - warning_msg = """A space order of {0} and a halo size of {1} has been - set but the current rank ({2}) has a domain size of - only {3}""".format(self._space_order, - max(self._size_inhalo), - self._distributor.myrank, - min(self.grid.shape_local)) + warning_msg = ( + f"A space order of {self._space_order} and a halo size of " + f"{max(self._size_inhalo)} has been set but the current rank " + f"({self._distributor.myrank}) has a domain size of only " + f"{min(self.grid.shape_local)}" + ) if not self._distributor.is_boundary_rank: warning(warning_msg) else: @@ -768,7 +768,7 @@ def _C_get_field(self, region, dim, side=None): offset = 0 size = ffp(self._C_field_size, self._C_make_index(dim)) else: - raise ValueError("Unknown region `%s`" % str(region)) + raise ValueError(f"Unknown region `{region}`") return RegionMeta(offset, size) @@ -781,8 +781,8 @@ def _halo_exchange(self): # Nothing to do return if MPI.COMM_WORLD.size > 1 and self._distributor is None: - raise RuntimeError("`%s` cannot perform a halo exchange as it has " - "no Grid attached" % self.name) + raise RuntimeError(f"`{self.name}` cannot perform a halo exchange " + f"as it has no Grid attached") neighborhood = self._distributor.neighborhood comm = self._distributor.comm @@ -874,17 +874,16 @@ def _arg_check(self, args, intervals, **kwargs): If an incompatibility is detected. """ if self.name not in args: - raise InvalidArgument("No runtime value for `%s`" % self.name) + raise InvalidArgument(f"No runtime value for `{self.name}`") data = args[self.name] if len(data.shape) != self.ndim: - raise InvalidArgument("Shape %s of runtime value `%s` does not match " - "dimensions %s" % - (data.shape, self.name, self.dimensions)) + raise InvalidArgument(f"Shape {data.shape} of runtime value `{self.name}` " + f"does not match dimensions {self.dimensions}") if data.dtype != self.dtype: - warning("Data type %s of runtime value `%s` does not match the " - "Function data type %s" % (data.dtype, self.name, self.dtype)) + warning(f"Data type {data.dtype} of runtime value `{self.name}` " + f"does not match the Function data type {self.dtype}") # Check each Dimension for potential OOB accesses for i, s in zip(self.dimensions, data.shape): @@ -894,11 +893,11 @@ def _arg_check(self, args, intervals, **kwargs): args.options['linearize'] and \ self.is_regular and \ data.size - 1 >= np.iinfo(np.int32).max: - raise InvalidArgument("`%s`, with its %d elements, is too big for " - "int32 pointer arithmetic. Consider using the " - "'index-mode=int64' option, the save=Buffer(..) " - "API (TimeFunction only), or domain " - "decomposition via MPI" % (self.name, data.size)) + raise InvalidArgument(f"`{self.name}`, with its {data.size} elements, " + "is too big for int32 pointer arithmetic." + "Consider using the 'index-mode=int64' option, " + "the save=Buffer(..) (TimeFunction only), or domain " + "decomposition via MPI") def _arg_finalize(self, args, alias=None): key = alias or self @@ -1134,9 +1133,8 @@ def __shape_setup__(cls, **kwargs): if d in grid.dimensions: size = grid.dimension_map[d] if size.glb != s and s is not None: - raise ValueError("Dimension `%s` is given size `%d`, " - "while `grid` says `%s` has size `%d` " - % (d, s, d, size.glb)) + raise ValueError(f"Dimension `{d}` is given size `{s}`, " + f"while `grid` says `{d}` has size `{size.glb}`") else: loc_shape.append(size.loc) else: @@ -1192,7 +1190,7 @@ def __padding_setup__(self, **kwargs): padding = tuple((0, i) if isinstance(i, int) else i for i in padding) else: - raise TypeError("`padding` must be int or %d-tuple of ints" % self.ndim) + raise TypeError(f"`padding` must be int or {self.ndim}-tuple of ints") return DimensionTuple(*padding, getters=self.dimensions) @property @@ -1438,7 +1436,7 @@ def __shape_setup__(cls, **kwargs): elif isinstance(save, int): shape.insert(cls._time_position, save) else: - raise TypeError("`save` can be None, int or Buffer, not %s" % type(save)) + raise TypeError(f"`save` can be None, int or Buffer, not {type(save)}") elif dimensions is None: raise TypeError("`dimensions` required if both `grid` and " "`shape` are provided") @@ -1502,9 +1500,10 @@ def _arg_check(self, args, intervals, **kwargs): key_time_size = args[self.name].shape[self._time_position] if self._time_buffering and self._time_size != key_time_size: - raise InvalidArgument("Expected `time_size=%d` for runtime " - "value `%s`, found `%d` instead" - % (self._time_size, self.name, key_time_size)) + raise InvalidArgument( + f"Expected `time_size={self._time_size}` for runtime value " + f"`{self.name}`, found `{key_time_size}` instead" + ) class SubFunction(Function): @@ -1533,8 +1532,8 @@ def _arg_values(self, **kwargs): if self._parent is not None and self.parent.name not in kwargs: return self._parent._arg_defaults(alias=self._parent).reduce_all() elif self.name in kwargs: - raise RuntimeError("`%s` is a SubFunction, so it can't be assigned " - "a value dynamically" % self.name) + raise RuntimeError(f"`{self.name}` is a SubFunction, so it can't " + "be assigned a value dynamically") else: return self._arg_defaults(alias=self) @@ -1678,8 +1677,8 @@ def make(self, shape=None, initializer=None, allocator=None, **kwargs): for n, i in enumerate(self.shape): v = i.subs(args) if not v.is_Integer: - raise ValueError("Couldn't resolve `shape[%d]=%s` with the given " - "kwargs (obtained: `%s`)" % (n, i, v)) + raise ValueError(f"Couldn't resolve `shape[{n}]={i}` with the given " + f"kwargs (obtained: `{v}`)") shape.append(int(v)) shape = tuple(shape) elif len(shape) != self.ndim: @@ -1706,6 +1705,6 @@ def _arg_values(self, **kwargs): # Set new values and re-derive defaults return new._arg_defaults().reduce_all() else: - raise InvalidArgument("Illegal runtime value for `%s`" % self.name) + raise InvalidArgument(f"Illegal runtime value for `{self.name}`") else: - raise InvalidArgument("TempFunction `%s` lacks override" % self.name) + raise InvalidArgument(f"TempFunction `{self.name}` lacks override")