Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

compiler: Fix issue #2235 #2237

Open
wants to merge 12 commits into
base: master
Choose a base branch
from
Open
4 changes: 2 additions & 2 deletions devito/deprecations.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,14 @@ class DevitoDeprecation():

@cached_property
def coeff_warn(self):
warn("The Coefficient API is deprecated and will be removed, coefficients should"
warn("The Coefficient API is deprecated and will be removed, coefficients should "
"be passed directly to the derivative object `u.dx(weights=...)",
DeprecationWarning, stacklevel=2)
return

@cached_property
def symbolic_warn(self):
warn("coefficients='symbolic' is deprecated, coefficients should"
warn("coefficients='symbolic' is deprecated, coefficients should "
"be passed directly to the derivative object `u.dx(weights=...)",
DeprecationWarning, stacklevel=2)
return
Expand Down
13 changes: 13 additions & 0 deletions devito/ir/clusters/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -387,6 +387,19 @@ def dspace(self):
# Dimension-centric view of the data space
intervals = IntervalGroup.generate('union', *parts.values())

# 'union' may consume intervals (values) from keys that have dimensions
# not mapped to intervals e.g. issue #2235, resulting in reduced
# iteration size. Here, we relax this mapped upper interval, by
# intersecting intervals with matching only dimensions
for f, v in parts.items():
for i in v:
if i.dim in self.ispace and i.dim in f.dimensions:
# oobs check is not required but helps reduce
# interval reconstruction
ii = intervals[i.dim].intersection(v[i.dim])
if not ii.is_Null:
intervals = intervals.set_upper(i.dim, ii.upper)

# E.g., `db0 -> time`, but `xi NOT-> x`
intervals = intervals.promote(lambda d: not d.is_Sub)
intervals = intervals.zero(set(intervals.dimensions) - oobs)
Expand Down
8 changes: 8 additions & 0 deletions devito/ir/support/space.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,6 +259,9 @@ def negate(self):
def zero(self):
return Interval(self.dim, 0, 0, self.stamp)

def set_upper(self, v=0):
return Interval(self.dim, self.lower, v, self.stamp)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this implementation doesn't really reflect the method name....

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

better?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it looks like a jump in the class API, given that all other methos are more generic, see zero, flip, lift, etc

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

maybe use sth like "ceil" ?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

To add, should all of these properties be cached? They all build Interval objects, rather than simply returning a private variable

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I do not know, @FabioLuporini , @mloubout ?


def flip(self):
return Interval(self.dim, self.upper, self.lower, self.stamp)

Expand Down Expand Up @@ -496,6 +499,11 @@ def zero(self, d=None):

return IntervalGroup(intervals, relations=self.relations, mode=self.mode)

def set_upper(self, d, v=0):
dims = as_tuple(d)
return IntervalGroup([i.set_upper(v) if i.dim in dims else i for i in self],
relations=self.relations, mode=self.mode)

def lift(self, d=None, v=None):
d = set(self.dimensions if d is None else as_tuple(d))
intervals = [i.lift(v) if i.dim._defines & d else i for i in self]
Expand Down
18 changes: 11 additions & 7 deletions examples/userapi/02_apply.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,11 @@
" 'y_m': 0,\n",
" 'y_size': 4,\n",
" 'y_M': 3,\n",
" 'timers': <cparam 'P' (0x7fb0d0550918)>}"
" 'h_x': 0.33333334,\n",
" 'h_y': 0.33333334,\n",
" 'o_x': 0.0,\n",
" 'o_y': 0.0,\n",
" 'timers': <cparam 'P' (0x7f3ce4f15110)>}"
]
},
"execution_count": 5,
Expand Down Expand Up @@ -419,8 +423,8 @@
{
"data": {
"text/plain": [
"PerformanceSummary([('section0',\n",
" PerfEntry(time=3e-06, gflopss=0.0, gpointss=0.0, oi=0.0, ops=0, itershapes=[]))])"
"PerformanceSummary([(PerfKey(name='section0', rank=None),\n",
" PerfEntry(time=1e-06, gflopss=0.0, gpointss=0.0, oi=0.0, ops=0, itershapes=[]))])"
]
},
"execution_count": 14,
Expand Down Expand Up @@ -449,14 +453,14 @@
"name": "stderr",
"output_type": "stream",
"text": [
"Operator `Kernel` run in 0.00 s\n"
"Operator `Kernel` ran in 0.01 s\n"
]
},
{
"data": {
"text/plain": [
"PerformanceSummary([('section0',\n",
" PerfEntry(time=3e-06, gflopss=0.021333333333333333, gpointss=0.010666666666666666, oi=0.16666666666666666, ops=2, itershapes=[(2, 4, 4)]))])"
"PerformanceSummary([(PerfKey(name='section0', rank=None),\n",
georgebisbas marked this conversation as resolved.
Show resolved Hide resolved
" PerfEntry(time=1e-06, gflopss=0.064, gpointss=0.032, oi=0.16666666666666666, ops=2, itershapes=((2, 4, 4),)))])"
]
},
"execution_count": 15,
Expand Down Expand Up @@ -525,7 +529,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.8"
"version": "3.10.12"
}
},
"nbformat": 4,
Expand Down
2 changes: 1 addition & 1 deletion tests/test_checkpointing.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@


@switchconfig(log_level='WARNING')
def test_segmented_incremment():
def test_segmented_increment():
"""
Test for segmented operator execution of a one-sided first order
function (increment). The corresponding set of stencil offsets in
Expand Down
19 changes: 19 additions & 0 deletions tests/test_dimension.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,25 @@ def test_degenerate_to_zero(self):

assert np.all(u.data == 10)

def test_default_timeM(self):
"""
MFE for issue #2235
"""
grid = Grid(shape=(4, 4))

u = TimeFunction(name='u', grid=grid)
usave = TimeFunction(name='usave', grid=grid, save=5)

eqns = [Eq(u.forward, u + 1),
Eq(usave, u)]

op = Operator(eqns)

assert op.arguments()['time_M'] == 4
op.apply()

assert all(np.all(usave.data[i] == i) for i in range(4))


class TestSubDimension:

Expand Down
21 changes: 12 additions & 9 deletions tests/test_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -1990,16 +1990,19 @@ def test_2194_v2(self, eqns, expected, exp_trees, exp_iters):

class TestInternals:

def test_indirection(self):
nt = 10
@pytest.mark.parametrize('nt, offset, epass',
([1, 1, True], [1, 2, False],
[5, 3, True], [3, 5, False],
[4, 1, True], [5, 10, False]))
def test_indirection(self, nt, offset, epass):
grid = Grid(shape=(4, 4))
time = grid.time_dim
x, y = grid.dimensions

f = TimeFunction(name='f', grid=grid, save=nt)
g = TimeFunction(name='g', grid=grid)

idx = time + 1
idx = time + offset
s = Indirection(name='ofs0', mapped=idx)

eqns = [
Expand All @@ -2010,10 +2013,10 @@ def test_indirection(self):
op = Operator(eqns)

assert op._dspace[time].lower == 0
assert op._dspace[time].upper == 1
assert op.arguments()['time_M'] == nt - 2
assert op._dspace[time].upper == offset

op()

assert np.all(f.data[0] == 0.)
assert np.all(f.data[i] == 3. for i in range(1, 10))
if epass:
assert op.arguments()['time_M'] == nt - offset - 1
op()
assert np.all(f.data[0] == 0.)
assert np.all(f.data[i] == 3. for i in range(1, nt))
Loading