Skip to content
This repository has been archived by the owner on Nov 27, 2024. It is now read-only.

Commit

Permalink
Merge branch 'gpu' of github.com:OP2/PyOP2 into gpu
Browse files Browse the repository at this point in the history
  • Loading branch information
Clara committed May 5, 2020
2 parents 9f1f57b + 56b44f6 commit 3d2acf6
Show file tree
Hide file tree
Showing 7 changed files with 263 additions and 128 deletions.
31 changes: 31 additions & 0 deletions pyop2/backend.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
class _not_implemented: # noqa
"""Not Implemented"""


class AbstractComputeBackend:
"""
Abstract class to record all the backend specific implementation of
:mod:`pyop2`'s data structures.
"""
ParLoop = _not_implemented()
Set = _not_implemented()
ExtrudedSet = _not_implemented()
MixedSet = _not_implemented()
Subset = _not_implemented()
DataSet = _not_implemented()
MixedDataSet = _not_implemented()
Map = _not_implemented()
MixedMap = _not_implemented()
Dat = _not_implemented()
MixedDat = _not_implemented()
DatView = _not_implemented()
Mat = _not_implemented()
Global = _not_implemented()
GlobalDataSet = _not_implemented()

def _getattr_(self, key):
val = super(AbstractComputeBackend, self)._getattr_(key)
if isinstance(val, _not_implemented):
raise NotImplementedError("'{}' is not implemented for backend"
" '{}'.".format(val, self.__name__))
return val
48 changes: 19 additions & 29 deletions pyop2/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,10 @@


def _make_object(name, *args, **kwargs):
from pyop2.gpu import cuda as backend
return getattr(backend, name)(*args, **kwargs)
# TODO: All "make_object("xyz", ...)" should be replaced by
# "compute_backend.xyz(...)"?
from pyop2.op2 import compute_backend
return getattr(compute_backend, name)(*args, **kwargs)


# Data API
Expand Down Expand Up @@ -212,12 +214,12 @@ def __iter__(self):
def split(self):
"""Split a mixed argument into a tuple of constituent arguments."""
if self._is_mixed_dat:
return tuple(_make_object('Arg', d, m, self._access)
return tuple(Arg(d, m, self._access)
for d, m in zip(self.data, self._map))
elif self._is_mixed_mat:
s = self.data.sparsity.shape
mr, mc = self.map
return tuple(_make_object('Arg', self.data[i, j], (mr.split[i], mc.split[j]),
return tuple(Arg(self.data[i, j], (mr.split[i], mc.split[j]),
self._access)
for j in range(s[1]) for i in range(s[0]))
else:
Expand Down Expand Up @@ -1355,7 +1357,6 @@ def pack(self):
('name', str, NameTypeError))
@validate_dtype(('dtype', None, DataTypeError))
def __init__(self, dataset, data=None, dtype=None, name=None, uid=None):

if isinstance(dataset, Dat):
self.__init__(dataset.dataset, None, dtype=dataset.dtype,
name="copy_of_%s" % dataset.name)
Expand Down Expand Up @@ -1396,7 +1397,7 @@ def _wrapper_cache_key_(self):
def __call__(self, access, path=None):
if configuration["type_check"] and path and path.toset != self.dataset.set:
raise MapValueError("To Set of Map does not match Set of Dat.")
return _make_object('Arg', data=self, map=path, access=access)
return Arg(data=self, map=path, access=access)

def __getitem__(self, idx):
"""Return self if ``idx`` is 0, raise an error otherwise."""
Expand Down Expand Up @@ -1576,7 +1577,7 @@ def zero(self, subset=None):
data = loopy.GlobalArg("dat", dtype=self.dtype, shape=(self.cdim,))
knl = loopy.make_function([domain], [insn], [data], name="zero")

knl = _make_object('Kernel', knl, 'zero')
knl = Kernel(knl, 'zero')
loop = _make_object('ParLoop', knl,
iterset,
self(WRITE))
Expand Down Expand Up @@ -1610,7 +1611,7 @@ def _copy_parloop(self, other, subset=None):
loopy.GlobalArg("other", dtype=other.dtype, shape=(other.cdim,))]
knl = loopy.make_function([domain], [insn], data, name="copy")

self._copy_kernel = _make_object('Kernel', knl, 'copy')
self._copy_kernel = Kernel(knl, 'copy')
return _make_object('ParLoop', self._copy_kernel,
subset or self.dataset.set,
self(READ), other(WRITE))
Expand Down Expand Up @@ -1663,7 +1664,7 @@ def _op(self, other, op):
loopy.GlobalArg("other", dtype=other.dtype, shape=(other.cdim,)),
loopy.GlobalArg("ret", dtype=self.dtype, shape=(self.cdim,))]
knl = loopy.make_function([domain], [insn], data, name=name)
k = _make_object('Kernel', knl, name)
k = Kernel(knl, name)

par_loop(k, self.dataset.set, self(READ), other(READ), ret(WRITE))

Expand Down Expand Up @@ -1692,7 +1693,7 @@ def _iop(self, other, op):
data = [loopy.GlobalArg("self", dtype=self.dtype, shape=(self.cdim,)),
loopy.GlobalArg("other", dtype=other.dtype, shape=(other.cdim,))]
knl = loopy.make_function([domain], [insn], data, name=name)
k = _make_object('Kernel', knl, name)
k = Kernel(knl, name)

par_loop(k, self.dataset.set, self(INC), other(READ))

Expand All @@ -1714,7 +1715,7 @@ def _uop(self, op):
insn = loopy.Assignment(_self.index(i), _op(_self.index(i)), within_inames=frozenset(["i"]))
data = [loopy.GlobalArg("self", dtype=self.dtype, shape=(self.cdim,))]
knl = loopy.make_function([domain], [insn], data, name=name)
k = _make_object('Kernel', knl, name)
k = Kernel(knl, name)

par_loop(k, self.dataset.set, self(RW))
return self
Expand Down Expand Up @@ -1745,7 +1746,7 @@ def inner(self, other):
loopy.GlobalArg("ret", dtype=ret.dtype, shape=(1,))]
knl = loopy.make_function([domain], [insn], data, name="inner")

k = _make_object('Kernel', knl, "inner")
k = Kernel(knl, "inner")
par_loop(k, self.dataset.set, self(READ), other(READ), ret(INC))
return ret.data_ro[0]

Expand Down Expand Up @@ -2287,7 +2288,7 @@ def _wrapper_cache_key_(self):

@validate_in(('access', _modes, ModeValueError))
def __call__(self, access, path=None):
return _make_object('Arg', data=self, access=access)
return Arg(data=self, access=access)

def __iter__(self):
"""Yield self when iterated over."""
Expand Down Expand Up @@ -2502,16 +2503,9 @@ def __init__(self, iterset, toset, arity, values=None, name=None, offset=None):
self._toset = toset
self.comm = toset.comm
self._arity = arity
if False:
# maps indexed as `map[idof, icell]`
self._values = verify_reshape(values, IntType,
(arity, iterset.total_size),
allow_none=True)
else:
# maps indexed as `map[icell, idof]`
self._values = verify_reshape(values, IntType,
(iterset.total_size, arity),
allow_none=True)
self._values = verify_reshape(values, IntType,
(iterset.total_size, arity),
allow_none=True)
self.shape = (iterset.total_size, arity)
self._name = name or "map_%d" % Map._globalcount
if offset is None or len(offset) == 0:
Expand Down Expand Up @@ -2589,11 +2583,7 @@ def values(self):
This only returns the map values for local points, to see the
halo points too, use :meth:`values_with_halo`."""
if False:
# Transposed maps
return self._values[:, :self.iterset.size]
else:
return self._values[:self.iterset.size]
return self._values[:self.iterset.size]

@cached_property
def values_with_halo(self):
Expand Down Expand Up @@ -3133,7 +3123,7 @@ def __call__(self, access, path, lgmaps=None, unroll_map=False):
path_maps = as_tuple(path, Map, 2)
if configuration["type_check"] and tuple(path_maps) not in self.sparsity:
raise MapValueError("Path maps not in sparsity maps")
return _make_object('Arg', data=self, map=path_maps, access=access, lgmaps=lgmaps, unroll_map=unroll_map)
return Arg(data=self, map=path_maps, access=access, lgmaps=lgmaps, unroll_map=unroll_map)

@cached_property
def _wrapper_cache_key_(self):
Expand Down
5 changes: 5 additions & 0 deletions pyop2/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,10 @@ class Configuration(dict):
cdim > 1 be built as block sparsities, or dof sparsities. The
former saves memory but changes which preconditioners are
available for the resulting matrices. (Default yes)
:param only_explicit_host_device_data_transfers: Flag to set host<->device
transfers mode. If set *True*, the user has to invoke all the
host<->device transfers. If set *False* (default), Firedrake automatically
figures out the data transfers, however this might lead to sub-optimality.
"""
# name, env variable, type, default, write once
DEFAULTS = {
Expand Down Expand Up @@ -112,6 +116,7 @@ class Configuration(dict):
"print_summary": ("PYOP2_PRINT_SUMMARY", bool, False),
"matnest": ("PYOP2_MATNEST", bool, True),
"block_sparsity": ("PYOP2_BLOCK_SPARSITY", bool, True),
"only_explicit_host_device_data_transfers": ("EXPLICIT_TRNSFRS", bool, False)
}
"""Default values for PyOP2 configuration parameters"""

Expand Down
Loading

0 comments on commit 3d2acf6

Please sign in to comment.