From d4b1d7567b1286959b83c7914701f272c1460bc7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Philip=20M=C3=BCller?= <147368808+philip-paul-mueller@users.noreply.github.com> Date: Mon, 4 Nov 2024 13:41:10 +0100 Subject: [PATCH 1/3] fix[DaCe]: Disable Some Transformations (#1711) DaCe's `MapReduceFusion` and `MapWCRFusion` are interesting as they move the initialization of the reduction accumulator away, which enables more fusion. However, they currently have a bug, as they assume that the reduction node is in the global scope and not inside a map scope. --- .../runners/dace_fieldview/transformations/auto_opt.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/gt4py/next/program_processors/runners/dace_fieldview/transformations/auto_opt.py b/src/gt4py/next/program_processors/runners/dace_fieldview/transformations/auto_opt.py index 37cc89aa2b..e070cdfe4e 100644 --- a/src/gt4py/next/program_processors/runners/dace_fieldview/transformations/auto_opt.py +++ b/src/gt4py/next/program_processors/runners/dace_fieldview/transformations/auto_opt.py @@ -257,9 +257,12 @@ def gt_auto_optimize( sdfg.apply_transformations_repeated( [ dace_dataflow.TrivialMapElimination, - # TODO(phimuell): Investigate if these two are appropriate. - dace_dataflow.MapReduceFusion, - dace_dataflow.MapWCRFusion, + # TODO(phimuell): The transformation are interesting, but they have + # a bug as they assume that they are not working inside a map scope. + # Before we use them we have to fix them. + # https://chat.spcl.inf.ethz.ch/spcl/pl/8mtgtqjb378hfy7h9a96sy3nhc + # dace_dataflow.MapReduceFusion, + # dace_dataflow.MapWCRFusion, ], validate=validate, validate_all=validate_all, From 725b6ba070f0f6eadda250aa308803a7ff30b685 Mon Sep 17 00:00:00 2001 From: Hannes Vogt Date: Mon, 4 Nov 2024 13:46:36 +0100 Subject: [PATCH 2/3] build: update gitpod image (#1722) --- .gitpod.Dockerfile | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.gitpod.Dockerfile b/.gitpod.Dockerfile index 967ae36f2e..5d02a0f436 100644 --- a/.gitpod.Dockerfile +++ b/.gitpod.Dockerfile @@ -1,8 +1,6 @@ -FROM gitpod/workspace-python +FROM gitpod/workspace-python-3.11 USER root RUN apt-get update \ && apt-get install -y libboost-dev \ && apt-get clean && rm -rf /var/cache/apt/* && rm -rf /var/lib/apt/lists/* && rm -rf /tmp/* USER gitpod -RUN pyenv install 3.10.2 -RUN pyenv global 3.10.2 From eea1fb63717beda13516137d9afb17d0e79d396a Mon Sep 17 00:00:00 2001 From: Hannes Vogt Date: Mon, 4 Nov 2024 15:11:27 +0100 Subject: [PATCH 3/3] bug[next]: fix missing local kind in gtfn connectivity (#1715) The second dimension of a connectivity is a local dimension. Before we defaulted to make this dimension horizontal. Currently, this information is not used. --- .../codegens/gtfn/gtfn_module.py | 22 +++++++++++-------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/src/gt4py/next/program_processors/codegens/gtfn/gtfn_module.py b/src/gt4py/next/program_processors/codegens/gtfn/gtfn_module.py index d729a5ba2f..07eec0b64b 100644 --- a/src/gt4py/next/program_processors/codegens/gtfn/gtfn_module.py +++ b/src/gt4py/next/program_processors/codegens/gtfn/gtfn_module.py @@ -18,7 +18,6 @@ from gt4py._core import definitions as core_defs from gt4py.eve import codegen from gt4py.next import common -from gt4py.next.common import Connectivity, Dimension from gt4py.next.ffront import fbuiltins from gt4py.next.iterator import ir as itir from gt4py.next.iterator.transforms import LiftMode, fencil_to_program, pass_manager @@ -84,7 +83,7 @@ def _process_regular_arguments( self, program: itir.FencilDefinition | itir.Program, arg_types: tuple[ts.TypeSpec, ...], - offset_provider: dict[str, Connectivity | Dimension], + offset_provider: common.OffsetProvider, ) -> tuple[list[interface.Parameter], list[str]]: parameters: list[interface.Parameter] = [] arg_exprs: list[str] = [] @@ -107,20 +106,20 @@ def _process_regular_arguments( # translate sparse dimensions to tuple dtype dim_name = dim.value connectivity = offset_provider[dim_name] - assert isinstance(connectivity, Connectivity) + assert isinstance(connectivity, common.Connectivity) size = connectivity.max_neighbors arg = f"gridtools::sid::dimension_to_tuple_like({arg})" arg_exprs.append(arg) return parameters, arg_exprs def _process_connectivity_args( - self, offset_provider: dict[str, Connectivity | Dimension] + self, offset_provider: dict[str, common.Connectivity | common.Dimension] ) -> tuple[list[interface.Parameter], list[str]]: parameters: list[interface.Parameter] = [] arg_exprs: list[str] = [] for name, connectivity in offset_provider.items(): - if isinstance(connectivity, Connectivity): + if isinstance(connectivity, common.Connectivity): if connectivity.index_type not in [np.int32, np.int64]: raise ValueError( "Neighbor table indices must be of type 'np.int32' or 'np.int64'." @@ -131,7 +130,12 @@ def _process_connectivity_args( interface.Parameter( name=GENERATED_CONNECTIVITY_PARAM_PREFIX + name.lower(), type_=ts.FieldType( - dims=[connectivity.origin_axis, Dimension(name)], + dims=[ + connectivity.origin_axis, + common.Dimension( + name, kind=common.DimensionKind.LOCAL + ), # TODO(havogt): we should not use the name of the offset as the name of the local dimension + ], dtype=ts.ScalarType( type_translation.get_scalar_kind(connectivity.index_type) ), @@ -149,7 +153,7 @@ def _process_connectivity_args( arg_exprs.append( f"gridtools::hymap::keys::make_values({nbtbl})" ) - elif isinstance(connectivity, Dimension): + elif isinstance(connectivity, common.Dimension): pass else: raise AssertionError( @@ -162,7 +166,7 @@ def _process_connectivity_args( def _preprocess_program( self, program: itir.FencilDefinition | itir.Program, - offset_provider: dict[str, Connectivity | Dimension], + offset_provider: dict[str, common.Connectivity | common.Dimension], ) -> itir.Program: if isinstance(program, itir.FencilDefinition) and not self.enable_itir_transforms: return fencil_to_program.FencilToProgram().apply( @@ -196,7 +200,7 @@ def _preprocess_program( def generate_stencil_source( self, program: itir.FencilDefinition | itir.Program, - offset_provider: dict[str, Connectivity | Dimension], + offset_provider: dict[str, common.Connectivity | common.Dimension], column_axis: Optional[common.Dimension], ) -> str: new_program = self._preprocess_program(program, offset_provider)