Skip to content

Commit

Permalink
Merge branch 'master' into struct_access_interstate_edge_bug
Browse files Browse the repository at this point in the history
  • Loading branch information
phschaad authored May 16, 2024
2 parents 202070d + ee5a6df commit 8bd0f16
Show file tree
Hide file tree
Showing 36 changed files with 914 additions and 392 deletions.
75 changes: 0 additions & 75 deletions .github/workflows/pace-build-ci.yml

This file was deleted.

94 changes: 94 additions & 0 deletions .github/workflows/pyFV3-ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
name: NASA/NOAA pyFV3 repository build test

on:
push:
branches: [ master, ci-fix ]
pull_request:
branches: [ master, ci-fix ]

defaults:
run:
shell: bash

jobs:
build_and_validate_pyFV3:
if: "!contains(github.event.pull_request.labels.*.name, 'no-ci')"
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.11.7]

steps:
- uses: actions/checkout@v2
with:
repository: 'NOAA-GFDL/PyFV3'
ref: 'ci/DaCe'
submodules: 'recursive'
path: 'pyFV3'
- uses: actions/checkout@v2
with:
path: 'dace'
submodules: 'recursive'
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install library dependencies
run: |
sudo apt-get install libopenmpi-dev libboost-all-dev gcc-13
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
gcc --version
# Because Github doesn't allow us to do a git checkout in code
# we use a trick to checkout DaCe first (not using the external submodule)
# install the full suite via requirements_dev, then re-install the correct DaCe
- name: Install Python packages
run: |
python -m pip install --upgrade pip wheel setuptools
pip install -e ./pyFV3[develop]
pip install -e ./dace
- name: Download data
run: |
cd pyFV3
mkdir -p test_data
cd test_data
wget https://portal.nccs.nasa.gov/datashare/astg/smt/pace-regression-data/8.1.3_c12_6ranks_standard.D_SW.tar.gz
tar -xzvf 8.1.3_c12_6ranks_standard.D_SW.tar.gz
wget https://portal.nccs.nasa.gov/datashare/astg/smt/pace-regression-data/8.1.3_c12_6ranks_standard.RiemSolver3.tar.gz
tar -xzvf 8.1.3_c12_6ranks_standard.RiemSolver3.tar.gz
wget https://portal.nccs.nasa.gov/datashare/astg/smt/pace-regression-data/8.1.3_c12_6ranks_standard.Remapping.tar.gz
tar -xzvf 8.1.3_c12_6ranks_standard.Remapping.tar.gz
cd ../..
# Clean up caches between run for stale un-expanded SDFG to trip the build system (NDSL side issue)
- name: "Regression test: Riemman Solver on D-grid (RiemSolver3)"
env:
FV3_DACEMODE: BuildAndRun
PACE_CONSTANTS: GFS
PACE_LOGLEVEL: Debug
run: |
pytest -v -s --data_path=./pyFV3/test_data/8.1.3/c12_6ranks_standard/dycore \
--backend=dace:cpu --which_modules=Riem_Solver3 \
--threshold_overrides_file=./pyFV3/tests/savepoint/translate/overrides/standard.yaml \
./pyFV3/tests/savepoint
rm -r ./.gt_cache_FV3_A
- name: "Regression test: Shallow water lagrangian dynamics on D-grid (D_SW) (on rank 0 only)"
env:
FV3_DACEMODE: BuildAndRun
PACE_CONSTANTS: GFS
PACE_LOGLEVEL: Debug
run: |
pytest -v -s --data_path=./pyFV3/test_data/8.1.3/c12_6ranks_standard/dycore \
--backend=dace:cpu --which_modules=D_SW --which_rank=0 \
--threshold_overrides_file=./pyFV3/tests/savepoint/translate/overrides/standard.yaml \
./pyFV3/tests/savepoint
rm -r ./.gt_cache_FV3_A
- name: "Regression test: Remapping (on rank 0 only)"
env:
FV3_DACEMODE: BuildAndRun
PACE_CONSTANTS: GFS
PACE_LOGLEVEL: Debug
run: |
pytest -v -s --data_path=./pyFV3/test_data/8.1.3/c12_6ranks_standard/dycore \
--backend=dace:cpu --which_modules=Remapping --which_rank=0 \
--threshold_overrides_file=./pyFV3/tests/savepoint/translate/overrides/standard.yaml \
./pyFV3/tests/savepoint
rm -r ./.gt_cache_FV3_A
13 changes: 11 additions & 2 deletions dace/builtin_hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,12 @@


@contextmanager
def profile(repetitions: int = 100, warmup: int = 0):
def profile(
repetitions: int = 100,
warmup: int = 0,
tqdm_leave: bool = True,
print_results: bool = True,
):
"""
Context manager that enables profiling of each called DaCe program. If repetitions is greater than 1, the
program is run multiple times and the average execution time is reported.
Expand All @@ -35,6 +40,10 @@ def profile(repetitions: int = 100, warmup: int = 0):
:param repetitions: The number of times to run each DaCe program.
:param warmup: Number of additional repetitions to run the program without measuring time.
:param tqdm_leave: Sets the ``leave`` parameter of the ``tqdm`` progress bar (useful
for nested progress bars). Ignored if tqdm progress bar is not used.
:param print_results: Whether or not to print the median execution time after
all repetitions.
:note: Running functions multiple times may affect the results of the program.
"""
from dace.frontend.operations import CompiledSDFGProfiler # Avoid circular import
Expand All @@ -51,7 +60,7 @@ def profile(repetitions: int = 100, warmup: int = 0):
yield hook
return

profiler = CompiledSDFGProfiler(repetitions, warmup)
profiler = CompiledSDFGProfiler(repetitions, warmup, tqdm_leave, print_results)

with on_compiled_sdfg_call(context_manager=profiler):
yield profiler
Expand Down
9 changes: 6 additions & 3 deletions dace/cli/sdfv.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ class NewCls(cls):
return NewCls


def view(sdfg: dace.SDFG, filename: Optional[Union[str, int]] = None):
def view(sdfg: dace.SDFG, filename: Optional[Union[str, int]] = None, verbose: bool = True):
"""
View an sdfg in the system's HTML viewer
Expand All @@ -33,6 +33,7 @@ def view(sdfg: dace.SDFG, filename: Optional[Union[str, int]] = None):
the generated HTML and related sources will be
served using a basic web server on that port,
blocking the current thread.
:param verbose: Be verbose.
"""
# If vscode is open, try to open it inside vscode
if filename is None:
Expand Down Expand Up @@ -71,7 +72,8 @@ def view(sdfg: dace.SDFG, filename: Optional[Union[str, int]] = None):
with open(html_filename, "w") as f:
f.write(html)

print("File saved at %s" % html_filename)
if(verbose):
print("File saved at %s" % html_filename)

if fd is not None:
os.close(fd)
Expand All @@ -83,7 +85,8 @@ def view(sdfg: dace.SDFG, filename: Optional[Union[str, int]] = None):
# start the web server
handler = partialclass(http.server.SimpleHTTPRequestHandler, directory=dirname)
httpd = http.server.HTTPServer(('localhost', filename), handler)
print(f"Serving at localhost:{filename}, press enter to stop...")
if(verbose):
print(f"Serving at localhost:{filename}, press enter to stop...")

# start the server in a different thread
def serve():
Expand Down
8 changes: 5 additions & 3 deletions dace/codegen/targets/cpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from dace.sdfg import (ScopeSubgraphView, SDFG, scope_contains_scope, is_array_stream_view, NodeNotExpandedError,
dynamic_map_inputs, local_transients)
from dace.sdfg.scope import is_devicelevel_gpu, is_devicelevel_fpga, is_in_scope
from dace.sdfg.validation import validate_memlet_data
from typing import Union
from dace.codegen.targets import fpga

Expand All @@ -40,7 +41,7 @@ def _visit_structure(struct: data.Structure, args: dict, prefix: str = ''):
_visit_structure(v, args, f'{prefix}->{k}')
elif isinstance(v, data.ContainerArray):
_visit_structure(v.stype, args, f'{prefix}->{k}')
elif isinstance(v, data.Data):
if isinstance(v, data.Data):
args[f'{prefix}->{k}'] = v

# Keeps track of generated connectors, so we know how to access them in nested scopes
Expand Down Expand Up @@ -624,6 +625,7 @@ def copy_memory(
callsite_stream,
)


def _emit_copy(
self,
sdfg,
Expand All @@ -641,9 +643,9 @@ def _emit_copy(
orig_vconn = vconn

# Determine memlet directionality
if isinstance(src_node, nodes.AccessNode) and memlet.data == src_node.data:
if isinstance(src_node, nodes.AccessNode) and validate_memlet_data(memlet.data, src_node.data):
write = True
elif isinstance(dst_node, nodes.AccessNode) and memlet.data == dst_node.data:
elif isinstance(dst_node, nodes.AccessNode) and validate_memlet_data(memlet.data, dst_node.data):
write = False
elif isinstance(src_node, nodes.CodeNode) and isinstance(dst_node, nodes.CodeNode):
# Code->Code copy (not read nor write)
Expand Down
7 changes: 6 additions & 1 deletion dace/codegen/targets/framecode.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,8 @@ def generate_fileheader(self, sdfg: SDFG, global_stream: CodeIOStream, backend:
if arr is not None:
datatypes.add(arr.dtype)

emitted = set()

def _emit_definitions(dtype: dtypes.typeclass, wrote_something: bool) -> bool:
if isinstance(dtype, dtypes.pointer):
wrote_something = _emit_definitions(dtype._typeclass, wrote_something)
Expand All @@ -164,7 +166,10 @@ def _emit_definitions(dtype: dtypes.typeclass, wrote_something: bool) -> bool:
if hasattr(dtype, 'emit_definition'):
if not wrote_something:
global_stream.write("", sdfg)
global_stream.write(dtype.emit_definition(), sdfg)
if dtype not in emitted:
global_stream.write(dtype.emit_definition(), sdfg)
wrote_something = True
emitted.add(dtype)
return wrote_something

# Emit unique definitions
Expand Down
4 changes: 2 additions & 2 deletions dace/config_schema.yml
Original file line number Diff line number Diff line change
Expand Up @@ -945,10 +945,10 @@ required:
serialize_all_fields:
type: bool
default: true
default: false
title: Serialize all unmodified fields in SDFG files
description: >
If False, saving an SDFG keeps only the modified non-default properties. If True,
If False (default), saving an SDFG keeps only the modified non-default properties. If True,
saves all fields.
#############################################
Expand Down
7 changes: 5 additions & 2 deletions dace/dtypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -1216,6 +1216,7 @@ def isconstant(var):
int16 = typeclass(numpy.int16)
int32 = typeclass(numpy.int32)
int64 = typeclass(numpy.int64)
uintp = typeclass(numpy.uintp)
uint8 = typeclass(numpy.uint8)
uint16 = typeclass(numpy.uint16)
uint32 = typeclass(numpy.uint32)
Expand Down Expand Up @@ -1449,8 +1450,10 @@ def validate_name(name):
return False
if name in {'True', 'False', 'None'}:
return False
if namere.match(name) is None:
return False
tokens = name.split('.')
for token in tokens:
if namere.match(token) is None:
return False
return True


Expand Down
Loading

0 comments on commit 8bd0f16

Please sign in to comment.