Skip to content

Commit

Permalink
Switched to UUID from time.
Browse files Browse the repository at this point in the history
Also added a helper function.
  • Loading branch information
philip-paul-mueller committed Sep 5, 2024
1 parent 5ab199d commit a0866a7
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 15 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import copy
import numpy as np
import pytest
import time

from dace.sdfg import nodes as dace_nodes

Expand Down Expand Up @@ -43,7 +42,7 @@ def _get_trivial_gpu_promotable(
Args:
tasklet_code: The body of the Tasklet inside the trivial map.
"""
sdfg = dace.SDFG(f"test_sdfg__{int(time.time() * 1000)}")
sdfg = dace.SDFG(util.unique_name("gpu_promotable_sdfg"))
state = sdfg.add_state("state", is_start_block=True)
sdfg.add_symbol("N", dace.int32)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,13 @@
import copy
import numpy as np
import pytest
import time

from dace.sdfg import nodes as dace_nodes, propagation as dace_propagation

from gt4py.next.program_processors.runners.dace_fieldview import (
transformations as gtx_transformations,
)
from . import util

pytestmark = pytest.mark.usefixtures("set_dace_settings")

Expand All @@ -29,7 +29,7 @@ def _get_simple_sdfg() -> tuple[dace.SDFG, Callable[[np.ndarray, np.ndarray], np
can be taken out. This is because how it is constructed. However, applying
some simplistic transformations this can be done.
"""
sdfg = dace.SDFG(f"test_sdfg__{int(time.time() * 1000)}")
sdfg = dace.SDFG(util.unique_name("simple_block_sdfg"))
state = sdfg.add_state("state", is_start_block=True)
sdfg.add_symbol("N", dace.int32)
sdfg.add_symbol("M", dace.int32)
Expand All @@ -52,7 +52,7 @@ def _get_chained_sdfg() -> tuple[dace.SDFG, Callable[[np.ndarray, np.ndarray], n
The bottom Tasklet is the only dependent Tasklet.
"""
sdfg = dace.SDFG(f"test_sdfg__{int(time.time() * 1000)}")
sdfg = dace.SDFG(util.unique_name("chained_block_sdfg"))
state = sdfg.add_state("state", is_start_block=True)
sdfg.add_symbol("N", dace.int32)
sdfg.add_symbol("M", dace.int32)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
import dace
import copy
import numpy as np
import time
from dace.sdfg import nodes as dace_nodes
from dace.transformation import dataflow as dace_dataflow

Expand All @@ -39,7 +38,7 @@ def _make_serial_sdfg_1(
N: The size of the arrays.
"""
shape = (N, N)
sdfg = dace.SDFG(f"test_sdfg__{int(time.time() * 1000)}")
sdfg = dace.SDFG(util.unique_name("serial_sdfg1"))
state = sdfg.add_state(is_start_block=True)

for name in ["a", "b", "tmp"]:
Expand Down Expand Up @@ -94,7 +93,7 @@ def _make_serial_sdfg_2(
N: The size of the arrays.
"""
shape = (N, N)
sdfg = dace.SDFG(f"test_sdfg__{int(time.time() * 1000)}")
sdfg = dace.SDFG(util.unique_name("serial_sdfg2"))
state = sdfg.add_state(is_start_block=True)

for name in ["a", "b", "c", "tmp_1", "tmp_2"]:
Expand Down Expand Up @@ -166,7 +165,7 @@ def _make_serial_sdfg_3(
input_shape = (N_input,)
output_shape = (N_output,)

sdfg = dace.SDFG(f"test_sdfg__{int(time.time() * 1000)}")
sdfg = dace.SDFG(util.unique_name("serial_sdfg3"))
state = sdfg.add_state(is_start_block=True)

for name, shape in [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import copy
import numpy as np
import pytest
import time

from dace.sdfg import nodes as dace_nodes

Expand All @@ -29,7 +28,7 @@ def test_serial_map_promotion():
N = 10
shape_1d = (N,)
shape_2d = (N, N)
sdfg = dace.SDFG(f"test_sdfg__{int(time.time() * 1000)}")
sdfg = dace.SDFG(util.unique_name("serial_promotable_sdfg"))
state = sdfg.add_state(is_start_block=True)

# 1D Arrays
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,12 @@
# SPDX-License-Identifier: BSD-3-Clause

from typing import Union, Literal, overload
import uuid

import dace
from dace.sdfg import nodes as dace_nodes
from dace.transformation import dataflow as dace_dataflow

__all__ = [
"_count_nodes",
]


@overload
def _count_nodes(
Expand Down Expand Up @@ -57,3 +54,8 @@ def _count_nodes(
if return_nodes:
return found_nodes
return len(found_nodes)


def unique_name(name: str) -> str:
"""Adds a unique string to `name`."""
return f"{name}_{str(uuid.uuid1()).replace('-', '_')}"

0 comments on commit a0866a7

Please sign in to comment.