Skip to content

Commit

Permalink
Reinstate slow tests by making mem utilization test delete data after…
Browse files Browse the repository at this point in the history
… each test to free up space (#361)
  • Loading branch information
tomwhite authored Jan 26, 2024
1 parent 3276d2b commit 9d2b618
Show file tree
Hide file tree
Showing 2 changed files with 35 additions and 31 deletions.
File renamed without changes.
66 changes: 35 additions & 31 deletions cubed/tests/test_mem_utilization.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import math
import shutil

import pytest

Expand Down Expand Up @@ -33,70 +34,70 @@ def round_up_to_multiple(x, multiple=10):


@pytest.mark.slow
def test_index(spec):
def test_index(tmp_path, spec):
a = cubed.random.random(
(10000, 10000), chunks=(5000, 5000), spec=spec
) # 200MB chunks
b = a[1:, :]
run_operation("index", b)
run_operation(tmp_path, "index", b)


@pytest.mark.slow
def test_index_step(spec):
def test_index_step(tmp_path, spec):
a = cubed.random.random(
(10000, 10000), chunks=(5000, 5000), spec=spec
) # 200MB chunks
b = a[::2, :]
run_operation("index_step", b)
run_operation(tmp_path, "index_step", b)


# Creation Functions


@pytest.mark.slow
def test_eye(spec):
def test_eye(tmp_path, spec):
a = xp.eye(10000, 10000, chunks=(5000, 5000), spec=spec)
run_operation("eye", a)
run_operation(tmp_path, "eye", a)


@pytest.mark.slow
def test_tril(spec):
def test_tril(tmp_path, spec):
a = cubed.random.random(
(10000, 10000), chunks=(5000, 5000), spec=spec
) # 200MB chunks
b = xp.tril(a)
run_operation("tril", b)
run_operation(tmp_path, "tril", b)


# Elementwise Functions


@pytest.mark.slow
def test_add(spec):
def test_add(tmp_path, spec):
a = cubed.random.random(
(10000, 10000), chunks=(5000, 5000), spec=spec
) # 200MB chunks
b = cubed.random.random(
(10000, 10000), chunks=(5000, 5000), spec=spec
) # 200MB chunks
c = xp.add(a, b)
run_operation("add", c)
run_operation(tmp_path, "add", c)


@pytest.mark.slow
def test_negative(spec):
def test_negative(tmp_path, spec):
a = cubed.random.random(
(10000, 10000), chunks=(5000, 5000), spec=spec
) # 200MB chunks
b = xp.negative(a)
run_operation("negative", b)
run_operation(tmp_path, "negative", b)


# Linear Algebra Functions


@pytest.mark.slow
def test_matmul(spec):
def test_matmul(tmp_path, spec):
a = cubed.random.random(
(10000, 10000), chunks=(5000, 5000), spec=spec
) # 200MB chunks
Expand All @@ -106,20 +107,20 @@ def test_matmul(spec):
c = xp.astype(a, xp.float32)
d = xp.astype(b, xp.float32)
e = xp.matmul(c, d)
run_operation("matmul", e)
run_operation(tmp_path, "matmul", e)


@pytest.mark.slow
def test_matrix_transpose(spec):
def test_matrix_transpose(tmp_path, spec):
a = cubed.random.random(
(10000, 10000), chunks=(5000, 5000), spec=spec
) # 200MB chunks
b = xp.matrix_transpose(a)
run_operation("matrix_transpose", b)
run_operation(tmp_path, "matrix_transpose", b)


@pytest.mark.slow
def test_tensordot(spec):
def test_tensordot(tmp_path, spec):
a = cubed.random.random(
(10000, 10000), chunks=(5000, 5000), spec=spec
) # 200MB chunks
Expand All @@ -129,14 +130,14 @@ def test_tensordot(spec):
c = xp.astype(a, xp.float32)
d = xp.astype(b, xp.float32)
e = xp.tensordot(c, d, axes=1)
run_operation("tensordot", e)
run_operation(tmp_path, "tensordot", e)


# Manipulation Functions


@pytest.mark.slow
def test_concat(spec):
def test_concat(tmp_path, spec):
# Note 'a' has one fewer element in axis=0 to force chunking to cross array boundaries
a = cubed.random.random(
(9999, 10000), chunks=(5000, 5000), spec=spec
Expand All @@ -145,69 +146,69 @@ def test_concat(spec):
(10000, 10000), chunks=(5000, 5000), spec=spec
) # 200MB chunks
c = xp.concat((a, b), axis=0)
run_operation("concat", c)
run_operation(tmp_path, "concat", c)


@pytest.mark.slow
def test_reshape(spec):
def test_reshape(tmp_path, spec):
a = cubed.random.random(
(10000, 10000), chunks=(5000, 5000), spec=spec
) # 200MB chunks
# need intermediate reshape due to limitations in Dask's reshape_rechunk
b = xp.reshape(a, (5000, 2, 10000))
c = xp.reshape(b, (5000, 20000))
run_operation("reshape", c)
run_operation(tmp_path, "reshape", c)


@pytest.mark.slow
def test_stack(spec):
def test_stack(tmp_path, spec):
a = cubed.random.random(
(10000, 10000), chunks=(5000, 5000), spec=spec
) # 200MB chunks
b = cubed.random.random(
(10000, 10000), chunks=(5000, 5000), spec=spec
) # 200MB chunks
c = xp.stack((a, b), axis=0)
run_operation("stack", c)
run_operation(tmp_path, "stack", c)


# Searching Functions


@pytest.mark.slow
def test_argmax(spec):
def test_argmax(tmp_path, spec):
a = cubed.random.random(
(10000, 10000), chunks=(5000, 5000), spec=spec
) # 200MB chunks
b = xp.argmax(a, axis=0)
run_operation("argmax", b)
run_operation(tmp_path, "argmax", b)


# Statistical Functions


@pytest.mark.slow
def test_max(spec):
def test_max(tmp_path, spec):
a = cubed.random.random(
(10000, 10000), chunks=(5000, 5000), spec=spec
) # 200MB chunks
b = xp.max(a, axis=0)
run_operation("max", b)
run_operation(tmp_path, "max", b)


@pytest.mark.slow
def test_mean(spec):
def test_mean(tmp_path, spec):
a = cubed.random.random(
(10000, 10000), chunks=(5000, 5000), spec=spec
) # 200MB chunks
b = xp.mean(a, axis=0)
run_operation("mean", b)
run_operation(tmp_path, "mean", b)


# Internal functions


def run_operation(name, result_array):
def run_operation(tmp_path, name, result_array):
# result_array.visualize(f"cubed-{name}-unoptimized", optimize_graph=False)
# result_array.visualize(f"cubed-{name}")
executor = LithopsDagExecutor(config=LITHOPS_LOCAL_CONFIG)
Expand All @@ -220,3 +221,6 @@ def run_operation(name, result_array):

# check projected_mem_utilization does not exceed 1
assert (df["projected_mem_utilization"] <= 1.0).all()

# delete temp files for this test immediately since they are so large
shutil.rmtree(tmp_path)

0 comments on commit 9d2b618

Please sign in to comment.