From b85b4976aed6b3944a4e0ccd97dca117b9f9cb87 Mon Sep 17 00:00:00 2001 From: Hendrik Makait Date: Wed, 20 Mar 2024 10:20:10 +0100 Subject: [PATCH] Fix memory issues for Spark (#1478) --- ci/requirements-2tpch-non-dask.in | 3 +++ tests/tpch/conftest.py | 3 +-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/ci/requirements-2tpch-non-dask.in b/ci/requirements-2tpch-non-dask.in index 2246eaf6e7..b4b682eaa3 100644 --- a/ci/requirements-2tpch-non-dask.in +++ b/ci/requirements-2tpch-non-dask.in @@ -10,5 +10,8 @@ grpcio==1.62.1 grpcio-status==1.62.1 protobuf==4.25.2 +# First version to be able to set memory factors +coiled>=1.13.1.dev1 + # Other TPCH tests polars==0.20.13 diff --git a/tests/tpch/conftest.py b/tests/tpch/conftest.py index 3d01d69437..590821fd9b 100644 --- a/tests/tpch/conftest.py +++ b/tests/tpch/conftest.py @@ -250,8 +250,7 @@ def spark_setup(cluster, local): ) spark_dashboard = parse_url("http://localhost:4040") else: - spark = cluster.get_spark() - + spark = cluster.get_spark(executor_memory_factor=0.8, worker_memory_factor=0.9) # Available on coiled>=1.12.4 if not hasattr(cluster, "_spark_dashboard"): cluster._spark_dashboard = (