Skip to content

Commit 5d52c1b

Browse files
authored
Fix PyTriton library discovery (#505)
Detect and configure required library paths (i.e., to Triton bundled libraries and libpython3.*.so), when starting Triton process. Avoids the need to set executorEnv globally and works across conda/non-conda envs (e.g. Dataproc vs. Databricks). Minor cleanups to PyTriton server shutdown. --------- Signed-off-by: Rishi Chandra <[email protected]>
1 parent 14e9bbc commit 5d52c1b

15 files changed

+45
-126
lines changed

examples/ML+DL-Examples/Spark-DL/dl_inference/huggingface/conditional_generation_tf.ipynb

-13
Original file line numberDiff line numberDiff line change
@@ -275,20 +275,7 @@
275275
" conf.setMaster(f\"spark://{hostname}:7077\")\n",
276276
" conf.set(\"spark.pyspark.python\", f\"{conda_env}/bin/python\")\n",
277277
" conf.set(\"spark.pyspark.driver.python\", f\"{conda_env}/bin/python\")\n",
278-
" # Point PyTriton to correct libpython3.11.so:\n",
279-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_env}/lib:{conda_env}/lib/python3.11/site-packages/nvidia_pytriton.libs:$LD_LIBRARY_PATH\")\n",
280-
" source = \"/usr/lib/x86_64-linux-gnu/libstdc++.so.6\"\n",
281-
" target = f\"{conda_env}/lib/libstdc++.so.6\"\n",
282-
" try:\n",
283-
" if os.path.islink(target) or os.path.exists(target):\n",
284-
" os.remove(target)\n",
285-
" os.symlink(source, target)\n",
286-
" except OSError as e:\n",
287-
" print(f\"Error creating symlink: {e}\")\n",
288278
" elif on_dataproc:\n",
289-
" # Point PyTriton to correct libpython3.11.so:\n",
290-
" conda_lib_path=\"/opt/conda/miniconda3/lib\"\n",
291-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_lib_path}:$LD_LIBRARY_PATH\")\n",
292279
" conf.set(\"spark.executorEnv.TF_GPU_ALLOCATOR\", \"cuda_malloc_async\")\n",
293280
" conf.set(\"spark.executor.instances\", \"4\") # dataproc defaults to 2\n",
294281
"\n",

examples/ML+DL-Examples/Spark-DL/dl_inference/huggingface/conditional_generation_torch.ipynb

-5
Original file line numberDiff line numberDiff line change
@@ -215,12 +215,7 @@
215215
" conf.setMaster(f\"spark://{hostname}:7077\")\n",
216216
" conf.set(\"spark.pyspark.python\", f\"{conda_env}/bin/python\")\n",
217217
" conf.set(\"spark.pyspark.driver.python\", f\"{conda_env}/bin/python\")\n",
218-
" # Point PyTriton to correct libpython3.11.so:\n",
219-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_env}/lib:{conda_env}/lib/python3.11/site-packages/nvidia_pytriton.libs:$LD_LIBRARY_PATH\")\n",
220218
" elif on_dataproc:\n",
221-
" # Point PyTriton to correct libpython3.11.so:\n",
222-
" conda_lib_path=\"/opt/conda/miniconda3/lib\"\n",
223-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_lib_path}:$LD_LIBRARY_PATH\")\n",
224219
" conf.set(\"spark.executor.instances\", \"4\") # dataproc defaults to 2\n",
225220
"\n",
226221
" conf.set(\"spark.executor.cores\", \"8\")\n",

examples/ML+DL-Examples/Spark-DL/dl_inference/huggingface/deepseek-r1_torch.ipynb

-5
Original file line numberDiff line numberDiff line change
@@ -305,12 +305,7 @@
305305
" conf.setMaster(f\"spark://{hostname}:7077\")\n",
306306
" conf.set(\"spark.pyspark.python\", f\"{conda_env}/bin/python\")\n",
307307
" conf.set(\"spark.pyspark.driver.python\", f\"{conda_env}/bin/python\")\n",
308-
" # Point PyTriton to correct libpython3.11.so:\n",
309-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_env}/lib:{conda_env}/lib/python3.11/site-packages/nvidia_pytriton.libs:$LD_LIBRARY_PATH\")\n",
310308
" elif on_dataproc:\n",
311-
" # Point PyTriton to correct libpython3.11.so:\n",
312-
" conda_lib_path=\"/opt/conda/miniconda3/lib\"\n",
313-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_lib_path}:$LD_LIBRARY_PATH\")\n",
314309
" conf.set(\"spark.executor.instances\", \"4\") # dataproc defaults to 2\n",
315310
" conf.set(\"spark.executorEnv.HF_HOME\", hf_home)\n",
316311
"\n",

examples/ML+DL-Examples/Spark-DL/dl_inference/huggingface/gemma-7b_torch.ipynb

-5
Original file line numberDiff line numberDiff line change
@@ -244,12 +244,7 @@
244244
" conf.setMaster(f\"spark://{hostname}:7077\")\n",
245245
" conf.set(\"spark.pyspark.python\", f\"{conda_env}/bin/python\")\n",
246246
" conf.set(\"spark.pyspark.driver.python\", f\"{conda_env}/bin/python\")\n",
247-
" # Point PyTriton to correct libpython3.11.so:\n",
248-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_env}/lib:{conda_env}/lib/python3.11/site-packages/nvidia_pytriton.libs:$LD_LIBRARY_PATH\")\n",
249247
" elif on_dataproc:\n",
250-
" # Point PyTriton to correct libpython3.11.so:\n",
251-
" conda_lib_path=\"/opt/conda/miniconda3/lib\"\n",
252-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_lib_path}:$LD_LIBRARY_PATH\")\n",
253248
" conf.set(\"spark.executor.instances\", \"4\") # dataproc defaults to 2\n",
254249
" conf.set(\"spark.executorEnv.HF_HOME\", hf_home)\n",
255250
"\n",

examples/ML+DL-Examples/Spark-DL/dl_inference/huggingface/pipelines_tf.ipynb

-13
Original file line numberDiff line numberDiff line change
@@ -340,20 +340,7 @@
340340
" conf.setMaster(f\"spark://{hostname}:7077\")\n",
341341
" conf.set(\"spark.pyspark.python\", f\"{conda_env}/bin/python\")\n",
342342
" conf.set(\"spark.pyspark.driver.python\", f\"{conda_env}/bin/python\")\n",
343-
" # Point PyTriton to correct libpython3.11.so:\n",
344-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_env}/lib:{conda_env}/lib/python3.11/site-packages/nvidia_pytriton.libs:$LD_LIBRARY_PATH\")\n",
345-
" source = \"/usr/lib/x86_64-linux-gnu/libstdc++.so.6\"\n",
346-
" target = f\"{conda_env}/lib/libstdc++.so.6\"\n",
347-
" try:\n",
348-
" if os.path.islink(target) or os.path.exists(target):\n",
349-
" os.remove(target)\n",
350-
" os.symlink(source, target)\n",
351-
" except OSError as e:\n",
352-
" print(f\"Error creating symlink: {e}\")\n",
353343
" elif on_dataproc:\n",
354-
" # Point PyTriton to correct libpython3.11.so:\n",
355-
" conda_lib_path=\"/opt/conda/miniconda3/lib\"\n",
356-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_lib_path}:$LD_LIBRARY_PATH\")\n",
357344
" conf.set(\"spark.executorEnv.TF_GPU_ALLOCATOR\", \"cuda_malloc_async\")\n",
358345
" conf.set(\"spark.executor.instances\", \"4\") # dataproc defaults to 2\n",
359346
"\n",

examples/ML+DL-Examples/Spark-DL/dl_inference/huggingface/pipelines_torch.ipynb

-5
Original file line numberDiff line numberDiff line change
@@ -254,12 +254,7 @@
254254
" conf.setMaster(f\"spark://{hostname}:7077\")\n",
255255
" conf.set(\"spark.pyspark.python\", f\"{conda_env}/bin/python\")\n",
256256
" conf.set(\"spark.pyspark.driver.python\", f\"{conda_env}/bin/python\")\n",
257-
" # Point PyTriton to correct libpython3.11.so:\n",
258-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_env}/lib:{conda_env}/lib/python3.11/site-packages/nvidia_pytriton.libs:$LD_LIBRARY_PATH\")\n",
259257
" elif on_dataproc:\n",
260-
" # Point PyTriton to correct libpython3.11.so:\n",
261-
" conda_lib_path=\"/opt/conda/miniconda3/lib\"\n",
262-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_lib_path}:$LD_LIBRARY_PATH\")\n",
263258
" conf.set(\"spark.executor.instances\", \"4\") # dataproc defaults to 2\n",
264259
"\n",
265260
" conf.set(\"spark.executor.cores\", \"8\")\n",

examples/ML+DL-Examples/Spark-DL/dl_inference/huggingface/qwen-2.5-7b_torch.ipynb

-5
Original file line numberDiff line numberDiff line change
@@ -307,12 +307,7 @@
307307
" conf.setMaster(f\"spark://{hostname}:7077\")\n",
308308
" conf.set(\"spark.pyspark.python\", f\"{conda_env}/bin/python\")\n",
309309
" conf.set(\"spark.pyspark.driver.python\", f\"{conda_env}/bin/python\")\n",
310-
" # Point PyTriton to correct libpython3.11.so:\n",
311-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_env}/lib:{conda_env}/lib/python3.11/site-packages/nvidia_pytriton.libs:$LD_LIBRARY_PATH\")\n",
312310
" elif on_dataproc:\n",
313-
" # Point PyTriton to correct libpython3.11.so:\n",
314-
" conda_lib_path=\"/opt/conda/miniconda3/lib\"\n",
315-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_lib_path}:$LD_LIBRARY_PATH\")\n",
316311
" conf.set(\"spark.executor.instances\", \"4\") # dataproc defaults to 2\n",
317312
" conf.set(\"spark.executorEnv.HF_HOME\", hf_home)\n",
318313
"\n",

examples/ML+DL-Examples/Spark-DL/dl_inference/huggingface/sentence_transformers_torch.ipynb

-5
Original file line numberDiff line numberDiff line change
@@ -161,12 +161,7 @@
161161
" conf.setMaster(f\"spark://{hostname}:7077\")\n",
162162
" conf.set(\"spark.pyspark.python\", f\"{conda_env}/bin/python\")\n",
163163
" conf.set(\"spark.pyspark.driver.python\", f\"{conda_env}/bin/python\")\n",
164-
" # Point PyTriton to correct libpython3.11.so:\n",
165-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_env}/lib:{conda_env}/lib/python3.11/site-packages/nvidia_pytriton.libs:$LD_LIBRARY_PATH\")\n",
166164
" elif on_dataproc:\n",
167-
" # Point PyTriton to correct libpython3.11.so:\n",
168-
" conda_lib_path=\"/opt/conda/miniconda3/lib\"\n",
169-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_lib_path}:$LD_LIBRARY_PATH\")\n",
170165
" conf.set(\"spark.executor.instances\", \"4\") # dataproc defaults to 2\n",
171166
"\n",
172167
" conf.set(\"spark.executor.cores\", \"8\")\n",

examples/ML+DL-Examples/Spark-DL/dl_inference/pytorch/housing_regression_torch.ipynb

-5
Original file line numberDiff line numberDiff line change
@@ -894,12 +894,7 @@
894894
" conf.setMaster(f\"spark://{hostname}:7077\")\n",
895895
" conf.set(\"spark.pyspark.python\", f\"{conda_env}/bin/python\")\n",
896896
" conf.set(\"spark.pyspark.driver.python\", f\"{conda_env}/bin/python\")\n",
897-
" # Point PyTriton to correct libpython3.11.so:\n",
898-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_env}/lib:{conda_env}/lib/python3.11/site-packages/nvidia_pytriton.libs:$LD_LIBRARY_PATH\")\n",
899897
" elif on_dataproc:\n",
900-
" # Point PyTriton to correct libpython3.11.so:\n",
901-
" conda_lib_path=\"/opt/conda/miniconda3/lib\"\n",
902-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_lib_path}:$LD_LIBRARY_PATH\")\n",
903898
" conf.set(\"spark.executor.instances\", \"4\") # dataproc defaults to 2\n",
904899
"\n",
905900
" conf.set(\"spark.executor.cores\", \"8\")\n",

examples/ML+DL-Examples/Spark-DL/dl_inference/pytorch/image_classification_torch.ipynb

-5
Original file line numberDiff line numberDiff line change
@@ -862,12 +862,7 @@
862862
" conf.setMaster(f\"spark://{hostname}:7077\")\n",
863863
" conf.set(\"spark.pyspark.python\", f\"{conda_env}/bin/python\")\n",
864864
" conf.set(\"spark.pyspark.driver.python\", f\"{conda_env}/bin/python\")\n",
865-
" # Point PyTriton to correct libpython3.11.so:\n",
866-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_env}/lib:{conda_env}/lib/python3.11/site-packages/nvidia_pytriton.libs:$LD_LIBRARY_PATH\")\n",
867865
" elif on_dataproc:\n",
868-
" # Point PyTriton to correct libpython3.11.so:\n",
869-
" conda_lib_path=\"/opt/conda/miniconda3/lib\"\n",
870-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_lib_path}:$LD_LIBRARY_PATH\") \n",
871866
" conf.set(\"spark.executor.instances\", \"4\") # dataproc defaults to 2\n",
872867
"\n",
873868
" conf.set(\"spark.executor.cores\", \"8\")\n",

examples/ML+DL-Examples/Spark-DL/dl_inference/pytriton_utils.py

+45-8
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
import os
1919
import signal
2020
import socket
21+
import sys
2122
import time
2223
from multiprocessing import Process
2324
from typing import Callable, Dict, List, Optional, Tuple
@@ -42,8 +43,31 @@ def _start_triton_server(
4243
model_path: Optional[str] = None,
4344
) -> List[tuple]:
4445
"""Task to start Triton server process on a Spark executor."""
45-
sig = inspect.signature(triton_server_fn)
46-
params = sig.parameters
46+
47+
def _prepare_pytriton_env():
48+
"""Expose PyTriton to correct libpython3.11.so and Triton bundled libraries."""
49+
ld_library_paths = []
50+
51+
# Add nvidia_pytriton.libs to LD_LIBRARY_PATH
52+
for path in sys.path:
53+
if os.path.isdir(path) and "site-packages" in path:
54+
libs_path = os.path.join(path, "nvidia_pytriton.libs")
55+
if os.path.isdir(libs_path):
56+
ld_library_paths.append(libs_path)
57+
break
58+
59+
# Add ${CONDA_PREFIX}/lib to LD_LIBRARY_PATH for conda environments
60+
if os.path.exists(os.path.join(sys.prefix, "conda-meta")):
61+
conda_lib = os.path.join(sys.prefix, "lib")
62+
if os.path.isdir(conda_lib):
63+
ld_library_paths.append(conda_lib)
64+
65+
if "LD_LIBRARY_PATH" in os.environ:
66+
ld_library_paths.append(os.environ["LD_LIBRARY_PATH"])
67+
68+
os.environ["LD_LIBRARY_PATH"] = ":".join(ld_library_paths)
69+
70+
return None
4771

4872
def _find_ports(start_port: int = 7000) -> List[int]:
4973
"""Find available ports for Triton's HTTP, gRPC, and metrics services."""
@@ -59,6 +83,8 @@ def _find_ports(start_port: int = 7000) -> List[int]:
5983
return ports
6084

6185
ports = _find_ports()
86+
sig = inspect.signature(triton_server_fn)
87+
params = sig.parameters
6288

6389
if model_path is not None:
6490
assert (
@@ -69,6 +95,7 @@ def _find_ports(start_port: int = 7000) -> List[int]:
6995
assert len(params) == 1, "Server function must accept (ports) argument"
7096
args = (ports,)
7197

98+
_prepare_pytriton_env()
7299
hostname = socket.gethostname()
73100
process = Process(target=triton_server_fn, args=args)
74101
process.start()
@@ -83,6 +110,11 @@ def _find_ports(start_port: int = 7000) -> List[int]:
83110
except Exception:
84111
pass
85112

113+
client.close()
114+
if process.is_alive():
115+
# Terminate if timeout is exceeded to avoid dangling server processes
116+
process.terminate()
117+
86118
raise TimeoutError(
87119
"Failure: server startup timeout exceeded. Check the executor logs for more info."
88120
)
@@ -98,14 +130,19 @@ def _stop_triton_server(
98130
pid, _ = server_pids_ports.get(hostname)
99131
assert pid is not None, f"No server PID found for host {hostname}"
100132

101-
for _ in range(wait_retries):
133+
try:
134+
process = psutil.Process(pid)
135+
process.terminate()
136+
process.wait(timeout=wait_timeout * wait_retries)
137+
return [True]
138+
except psutil.NoSuchProcess:
139+
return [True]
140+
except psutil.TimeoutExpired:
102141
try:
103-
os.kill(pid, signal.SIGTERM)
104-
except OSError:
142+
process.kill()
105143
return [True]
106-
time.sleep(wait_timeout)
107-
108-
return [False] # Failed to terminate or timed out
144+
except:
145+
return [False]
109146

110147

111148
class TritonServerManager:

examples/ML+DL-Examples/Spark-DL/dl_inference/tensorflow/image_classification_tf.ipynb

-13
Original file line numberDiff line numberDiff line change
@@ -815,20 +815,7 @@
815815
" conf.setMaster(f\"spark://{hostname}:7077\")\n",
816816
" conf.set(\"spark.pyspark.python\", f\"{conda_env}/bin/python\")\n",
817817
" conf.set(\"spark.pyspark.driver.python\", f\"{conda_env}/bin/python\")\n",
818-
" # Point PyTriton to correct libpython3.11.so:\n",
819-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_env}/lib:{conda_env}/lib/python3.11/site-packages/nvidia_pytriton.libs:$LD_LIBRARY_PATH\")\n",
820-
" source = \"/usr/lib/x86_64-linux-gnu/libstdc++.so.6\"\n",
821-
" target = f\"{conda_env}/lib/libstdc++.so.6\"\n",
822-
" try:\n",
823-
" if os.path.islink(target) or os.path.exists(target):\n",
824-
" os.remove(target)\n",
825-
" os.symlink(source, target)\n",
826-
" except OSError as e:\n",
827-
" print(f\"Error creating symlink: {e}\")\n",
828818
" elif on_dataproc:\n",
829-
" # Point PyTriton to correct libpython3.11.so:\n",
830-
" conda_lib_path=\"/opt/conda/miniconda3/lib\"\n",
831-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_lib_path}:$LD_LIBRARY_PATH\")\n",
832819
" conf.set(\"spark.executorEnv.TF_GPU_ALLOCATOR\", \"cuda_malloc_async\")\n",
833820
" conf.set(\"spark.executor.instances\", \"4\") # dataproc defaults to 2\n",
834821
"\n",

examples/ML+DL-Examples/Spark-DL/dl_inference/tensorflow/keras_preprocessing_tf.ipynb

-13
Original file line numberDiff line numberDiff line change
@@ -991,20 +991,7 @@
991991
" conf.setMaster(f\"spark://{hostname}:7077\")\n",
992992
" conf.set(\"spark.pyspark.python\", f\"{conda_env}/bin/python\")\n",
993993
" conf.set(\"spark.pyspark.driver.python\", f\"{conda_env}/bin/python\")\n",
994-
" # Point PyTriton to correct libpython3.11.so:\n",
995-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_env}/lib:{conda_env}/lib/python3.11/site-packages/nvidia_pytriton.libs:$LD_LIBRARY_PATH\")\n",
996-
" source = \"/usr/lib/x86_64-linux-gnu/libstdc++.so.6\"\n",
997-
" target = f\"{conda_env}/lib/libstdc++.so.6\"\n",
998-
" try:\n",
999-
" if os.path.islink(target) or os.path.exists(target):\n",
1000-
" os.remove(target)\n",
1001-
" os.symlink(source, target)\n",
1002-
" except OSError as e:\n",
1003-
" print(f\"Error creating symlink: {e}\")\n",
1004994
" elif on_dataproc:\n",
1005-
" # Point PyTriton to correct libpython3.11.so:\n",
1006-
" conda_lib_path=\"/opt/conda/miniconda3/lib\"\n",
1007-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_lib_path}:$LD_LIBRARY_PATH\")\n",
1008995
" conf.set(\"spark.executorEnv.TF_GPU_ALLOCATOR\", \"cuda_malloc_async\")\n",
1009996
" conf.set(\"spark.executor.instances\", \"4\") # dataproc defaults to 2\n",
1010997
"\n",

examples/ML+DL-Examples/Spark-DL/dl_inference/tensorflow/keras_resnet50_tf.ipynb

-13
Original file line numberDiff line numberDiff line change
@@ -189,20 +189,7 @@
189189
" conf.setMaster(f\"spark://{hostname}:7077\")\n",
190190
" conf.set(\"spark.pyspark.python\", f\"{conda_env}/bin/python\")\n",
191191
" conf.set(\"spark.pyspark.driver.python\", f\"{conda_env}/bin/python\")\n",
192-
" # Point PyTriton to correct libpython3.11.so:\n",
193-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_env}/lib:{conda_env}/lib/python3.11/site-packages/nvidia_pytriton.libs:$LD_LIBRARY_PATH\")\n",
194-
" source = \"/usr/lib/x86_64-linux-gnu/libstdc++.so.6\"\n",
195-
" target = f\"{conda_env}/lib/libstdc++.so.6\"\n",
196-
" try:\n",
197-
" if os.path.islink(target) or os.path.exists(target):\n",
198-
" os.remove(target)\n",
199-
" os.symlink(source, target)\n",
200-
" except OSError as e:\n",
201-
" print(f\"Error creating symlink: {e}\")\n",
202192
" elif on_dataproc:\n",
203-
" # Point PyTriton to correct libpython3.11.so:\n",
204-
" conda_lib_path=\"/opt/conda/miniconda3/lib\"\n",
205-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_lib_path}:$LD_LIBRARY_PATH\") \n",
206193
" conf.set(\"spark.executorEnv.TF_GPU_ALLOCATOR\", \"cuda_malloc_async\")\n",
207194
" conf.set(\"spark.executor.instances\", \"4\") # dataproc defaults to 2\n",
208195
"\n",

examples/ML+DL-Examples/Spark-DL/dl_inference/tensorflow/text_classification_tf.ipynb

-13
Original file line numberDiff line numberDiff line change
@@ -1210,20 +1210,7 @@
12101210
" conf.setMaster(f\"spark://{hostname}:7077\")\n",
12111211
" conf.set(\"spark.pyspark.python\", f\"{conda_env}/bin/python\")\n",
12121212
" conf.set(\"spark.pyspark.driver.python\", f\"{conda_env}/bin/python\")\n",
1213-
" # Point PyTriton to correct libpython3.11.so:\n",
1214-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_env}/lib:{conda_env}/lib/python3.11/site-packages/nvidia_pytriton.libs:$LD_LIBRARY_PATH\")\n",
1215-
" source = \"/usr/lib/x86_64-linux-gnu/libstdc++.so.6\"\n",
1216-
" target = f\"{conda_env}/lib/libstdc++.so.6\"\n",
1217-
" try:\n",
1218-
" if os.path.islink(target) or os.path.exists(target):\n",
1219-
" os.remove(target)\n",
1220-
" os.symlink(source, target)\n",
1221-
" except OSError as e:\n",
1222-
" print(f\"Error creating symlink: {e}\")\n",
12231213
" elif on_dataproc:\n",
1224-
" # Point PyTriton to correct libpython3.11.so:\n",
1225-
" conda_lib_path=\"/opt/conda/miniconda3/lib\"\n",
1226-
" conf.set(\"spark.executorEnv.LD_LIBRARY_PATH\", f\"{conda_lib_path}:$LD_LIBRARY_PATH\")\n",
12271214
" conf.set(\"spark.executorEnv.TF_GPU_ALLOCATOR\", \"cuda_malloc_async\")\n",
12281215
" conf.set(\"spark.executor.instances\", \"4\") # dataproc defaults to 2\n",
12291216
"\n",

0 commit comments

Comments
 (0)