Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

The container image of spark task should be immutable #2956

Merged
merged 8 commits into from
Nov 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions plugins/flytekit-spark/flytekitplugins/spark/task.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import dataclasses
import os
import shutil
from dataclasses import dataclass
Expand Down Expand Up @@ -134,11 +135,12 @@ def __init__(
self.sess: Optional[SparkSession] = None
self._default_executor_path: str = task_config.executor_path
self._default_applications_path: str = task_config.applications_path
self._container_image = container_image

if isinstance(container_image, ImageSpec):
if container_image.base_image is None:
img = f"cr.flyte.org/flyteorg/flytekit:spark-{DefaultImages.get_version_suffix()}"
container_image.base_image = img
self._container_image = dataclasses.replace(container_image, base_image=img)
# default executor path and applications path in apache/spark-py:3.3.1
self._default_executor_path = self._default_executor_path or "/usr/bin/python3"
self._default_applications_path = (
Expand All @@ -154,7 +156,7 @@ def __init__(
task_config=task_config,
task_type=task_type,
task_function=task_function,
container_image=container_image,
container_image=self._container_image,
**kwargs,
)

Expand Down
35 changes: 33 additions & 2 deletions plugins/flytekit-spark/tests/test_spark_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
from pyspark.sql import SparkSession

import flytekit
from flytekit import StructuredDataset, StructuredDatasetTransformerEngine, task
from flytekit.configuration import Image, ImageConfig, SerializationSettings, FastSerializationSettings
from flytekit import StructuredDataset, StructuredDatasetTransformerEngine, task, ImageSpec
from flytekit.configuration import Image, ImageConfig, SerializationSettings, FastSerializationSettings, DefaultImages
from flytekit.core.context_manager import ExecutionParameters, FlyteContextManager, ExecutionState


Expand Down Expand Up @@ -157,3 +157,34 @@ def my_spark(a: int) -> int:
my_spark.pre_execute(new_ctx.user_space_params)
mock_add_pyfile.assert_called_once()
os.remove(os.path.join(os.getcwd(), "flyte_wf.zip"))


def test_spark_with_image_spec():
custom_image = ImageSpec(
registry="ghcr.io/flyteorg",
packages=["flytekitplugins-spark"],
)

@task(
task_config=Spark(spark_conf={"spark.driver.memory": "1000M"}),
container_image=custom_image,
)
def spark1(partitions: int) -> float:
print("Starting Spark with Partitions: {}".format(partitions))
return 1.0

assert spark1.container_image.base_image == f"cr.flyte.org/flyteorg/flytekit:spark-{DefaultImages.get_version_suffix()}"
assert spark1._default_executor_path == "/usr/bin/python3"
assert spark1._default_applications_path == "local:///usr/local/bin/entrypoint.py"

@task(
task_config=Spark(spark_conf={"spark.driver.memory": "1000M"}),
container_image=custom_image,
)
def spark2(partitions: int) -> float:
print("Starting Spark with Partitions: {}".format(partitions))
return 1.0

assert spark2.container_image.base_image == f"cr.flyte.org/flyteorg/flytekit:spark-{DefaultImages.get_version_suffix()}"
assert spark2._default_executor_path == "/usr/bin/python3"
assert spark2._default_applications_path == "local:///usr/local/bin/entrypoint.py"
Loading