diff --git a/.github/workflows/docker.yaml b/.github/workflows/docker.yaml new file mode 100644 index 0000000..5d4caf3 --- /dev/null +++ b/.github/workflows/docker.yaml @@ -0,0 +1,41 @@ +name: Docker +on: + push: + branches: + - "main" + pull_request: + +env: + REGISTRY: ghcr.io + IMAGE_NAME: example-compute-block + +jobs: + build: + name: Build docker image + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + + - name: Log in to Docker Hub + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata for docker + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + type=ref, event=pr + type=raw, value=latest, enable=${{ (github.ref == format('refs/heads/{0}', 'main')) }} + + - name: Build and push Docker image + uses: docker/build-push-action@v5 + with: + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} diff --git a/Dockerfile b/Dockerfile index 947691a..5685cf5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,22 +1,22 @@ FROM python:3.12 +ENV PYTHONUNBUFFERED=1 + # Install Java for Spark RUN apt-get update && \ apt-get install -y default-jdk && \ apt-get clean -COPY . ./app - WORKDIR /app -# Using this example-compute-block for testing, you have to copy your current version of scystream-sdk into this -# directory. We copy it into the docker container, and install it there via the folder. -RUN python3 -m venv .venv && \ - .venv/bin/pip install --upgrade pip && \ - .venv/bin/pip install ./scystream-sdk +COPY requirements.txt /app + +RUN pip install --upgrade pip && pip install -r requirements.txt + +COPY . /app + -ENV VIRTUAL_ENV=/app/.venv -ENV PATH="/app/.venv/bin:$PATH" +# CMD sh -c "python -c 'import main; from scystream.sdk.scheduler import Scheduler; Scheduler.execute_function(\"test_file\")'" -# Keep the container running. Just for testing, normally placeholder which will be overriden by DockerOperator -CMD ["tail", "-f", "/dev/null"] +# Cmd that will be overwritten by Airflow +CMD ["sh", "-c","echo Container is ready for the Scheduler.exectue_function call."] diff --git a/main.py b/main.py index 8bf92c2..b89e047 100644 --- a/main.py +++ b/main.py @@ -120,6 +120,6 @@ def test_file(settings): ) -if __name__ == "__main__": - Scheduler.execute_function("test_entrypoint") - Scheduler.execute_function("test_file") +# if __name__ == "__main__": +# Scheduler.execute_function("test_entrypoint") +# Scheduler.execute_function("test_file") diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..eae9cf1 --- /dev/null +++ b/requirements.txt @@ -0,0 +1 @@ +scystream-sdk==1.0