Skip to content

Commit

Permalink
Add job to run unit tests
Browse files Browse the repository at this point in the history
  • Loading branch information
fjakobs committed Nov 30, 2022
1 parent 75218eb commit f99035e
Show file tree
Hide file tree
Showing 4 changed files with 47 additions and 1 deletion.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
.databricks/
.venv/
.pytest_cache/
*.pyc
__pycache__/
.pytest_cache/
dist/
build/
covid_analysis.egg-info/
16 changes: 16 additions & 0 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "databricks",
"request": "launch",
"name": "Unit Tests (on Databricks)",
"program": "${workspaceFolder}/jobs/pytest_databricks.py",
"args": ["./tests"],
"env": {}
}
]
}
12 changes: 12 additions & 0 deletions jobs/pytest_databricks.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
import pytest
import os
import sys

# Run all tests in the repository root.
repo_root = os.path.dirname(os.getcwd())
os.chdir(repo_root)

# Skip writing pyc files on a readonly filesystem.
sys.dont_write_bytecode = True

_ = pytest.main(sys.argv[1:])
18 changes: 18 additions & 0 deletions tests/spark_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
from pyspark.sql import SparkSession
import pytest


@pytest.fixture
def spark() -> SparkSession:
"""
Create a spark session. Unit tests don't have access to the spark global
"""
return SparkSession.builder.getOrCreate()


def test_spark(spark):
"""
Example test that needs to run on the cluster to work
"""
data = spark.sql("select 1").collect()
assert data[0][0] == 1

0 comments on commit f99035e

Please sign in to comment.