Skip to content

Commit

Permalink
Merge pull request #81 from os-climate/update-devops-tooling
Browse files Browse the repository at this point in the history
Chore: Update DevOps tooling from central repository [skip ci]
  • Loading branch information
ModeSevenIndustrialSolutions authored May 13, 2024
2 parents d006edc + de4cd34 commit d9842d2
Show file tree
Hide file tree
Showing 3 changed files with 60 additions and 26 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/release.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ jobs:
uses: softprops/action-gh-release@v2
with:
token: ${{ secrets.GITHUB_TOKEN }}
prerelease: true
prerelease: false
tag_name: ${{ github.ref_name }}
name: "Test/Development Build \
${{ github.ref_name }}"
Expand Down
36 changes: 20 additions & 16 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -65,16 +65,16 @@ repos:
- id: markdownlint
args: ["--fix"]

- repo: https://github.com/psf/black-pre-commit-mirror
rev: 24.4.2
hooks:
- id: black
- id: black-jupyter
# - repo: https://github.com/psf/black-pre-commit-mirror
# rev: 24.4.2
# hooks:
# - id: black
# - id: black-jupyter

- repo: https://github.com/tomcatling/black-nb
rev: '0.7'
hooks:
- id: black-nb
# - repo: https://github.com/tomcatling/black-nb
# rev: '0.7'
# hooks:
# - id: black-nb

- repo: https://github.com/jorisroovers/gitlint
rev: v0.19.1
Expand Down Expand Up @@ -112,13 +112,8 @@ repos:
- repo: https://github.com/pycqa/flake8
rev: "7.0.0"
hooks:
- id: flake8
# Ignore all format-related checks as Black takes care of those.
args:
- --ignore=E2, W5, F401, E401
- --select=E, W, F, N
- --max-line-length=120
additional_dependencies:
- id: flake8
additional_dependencies:
- pep8-naming

- repo: https://github.com/adrienverge/yamllint.git
Expand All @@ -135,6 +130,15 @@ repos:
args: [--show-error-codes]
additional_dependencies: ["pytest", "types-requests"]

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.4.3
hooks:
- id: ruff
files: ^(scripts|tests|custom_components)/.+\.py$
args: [--fix, --exit-non-zero-on-fix]
- id: ruff-format
files: ^(scripts|tests|custom_components)/.+\.py$

# Check for misspellings in documentation files
# - repo: https://github.com/codespell-project/codespell
# rev: v2.2.2
Expand Down
48 changes: 39 additions & 9 deletions tests/test_trino_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,13 @@ def test_attach_trino_engine(mock_engine, mock_trino_auth, monkeypatch):
mock_engine.return_value = fake_engine
mock_trino_auth.return_value = "yep"

attach_trino_engine(env_var_prefix="TEST", catalog="ex_catalog", schema="ex_schema", verbose=True)
attach_trino_engine(
env_var_prefix="TEST", catalog="ex_catalog", schema="ex_schema", verbose=True
)

mock_engine.assert_called_with(
"trino://tester@example:8000/ex_catalog/ex_schema", connect_args={"auth": "yep", "http_scheme": "https"}
"trino://tester@example:8000/ex_catalog/ex_schema",
connect_args={"auth": "yep", "http_scheme": "https"},
)


Expand All @@ -67,15 +70,30 @@ def test_trino_batch_insert():
cxn = mock.MagicMock()
# tuple data, in form supplied to __call__ as specified in 'method' param docs:
# https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.to_sql.html
rows = [("a", 4.5), ("b'c", math.nan), (None, math.inf), ("d", -math.inf), ("e", datetime(2022, 1, 1)), (":f", 1.0)]
rows = [
("a", 4.5),
("b'c", math.nan),
(None, math.inf),
("d", -math.inf),
("e", datetime(2022, 1, 1)),
(":f", 1.0),
]
# invoke the __call__ method, simulating df.to_sql call
tbi = TrinoBatchInsert(catalog="test", schema="test", batch_size=2, verbose=True, optimize=True)
tbi = TrinoBatchInsert(
catalog="test", schema="test", batch_size=2, verbose=True, optimize=True
)
tbi(tbl, cxn, [], rows)

assert cxn.execute.call_count == 4
xcalls = cxn.execute.call_args_list
assert xcalls[0].args[0].text == "insert into test.test.test values\n('a', 4.5),\n('b''c', nan())"
assert xcalls[1].args[0].text == "insert into test.test.test values\n(NULL, infinity()),\n('d', -infinity())"
assert (
xcalls[0].args[0].text
== "insert into test.test.test values\n('a', 4.5),\n('b''c', nan())"
)
assert (
xcalls[1].args[0].text
== "insert into test.test.test values\n(NULL, infinity()),\n('d', -infinity())"
)
assert (
xcalls[2].args[0].text
== "insert into test.test.test values\n('e', TIMESTAMP '2022-01-01 00:00:00'),\n('\\:f', 1.0)"
Expand All @@ -92,9 +110,19 @@ def test_trino_pandas_insert():
# mock up an sqlalchemy Connnection
cxn = mock.MagicMock()
df = pd.DataFrame(
{"A": [4.5], "B'C": [math.nan], None: [math.inf], "D": [-math.inf], "E": [datetime(2022, 1, 1)], ":F": [1.0]}
{
"A": [4.5],
"B'C": [math.nan],
None: [math.inf],
"D": [-math.inf],
"E": [datetime(2022, 1, 1)],
":F": [1.0],
}
).convert_dtypes()
assert (df.dtypes == ["Float64", "Int64", "Float64", "Float64", "datetime64[ns]", "Int64"]).all()
assert (
df.dtypes
== ["Float64", "Int64", "Float64", "Float64", "datetime64[ns]", "Int64"]
).all()
# This passes Mock test, but fails when used in Trino/Iceberg environment
df.to_sql(
tbl.name,
Expand All @@ -117,5 +145,7 @@ def test_unmanaged_parquet_tabledef():
bucket = conn.Bucket("mybucket")
bucket.create()

tabledef = unmanaged_parquet_tabledef(df, "catalog", "schema", "table", bucket, partition_columns=["a", "b"])
tabledef = unmanaged_parquet_tabledef(
df, "catalog", "schema", "table", bucket, partition_columns=["a", "b"]
)
print(tabledef)

0 comments on commit d9842d2

Please sign in to comment.