Skip to content

Commit

Permalink
Merge pull request #342 from VariantEffect/tests/bencap/106/utf-encod…
Browse files Browse the repository at this point in the history
…ing-for-csvs

Add Tests for UTF-8 Encoded Score and Count Files
  • Loading branch information
bencap authored Oct 22, 2024
2 parents 829f2af + 57d151f commit c86d39f
Show file tree
Hide file tree
Showing 3 changed files with 61 additions and 0 deletions.
4 changes: 4 additions & 0 deletions tests/routers/data/counts_utf8_encoded.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
hgvs_nt,hgvs_pro,c_0,c_1
c.1A>T,p.Thr1Ser,10,20
c.2C>T,p.Thr1Met,8,8
c.6T>A,p.Phe2Leu,90,2
4 changes: 4 additions & 0 deletions tests/routers/data/scores_utf8_encoded.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
hgvs_nt,hgvs_pro,score
c.1A>T,p.Thr1Ser,0.3
c.2C>T,p.Thr1Met,0
c.6T>A,p.Phe2Leu,-1.65
53 changes: 53 additions & 0 deletions tests/routers/test_score_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,6 +338,59 @@ def test_add_score_set_variants_scores_and_counts_endpoint(session, client, setu
assert score_set == response_data


def test_add_score_set_variants_scores_only_endpoint_utf8_encoded(client, setup_router_db, data_files):
experiment = create_experiment(client)
score_set = create_seq_score_set(client, experiment["urn"])
scores_csv_path = data_files / "scores_utf8_encoded.csv"
with (
open(scores_csv_path, "rb") as scores_file,
patch.object(ArqRedis, "enqueue_job", return_value=None) as queue,
):
response = client.post(
f"/api/v1/score-sets/{score_set['urn']}/variants/data",
files={"scores_file": (scores_csv_path.name, scores_file, "text/csv")},
)
queue.assert_called_once()

assert response.status_code == 200
response_data = response.json()
jsonschema.validate(instance=response_data, schema=ScoreSet.schema())

# We test the worker process that actually adds the variant data separately. Here, we take it as
# fact that it would have succeeded.
score_set.update({"processingState": "processing"})
assert score_set == response_data


def test_add_score_set_variants_scores_and_counts_endpoint_utf8_encoded(session, client, setup_router_db, data_files):
experiment = create_experiment(client)
score_set = create_seq_score_set(client, experiment["urn"])
scores_csv_path = data_files / "scores_utf8_encoded.csv"
counts_csv_path = data_files / "counts_utf8_encoded.csv"
with (
open(scores_csv_path, "rb") as scores_file,
open(counts_csv_path, "rb") as counts_file,
patch.object(ArqRedis, "enqueue_job", return_value=None) as queue,
):
response = client.post(
f"/api/v1/score-sets/{score_set['urn']}/variants/data",
files={
"scores_file": (scores_csv_path.name, scores_file, "text/csv"),
"counts_file": (counts_csv_path.name, counts_file, "text/csv"),
},
)
queue.assert_called_once()

assert response.status_code == 200
response_data = response.json()
jsonschema.validate(instance=response_data, schema=ScoreSet.schema())

# We test the worker process that actually adds the variant data separately. Here, we take it as
# fact that it would have succeeded.
score_set.update({"processingState": "processing"})
assert score_set == response_data


def test_cannot_add_scores_to_score_set_without_email(session, client, setup_router_db, data_files):
experiment = create_experiment(client)
score_set = create_seq_score_set(client, experiment["urn"])
Expand Down

0 comments on commit c86d39f

Please sign in to comment.