Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Deal with aborted orders which were once in processing, also add codecov to see if it works #31

Merged
merged 4 commits into from
Jan 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion .github/workflows/run_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,8 @@ jobs:
pip install pytest pytest-cov
- name: Test with pytest
run: |
python -m pytest --cov=daily_read tests
python -m pytest --cov=daily_read --cov-report=xml tests
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
14 changes: 8 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
# The NGI Daily Read

[![codecov](https://codecov.io/gh/NationalGenomicsInfrastructure/DailyRead/graph/badge.svg?token=P3M4Y1N4SU)](https://codecov.io/gh/NationalGenomicsInfrastructure/DailyRead)

A utility to generate and upload automatic progress reports for NGI Sweden.

## Suggested logic
## How it works

- The script first fetches data from the appropriate NGI source, i.e. statusdb for Stockholm.
- The data corresponding to each project will then be saved in a small data file (json, yaml or csv perhaps) on disk.
Expand All @@ -21,21 +23,21 @@ Also see diagram below:
![alt text](doc/figures/overview_dark.png#gh-dark-mode-only)
![alt text](doc/figures/overview_light.png#gh-light-mode-only)

## Planned Usage (yet to be implemented)
## Usage

```bash
# Generate reports and save in a local git repository (location is given by configuration variable) and commit changes with a timestamp message
daily_read generate all

# Generate report for single orderer, need location specified, will not create git commit
daily_read generate single <ordererID> <location>
# Generate report for single orderer,
daily_read generate single --project <OrderID>

# Generate and upload
daily_read generate all --upload
daily_read generate single <orderer>

```

To generate and upload reports for a single user(or a list of users), their name(s) can be entered in a text file and provided to the environment variable `DAILY_READ_USERS_LIST_LOCATION`.

## Configuration variables

Configuration is dealt with via environment variables. Simplest way to set it up is to retrieve a `.env` file based on the `.env.example` provided in the repo. Environment variables which are not set have default variables in `daily_read/config.py`.
Expand Down
46 changes: 41 additions & 5 deletions daily_read/ngi_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,9 +190,16 @@ def get_modified_or_new_projects(self):
project_dates,
internal_id,
internal_name,
internal_proj_status,
) = ProjectDataRecord.data_from_file(project_path)
project_record = ProjectDataRecord(
project_path, orderer, project_dates, internal_id, internal_name, self.config.STATUS_PRIORITY_REV
project_path,
orderer,
project_dates,
internal_id,
internal_name,
self.config.STATUS_PRIORITY_REV,
internal_proj_status,
)
projects_list.append(project_record)

Expand Down Expand Up @@ -223,7 +230,16 @@ class ProjectDataRecord(object):
Raises ValueError if orderer is not present in data, if data is given
"""

def __init__(self, relative_path, orderer, project_dates, internal_id=None, internal_name=None, dates_prio=None):
def __init__(
self,
relative_path,
orderer,
project_dates,
internal_id=None,
internal_name=None,
dates_prio=None,
internal_proj_status=None,
):
"""relative_path: e.g. "NGIS/2023/NGI0002313.json" """
node_year, file_name = os.path.split(relative_path)
node, year = os.path.split(node_year)
Expand All @@ -245,6 +261,7 @@ def __init__(self, relative_path, orderer, project_dates, internal_id=None, inte
self.internal_name = internal_name
self.events = [] # List of tuples (date_value, (date_status, <ProjectDataRecord>))
self.status = None
self.internal_proj_status = internal_proj_status

for date_value, date_statuses in project_dates.items():
for date_status in date_statuses:
Expand Down Expand Up @@ -284,6 +301,7 @@ def data_for_file(self):
"project_dates": self.project_dates,
"internal_id": self.internal_id,
"internal_name": self.internal_name,
"internal_proj_status": self.internal_proj_status,
}

def data_from_file(relative_path):
Expand All @@ -299,7 +317,11 @@ def data_from_file(relative_path):
if "internal_name" in data:
internal_name = data["internal_name"]

return data["orderer"], data["project_dates"], internal_id, internal_name
internal_proj_status = None
if "internal_proj_status" in data:
internal_proj_status = data["internal_proj_status"]

return data["orderer"], data["project_dates"], internal_id, internal_name, internal_proj_status

def portal_id_from_path(path):
"""Class method to parse out project portal id (e.g. filename without extension) from given path"""
Expand Down Expand Up @@ -342,9 +364,16 @@ def get_data(self, project_id=None, close_date=None):
orderer = row.value["orderer"]
internal_id = row.value["project_id"]
internal_name = row.value["project_name"]
internal_proj_status = row.value["status"]

self.data[portal_id] = ProjectDataRecord(
relative_path, orderer, project_dates, internal_id, internal_name, self.dates_prio
relative_path,
orderer,
project_dates,
internal_id,
internal_name,
self.dates_prio,
internal_proj_status,
)

return self.data
Expand All @@ -367,9 +396,16 @@ def get_entry(self, project_id):
orderer = row.value["orderer"]
internal_id = row.value["project_id"]
internal_name = row.value["project_name"]
internal_proj_status = row.value["status"]

self.data[portal_id] = ProjectDataRecord(
relative_path, orderer, project_dates, internal_id, internal_name, self.dates_prio
relative_path,
orderer,
project_dates,
internal_id,
internal_name,
self.dates_prio,
internal_proj_status,
)
return
raise ValueError(f"Project {project_id} not found in statusdb")
Expand Down
4 changes: 4 additions & 0 deletions daily_read/order_portal.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,10 @@ def process_orders(self, priority, closed_before_in_days=30):
delete_report = True

proj_info = self.projects_data.data[order["identifier"]]

if proj_info.internal_proj_status in ["Aborted"]:
delete_report = True

if order["reports"]:
prog_reports = [item for item in order["reports"] if item["name"] == "Project Progress"]
if prog_reports:
Expand Down
39 changes: 38 additions & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
},
"internal_id": "P123456",
"internal_name": "D.Dummysson_23_01",
"internal_proj_status": "Ongoing",
}

dummy_order_closed = {
Expand All @@ -29,6 +30,7 @@
},
"internal_id": "P123455",
"internal_name": "D.Dummysson_23_02",
"internal_proj_status": "Ongoing",
}

order_portal_resp_order_processing = {
Expand Down Expand Up @@ -120,6 +122,26 @@
},
]

order_portal_resp_order_processing_to_aborted = copy.deepcopy(order_portal_resp_order_processing)
order_portal_resp_order_processing_to_aborted["identifier"] = "NGI123461"
order_portal_resp_order_processing_to_aborted["reports"] = [
{
"iuid": "c5ee943",
"name": "Project Progress",
"filename": "project_progress.html",
"status": "published",
"modified": "2024-01-15T15:09:18.732Z",
"links": {
"api": {"href": "https://orderportal.example.com/orders/api/v1/report/c5ee943"},
"file": {"href": "https://orderportal.example.com/orders/report/c5ee943"},
},
},
]
order_portal_resp_order_processing_to_aborted["status"] = "Rejected"
order_portal_resp_order_processing_to_aborted["history"]["rejected"] = "2024-01-16"
order_portal_resp_order_processing_to_aborted["fields"]["project_ngi_identifier"] = "P123461"
order_portal_resp_order_processing_to_aborted["fields"]["project_ngi_name"] = "D.Dummysson_23_07"

order_portal_resp_order_closed = {
"identifier": "NGI123455",
"title": "Test run with closed",
Expand Down Expand Up @@ -221,6 +243,7 @@ def data_repo_new_staged(data_repo):
"NGIS/2023/staged_file2.json",
"NGIS/2023/P123456.json",
"NGIS/2023/P123453.json",
"NGIS/2023/P123461.json",
]
_create_all_files(staged_files, data_repo.working_dir)
data_repo.index.add(staged_files)
Expand Down Expand Up @@ -309,6 +332,7 @@ def _method(status):
dummy_order_open["internal_id"],
dummy_order_open["internal_name"],
config_values.STATUS_PRIORITY_REV,
dummy_order_open["internal_proj_status"],
)
if status == "closed":
mock_record = ngi_data.ProjectDataRecord(
Expand All @@ -318,6 +342,7 @@ def _method(status):
dummy_order_closed["internal_id"],
dummy_order_closed["internal_name"],
config_values.STATUS_PRIORITY_REV,
dummy_order_closed["internal_proj_status"],
)
if status == "open_with_report":
mock_record = ngi_data.ProjectDataRecord(
Expand All @@ -327,6 +352,17 @@ def _method(status):
dummy_order_open["internal_id"],
dummy_order_open["internal_name"],
config_values.STATUS_PRIORITY_REV,
dummy_order_open["internal_proj_status"],
)
if status == "open_to_aborted_with_report":
mock_record = ngi_data.ProjectDataRecord(
"NGIS/2023/NGI123461.json",
dummy_order_open["orderer"],
dummy_order_open["project_dates"],
"P123461",
"D.Dummysson_23_07",
config_values.STATUS_PRIORITY_REV,
"Aborted",
)
return mock_record

Expand Down Expand Up @@ -357,6 +393,7 @@ def json(self):
order_portal_resp_order_closed,
order_portal_resp_order_processing_mult_reports,
order_portal_resp_order_processing_single_report,
order_portal_resp_order_processing_to_aborted,
]
},
200,
Expand Down Expand Up @@ -412,7 +449,7 @@ def mocked_statusdb_conn_rows():
"project_id": "P123460",
"project_name": "D.Dummysson_23_06",
"proj_dates": {},
"status": "Pending",
"status": "Reception control",
},
)
return [row1, row2, row3]
4 changes: 2 additions & 2 deletions tests/test_ngi_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def test_modified_or_new(data_repo_full):

modified_or_new = data_master.get_modified_or_new_projects()
file_names = [project.relative_path for project in modified_or_new]
assert len(set(file_names)) == 11
assert len(set(file_names)) == 12


def test_modified_or_new_untracked(data_repo_untracked):
Expand All @@ -84,7 +84,7 @@ def test_modified_or_new_staged(data_repo_new_staged):

modified_or_new = data_master.get_modified_or_new_projects()
file_names = [project.relative_path for project in modified_or_new]
assert len(set(file_names)) == 5
assert len(set(file_names)) == 6
assert any("staged_file" in s for s in file_names)


Expand Down
34 changes: 34 additions & 0 deletions tests/test_order_portal.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,40 @@ def test_get_and_process_orders_open_with_report_and_upload(data_repo_full, mock
)


def test_get_and_process_orders_open_to_aborted_with_report_and_upload(data_repo_full, mock_project_data_record):
"""Test getting, processing an open order with an existing Project progress report and uploading the report to the Order portal"""
orderer = "[email protected]"
order_id = "NGI123461"
config_values = config.Config()
with mock.patch("daily_read.statusdb.StatusDBSession"):
data_master = ngi_data.ProjectDataMaster(config_values)

data_master.data = {order_id: mock_project_data_record("open_to_aborted_with_report")}

op = order_portal.OrderPortal(config_values, data_master)
with mock.patch("daily_read.order_portal.OrderPortal._get", side_effect=mocked_requests_get):
op.get_orders(orderer=orderer)

assert op.all_orders[4]["identifier"] == order_id
modified_orders = op.process_orders(config_values.STATUS_PRIORITY_REV)

assert modified_orders[orderer]["delete_report_for"]["Library QC finished"][0] == data_master.data[order_id]
alneberg marked this conversation as resolved.
Show resolved Hide resolved
with mock.patch("daily_read.order_portal.requests.post") as mock_post:
mock_post.return_value.status_code = 200
op.upload_report_to_order_portal(
"", modified_orders[orderer]["delete_report_for"]["Library QC finished"][0], "review"
)
url = f"{config_values.ORDER_PORTAL_URL}/api/v1/report/{op.all_orders[4]['reports'][0]['iuid']}"
indata = dict(
order=order_id,
name="Project Progress",
status="review",
)
mock_post.assert_called_once_with(
url, headers={"X-OrderPortal-API-key": config_values.ORDER_PORTAL_API_KEY}, json=indata
)


def test_get_and_process_orders_closed(data_repo_full, mock_project_data_record):
"""Test getting and processing an order closed within the timeframe of Project progress report deletion"""
orderer = "[email protected]"
Expand Down