From c225e36a6d0b1b11d9f0e39f9a7a2462b2b3f353 Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Tue, 26 Sep 2023 15:18:36 +0200 Subject: [PATCH 001/114] Add UpdateFailedFiles endpoint --- dds_web/api/__init__.py | 1 + dds_web/api/files.py | 13 +++++++++++++ 2 files changed, 14 insertions(+) diff --git a/dds_web/api/__init__.py b/dds_web/api/__init__.py index 3a7da23ff..55b3853b7 100644 --- a/dds_web/api/__init__.py +++ b/dds_web/api/__init__.py @@ -50,6 +50,7 @@ def output_json(data, code, headers=None): api.add_resource(files.FileInfo, "/file/info", endpoint="file_info") api.add_resource(files.FileInfoAll, "/file/all/info", endpoint="all_file_info") api.add_resource(files.UpdateFile, "/file/update", endpoint="update_file") +api.add_resource(files.UpdateFailedFiles, "/file/failed/update", endpoint="update_failed_file") # Projects ############################################################################## Projects # api.add_resource(project.UserProjects, "/proj/list", endpoint="list_projects") diff --git a/dds_web/api/files.py b/dds_web/api/files.py index 415977924..457535635 100644 --- a/dds_web/api/files.py +++ b/dds_web/api/files.py @@ -740,3 +740,16 @@ def put(self): db.session.commit() return {"message": "File info updated."} + + +class UpdateFailedFiles(flask_restful.Resource): + """Add files from failed_delivery_log to DB using the "update_uploaded_file_with_log" command""" + + @auth.login_required(role=["Unit Admin", "Unit Personnel", "Project Owner", "Researcher"]) + @json_required + @handle_validation_errors + def put(self): + """Run flask command with failed_delivery_log.""" + + flask.current_app.logger.debug("API called") + return {"message": "File(s) info updated."} From 5e91e42c7891a712c075b5576ba9f41462519f9d Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Tue, 26 Sep 2023 15:20:15 +0200 Subject: [PATCH 002/114] black --- dds_web/api/files.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dds_web/api/files.py b/dds_web/api/files.py index 457535635..386994f90 100644 --- a/dds_web/api/files.py +++ b/dds_web/api/files.py @@ -750,6 +750,6 @@ class UpdateFailedFiles(flask_restful.Resource): @handle_validation_errors def put(self): """Run flask command with failed_delivery_log.""" - + flask.current_app.logger.debug("API called") return {"message": "File(s) info updated."} From 833a87459d3584616e99cb55ff200fee63c5b160 Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Thu, 5 Oct 2023 11:15:23 +0200 Subject: [PATCH 003/114] created utils function which can be called by the command and the new endpoint --- dds_web/api/files.py | 10 ++++++-- dds_web/commands.py | 53 ++--------------------------------------- dds_web/utils.py | 56 ++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 66 insertions(+), 53 deletions(-) diff --git a/dds_web/api/files.py b/dds_web/api/files.py index 386994f90..56e0e568e 100644 --- a/dds_web/api/files.py +++ b/dds_web/api/files.py @@ -751,5 +751,11 @@ class UpdateFailedFiles(flask_restful.Resource): def put(self): """Run flask command with failed_delivery_log.""" - flask.current_app.logger.debug("API called") - return {"message": "File(s) info updated."} + # Verify project ID and access + project = project_schemas.ProjectRequiredSchema().load(flask.request.args) + + # Get the request json and pass it to add_uploaded_files_to_db + request_json = flask.request.get_json(silent=True) + dds_web.utils.add_uploaded_files_to_db(project, request_json) + + return {"message": "File(s) added to database."} diff --git a/dds_web/commands.py b/dds_web/commands.py index c9d925cb3..343cbe6f2 100644 --- a/dds_web/commands.py +++ b/dds_web/commands.py @@ -211,8 +211,7 @@ def update_uploaded_file_with_log(project, path_to_log_file): """Update file details that weren't properly uploaded to db from cli log""" import botocore from dds_web.database import models - from dds_web import db - from dds_web.api.api_s3_connector import ApiS3Connector + from dds_web import utils import json proj_in_db = models.Project.query.filter_by(public_id=project).one_or_none() @@ -226,56 +225,8 @@ def update_uploaded_file_with_log(project, path_to_log_file): with open(path_to_log_file, "r") as f: log = json.load(f) - errors = {} - files_added = [] - for file, vals in log.items(): - status = vals.get("status") - if not status or not status.get("failed_op") == "add_file_db": - continue - - with ApiS3Connector(project=proj_in_db) as s3conn: - try: - _ = s3conn.resource.meta.client.head_object( - Bucket=s3conn.project.bucket, Key=vals["path_remote"] - ) - except botocore.client.ClientError as err: - if err.response["Error"]["Code"] == "404": - errors[file] = {"error": "File not found in S3", "traceback": err.__traceback__} - else: - file_object = models.File.query.filter( - sqlalchemy.and_( - models.File.name == sqlalchemy.func.binary(file), - models.File.project_id == proj_in_db.id, - ) - ).first() - if file_object: - errors[file] = {"error": "File already in database."} - else: - new_file = models.File( - name=file, - name_in_bucket=vals["path_remote"], - subpath=vals["subpath"], - project_id=proj_in_db.id, - size_original=vals["size_raw"], - size_stored=vals["size_processed"], - compressed=not vals["compressed"], - public_key=vals["public_key"], - salt=vals["salt"], - checksum=vals["checksum"], - ) - new_version = models.Version( - size_stored=new_file.size_stored, time_uploaded=datetime.datetime.utcnow() - ) - proj_in_db.file_versions.append(new_version) - proj_in_db.files.append(new_file) - new_file.versions.append(new_version) - - db.session.add(new_file) - files_added.append(new_file) - db.session.commit() - flask.current_app.logger.info(f"Files added: {files_added}") - flask.current_app.logger.info(f"Errors while adding files: {errors}") + utils.add_uploaded_files_to_db(proj_in_db, log) @click.group(name="lost-files") diff --git a/dds_web/utils.py b/dds_web/utils.py index eb34ef5a4..0057edef2 100644 --- a/dds_web/utils.py +++ b/dds_web/utils.py @@ -749,3 +749,59 @@ def use_sto4(unit_object, project_object) -> bool: flask.current_app.logger.info(f"{project_id_logging}sto2") return False + + +def add_uploaded_files_to_db(proj_in_db, log): + from dds_web import db + from dds_web.api.api_s3_connector import ApiS3Connector + + errors = {} + files_added = [] + for file, vals in log.items(): + status = vals.get("status") + if not status or not status.get("failed_op") == "add_file_db": + continue + + with ApiS3Connector(project=proj_in_db) as s3conn: + try: + _ = s3conn.resource.meta.client.head_object( + Bucket=s3conn.project.bucket, Key=vals["path_remote"] + ) + except botocore.client.ClientError as err: + if err.response["Error"]["Code"] == "404": + errors[file] = {"error": "File not found in S3", "traceback": err.__traceback__} + else: + file_object = models.File.query.filter( + sqlalchemy.and_( + models.File.name == sqlalchemy.func.binary(file), + models.File.project_id == proj_in_db.id, + ) + ).first() + if file_object: + errors[file] = {"error": "File already in database."} + else: + new_file = models.File( + name=file, + name_in_bucket=vals["path_remote"], + subpath=vals["subpath"], + project_id=proj_in_db.id, + size_original=vals["size_raw"], + size_stored=vals["size_processed"], + compressed=not vals["compressed"], + public_key=vals["public_key"], + salt=vals["salt"], + checksum=vals["checksum"], + ) + new_version = models.Version( + size_stored=new_file.size_stored, time_uploaded=datetime.datetime.utcnow() + ) + proj_in_db.file_versions.append(new_version) + proj_in_db.files.append(new_file) + new_file.versions.append(new_version) + + db.session.add(new_file) + files_added.append(new_file) + db.session.commit() + + flask.current_app.logger.info(f"Files added: {files_added}") + flask.current_app.logger.info(f"Errors while adding files: {errors}") From 9364613c8d835eff828eb8ef29e2ea74fe3f0120 Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Thu, 5 Oct 2023 15:35:09 +0200 Subject: [PATCH 004/114] add test for the new utils function --- tests/test_utils.py | 49 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 48 insertions(+), 1 deletion(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index bc6864db7..51dca7c83 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,7 +1,7 @@ import marshmallow from dds_web import utils import pytest -from unittest.mock import patch +from unittest.mock import patch, MagicMock from unittest.mock import PropertyMock from dds_web import db @@ -1510,3 +1510,50 @@ def test_use_sto4_return_true(client: flask.testing.FlaskClient): # Run function result: bool = use_sto4(unit_object=unit, project_object=project) assert result is True + + def test_add_uploaded_files_to_db(self, mock_session, mock_version, mock_query, mock_logger): + # Mock input data + proj_in_db = MagicMock() + log = { + "file1.txt": { + "status": {"failed_op": "add_file_db"}, + "path_remote": "path/to/file1.txt", + "subpath": "subpath", + "size_raw": 100, + "size_processed": 200, + "compressed": False, + "public_key": "public_key", + "salt": "salt", + "checksum": "checksum", + } + } + + # Mock S3 connection + mock_s3conn = MagicMock() + mock_resource = MagicMock() + mock_client = MagicMock() + mock_s3conn.__enter__.return_value = mock_resource + mock_resource.meta.client.head_object.return_value = None + ApiS3Connector.return_value = mock_s3conn + + # Mock database query + mock_file = MagicMock() + mock_file = mock_query.filter.return_value.first.return_value + + # Call the function + add_uploaded_files_to_db(proj_in_db, log) + + # Assert database operations + mock_query.filter.assert_called_once_with( + File.name == sqlalchemy.func.binary("file1.txt"), File.project_id == proj_in_db.id + ) + mock_file.versions.append.assert_called_once_with(mock_version.return_value) + proj_in_db.file_versions.append.assert_called_once_with(mock_version.return_value) + proj_in_db.files.append.assert_called_once_with(mock_file) + mock_session.add.assert_called_once_with(mock_file) + mock_session.commit.assert_called_once() + + # Assert logs + mock_logger.assert_any_call("vals: {'status': {'failed_op': 'add_file_db'}}") + mock_logger.assert_any_call("Files added: []") + mock_logger.assert_any_call("Errors while adding files: {}") From 5d6e1b3ee5cdb58be589f1f57d56fffefdeffe03 Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Fri, 6 Oct 2023 11:49:53 +0200 Subject: [PATCH 005/114] improve the test --- tests/test_utils.py | 45 +++++++++++++++++---------------------------- 1 file changed, 17 insertions(+), 28 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index 51dca7c83..0e043d0a5 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1511,9 +1511,9 @@ def test_use_sto4_return_true(client: flask.testing.FlaskClient): result: bool = use_sto4(unit_object=unit, project_object=project) assert result is True - def test_add_uploaded_files_to_db(self, mock_session, mock_version, mock_query, mock_logger): + def test_add_uploaded_files_to_db(client: flask.testing.FlaskClient): # Mock input data - proj_in_db = MagicMock() + proj_in_db = models.Project.query.first() log = { "file1.txt": { "status": {"failed_op": "add_file_db"}, @@ -1528,32 +1528,21 @@ def test_add_uploaded_files_to_db(self, mock_session, mock_version, mock_query, } } - # Mock S3 connection - mock_s3conn = MagicMock() - mock_resource = MagicMock() - mock_client = MagicMock() - mock_s3conn.__enter__.return_value = mock_resource - mock_resource.meta.client.head_object.return_value = None - ApiS3Connector.return_value = mock_s3conn - - # Mock database query - mock_file = MagicMock() - mock_file = mock_query.filter.return_value.first.return_value - # Call the function add_uploaded_files_to_db(proj_in_db, log) - # Assert database operations - mock_query.filter.assert_called_once_with( - File.name == sqlalchemy.func.binary("file1.txt"), File.project_id == proj_in_db.id - ) - mock_file.versions.append.assert_called_once_with(mock_version.return_value) - proj_in_db.file_versions.append.assert_called_once_with(mock_version.return_value) - proj_in_db.files.append.assert_called_once_with(mock_file) - mock_session.add.assert_called_once_with(mock_file) - mock_session.commit.assert_called_once() - - # Assert logs - mock_logger.assert_any_call("vals: {'status': {'failed_op': 'add_file_db'}}") - mock_logger.assert_any_call("Files added: []") - mock_logger.assert_any_call("Errors while adding files: {}") + # check that the file is added to the database + file = models.File.query.filter_by(name="file1.txt").first() + assert file + assert file.name == "file1.txt" + assert file.name_in_bucket == "file1.txt" + + # check that the file is added to the project + assert file in proj_in_db.files + + # check that the version is added to the database + version = models.Version.query.filter_by(file_id=file.id).first() + assert version + + # check that the version is added to the project + assert version in proj_in_db.versions From c9d5a39a9db07a9962b879c3ff5a9706778f2067 Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Mon, 9 Oct 2023 13:37:56 +0200 Subject: [PATCH 006/114] fix test indentation --- tests/test_utils.py | 59 +++++++++++++++++++++++---------------------- 1 file changed, 30 insertions(+), 29 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index 0e043d0a5..ea2a11426 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1511,38 +1511,39 @@ def test_use_sto4_return_true(client: flask.testing.FlaskClient): result: bool = use_sto4(unit_object=unit, project_object=project) assert result is True - def test_add_uploaded_files_to_db(client: flask.testing.FlaskClient): - # Mock input data - proj_in_db = models.Project.query.first() - log = { - "file1.txt": { - "status": {"failed_op": "add_file_db"}, - "path_remote": "path/to/file1.txt", - "subpath": "subpath", - "size_raw": 100, - "size_processed": 200, - "compressed": False, - "public_key": "public_key", - "salt": "salt", - "checksum": "checksum", - } + +def test_add_uploaded_files_to_db(client: flask.testing.FlaskClient): + # Mock input data + proj_in_db = models.Project.query.first() + log = { + "file1.txt": { + "status": {"failed_op": "add_file_db"}, + "path_remote": "path/to/file1.txt", + "subpath": "subpath", + "size_raw": 100, + "size_processed": 200, + "compressed": False, + "public_key": "public_key", + "salt": "salt", + "checksum": "checksum", } + } - # Call the function - add_uploaded_files_to_db(proj_in_db, log) + # Call the function + add_uploaded_files_to_db(proj_in_db, log) - # check that the file is added to the database - file = models.File.query.filter_by(name="file1.txt").first() - assert file - assert file.name == "file1.txt" - assert file.name_in_bucket == "file1.txt" + # check that the file is added to the database + file = models.File.query.filter_by(name="file1.txt").first() + assert file + assert file.name == "file1.txt" + assert file.name_in_bucket == "file1.txt" - # check that the file is added to the project - assert file in proj_in_db.files + # check that the file is added to the project + assert file in proj_in_db.files - # check that the version is added to the database - version = models.Version.query.filter_by(file_id=file.id).first() - assert version + # check that the version is added to the database + version = models.Version.query.filter_by(file_id=file.id).first() + assert version - # check that the version is added to the project - assert version in proj_in_db.versions + # check that the version is added to the project + assert version in proj_in_db.versions From 4a8e28dbfc1d5594d1a0ec33aa6f1e426e7d52a2 Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Mon, 9 Oct 2023 13:50:51 +0200 Subject: [PATCH 007/114] fix test --- tests/test_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index ea2a11426..578efdcec 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1530,7 +1530,7 @@ def test_add_uploaded_files_to_db(client: flask.testing.FlaskClient): } # Call the function - add_uploaded_files_to_db(proj_in_db, log) + utils.add_uploaded_files_to_db(proj_in_db, log) # check that the file is added to the database file = models.File.query.filter_by(name="file1.txt").first() From 8151f0c624d59a08d2f6ce8bf2329b1fc1280065 Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Mon, 9 Oct 2023 14:13:01 +0200 Subject: [PATCH 008/114] fix the test and add one more --- tests/test_utils.py | 44 ++++++++++++++++++++++++++++++++++++++------ 1 file changed, 38 insertions(+), 6 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index 578efdcec..ef3d5b6a9 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1512,7 +1512,7 @@ def test_use_sto4_return_true(client: flask.testing.FlaskClient): assert result is True -def test_add_uploaded_files_to_db(client: flask.testing.FlaskClient): +def test_add_uploaded_files_to_db_correct_failed_op(client: flask.testing.FlaskClient): # Mock input data proj_in_db = models.Project.query.first() log = { @@ -1529,21 +1529,53 @@ def test_add_uploaded_files_to_db(client: flask.testing.FlaskClient): } } + # Mock the S3 connector and head_object method + mock_s3conn = MagicMock() + mock_s3conn.resource.meta.client.head_object.return_value = None + # Call the function - utils.add_uploaded_files_to_db(proj_in_db, log) + with patch("dds_web.api.api_s3_connector.ApiS3Connector", return_value=mock_s3conn): + utils.add_uploaded_files_to_db(proj_in_db, log) # check that the file is added to the database file = models.File.query.filter_by(name="file1.txt").first() assert file assert file.name == "file1.txt" - assert file.name_in_bucket == "file1.txt" + assert file.name_in_bucket == "path/to/file1.txt" # check that the file is added to the project assert file in proj_in_db.files # check that the version is added to the database - version = models.Version.query.filter_by(file_id=file.id).first() + version = models.Version.query.filter_by(active_file=file.id).first() assert version - # check that the version is added to the project - assert version in proj_in_db.versions + +def test_add_uploaded_files_to_db_other_failed_op(client: flask.testing.FlaskClient): + # Mock input data + proj_in_db = models.Project.query.first() + log = { + "file1.txt": { + "status": {"failed_op": "some_other_failed_op"}, + "path_remote": "path/to/file1.txt", + "subpath": "subpath", + "size_raw": 100, + "size_processed": 200, + "compressed": False, + "public_key": "public_key", + "salt": "salt", + "checksum": "checksum", + } + } + + # Mock the S3 connector and head_object method + mock_s3conn = MagicMock() + mock_s3conn.resource.meta.client.head_object.return_value = None + + # Call the function + with patch("dds_web.api.api_s3_connector.ApiS3Connector", return_value=mock_s3conn): + utils.add_uploaded_files_to_db(proj_in_db, log) + + # check that the file is added to the database + file = models.File.query.filter_by(name="file1.txt").first() + assert not file From c3a3b65e86fccd22035233ad66afe2852a396f93 Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Tue, 10 Oct 2023 17:09:16 +0200 Subject: [PATCH 009/114] correct docstring --- dds_web/api/files.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dds_web/api/files.py b/dds_web/api/files.py index 56e0e568e..6f254ea7e 100644 --- a/dds_web/api/files.py +++ b/dds_web/api/files.py @@ -743,7 +743,7 @@ def put(self): class UpdateFailedFiles(flask_restful.Resource): - """Add files from failed_delivery_log to DB using the "update_uploaded_file_with_log" command""" + """Add files from failed_delivery_log to DB using the "add_uploaded_files_to_db" utils function""" @auth.login_required(role=["Unit Admin", "Unit Personnel", "Project Owner", "Researcher"]) @json_required From 01c6fe57bacb949f395c0a1d76229cb996d4d1d0 Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Tue, 10 Oct 2023 17:11:47 +0200 Subject: [PATCH 010/114] add test for the new endpoint --- tests/__init__.py | 1 + tests/test_files_new.py | 55 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+) diff --git a/tests/__init__.py b/tests/__init__.py index 9f45f2447..70f545f8e 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -195,6 +195,7 @@ class DDSEndpoint: FILE_INFO = BASE_ENDPOINT + "/file/info" FILE_INFO_ALL = BASE_ENDPOINT + "/file/all/info" FILE_UPDATE = BASE_ENDPOINT + "/file/update" + FILE_UPDATE_FAILED = BASE_ENDPOINT + "/file/failed/update" # Project specific urls PROJECT_CREATE = BASE_ENDPOINT + "/proj/create" diff --git a/tests/test_files_new.py b/tests/test_files_new.py index 336637f05..719bb597e 100644 --- a/tests/test_files_new.py +++ b/tests/test_files_new.py @@ -909,3 +909,58 @@ def test_delete_contents_and_upload_again(client, boto3_session): .first() ) assert file_in_db + + +# Test UpdateFailedFiles endpoint + +def test_update_failed_files_success(client, boto3_session): + """Update failed files endpoint with valid data.""" + + # get project and verify in progress + project_1 = project_row(project_id="file_testing_project") + assert project_1 + assert project_1.current_status == "In Progress" + + # mock a dict object with failed files + failed_files = { + "file1.txt": { + "status": {"failed_op": "add_file_db"}, + "path_remote": "path/to/file1.txt", + "subpath": "subpath", + "size_raw": 100, + "size_processed": 200, + "compressed": False, + "public_key": "public_key", + "salt": "salt", + "checksum": "checksum", + }, + "file2.txt": { + "status": {"failed_op": "add_file_db"}, + "path_remote": "path/to/file2.txt", + "subpath": "subpath", + "size_raw": 100, + "size_processed": 200, + "compressed": False, + "public_key": "public_key", + "salt": "salt", + "checksum": "checksum", + }, +} + + + response = client.put( + tests.DDSEndpoint.FILE_UPDATE_FAILED, + headers=tests.UserAuth(tests.USER_CREDENTIALS["unitadmin"]).token(client), + query_string={"project": "file_testing_project", "status": "Failed"}, + json=failed_files, + ) + + assert response.status_code == http.HTTPStatus.OK + assert response.json["message"] == "File(s) added to database." + for file in failed_files: + # query the database for file + assert ( + db.session.query(models.File) + .filter(models.File.name == file) + .first() + ) \ No newline at end of file From b57b5d6ce0a6c6ac94f89b99b115c08f8a9eb262 Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Tue, 10 Oct 2023 17:14:29 +0200 Subject: [PATCH 011/114] black --- tests/test_files_new.py | 56 +++++++++++++++++++---------------------- 1 file changed, 26 insertions(+), 30 deletions(-) diff --git a/tests/test_files_new.py b/tests/test_files_new.py index 719bb597e..1d925aa3d 100644 --- a/tests/test_files_new.py +++ b/tests/test_files_new.py @@ -913,40 +913,40 @@ def test_delete_contents_and_upload_again(client, boto3_session): # Test UpdateFailedFiles endpoint + def test_update_failed_files_success(client, boto3_session): """Update failed files endpoint with valid data.""" - # get project and verify in progress + # get project and verify in progress project_1 = project_row(project_id="file_testing_project") assert project_1 assert project_1.current_status == "In Progress" # mock a dict object with failed files failed_files = { - "file1.txt": { - "status": {"failed_op": "add_file_db"}, - "path_remote": "path/to/file1.txt", - "subpath": "subpath", - "size_raw": 100, - "size_processed": 200, - "compressed": False, - "public_key": "public_key", - "salt": "salt", - "checksum": "checksum", - }, - "file2.txt": { - "status": {"failed_op": "add_file_db"}, - "path_remote": "path/to/file2.txt", - "subpath": "subpath", - "size_raw": 100, - "size_processed": 200, - "compressed": False, - "public_key": "public_key", - "salt": "salt", - "checksum": "checksum", - }, -} - + "file1.txt": { + "status": {"failed_op": "add_file_db"}, + "path_remote": "path/to/file1.txt", + "subpath": "subpath", + "size_raw": 100, + "size_processed": 200, + "compressed": False, + "public_key": "public_key", + "salt": "salt", + "checksum": "checksum", + }, + "file2.txt": { + "status": {"failed_op": "add_file_db"}, + "path_remote": "path/to/file2.txt", + "subpath": "subpath", + "size_raw": 100, + "size_processed": 200, + "compressed": False, + "public_key": "public_key", + "salt": "salt", + "checksum": "checksum", + }, + } response = client.put( tests.DDSEndpoint.FILE_UPDATE_FAILED, @@ -959,8 +959,4 @@ def test_update_failed_files_success(client, boto3_session): assert response.json["message"] == "File(s) added to database." for file in failed_files: # query the database for file - assert ( - db.session.query(models.File) - .filter(models.File.name == file) - .first() - ) \ No newline at end of file + assert db.session.query(models.File).filter(models.File.name == file).first() From 0f05b2fc15ec75e2de7fb2faaae90746d9b1206f Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Wed, 11 Oct 2023 10:26:53 +0200 Subject: [PATCH 012/114] refactor and add more tests --- tests/test_files_new.py | 97 ++++++++++++++++++++++++++++------------- 1 file changed, 66 insertions(+), 31 deletions(-) diff --git a/tests/test_files_new.py b/tests/test_files_new.py index 1d925aa3d..0899b6f16 100644 --- a/tests/test_files_new.py +++ b/tests/test_files_new.py @@ -17,6 +17,31 @@ "checksum": "c" * 64, } +FAILED_FILES = { + "file1.txt": { + "status": {"failed_op": "add_file_db"}, + "path_remote": "path/to/file1.txt", + "subpath": "subpath", + "size_raw": 100, + "size_processed": 200, + "compressed": False, + "public_key": "public_key", + "salt": "salt", + "checksum": "checksum", + }, + "file2.txt": { + "status": {"failed_op": "add_file_db"}, + "path_remote": "path/to/file2.txt", + "subpath": "subpath", + "size_raw": 100, + "size_processed": 200, + "compressed": False, + "public_key": "public_key", + "salt": "salt", + "checksum": "checksum", + }, +} + # TOOLS #################################################################################### TOOLS # @@ -915,48 +940,58 @@ def test_delete_contents_and_upload_again(client, boto3_session): def test_update_failed_files_success(client, boto3_session): - """Update failed files endpoint with valid data.""" + """Update failed files with valid data.""" # get project and verify in progress project_1 = project_row(project_id="file_testing_project") assert project_1 assert project_1.current_status == "In Progress" - # mock a dict object with failed files - failed_files = { - "file1.txt": { - "status": {"failed_op": "add_file_db"}, - "path_remote": "path/to/file1.txt", - "subpath": "subpath", - "size_raw": 100, - "size_processed": 200, - "compressed": False, - "public_key": "public_key", - "salt": "salt", - "checksum": "checksum", - }, - "file2.txt": { - "status": {"failed_op": "add_file_db"}, - "path_remote": "path/to/file2.txt", - "subpath": "subpath", - "size_raw": 100, - "size_processed": 200, - "compressed": False, - "public_key": "public_key", - "salt": "salt", - "checksum": "checksum", - }, - } - response = client.put( tests.DDSEndpoint.FILE_UPDATE_FAILED, headers=tests.UserAuth(tests.USER_CREDENTIALS["unitadmin"]).token(client), - query_string={"project": "file_testing_project", "status": "Failed"}, - json=failed_files, + query_string={"project": "file_testing_project"}, + json=FAILED_FILES, ) assert response.status_code == http.HTTPStatus.OK assert response.json["message"] == "File(s) added to database." - for file in failed_files: - # query the database for file + for file in FAILED_FILES: assert db.session.query(models.File).filter(models.File.name == file).first() + + +def test_update_failed_files_no_json(client, boto3_session): + """Update failed files without log json.""" + + # get project and verify in progress + project_1 = project_row(project_id="file_testing_project") + assert project_1 + assert project_1.current_status == "In Progress" + + response = client.put( + tests.DDSEndpoint.FILE_UPDATE_FAILED, + headers=tests.UserAuth(tests.USER_CREDENTIALS["unitadmin"]).token(client), + query_string={"project": "file_testing_project"}, + ) + + assert response.status_code == http.HTTPStatus.BAD_REQUEST + assert response.json["message"] == "Required data missing from request!" + # check that none of the files in the list exist in the database. + for file in FAILED_FILES: + assert not db.session.query(models.File).filter(models.File.name == file).first() + + +def test_update_failed_files_no_project(client, boto3_session): + """Update failed files without project.""" + + response = client.put( + tests.DDSEndpoint.FILE_UPDATE_FAILED, + headers=tests.UserAuth(tests.USER_CREDENTIALS["unitadmin"]).token(client), + json=FAILED_FILES, + ) + + assert response.status_code == http.HTTPStatus.BAD_REQUEST + assert response.json["project"]["message"] == "Project ID required." + # check that none of the files in the list exist in the database. + for file in FAILED_FILES: + assert not db.session.query(models.File).filter(models.File.name == file).first() From c23221c075f7b4ea3c52f9f8bb697ff3ddbaebba Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Wed, 11 Oct 2023 11:43:33 +0200 Subject: [PATCH 013/114] a commented out test that doesn't work atm --- tests/test_utils.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/tests/test_utils.py b/tests/test_utils.py index ef3d5b6a9..4f4d160bd 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1579,3 +1579,36 @@ def test_add_uploaded_files_to_db_other_failed_op(client: flask.testing.FlaskCli # check that the file is added to the database file = models.File.query.filter_by(name="file1.txt").first() assert not file + + +# def test_add_uploaded_files_to_db_file_not_found(client: flask.testing.FlaskClient): + +# from botocore.exceptions import ClientError +# # create mock project and log +# proj_in_db = models.Project.query.first() +# log = { +# "file1.txt": { +# "status": {"failed_op": "add_file_db"}, +# "path_remote": "path/to/file1.txt", +# "subpath": "subpath", +# "size_raw": 100, +# "size_processed": 200, +# "compressed": False, +# "public_key": "public_key", +# "salt": "salt", +# "checksum": "checksum", +# } +# } + +# # mock ApiS3Connector to raise exception when get_object is called +# mock_s3conn = MagicMock() +# # mock_s3conn.get_object.side_effect = ClientError({"Error": {"Code": "404"}}, "operation_name") +# mock_s3conn.resource.meta.client.head_object.side_effect = ClientError({"Error": {"Code": "404"}}, "operation_name") +# mock_api_s3_conn = MagicMock(return_value=mock_s3conn) + +# # call add_uploaded_files_to_db and check for expected errors +# with patch("dds_web.api.api_s3_connector.ApiS3Connector", mock_api_s3_conn): +# utils.add_uploaded_files_to_db(proj_in_db, log) + +# file = models.File.query.filter_by(name="file1.txt").first() +# assert not file From bbc7734620e492cd3f46a7bdca47097571bcba9a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Thu, 28 Sep 2023 14:58:44 +0200 Subject: [PATCH 014/114] comment about whitelisted IPs --- dds_web/database/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dds_web/database/models.py b/dds_web/database/models.py index 104fa932d..8d27ea779 100644 --- a/dds_web/database/models.py +++ b/dds_web/database/models.py @@ -195,7 +195,7 @@ class Unit(db.Model): sto2_access = db.Column(db.String(255), unique=False, nullable=True) # unique=True later sto2_secret = db.Column(db.String(255), unique=False, nullable=True) # unique=True later - # New safespring storage + # New safespring storage - NOTE: MAKE SURE IPS ARE WHITELISTED ON UPPMAX AND OTHER SERVERS sto4_start_time = db.Column(db.DateTime(), nullable=True) sto4_endpoint = db.Column(db.String(255), unique=False, nullable=True) # unique=True later sto4_name = db.Column(db.String(255), unique=False, nullable=True) # unique=True later From 5c78404f642cd713af91d3ad432a9ef46b4ddf04 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Thu, 5 Oct 2023 11:13:27 +0200 Subject: [PATCH 015/114] files --- dds_web/api/superadmin_only.py | 2 +- tests/api/test_superadmin_only.py | 14 ++++++++++---- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/dds_web/api/superadmin_only.py b/dds_web/api/superadmin_only.py index 44fae0fa0..3c73305f1 100644 --- a/dds_web/api/superadmin_only.py +++ b/dds_web/api/superadmin_only.py @@ -164,7 +164,7 @@ def post(self): # Create email content # put motd_obj.message etc in there etc - subject: str = "DDS Important Information" + subject: str = "Important Information: Data Delivery System" body: str = flask.render_template(f"mail/motd.txt", motd=motd_obj.message) html = flask.render_template(f"mail/motd.html", motd=motd_obj.message) diff --git a/tests/api/test_superadmin_only.py b/tests/api/test_superadmin_only.py index 022c537a7..54d600058 100644 --- a/tests/api/test_superadmin_only.py +++ b/tests/api/test_superadmin_only.py @@ -20,7 +20,7 @@ import click # Own -from dds_web import db +from dds_web import db, mail from dds_web.database import models import tests from dds_web.commands import collect_stats @@ -155,9 +155,15 @@ def test_create_motd_as_superadmin_empty_message(client: flask.testing.FlaskClie def test_create_motd_as_superadmin_success(client: flask.testing.FlaskClient) -> None: """Create a new message of the day, using a Super Admin account.""" token: typing.Dict = get_token(username=users["Super Admin"], client=client) - response: werkzeug.test.WrapperTestResponse = client.post( - tests.DDSEndpoint.MOTD, headers=token, json={"message": "test"} - ) + + with mail.record_messages() as outbox: + response: werkzeug.test.WrapperTestResponse = client.post( + tests.DDSEndpoint.MOTD, headers=token, json={"message": "test"} + ) + + assert len(outbox) == 1 + assert "Important Information: Data Delivery System" in outbox[-1].subject + assert response.status_code == http.HTTPStatus.OK assert "The MOTD was successfully added to the database." in response.json.get("message") From d8f7dffc9fa2a7d3502b12718a75e938d84beb6c Mon Sep 17 00:00:00 2001 From: rv0lt Date: Thu, 5 Oct 2023 11:54:59 +0200 Subject: [PATCH 016/114] sprintlog and move test --- SPRINTLOG.md | 4 ++++ tests/api/test_superadmin_only.py | 17 ++++++----------- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/SPRINTLOG.md b/SPRINTLOG.md index d965cd8b5..bdf8ee865 100644 --- a/SPRINTLOG.md +++ b/SPRINTLOG.md @@ -307,3 +307,7 @@ _Nothing merged in CLI during this sprint_ - Column `sto4_start_time` is automatically set when the create-unit command is run ([#1668])(https://scilifelab.atlassian.net/jira/software/projects/DDS/boards/13?selectedIssue=DDS-1668) - Replace expired invites when there's a new invitation attempt ([#1466](https://github.com/ScilifelabDataCentre/dds_web/pull/1466)) - New version: 2.5.1 ([#1471](https://github.com/ScilifelabDataCentre/dds_web/pull/1471)) + +# 2023-10-02 - 2023-10-13 + +- Changed email subject when motd is release to send the whole DDS name and not just the acronynm ([#1422])(https://scilifelab.atlassian.net/jira/software/projects/DDS/boards/13?selectedIssue=DDS-1422) diff --git a/tests/api/test_superadmin_only.py b/tests/api/test_superadmin_only.py index 54d600058..d06df07b0 100644 --- a/tests/api/test_superadmin_only.py +++ b/tests/api/test_superadmin_only.py @@ -155,15 +155,9 @@ def test_create_motd_as_superadmin_empty_message(client: flask.testing.FlaskClie def test_create_motd_as_superadmin_success(client: flask.testing.FlaskClient) -> None: """Create a new message of the day, using a Super Admin account.""" token: typing.Dict = get_token(username=users["Super Admin"], client=client) - - with mail.record_messages() as outbox: - response: werkzeug.test.WrapperTestResponse = client.post( - tests.DDSEndpoint.MOTD, headers=token, json={"message": "test"} - ) - - assert len(outbox) == 1 - assert "Important Information: Data Delivery System" in outbox[-1].subject - + response: werkzeug.test.WrapperTestResponse = client.post( + tests.DDSEndpoint.MOTD, headers=token, json={"message": "test"} + ) assert response.status_code == http.HTTPStatus.OK assert "The MOTD was successfully added to the database." in response.json.get("message") @@ -611,12 +605,13 @@ def test_send_motd_ok(client: flask.testing.FlaskClient) -> None: num_users = models.User.query.count() # Attempt request - with unittest.mock.patch.object(flask_mail.Connection, "send") as mock_mail_send: + with mail.record_messages() as outbox: response: werkzeug.test.WrapperTestResponse = client.post( tests.DDSEndpoint.MOTD_SEND, headers=token, json={"motd_id": created_motd.id} ) assert response.status_code == http.HTTPStatus.OK - assert mock_mail_send.call_count == num_users + assert len(outbox) == num_users + assert "Important Information: Data Delivery System" in outbox[-1].subject # Maintenance ###################################################################################### From 127b15fd1a8f319796befad1afc6ba32148d27ce Mon Sep 17 00:00:00 2001 From: rv0lt Date: Thu, 5 Oct 2023 12:48:12 +0200 Subject: [PATCH 017/114] last test --- tests/api/test_superadmin_only.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/api/test_superadmin_only.py b/tests/api/test_superadmin_only.py index d06df07b0..c3e4e44b8 100644 --- a/tests/api/test_superadmin_only.py +++ b/tests/api/test_superadmin_only.py @@ -577,13 +577,14 @@ def test_send_motd_no_primary_email(client: flask.testing.FlaskClient) -> None: assert not models.Email.query.filter_by(email=email).one_or_none() assert not models.User.query.filter_by(username=username).one().primary_email - # Attempt request - with unittest.mock.patch.object(flask_mail.Connection, "send") as mock_mail_send: + # Attempt request and catch email + with mail.record_messages() as outbox: response: werkzeug.test.WrapperTestResponse = client.post( tests.DDSEndpoint.MOTD_SEND, headers=token, json={"motd_id": created_motd.id} ) assert response.status_code == http.HTTPStatus.OK - assert mock_mail_send.call_count == num_users - 1 + assert len(outbox) == num_users - 1 + assert "Important Information: Data Delivery System" in outbox[-1].subject def test_send_motd_ok(client: flask.testing.FlaskClient) -> None: @@ -604,7 +605,7 @@ def test_send_motd_ok(client: flask.testing.FlaskClient) -> None: # Get number of users num_users = models.User.query.count() - # Attempt request + # Attempt request and catch email with mail.record_messages() as outbox: response: werkzeug.test.WrapperTestResponse = client.post( tests.DDSEndpoint.MOTD_SEND, headers=token, json={"motd_id": created_motd.id} From 8714949287c9b991992d12935c8d8db0b61f8a11 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Revuelta?= <46089290+rv0lt@users.noreply.github.com> Date: Thu, 5 Oct 2023 17:42:02 +0200 Subject: [PATCH 018/114] Update SPRINTLOG.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ina Odén Österbo <35953392+i-oden@users.noreply.github.com> --- SPRINTLOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/SPRINTLOG.md b/SPRINTLOG.md index bdf8ee865..5e2242f7b 100644 --- a/SPRINTLOG.md +++ b/SPRINTLOG.md @@ -310,4 +310,4 @@ _Nothing merged in CLI during this sprint_ # 2023-10-02 - 2023-10-13 -- Changed email subject when motd is release to send the whole DDS name and not just the acronynm ([#1422])(https://scilifelab.atlassian.net/jira/software/projects/DDS/boards/13?selectedIssue=DDS-1422) +- Use full DDS name in MOTD email subject ([#1422])(https://scilifelab.atlassian.net/jira/software/projects/DDS/boards/13?selectedIssue=DDS-1422) From 0e82606bd4d7a5c16e5af197b73f5ec2ca23c377 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Revuelta?= <46089290+rv0lt@users.noreply.github.com> Date: Thu, 5 Oct 2023 17:42:10 +0200 Subject: [PATCH 019/114] Update tests/api/test_superadmin_only.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ina Odén Österbo <35953392+i-oden@users.noreply.github.com> --- tests/api/test_superadmin_only.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/api/test_superadmin_only.py b/tests/api/test_superadmin_only.py index c3e4e44b8..ca1d5d428 100644 --- a/tests/api/test_superadmin_only.py +++ b/tests/api/test_superadmin_only.py @@ -585,6 +585,7 @@ def test_send_motd_no_primary_email(client: flask.testing.FlaskClient) -> None: assert response.status_code == http.HTTPStatus.OK assert len(outbox) == num_users - 1 assert "Important Information: Data Delivery System" in outbox[-1].subject + assert "incorrect subject" not in outbox[-1].subject def test_send_motd_ok(client: flask.testing.FlaskClient) -> None: From 8e29749f1683e0a523f4bd88aae5441ca5732dc6 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Tue, 3 Oct 2023 10:43:44 +0200 Subject: [PATCH 020/114] new info --- dds_web/api/user.py | 4 ++++ dds_web/templates/mail/project_release.html | 16 +++++++++++----- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/dds_web/api/user.py b/dds_web/api/user.py index d8fb964aa..35ec90405 100644 --- a/dds_web/api/user.py +++ b/dds_web/api/user.py @@ -423,6 +423,7 @@ def compose_and_send_email_to_user(userobj, mail_type, link=None, project=None): unit_email = None project_id = None + project_title = None deadline = None # Don't display unit admins or personnels name @@ -440,6 +441,7 @@ def compose_and_send_email_to_user(userobj, mail_type, link=None, project=None): elif mail_type == "project_release": subject = f"Project made available by {displayed_sender} in the SciLifeLab Data Delivery System" project_id = project.public_id + project_title = project.title deadline = project.current_deadline.astimezone(datetime.timezone.utc).strftime( "%Y-%m-%d %H:%M:%S %Z" ) @@ -470,6 +472,7 @@ def compose_and_send_email_to_user(userobj, mail_type, link=None, project=None): displayed_sender=displayed_sender, unit_email=unit_email, project_id=project_id, + project_title=project_title, deadline=deadline, ) msg.html = flask.render_template( @@ -478,6 +481,7 @@ def compose_and_send_email_to_user(userobj, mail_type, link=None, project=None): displayed_sender=displayed_sender, unit_email=unit_email, project_id=project_id, + project_title=project_title, deadline=deadline, ) diff --git a/dds_web/templates/mail/project_release.html b/dds_web/templates/mail/project_release.html index 24262a413..1a8ef6272 100644 --- a/dds_web/templates/mail/project_release.html +++ b/dds_web/templates/mail/project_release.html @@ -10,11 +10,13 @@ -

The - project {{project_id}} is now available for your access in the SciLifeLab Data Delivery System (DDS).

-

- The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple - way.

+

+ The following project is now available for your access in the SciLifeLab Data Delivery System (DDS) and you can now download your data. +

    +
  • Project Title: {{project_title}}
  • +
  • DDS project ID: {{project_id}}
  • +
+

{% if unit_email %} You were added to this project on behalf of {{displayed_sender}} ({{unit_email}}). @@ -28,6 +30,10 @@ The DDS CLI command dds data get -p {{project_id}} -a can be used to download all the files in this project to your current directory.

Your access to this project will expire on {{deadline}}

+

+ What is the DDS? The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple + way.

+ From 6cebad3a270173d9a7e21d28bdd4bcedda6f2ce2 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Tue, 3 Oct 2023 11:31:08 +0200 Subject: [PATCH 021/114] txt templatw --- dds_web/templates/mail/project_release.txt | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/dds_web/templates/mail/project_release.txt b/dds_web/templates/mail/project_release.txt index 121d5ea6f..a7cf4774c 100644 --- a/dds_web/templates/mail/project_release.txt +++ b/dds_web/templates/mail/project_release.txt @@ -1,5 +1,6 @@ -The project {{project_id}} is now available for your access in the SciLifeLab Data Delivery System (DDS). -The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple way. +The following project is now available for your access in the SciLifeLab Data Delivery System (DDS) and you can now download your data. + - Project Title: {{project_title}} + - DDS project ID: {{project_id}} {% if unit_email %} You were added to this project on behalf of {{displayed_sender}} ({{unit_email}}). @@ -12,3 +13,6 @@ The DDS CLI command 'dds ls -p {{project_id}}' can be used to list the files in The DDS CLI command 'dds data get -p {{project_id}} -a' can be used to download all the files in this project to your current directory. Your access to this project will expire on {{deadline}} + +What is the DDS? The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple way. + From 350cd32bf8514007a6d80ca50920f40e91a8896d Mon Sep 17 00:00:00 2001 From: rv0lt Date: Wed, 4 Oct 2023 12:57:19 +0200 Subject: [PATCH 022/114] test email --- tests/api/test_project.py | 43 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/tests/api/test_project.py b/tests/api/test_project.py index cd1d7f740..81a3a7b9a 100644 --- a/tests/api/test_project.py +++ b/tests/api/test_project.py @@ -1388,3 +1388,46 @@ def test_project_usage(module_client): # Call project_usage() for the project and check if cost is calculated correctly proj_bhours, proj_cost = UserProjects.project_usage(project=project_0) assert (proj_bhours / 1e9) * cost_gbhour == proj_cost + + +def test_email_project_release(module_client): + """Test that the email to the researches is sent when the project has been released + Function is compose_and_send_email_to_user used at project.py + """ + + # Create unit admins to allow project creation + current_unit_admins = models.UnitUser.query.filter_by(unit_id=1, is_admin=True).count() + if current_unit_admins < 3: + create_unit_admins(num_admins=2) + current_unit_admins = models.UnitUser.query.filter_by(unit_id=1, is_admin=True).count() + assert current_unit_admins >= 3 + + # Create project + response = module_client.post( + tests.DDSEndpoint.PROJECT_CREATE, + headers=tests.UserAuth(tests.USER_CREDENTIALS["unituser"]).token(module_client), + json=proj_data, + ) + assert response.status_code == http.HTTPStatus.OK + + project_id = response.json.get("project_id") + + # Release project and check email + with unittest.mock.patch.object(flask_mail.Mail) as mock_mail: + with mock_mail.record_messages() as outbox: + response = module_client.post( + tests.DDSEndpoint.PROJECT_STATUS, + headers=tests.UserAuth(tests.USER_CREDENTIALS["unitadmin"]).token(module_client), + query_string={"project": project_id}, + json={"new_status": "Available"}, + ) + assert len(outbox) == 1 + assert "Project made available by" in outbox[0].subject + + assert response.status_code == http.HTTPStatus.OK + + +# msg = outbox[-1] +# assert msg.subject == const.RESET_EMAIL_SUBJECT +# assert 'Reset Password' in msg.html +# assert 'Reset Password' in msg.body From f27f650957fb836e3d18932069eca2cf0a9576c3 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Wed, 4 Oct 2023 13:30:48 +0200 Subject: [PATCH 023/114] changed client --- tests/api/test_project.py | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/tests/api/test_project.py b/tests/api/test_project.py index 81a3a7b9a..fd909f3c1 100644 --- a/tests/api/test_project.py +++ b/tests/api/test_project.py @@ -1390,37 +1390,35 @@ def test_project_usage(module_client): assert (proj_bhours / 1e9) * cost_gbhour == proj_cost -def test_email_project_release(module_client): +def test_email_project_release(client): """Test that the email to the researches is sent when the project has been released Function is compose_and_send_email_to_user used at project.py """ - - # Create unit admins to allow project creation + create_unit_admins(num_admins=2) current_unit_admins = models.UnitUser.query.filter_by(unit_id=1, is_admin=True).count() - if current_unit_admins < 3: - create_unit_admins(num_admins=2) - current_unit_admins = models.UnitUser.query.filter_by(unit_id=1, is_admin=True).count() - assert current_unit_admins >= 3 + assert current_unit_admins == 3 - # Create project - response = module_client.post( + response = client.post( tests.DDSEndpoint.PROJECT_CREATE, - headers=tests.UserAuth(tests.USER_CREDENTIALS["unituser"]).token(module_client), + headers=tests.UserAuth(tests.USER_CREDENTIALS["unituser"]).token(client), json=proj_data, ) assert response.status_code == http.HTTPStatus.OK project_id = response.json.get("project_id") + assert response.status_code == http.HTTPStatus.OK + # Release project and check email with unittest.mock.patch.object(flask_mail.Mail) as mock_mail: with mock_mail.record_messages() as outbox: - response = module_client.post( + response = client.post( tests.DDSEndpoint.PROJECT_STATUS, - headers=tests.UserAuth(tests.USER_CREDENTIALS["unitadmin"]).token(module_client), + headers=tests.UserAuth(tests.USER_CREDENTIALS["unituser"]).token(client), query_string={"project": project_id}, json={"new_status": "Available"}, ) + assert len(outbox) == 1 assert "Project made available by" in outbox[0].subject From dc2c2fba34ae4e35fd2048cc0b3d5403cbfca0f7 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Wed, 4 Oct 2023 15:32:52 +0200 Subject: [PATCH 024/114] added tests --- tests/api/test_project.py | 39 ++++++++++++++++++--------------------- 1 file changed, 18 insertions(+), 21 deletions(-) diff --git a/tests/api/test_project.py b/tests/api/test_project.py index fd909f3c1..27966c903 100644 --- a/tests/api/test_project.py +++ b/tests/api/test_project.py @@ -16,7 +16,7 @@ # Own import dds_web -from dds_web import db +from dds_web import auth, mail, db, basic_auth, limiter from dds_web.errors import BucketNotFoundError, DatabaseError, DeletionError import tests from tests.test_files_new import project_row, file_in_db, FIRST_NEW_FILE @@ -1390,7 +1390,7 @@ def test_project_usage(module_client): assert (proj_bhours / 1e9) * cost_gbhour == proj_cost -def test_email_project_release(client): +def test_email_project_release(client,boto3_session): """Test that the email to the researches is sent when the project has been released Function is compose_and_send_email_to_user used at project.py """ @@ -1401,31 +1401,28 @@ def test_email_project_release(client): response = client.post( tests.DDSEndpoint.PROJECT_CREATE, headers=tests.UserAuth(tests.USER_CREDENTIALS["unituser"]).token(client), - json=proj_data, + json=proj_data_with_existing_users, ) assert response.status_code == http.HTTPStatus.OK - project_id = response.json.get("project_id") - - assert response.status_code == http.HTTPStatus.OK + public_project_id = response.json.get("project_id") # Release project and check email - with unittest.mock.patch.object(flask_mail.Mail) as mock_mail: - with mock_mail.record_messages() as outbox: - response = client.post( - tests.DDSEndpoint.PROJECT_STATUS, - headers=tests.UserAuth(tests.USER_CREDENTIALS["unituser"]).token(client), - query_string={"project": project_id}, - json={"new_status": "Available"}, - ) - - assert len(outbox) == 1 - assert "Project made available by" in outbox[0].subject + with mail.record_messages() as outbox: + response = client.post( + tests.DDSEndpoint.PROJECT_STATUS, + headers=tests.UserAuth(tests.USER_CREDENTIALS["unituser"]).token(client), + query_string={"project": public_project_id}, + json={"new_status": "Available", "deadline": 10, "send_email": True}, + ) + assert len(outbox) == 3 + assert "Project made available by" in outbox[-1].subject + # TODO check the body of the email + # msg = outbox[-1] +# assert msg.subject == const.RESET_EMAIL_SUBJECT +# assert 'Reset Password' in msg.html +# assert 'Reset Password' in msg.body assert response.status_code == http.HTTPStatus.OK -# msg = outbox[-1] -# assert msg.subject == const.RESET_EMAIL_SUBJECT -# assert 'Reset Password' in msg.html -# assert 'Reset Password' in msg.body From d9ea65cd220d1038b98255f910d47731218a8cb3 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Thu, 5 Oct 2023 09:50:27 +0200 Subject: [PATCH 025/114] tryit --- tests/api/test_project.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/api/test_project.py b/tests/api/test_project.py index 27966c903..23375d5dd 100644 --- a/tests/api/test_project.py +++ b/tests/api/test_project.py @@ -16,7 +16,7 @@ # Own import dds_web -from dds_web import auth, mail, db, basic_auth, limiter +from dds_web import mail, db from dds_web.errors import BucketNotFoundError, DatabaseError, DeletionError import tests from tests.test_files_new import project_row, file_in_db, FIRST_NEW_FILE @@ -1421,7 +1421,7 @@ def test_email_project_release(client,boto3_session): # msg = outbox[-1] # assert msg.subject == const.RESET_EMAIL_SUBJECT # assert 'Reset Password' in msg.html -# assert 'Reset Password' in msg.body +# assert 'Reset Password' in msg.body -> plain text assert response.status_code == http.HTTPStatus.OK From 78d26a5235b18da4b35c33cae91ca13cb0e29620 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Thu, 5 Oct 2023 09:55:43 +0200 Subject: [PATCH 026/114] black --- tests/api/test_project.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/tests/api/test_project.py b/tests/api/test_project.py index 23375d5dd..1fc557fa5 100644 --- a/tests/api/test_project.py +++ b/tests/api/test_project.py @@ -1390,7 +1390,7 @@ def test_project_usage(module_client): assert (proj_bhours / 1e9) * cost_gbhour == proj_cost -def test_email_project_release(client,boto3_session): +def test_email_project_release(client, boto3_session): """Test that the email to the researches is sent when the project has been released Function is compose_and_send_email_to_user used at project.py """ @@ -1419,10 +1419,8 @@ def test_email_project_release(client,boto3_session): assert "Project made available by" in outbox[-1].subject # TODO check the body of the email # msg = outbox[-1] -# assert msg.subject == const.RESET_EMAIL_SUBJECT -# assert 'Reset Password' in msg.html -# assert 'Reset Password' in msg.body -> plain text + # assert msg.subject == const.RESET_EMAIL_SUBJECT + # assert 'Reset Password' in msg.html + # assert 'Reset Password' in msg.body -> plain text assert response.status_code == http.HTTPStatus.OK - - From 7b7d824cdfc0c456f8cab1699f6dbb55cad21746 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Thu, 5 Oct 2023 10:23:12 +0200 Subject: [PATCH 027/114] changes to module client --- tests/api/test_project.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/api/test_project.py b/tests/api/test_project.py index 1fc557fa5..854373c81 100644 --- a/tests/api/test_project.py +++ b/tests/api/test_project.py @@ -1390,7 +1390,7 @@ def test_project_usage(module_client): assert (proj_bhours / 1e9) * cost_gbhour == proj_cost -def test_email_project_release(client, boto3_session): +def test_email_project_release(module_client, boto3_session): """Test that the email to the researches is sent when the project has been released Function is compose_and_send_email_to_user used at project.py """ @@ -1398,9 +1398,9 @@ def test_email_project_release(client, boto3_session): current_unit_admins = models.UnitUser.query.filter_by(unit_id=1, is_admin=True).count() assert current_unit_admins == 3 - response = client.post( + response = module_client.post( tests.DDSEndpoint.PROJECT_CREATE, - headers=tests.UserAuth(tests.USER_CREDENTIALS["unituser"]).token(client), + headers=tests.UserAuth(tests.USER_CREDENTIALS["unituser"]).token(module_client), json=proj_data_with_existing_users, ) assert response.status_code == http.HTTPStatus.OK @@ -1409,9 +1409,9 @@ def test_email_project_release(client, boto3_session): # Release project and check email with mail.record_messages() as outbox: - response = client.post( + response = module_client.post( tests.DDSEndpoint.PROJECT_STATUS, - headers=tests.UserAuth(tests.USER_CREDENTIALS["unituser"]).token(client), + headers=tests.UserAuth(tests.USER_CREDENTIALS["unituser"]).token(module_client), query_string={"project": public_project_id}, json={"new_status": "Available", "deadline": 10, "send_email": True}, ) From 968c8e061988742fdb7481ed7ea4774c9e1ec0e4 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Thu, 5 Oct 2023 10:57:51 +0200 Subject: [PATCH 028/114] completed test --- tests/api/test_project.py | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/tests/api/test_project.py b/tests/api/test_project.py index 854373c81..8ef851f1d 100644 --- a/tests/api/test_project.py +++ b/tests/api/test_project.py @@ -1396,11 +1396,13 @@ def test_email_project_release(module_client, boto3_session): """ create_unit_admins(num_admins=2) current_unit_admins = models.UnitUser.query.filter_by(unit_id=1, is_admin=True).count() - assert current_unit_admins == 3 + assert current_unit_admins >= 3 + + token = tests.UserAuth(tests.USER_CREDENTIALS["unituser"]).token(module_client) response = module_client.post( tests.DDSEndpoint.PROJECT_CREATE, - headers=tests.UserAuth(tests.USER_CREDENTIALS["unituser"]).token(module_client), + headers=token, json=proj_data_with_existing_users, ) assert response.status_code == http.HTTPStatus.OK @@ -1411,16 +1413,25 @@ def test_email_project_release(module_client, boto3_session): with mail.record_messages() as outbox: response = module_client.post( tests.DDSEndpoint.PROJECT_STATUS, - headers=tests.UserAuth(tests.USER_CREDENTIALS["unituser"]).token(module_client), + headers=token, query_string={"project": public_project_id}, json={"new_status": "Available", "deadline": 10, "send_email": True}, ) assert len(outbox) == 3 assert "Project made available by" in outbox[-1].subject - # TODO check the body of the email - # msg = outbox[-1] - # assert msg.subject == const.RESET_EMAIL_SUBJECT - # assert 'Reset Password' in msg.html - # assert 'Reset Password' in msg.body -> plain text + + body = outbox[-1].body #plain text + html = outbox[-1].html + + project_title = proj_data_with_existing_users["title"] + + ## check plain text message + assert f"- Project Title: {project_title}" in outbox[-1].body + assert f"- DDS project ID: {public_project_id}" in outbox[-1].body + + ## check html + + assert f"
  • Project Title: {project_title}
  • " + assert f"
  • DDS project ID: {public_project_id}
  • " assert response.status_code == http.HTTPStatus.OK From 116da0b7598793fcb79318c2b51862f11ecd3302 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Thu, 5 Oct 2023 11:00:16 +0200 Subject: [PATCH 029/114] black --- tests/api/test_project.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/api/test_project.py b/tests/api/test_project.py index 8ef851f1d..dbb6812a1 100644 --- a/tests/api/test_project.py +++ b/tests/api/test_project.py @@ -1397,7 +1397,7 @@ def test_email_project_release(module_client, boto3_session): create_unit_admins(num_admins=2) current_unit_admins = models.UnitUser.query.filter_by(unit_id=1, is_admin=True).count() assert current_unit_admins >= 3 - + token = tests.UserAuth(tests.USER_CREDENTIALS["unituser"]).token(module_client) response = module_client.post( @@ -1419,8 +1419,8 @@ def test_email_project_release(module_client, boto3_session): ) assert len(outbox) == 3 assert "Project made available by" in outbox[-1].subject - - body = outbox[-1].body #plain text + + body = outbox[-1].body # plain text html = outbox[-1].html project_title = proj_data_with_existing_users["title"] From 4ef499428ccbd8e94fe63431af98355307bd8658 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Thu, 5 Oct 2023 11:23:13 +0200 Subject: [PATCH 030/114] sprintlog and change int --- SPRINTLOG.md | 1 + tests/api/test_project.py | 10 +++++----- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/SPRINTLOG.md b/SPRINTLOG.md index 5e2242f7b..dd116ef4e 100644 --- a/SPRINTLOG.md +++ b/SPRINTLOG.md @@ -311,3 +311,4 @@ _Nothing merged in CLI during this sprint_ # 2023-10-02 - 2023-10-13 - Use full DDS name in MOTD email subject ([#1422])(https://scilifelab.atlassian.net/jira/software/projects/DDS/boards/13?selectedIssue=DDS-1422) +- Added the project title aling with the internal project ID in the email sent when a project is released ([#1537])(https://scilifelab.atlassian.net/jira/software/projects/DDS/boards/13?selectedIssue=DDS-1537) diff --git a/tests/api/test_project.py b/tests/api/test_project.py index dbb6812a1..e798960f8 100644 --- a/tests/api/test_project.py +++ b/tests/api/test_project.py @@ -1417,7 +1417,7 @@ def test_email_project_release(module_client, boto3_session): query_string={"project": public_project_id}, json={"new_status": "Available", "deadline": 10, "send_email": True}, ) - assert len(outbox) == 3 + assert len(outbox) == 2 # Emails informing researchers assert "Project made available by" in outbox[-1].subject body = outbox[-1].body # plain text @@ -1426,12 +1426,12 @@ def test_email_project_release(module_client, boto3_session): project_title = proj_data_with_existing_users["title"] ## check plain text message - assert f"- Project Title: {project_title}" in outbox[-1].body - assert f"- DDS project ID: {public_project_id}" in outbox[-1].body + assert f"- Project Title: {project_title}" in body + assert f"- DDS project ID: {public_project_id}" in body ## check html - assert f"
  • Project Title: {project_title}
  • " - assert f"
  • DDS project ID: {public_project_id}
  • " + assert f"
  • Project Title: {project_title}
  • " in html + assert f"
  • DDS project ID: {public_project_id}
  • " in html assert response.status_code == http.HTTPStatus.OK From 8d56ad36dab8f03839d5d2ee4e91b0667629df75 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Thu, 5 Oct 2023 11:24:18 +0200 Subject: [PATCH 031/114] black --- tests/api/test_project.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/api/test_project.py b/tests/api/test_project.py index e798960f8..a73cda918 100644 --- a/tests/api/test_project.py +++ b/tests/api/test_project.py @@ -1417,7 +1417,7 @@ def test_email_project_release(module_client, boto3_session): query_string={"project": public_project_id}, json={"new_status": "Available", "deadline": 10, "send_email": True}, ) - assert len(outbox) == 2 # Emails informing researchers + assert len(outbox) == 2 # Emails informing researchers assert "Project made available by" in outbox[-1].subject body = outbox[-1].body # plain text From e0d48d0559cbd221268bb1a75c84b8fce40484fc Mon Sep 17 00:00:00 2001 From: rv0lt Date: Thu, 5 Oct 2023 13:08:17 +0200 Subject: [PATCH 032/114] refactoring --- tests/api/test_project.py | 24 ++++++++++-------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/tests/api/test_project.py b/tests/api/test_project.py index a73cda918..867da5ef3 100644 --- a/tests/api/test_project.py +++ b/tests/api/test_project.py @@ -1391,23 +1391,19 @@ def test_project_usage(module_client): def test_email_project_release(module_client, boto3_session): - """Test that the email to the researches is sent when the project has been released - Function is compose_and_send_email_to_user used at project.py - """ + """Test that check that the email sent to the researchers when project is released is correct""" + public_project_id = "public_project_id" + create_unit_admins(num_admins=2) current_unit_admins = models.UnitUser.query.filter_by(unit_id=1, is_admin=True).count() assert current_unit_admins >= 3 + # user to perfrom the operation token = tests.UserAuth(tests.USER_CREDENTIALS["unituser"]).token(module_client) - - response = module_client.post( - tests.DDSEndpoint.PROJECT_CREATE, - headers=token, - json=proj_data_with_existing_users, - ) - assert response.status_code == http.HTTPStatus.OK - - public_project_id = response.json.get("project_id") + # project to be released + project = models.Project.query.filter_by(public_id=public_project_id).first() + # num of researchers that will receive email + num_users = models.ProjectUsers.query.filter_by(project_id=project.id).count() # Release project and check email with mail.record_messages() as outbox: @@ -1417,13 +1413,13 @@ def test_email_project_release(module_client, boto3_session): query_string={"project": public_project_id}, json={"new_status": "Available", "deadline": 10, "send_email": True}, ) - assert len(outbox) == 2 # Emails informing researchers + assert len(outbox) == num_users # nº of Emails informing researchers assert "Project made available by" in outbox[-1].subject body = outbox[-1].body # plain text html = outbox[-1].html - project_title = proj_data_with_existing_users["title"] + project_title = project.title ## check plain text message assert f"- Project Title: {project_title}" in body From 3d25be01c8271d86b5249afb7f746e46f1553947 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Revuelta?= <46089290+rv0lt@users.noreply.github.com> Date: Thu, 5 Oct 2023 17:39:28 +0200 Subject: [PATCH 033/114] Update SPRINTLOG.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ina Odén Österbo <35953392+i-oden@users.noreply.github.com> --- SPRINTLOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/SPRINTLOG.md b/SPRINTLOG.md index dd116ef4e..2fa6fffef 100644 --- a/SPRINTLOG.md +++ b/SPRINTLOG.md @@ -312,3 +312,4 @@ _Nothing merged in CLI during this sprint_ - Use full DDS name in MOTD email subject ([#1422])(https://scilifelab.atlassian.net/jira/software/projects/DDS/boards/13?selectedIssue=DDS-1422) - Added the project title aling with the internal project ID in the email sent when a project is released ([#1537])(https://scilifelab.atlassian.net/jira/software/projects/DDS/boards/13?selectedIssue=DDS-1537) +- Project title displayed along with the internal project ID email sent when a project is released ([#1537])(https://scilifelab.atlassian.net/jira/software/projects/DDS/boards/13?selectedIssue=DDS-1537) From 8fc7e0d96696d54cfb81235f1fe8553f17b55a92 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Revuelta?= <46089290+rv0lt@users.noreply.github.com> Date: Thu, 5 Oct 2023 17:39:34 +0200 Subject: [PATCH 034/114] Update dds_web/templates/mail/project_release.html MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ina Odén Österbo <35953392+i-oden@users.noreply.github.com> --- dds_web/templates/mail/project_release.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dds_web/templates/mail/project_release.html b/dds_web/templates/mail/project_release.html index 1a8ef6272..45f6ad4cd 100644 --- a/dds_web/templates/mail/project_release.html +++ b/dds_web/templates/mail/project_release.html @@ -11,7 +11,7 @@

    - The following project is now available for your access in the SciLifeLab Data Delivery System (DDS) and you can now download your data. + The following project is now available for your access in the SciLifeLab Data Delivery System (DDS) and you can download your data.

    • Project Title: {{project_title}}
    • DDS project ID: {{project_id}}
    • From 9e520233cb57fc9200e4060b2fed55ee418ae028 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Revuelta?= <46089290+rv0lt@users.noreply.github.com> Date: Thu, 5 Oct 2023 17:39:46 +0200 Subject: [PATCH 035/114] Update dds_web/templates/mail/project_release.html MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ina Odén Österbo <35953392+i-oden@users.noreply.github.com> --- dds_web/templates/mail/project_release.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dds_web/templates/mail/project_release.html b/dds_web/templates/mail/project_release.html index 45f6ad4cd..1e96f384c 100644 --- a/dds_web/templates/mail/project_release.html +++ b/dds_web/templates/mail/project_release.html @@ -31,7 +31,7 @@

      Your access to this project will expire on {{deadline}}

      - What is the DDS? The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple + What is the DDS? The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple way.

      From 3584f6bfc9afc34922259e4b923f3b349d61d6a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Revuelta?= <46089290+rv0lt@users.noreply.github.com> Date: Fri, 6 Oct 2023 11:01:10 +0200 Subject: [PATCH 036/114] Update SPRINTLOG.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ina Odén Österbo <35953392+i-oden@users.noreply.github.com> --- SPRINTLOG.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/SPRINTLOG.md b/SPRINTLOG.md index 2fa6fffef..09dc78670 100644 --- a/SPRINTLOG.md +++ b/SPRINTLOG.md @@ -310,6 +310,5 @@ _Nothing merged in CLI during this sprint_ # 2023-10-02 - 2023-10-13 -- Use full DDS name in MOTD email subject ([#1422])(https://scilifelab.atlassian.net/jira/software/projects/DDS/boards/13?selectedIssue=DDS-1422) -- Added the project title aling with the internal project ID in the email sent when a project is released ([#1537])(https://scilifelab.atlassian.net/jira/software/projects/DDS/boards/13?selectedIssue=DDS-1537) -- Project title displayed along with the internal project ID email sent when a project is released ([#1537])(https://scilifelab.atlassian.net/jira/software/projects/DDS/boards/13?selectedIssue=DDS-1537) +- Project title displayed along with the internal project ID email sent when a project is released ([#1475](https://github.com/ScilifelabDataCentre/dds_web/pull/1475)) +- Use full DDS name in MOTD email subject ([#1477](https://github.com/ScilifelabDataCentre/dds_web/pull/1477)) From 458bc1fe788ac838afd99c1942eba41db3fe2f71 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Fri, 6 Oct 2023 12:02:37 +0200 Subject: [PATCH 037/114] new templates --- dds_web/templates/mail/project_release.html | 2 +- dds_web/templates/mail/project_release.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dds_web/templates/mail/project_release.html b/dds_web/templates/mail/project_release.html index 1e96f384c..4183de5fa 100644 --- a/dds_web/templates/mail/project_release.html +++ b/dds_web/templates/mail/project_release.html @@ -27,7 +27,7 @@

      The DDS CLI command dds ls -p {{project_id}} can be used to list the files in this project.

      - The DDS CLI command dds data get -p {{project_id}} -a can be used to download all the files in this project to your current directory.

      + The DDS CLI command dds data get -p {{project_id}} -a --verify-checksum can be used to download all the files in this project to your current directory.

      Your access to this project will expire on {{deadline}}

      diff --git a/dds_web/templates/mail/project_release.txt b/dds_web/templates/mail/project_release.txt index a7cf4774c..162f0a803 100644 --- a/dds_web/templates/mail/project_release.txt +++ b/dds_web/templates/mail/project_release.txt @@ -10,7 +10,7 @@ You were added to this project by {{displayed_sender}}. The DDS CLI command 'dds ls -p {{project_id}}' can be used to list the files in this project. -The DDS CLI command 'dds data get -p {{project_id}} -a' can be used to download all the files in this project to your current directory. +The DDS CLI command 'dds data get -p {{project_id}} -a --verify-checksum' can be used to download all the files in this project to your current directory. Your access to this project will expire on {{deadline}} From 01cbbeaed0e0090e97e8e64a1092d67258c1b2fe Mon Sep 17 00:00:00 2001 From: rv0lt Date: Fri, 6 Oct 2023 12:42:27 +0200 Subject: [PATCH 038/114] springlog --- SPRINTLOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/SPRINTLOG.md b/SPRINTLOG.md index 09dc78670..c96aac575 100644 --- a/SPRINTLOG.md +++ b/SPRINTLOG.md @@ -310,5 +310,6 @@ _Nothing merged in CLI during this sprint_ # 2023-10-02 - 2023-10-13 +- Add flag --verify-checksum to the comand in email template ([#1409])(https://scilifelab.atlassian.net/jira/software/projects/DDS/boards/13?selectedIssue=DDS-1409) - Project title displayed along with the internal project ID email sent when a project is released ([#1475](https://github.com/ScilifelabDataCentre/dds_web/pull/1475)) - Use full DDS name in MOTD email subject ([#1477](https://github.com/ScilifelabDataCentre/dds_web/pull/1477)) From 7aacc153f26e447f6e341331ca98c143eb7f2e69 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Fri, 6 Oct 2023 15:27:05 +0200 Subject: [PATCH 039/114] correct sprintlog --- SPRINTLOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/SPRINTLOG.md b/SPRINTLOG.md index c96aac575..ee717956f 100644 --- a/SPRINTLOG.md +++ b/SPRINTLOG.md @@ -310,6 +310,6 @@ _Nothing merged in CLI during this sprint_ # 2023-10-02 - 2023-10-13 -- Add flag --verify-checksum to the comand in email template ([#1409])(https://scilifelab.atlassian.net/jira/software/projects/DDS/boards/13?selectedIssue=DDS-1409) - Project title displayed along with the internal project ID email sent when a project is released ([#1475](https://github.com/ScilifelabDataCentre/dds_web/pull/1475)) - Use full DDS name in MOTD email subject ([#1477](https://github.com/ScilifelabDataCentre/dds_web/pull/1477)) +- Add flag --verify-checksum to the comand in email template ([#1478])(https://github.com/ScilifelabDataCentre/dds_web/pull/1478) From 9fb3c129f93aedf8a7b1a6f35075d6e233f12600 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Mon, 9 Oct 2023 10:59:30 +0200 Subject: [PATCH 040/114] modified files for functionality --- dds_web/templates/mail/mail_base.html | 23 ++++++++++++++++++- dds_web/templates/mail/project_release.html | 25 +++++++++++++-------- dds_web/templates/mail/project_release.txt | 18 ++++++++++----- 3 files changed, 50 insertions(+), 16 deletions(-) diff --git a/dds_web/templates/mail/mail_base.html b/dds_web/templates/mail/mail_base.html index d0dd52e7f..5fbafcd8a 100644 --- a/dds_web/templates/mail/mail_base.html +++ b/dds_web/templates/mail/mail_base.html @@ -1,6 +1,23 @@ - + + @@ -122,6 +139,10 @@ border-color: #a00202 !important; } } + mark { + background-color: pink; + color: black; +} diff --git a/dds_web/templates/mail/project_release.html b/dds_web/templates/mail/project_release.html index 4183de5fa..ae49f4b70 100644 --- a/dds_web/templates/mail/project_release.html +++ b/dds_web/templates/mail/project_release.html @@ -18,22 +18,29 @@

    + You were added to this project on behalf of {{displayed_sender}}. +

    +

    + To list the files in this project, run:
    + dds ls -p {{project_id}}.

    +

    + To download all the files in this project to your current directory, run:
    + dds data get -p {{project_id}} -a --verify-checksum.

    +

    + For more information (including an installation guide), see the DDS CLI documentation here: scilifelabdatacentre.github.io/dds_cli.

    +

    + {% if unit_email %} - You were added to this project on behalf of {{displayed_sender}} ({{unit_email}}). + if you experience issues, please contact the SciLifeLab unit {{displayed_sender}} at ({{unit_email}}). {% else %} - You were added to this project by {{displayed_sender}}. + if you experience issues, please contact the SciLifeLab unit {{displayed_sender}}. {% endif %}

    -

    - The DDS CLI command dds ls -p {{project_id}} can be used to list the files in this project.

    -

    - The DDS CLI command dds data get -p {{project_id}} -a --verify-checksum can be used to download all the files in this project to your current directory.

    -

    - Your access to this project will expire on {{deadline}}

    +

    + Your access to this project will expire on: {{deadline}}

    What is the DDS? The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple way.

    - diff --git a/dds_web/templates/mail/project_release.txt b/dds_web/templates/mail/project_release.txt index 162f0a803..e8d88dda9 100644 --- a/dds_web/templates/mail/project_release.txt +++ b/dds_web/templates/mail/project_release.txt @@ -2,16 +2,22 @@ The following project is now available for your access in the SciLifeLab Data De - Project Title: {{project_title}} - DDS project ID: {{project_id}} +You were added to this project on behalf of {{displayed_sender}}. + +To list the files in this project, run: + dds ls -p {{project_id}} + +To download all the files in this project to your current directory, run: + dds data get -p {{project_id}} -a --verify-checksum. + +For more information (including an installation guide), see the DDS CLI documentation here: https://scilifelabdatacentre.github.io/dds_cli/ + {% if unit_email %} -You were added to this project on behalf of {{displayed_sender}} ({{unit_email}}). +if you experience issues, please contact the SciLifeLab unit {{displayed_sender}}at ({{unit_email}}). {% else %} -You were added to this project by {{displayed_sender}}. +if you experience issues, please contact the SciLifeLab unit {{displayed_sender}}. {% endif %} -The DDS CLI command 'dds ls -p {{project_id}}' can be used to list the files in this project. - -The DDS CLI command 'dds data get -p {{project_id}} -a --verify-checksum' can be used to download all the files in this project to your current directory. - Your access to this project will expire on {{deadline}} What is the DDS? The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple way. From b143738068c93d316e195641d8d49f9ec7416831 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Mon, 9 Oct 2023 11:14:54 +0200 Subject: [PATCH 041/114] new tests --- dds_web/templates/mail/project_release.html | 3 +-- tests/api/test_project.py | 9 ++++++++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/dds_web/templates/mail/project_release.html b/dds_web/templates/mail/project_release.html index ae49f4b70..c5ed54bf3 100644 --- a/dds_web/templates/mail/project_release.html +++ b/dds_web/templates/mail/project_release.html @@ -39,8 +39,7 @@

    Your access to this project will expire on: {{deadline}}

    - What is the DDS? The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple - way.

    + What is the DDS? The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple way.

    diff --git a/tests/api/test_project.py b/tests/api/test_project.py index 867da5ef3..1032d31e8 100644 --- a/tests/api/test_project.py +++ b/tests/api/test_project.py @@ -1424,10 +1424,17 @@ def test_email_project_release(module_client, boto3_session): ## check plain text message assert f"- Project Title: {project_title}" in body assert f"- DDS project ID: {public_project_id}" in body + assert f"dds ls -p {public_project_id}" in body + assert f"dds data get -p {public_project_id} -a --verify-checksum" in body + assert "if you experience issues, please contact the SciLifeLab unit" in body + assert "What is the DDS? The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple way" in body ## check html - assert f"
  • Project Title: {project_title}
  • " in html assert f"
  • DDS project ID: {public_project_id}
  • " in html + assert f"dds ls -p {public_project_id}" in html + assert f"dds data get -p {public_project_id} -a --verify-checksum" in html + assert "if you experience issues, please contact the SciLifeLab unit" in html + assert "What is the DDS? The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple way." in html assert response.status_code == http.HTTPStatus.OK From 150c2ffe697514cef8eb4c357f0667394e649748 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Mon, 9 Oct 2023 11:17:38 +0200 Subject: [PATCH 042/114] test --- tests/api/test_project.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/tests/api/test_project.py b/tests/api/test_project.py index 1032d31e8..89d5e1c2a 100644 --- a/tests/api/test_project.py +++ b/tests/api/test_project.py @@ -1427,7 +1427,10 @@ def test_email_project_release(module_client, boto3_session): assert f"dds ls -p {public_project_id}" in body assert f"dds data get -p {public_project_id} -a --verify-checksum" in body assert "if you experience issues, please contact the SciLifeLab unit" in body - assert "What is the DDS? The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple way" in body + assert ( + "What is the DDS? The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple way" + in body + ) ## check html assert f"
  • Project Title: {project_title}
  • " in html @@ -1435,6 +1438,9 @@ def test_email_project_release(module_client, boto3_session): assert f"dds ls -p {public_project_id}" in html assert f"dds data get -p {public_project_id} -a --verify-checksum" in html assert "if you experience issues, please contact the SciLifeLab unit" in html - assert "What is the DDS? The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple way." in html + assert ( + "What is the DDS? The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple way." + in html + ) assert response.status_code == http.HTTPStatus.OK From c4efca6346fd03e7de2b6b4fe8a28686d3b3c6d6 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Mon, 9 Oct 2023 11:18:59 +0200 Subject: [PATCH 043/114] springlog --- SPRINTLOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/SPRINTLOG.md b/SPRINTLOG.md index ee717956f..32341599a 100644 --- a/SPRINTLOG.md +++ b/SPRINTLOG.md @@ -313,3 +313,4 @@ _Nothing merged in CLI during this sprint_ - Project title displayed along with the internal project ID email sent when a project is released ([#1475](https://github.com/ScilifelabDataCentre/dds_web/pull/1475)) - Use full DDS name in MOTD email subject ([#1477](https://github.com/ScilifelabDataCentre/dds_web/pull/1477)) - Add flag --verify-checksum to the comand in email template ([#1478])(https://github.com/ScilifelabDataCentre/dds_web/pull/1478) +- Improved on email layout when project release ([#1479])(https://github.com/ScilifelabDataCentre/dds_web/pull/1479) From 6f471ff65baea188d0a653b5ab11f5f370c982cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Revuelta?= <46089290+rv0lt@users.noreply.github.com> Date: Tue, 10 Oct 2023 11:49:28 +0200 Subject: [PATCH 044/114] Update SPRINTLOG.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ina Odén Österbo <35953392+i-oden@users.noreply.github.com> --- SPRINTLOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/SPRINTLOG.md b/SPRINTLOG.md index 32341599a..8001f5ca5 100644 --- a/SPRINTLOG.md +++ b/SPRINTLOG.md @@ -313,4 +313,4 @@ _Nothing merged in CLI during this sprint_ - Project title displayed along with the internal project ID email sent when a project is released ([#1475](https://github.com/ScilifelabDataCentre/dds_web/pull/1475)) - Use full DDS name in MOTD email subject ([#1477](https://github.com/ScilifelabDataCentre/dds_web/pull/1477)) - Add flag --verify-checksum to the comand in email template ([#1478])(https://github.com/ScilifelabDataCentre/dds_web/pull/1478) -- Improved on email layout when project release ([#1479])(https://github.com/ScilifelabDataCentre/dds_web/pull/1479) +- Improved email layout; Highlighted information and commands when project is released ([#1479])(https://github.com/ScilifelabDataCentre/dds_web/pull/1479) From d5143191ab5f7e75cef324041d79e146058de3e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Revuelta?= <46089290+rv0lt@users.noreply.github.com> Date: Tue, 10 Oct 2023 11:49:46 +0200 Subject: [PATCH 045/114] Update dds_web/templates/mail/project_release.html MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ina Odén Österbo <35953392+i-oden@users.noreply.github.com> --- dds_web/templates/mail/project_release.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dds_web/templates/mail/project_release.html b/dds_web/templates/mail/project_release.html index c5ed54bf3..1c3e50346 100644 --- a/dds_web/templates/mail/project_release.html +++ b/dds_web/templates/mail/project_release.html @@ -31,7 +31,7 @@

    {% if unit_email %} - if you experience issues, please contact the SciLifeLab unit {{displayed_sender}} at ({{unit_email}}). + if you experience issues, please contact the SciLifeLab unit {{displayed_sender}} at {{unit_email}}. {% else %} if you experience issues, please contact the SciLifeLab unit {{displayed_sender}}. {% endif %} From 4cefdabd133e0f2d2a126182ebfdf8d65bc87a1f Mon Sep 17 00:00:00 2001 From: rv0lt Date: Tue, 10 Oct 2023 12:04:27 +0200 Subject: [PATCH 046/114] added feedback --- dds_web/templates/mail/mail_base.html | 2 +- dds_web/templates/mail/project_release.html | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/dds_web/templates/mail/mail_base.html b/dds_web/templates/mail/mail_base.html index 5fbafcd8a..a9531bcb7 100644 --- a/dds_web/templates/mail/mail_base.html +++ b/dds_web/templates/mail/mail_base.html @@ -139,7 +139,7 @@ border-color: #a00202 !important; } } - mark { + code { background-color: pink; color: black; } diff --git a/dds_web/templates/mail/project_release.html b/dds_web/templates/mail/project_release.html index 1c3e50346..da99d68e9 100644 --- a/dds_web/templates/mail/project_release.html +++ b/dds_web/templates/mail/project_release.html @@ -22,10 +22,11 @@

    To list the files in this project, run:
    - dds ls -p {{project_id}}.

    + dds ls -p {{project_id}}.

    +

    To download all the files in this project to your current directory, run:
    - dds data get -p {{project_id}} -a --verify-checksum.

    + dds data get -p {{project_id}} -a --verify-checksum.

    For more information (including an installation guide), see the DDS CLI documentation here: scilifelabdatacentre.github.io/dds_cli.

    @@ -37,7 +38,7 @@ {% endif %}

    - Your access to this project will expire on: {{deadline}}

    + Your access to this project will expire on:
    {{deadline}}

    What is the DDS? The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple way.

    From 86829582a3d152da8548500fed4a84fde7cf688f Mon Sep 17 00:00:00 2001 From: rv0lt Date: Tue, 10 Oct 2023 12:07:05 +0200 Subject: [PATCH 047/114] feedback --- dds_web/templates/mail/project_release.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dds_web/templates/mail/project_release.txt b/dds_web/templates/mail/project_release.txt index e8d88dda9..1813881e1 100644 --- a/dds_web/templates/mail/project_release.txt +++ b/dds_web/templates/mail/project_release.txt @@ -13,7 +13,7 @@ To download all the files in this project to your current directory, run: For more information (including an installation guide), see the DDS CLI documentation here: https://scilifelabdatacentre.github.io/dds_cli/ {% if unit_email %} -if you experience issues, please contact the SciLifeLab unit {{displayed_sender}}at ({{unit_email}}). +if you experience issues, please contact the SciLifeLab unit {{displayed_sender}} at ({{unit_email}}). {% else %} if you experience issues, please contact the SciLifeLab unit {{displayed_sender}}. {% endif %} From 4b555b04bfbcc6c7ca827ccb72f66e21c5a01348 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Revuelta?= <46089290+rv0lt@users.noreply.github.com> Date: Tue, 10 Oct 2023 14:07:11 +0200 Subject: [PATCH 048/114] Update dds_web/templates/mail/project_release.html MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ina Odén Österbo <35953392+i-oden@users.noreply.github.com> --- dds_web/templates/mail/project_release.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dds_web/templates/mail/project_release.html b/dds_web/templates/mail/project_release.html index da99d68e9..588b4732b 100644 --- a/dds_web/templates/mail/project_release.html +++ b/dds_web/templates/mail/project_release.html @@ -18,7 +18,7 @@

    - You were added to this project on behalf of {{displayed_sender}}. + You were added to this project {% if unit_email %} on behalf of {% else %} by {% endif %} {{displayed_sender}}.

    To list the files in this project, run:
    From bf5f8dc0860c119f5bc751b89c7e2c443b09fbfe Mon Sep 17 00:00:00 2001 From: rv0lt Date: Tue, 10 Oct 2023 14:13:25 +0200 Subject: [PATCH 049/114] feedback --- dds_web/templates/mail/mail_base.html | 2 ++ dds_web/templates/mail/project_release.html | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/dds_web/templates/mail/mail_base.html b/dds_web/templates/mail/mail_base.html index a9531bcb7..816e6fbf3 100644 --- a/dds_web/templates/mail/mail_base.html +++ b/dds_web/templates/mail/mail_base.html @@ -142,6 +142,8 @@ code { background-color: pink; color: black; + font-weight: normal; + font-style: normal; } diff --git a/dds_web/templates/mail/project_release.html b/dds_web/templates/mail/project_release.html index 588b4732b..25dbfe448 100644 --- a/dds_web/templates/mail/project_release.html +++ b/dds_web/templates/mail/project_release.html @@ -22,11 +22,11 @@

    To list the files in this project, run:
    - dds ls -p {{project_id}}.

    + dds ls -p {{project_id}}


    To download all the files in this project to your current directory, run:
    - dds data get -p {{project_id}} -a --verify-checksum.

    + dds data get -p {{project_id}} -a --verify-checksum

    For more information (including an installation guide), see the DDS CLI documentation here: scilifelabdatacentre.github.io/dds_cli.

    From c49e7d6219dd35c5fa2c210baf05501f1029d048 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Revuelta?= <46089290+rv0lt@users.noreply.github.com> Date: Tue, 10 Oct 2023 14:20:40 +0200 Subject: [PATCH 050/114] Update dds_web/templates/mail/project_release.html MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ina Odén Österbo <35953392+i-oden@users.noreply.github.com> --- dds_web/templates/mail/project_release.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dds_web/templates/mail/project_release.html b/dds_web/templates/mail/project_release.html index 25dbfe448..af605b7ad 100644 --- a/dds_web/templates/mail/project_release.html +++ b/dds_web/templates/mail/project_release.html @@ -22,7 +22,7 @@

    To list the files in this project, run:
    - dds ls -p {{project_id}}

    + dds ls -p {{project_id}}


    To download all the files in this project to your current directory, run:
    From e1761ec048ee89f02c84bc8ad4558b6c9a80dc49 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Revuelta?= <46089290+rv0lt@users.noreply.github.com> Date: Tue, 10 Oct 2023 14:21:08 +0200 Subject: [PATCH 051/114] Update dds_web/templates/mail/project_release.html MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ina Odén Österbo <35953392+i-oden@users.noreply.github.com> --- dds_web/templates/mail/project_release.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dds_web/templates/mail/project_release.html b/dds_web/templates/mail/project_release.html index af605b7ad..307be8dd7 100644 --- a/dds_web/templates/mail/project_release.html +++ b/dds_web/templates/mail/project_release.html @@ -26,7 +26,7 @@

    To download all the files in this project to your current directory, run:
    - dds data get -p {{project_id}} -a --verify-checksum

    + dds data get -p {{project_id}} -a --verify-checksum

    For more information (including an installation guide), see the DDS CLI documentation here: scilifelabdatacentre.github.io/dds_cli.

    From 7a36d53e0f5b5c8bca7d09bdf54291b4b8e569d3 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Tue, 10 Oct 2023 14:23:40 +0200 Subject: [PATCH 052/114] feedback --- dds_web/templates/mail/mail_base.html | 2 -- 1 file changed, 2 deletions(-) diff --git a/dds_web/templates/mail/mail_base.html b/dds_web/templates/mail/mail_base.html index 816e6fbf3..a9531bcb7 100644 --- a/dds_web/templates/mail/mail_base.html +++ b/dds_web/templates/mail/mail_base.html @@ -142,8 +142,6 @@ code { background-color: pink; color: black; - font-weight: normal; - font-style: normal; } From 1e0ab3f3619db58755fb045aa1b264ab62949f82 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Revuelta?= <46089290+rv0lt@users.noreply.github.com> Date: Tue, 10 Oct 2023 14:34:15 +0200 Subject: [PATCH 053/114] Update dds_web/templates/mail/project_release.html MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ina Odén Österbo <35953392+i-oden@users.noreply.github.com> --- dds_web/templates/mail/project_release.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dds_web/templates/mail/project_release.html b/dds_web/templates/mail/project_release.html index 307be8dd7..701ad8b21 100644 --- a/dds_web/templates/mail/project_release.html +++ b/dds_web/templates/mail/project_release.html @@ -32,7 +32,7 @@

    {% if unit_email %} - if you experience issues, please contact the SciLifeLab unit {{displayed_sender}} at {{unit_email}}. + If you experience issues, please contact the SciLifeLab unit {{displayed_sender}} at {{unit_email}}. {% else %} if you experience issues, please contact the SciLifeLab unit {{displayed_sender}}. {% endif %} From 7724d7509b4133ca499fe2492da116599b4df065 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Revuelta?= <46089290+rv0lt@users.noreply.github.com> Date: Tue, 10 Oct 2023 14:34:25 +0200 Subject: [PATCH 054/114] Update dds_web/templates/mail/project_release.html MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ina Odén Österbo <35953392+i-oden@users.noreply.github.com> --- dds_web/templates/mail/project_release.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dds_web/templates/mail/project_release.html b/dds_web/templates/mail/project_release.html index 701ad8b21..06d536664 100644 --- a/dds_web/templates/mail/project_release.html +++ b/dds_web/templates/mail/project_release.html @@ -34,7 +34,7 @@ {% if unit_email %} If you experience issues, please contact the SciLifeLab unit {{displayed_sender}} at {{unit_email}}. {% else %} - if you experience issues, please contact the SciLifeLab unit {{displayed_sender}}. + If you experience issues, please contact the SciLifeLab unit {{displayed_sender}}. {% endif %}

    From 5404755eb0d9d6b2121e5c0bb617bdf1f5cb1cf2 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Tue, 10 Oct 2023 14:36:48 +0200 Subject: [PATCH 055/114] updated txt --- dds_web/templates/mail/project_release.txt | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/dds_web/templates/mail/project_release.txt b/dds_web/templates/mail/project_release.txt index 1813881e1..9b6dd9220 100644 --- a/dds_web/templates/mail/project_release.txt +++ b/dds_web/templates/mail/project_release.txt @@ -2,7 +2,7 @@ The following project is now available for your access in the SciLifeLab Data De - Project Title: {{project_title}} - DDS project ID: {{project_id}} -You were added to this project on behalf of {{displayed_sender}}. +You were added to this project {% if unit_email %} on behalf of {% else %} by {% endif %} {{displayed_sender}}. To list the files in this project, run: dds ls -p {{project_id}} @@ -12,11 +12,11 @@ To download all the files in this project to your current directory, run: For more information (including an installation guide), see the DDS CLI documentation here: https://scilifelabdatacentre.github.io/dds_cli/ -{% if unit_email %} -if you experience issues, please contact the SciLifeLab unit {{displayed_sender}} at ({{unit_email}}). -{% else %} -if you experience issues, please contact the SciLifeLab unit {{displayed_sender}}. -{% endif %} +{% if unit_email %} +If you experience issues, please contact the SciLifeLab unit {{displayed_sender}} at {{unit_email}}. +{% else %} +If you experience issues, please contact the SciLifeLab unit {{displayed_sender}}. +{% endif %} Your access to this project will expire on {{deadline}} From 8fb8f0ff7ad32194172b204b9c066a54d08780b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Revuelta?= <46089290+rv0lt@users.noreply.github.com> Date: Tue, 10 Oct 2023 15:00:33 +0200 Subject: [PATCH 056/114] Update test_project.py --- tests/api/test_project.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/api/test_project.py b/tests/api/test_project.py index 89d5e1c2a..7ac6ce020 100644 --- a/tests/api/test_project.py +++ b/tests/api/test_project.py @@ -1426,7 +1426,7 @@ def test_email_project_release(module_client, boto3_session): assert f"- DDS project ID: {public_project_id}" in body assert f"dds ls -p {public_project_id}" in body assert f"dds data get -p {public_project_id} -a --verify-checksum" in body - assert "if you experience issues, please contact the SciLifeLab unit" in body + assert "If you experience issues, please contact the SciLifeLab unit" in body assert ( "What is the DDS? The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple way" in body @@ -1437,7 +1437,7 @@ def test_email_project_release(module_client, boto3_session): assert f"

  • DDS project ID: {public_project_id}
  • " in html assert f"dds ls -p {public_project_id}" in html assert f"dds data get -p {public_project_id} -a --verify-checksum" in html - assert "if you experience issues, please contact the SciLifeLab unit" in html + assert "If you experience issues, please contact the SciLifeLab unit" in html assert ( "What is the DDS? The DDS is a system for SciLifeLab infrastructures to deliver data to researchers in a fast, secure and simple way." in html From ef0bf21dcedd113474d7086948afd23decb7b715 Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Wed, 11 Oct 2023 13:01:01 +0200 Subject: [PATCH 057/114] add sprintlog --- SPRINTLOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/SPRINTLOG.md b/SPRINTLOG.md index 8001f5ca5..489cd7893 100644 --- a/SPRINTLOG.md +++ b/SPRINTLOG.md @@ -314,3 +314,4 @@ _Nothing merged in CLI during this sprint_ - Use full DDS name in MOTD email subject ([#1477](https://github.com/ScilifelabDataCentre/dds_web/pull/1477)) - Add flag --verify-checksum to the comand in email template ([#1478])(https://github.com/ScilifelabDataCentre/dds_web/pull/1478) - Improved email layout; Highlighted information and commands when project is released ([#1479])(https://github.com/ScilifelabDataCentre/dds_web/pull/1479) +- Add endpoint for updating failed files in database ([#1472])(https://github.com/ScilifelabDataCentre/dds_web/pull/1472) From f7e994f9421a45f76c250e72b40c97dcc389ce0e Mon Sep 17 00:00:00 2001 From: Valentin Georgiev Date: Fri, 13 Oct 2023 09:11:35 +0200 Subject: [PATCH 058/114] Update SPRINTLOG.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ina Odén Österbo <35953392+i-oden@users.noreply.github.com> --- SPRINTLOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/SPRINTLOG.md b/SPRINTLOG.md index 489cd7893..a52a110af 100644 --- a/SPRINTLOG.md +++ b/SPRINTLOG.md @@ -314,4 +314,4 @@ _Nothing merged in CLI during this sprint_ - Use full DDS name in MOTD email subject ([#1477](https://github.com/ScilifelabDataCentre/dds_web/pull/1477)) - Add flag --verify-checksum to the comand in email template ([#1478])(https://github.com/ScilifelabDataCentre/dds_web/pull/1478) - Improved email layout; Highlighted information and commands when project is released ([#1479])(https://github.com/ScilifelabDataCentre/dds_web/pull/1479) -- Add endpoint for updating failed files in database ([#1472])(https://github.com/ScilifelabDataCentre/dds_web/pull/1472) +- New endpoint `AddFailedFiles` for adding failed files to database ([#1472])(https://github.com/ScilifelabDataCentre/dds_web/pull/1472) From ee45c2e592528390abf3439b312b1c92e4cd6119 Mon Sep 17 00:00:00 2001 From: Valentin Georgiev Date: Fri, 13 Oct 2023 09:15:20 +0200 Subject: [PATCH 059/114] Update dds_web/api/__init__.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ina Odén Österbo <35953392+i-oden@users.noreply.github.com> --- dds_web/api/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dds_web/api/__init__.py b/dds_web/api/__init__.py index 55b3853b7..afdd0a467 100644 --- a/dds_web/api/__init__.py +++ b/dds_web/api/__init__.py @@ -50,7 +50,7 @@ def output_json(data, code, headers=None): api.add_resource(files.FileInfo, "/file/info", endpoint="file_info") api.add_resource(files.FileInfoAll, "/file/all/info", endpoint="all_file_info") api.add_resource(files.UpdateFile, "/file/update", endpoint="update_file") -api.add_resource(files.UpdateFailedFiles, "/file/failed/update", endpoint="update_failed_file") +api.add_resource(files.AddFailedFiles, "/file/failed/add", endpoint="add_failed_files") # Projects ############################################################################## Projects # api.add_resource(project.UserProjects, "/proj/list", endpoint="list_projects") From a19827affe8ba046787171795c51d9122a035358 Mon Sep 17 00:00:00 2001 From: Valentin Georgiev Date: Fri, 13 Oct 2023 09:15:37 +0200 Subject: [PATCH 060/114] Update dds_web/api/files.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ina Odén Österbo <35953392+i-oden@users.noreply.github.com> --- dds_web/api/files.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dds_web/api/files.py b/dds_web/api/files.py index 6f254ea7e..282f3db3f 100644 --- a/dds_web/api/files.py +++ b/dds_web/api/files.py @@ -742,7 +742,7 @@ def put(self): return {"message": "File info updated."} -class UpdateFailedFiles(flask_restful.Resource): +class AddFailedFiles(flask_restful.Resource): """Add files from failed_delivery_log to DB using the "add_uploaded_files_to_db" utils function""" @auth.login_required(role=["Unit Admin", "Unit Personnel", "Project Owner", "Researcher"]) From 2dba4911cba225a1bd16ea987d16c6699654ed09 Mon Sep 17 00:00:00 2001 From: Valentin Georgiev Date: Fri, 13 Oct 2023 09:17:05 +0200 Subject: [PATCH 061/114] Update dds_web/api/files.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ina Odén Österbo <35953392+i-oden@users.noreply.github.com> --- dds_web/api/files.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dds_web/api/files.py b/dds_web/api/files.py index 282f3db3f..4290aa46f 100644 --- a/dds_web/api/files.py +++ b/dds_web/api/files.py @@ -743,7 +743,7 @@ def put(self): class AddFailedFiles(flask_restful.Resource): - """Add files from failed_delivery_log to DB using the "add_uploaded_files_to_db" utils function""" + """Get files from log file and save to database.""" @auth.login_required(role=["Unit Admin", "Unit Personnel", "Project Owner", "Researcher"]) @json_required From 3cecb5274b7d6270d6116129b217ae24ef015959 Mon Sep 17 00:00:00 2001 From: Valentin Georgiev Date: Fri, 13 Oct 2023 09:17:30 +0200 Subject: [PATCH 062/114] Update dds_web/api/files.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ina Odén Österbo <35953392+i-oden@users.noreply.github.com> --- dds_web/api/files.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dds_web/api/files.py b/dds_web/api/files.py index 4290aa46f..c2b12d738 100644 --- a/dds_web/api/files.py +++ b/dds_web/api/files.py @@ -745,7 +745,7 @@ def put(self): class AddFailedFiles(flask_restful.Resource): """Get files from log file and save to database.""" - @auth.login_required(role=["Unit Admin", "Unit Personnel", "Project Owner", "Researcher"]) + @auth.login_required(role=["Unit Admin", "Unit Personnel"]) @json_required @handle_validation_errors def put(self): From 4a15e97af3761a0d6995f9c4a742e950056fcce8 Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Fri, 13 Oct 2023 10:46:47 +0200 Subject: [PATCH 063/114] adapt test for new endpoint --- tests/__init__.py | 2 +- tests/test_files_new.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/__init__.py b/tests/__init__.py index 70f545f8e..94887177b 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -195,7 +195,7 @@ class DDSEndpoint: FILE_INFO = BASE_ENDPOINT + "/file/info" FILE_INFO_ALL = BASE_ENDPOINT + "/file/all/info" FILE_UPDATE = BASE_ENDPOINT + "/file/update" - FILE_UPDATE_FAILED = BASE_ENDPOINT + "/file/failed/update" + FILE_ADD_FAILED = BASE_ENDPOINT + "/file/failed/add" # Project specific urls PROJECT_CREATE = BASE_ENDPOINT + "/proj/create" diff --git a/tests/test_files_new.py b/tests/test_files_new.py index 0899b6f16..3c04fc909 100644 --- a/tests/test_files_new.py +++ b/tests/test_files_new.py @@ -948,7 +948,7 @@ def test_update_failed_files_success(client, boto3_session): assert project_1.current_status == "In Progress" response = client.put( - tests.DDSEndpoint.FILE_UPDATE_FAILED, + tests.DDSEndpoint.FILE_ADD_FAILED, headers=tests.UserAuth(tests.USER_CREDENTIALS["unitadmin"]).token(client), query_string={"project": "file_testing_project"}, json=FAILED_FILES, @@ -969,7 +969,7 @@ def test_update_failed_files_no_json(client, boto3_session): assert project_1.current_status == "In Progress" response = client.put( - tests.DDSEndpoint.FILE_UPDATE_FAILED, + tests.DDSEndpoint.FILE_ADD_FAILED, headers=tests.UserAuth(tests.USER_CREDENTIALS["unitadmin"]).token(client), query_string={"project": "file_testing_project"}, ) @@ -985,7 +985,7 @@ def test_update_failed_files_no_project(client, boto3_session): """Update failed files without project.""" response = client.put( - tests.DDSEndpoint.FILE_UPDATE_FAILED, + tests.DDSEndpoint.FILE_ADD_FAILED, headers=tests.UserAuth(tests.USER_CREDENTIALS["unitadmin"]).token(client), json=FAILED_FILES, ) From 5e89ea02918e2f68c36f37d76953f8ea1d93eaaf Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Fri, 13 Oct 2023 14:29:45 +0200 Subject: [PATCH 064/114] review suggestions --- dds_web/api/files.py | 2 +- dds_web/commands.py | 5 ++++- dds_web/utils.py | 10 ++++++---- tests/test_files_new.py | 2 +- 4 files changed, 12 insertions(+), 7 deletions(-) diff --git a/dds_web/api/files.py b/dds_web/api/files.py index c2b12d738..eb050a4a7 100644 --- a/dds_web/api/files.py +++ b/dds_web/api/files.py @@ -756,6 +756,6 @@ def put(self): # Get the request json and pass it to add_uploaded_files_to_db request_json = flask.request.get_json(silent=True) - dds_web.utils.add_uploaded_files_to_db(project, request_json) + files_added, errors = dds_web.utils.add_uploaded_files_to_db(project, request_json) return {"message": "File(s) added to database."} diff --git a/dds_web/commands.py b/dds_web/commands.py index 343cbe6f2..2669034e8 100644 --- a/dds_web/commands.py +++ b/dds_web/commands.py @@ -226,7 +226,10 @@ def update_uploaded_file_with_log(project, path_to_log_file): with open(path_to_log_file, "r") as f: log = json.load(f) - utils.add_uploaded_files_to_db(proj_in_db, log) + files_added, errors = utils.add_uploaded_files_to_db(proj_in_db, log) + + flask.current_app.logger.info(f"Files added: {files_added}") + flask.current_app.logger.info(f"Errors while adding files: {errors}") @click.group(name="lost-files") diff --git a/dds_web/utils.py b/dds_web/utils.py index 0057edef2..ce8f647f5 100644 --- a/dds_web/utils.py +++ b/dds_web/utils.py @@ -800,8 +800,10 @@ def add_uploaded_files_to_db(proj_in_db, log): new_file.versions.append(new_version) db.session.add(new_file) - files_added.append(new_file) - db.session.commit() + try: + db.session.commit() + files_added.append(new_file) + except (sqlalchemy.exc.IntegrityError, sqlalchemy.exc.OperationalError) as err: + errors[file] = {"error": str(err)} - flask.current_app.logger.info(f"Files added: {files_added}") - flask.current_app.logger.info(f"Errors while adding files: {errors}") + return files_added, errors diff --git a/tests/test_files_new.py b/tests/test_files_new.py index 3c04fc909..67c681260 100644 --- a/tests/test_files_new.py +++ b/tests/test_files_new.py @@ -936,7 +936,7 @@ def test_delete_contents_and_upload_again(client, boto3_session): assert file_in_db -# Test UpdateFailedFiles endpoint +# Test AddFailedFiles endpoint def test_update_failed_files_success(client, boto3_session): From e20b73550e8f216a728e21e877666ba466ca77b6 Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Fri, 13 Oct 2023 16:03:52 +0200 Subject: [PATCH 065/114] add test from review suggestion --- tests/test_utils.py | 66 ++++++++++++++++++++++++--------------------- 1 file changed, 35 insertions(+), 31 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index 4f4d160bd..d984d6d48 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1581,34 +1581,38 @@ def test_add_uploaded_files_to_db_other_failed_op(client: flask.testing.FlaskCli assert not file -# def test_add_uploaded_files_to_db_file_not_found(client: flask.testing.FlaskClient): - -# from botocore.exceptions import ClientError -# # create mock project and log -# proj_in_db = models.Project.query.first() -# log = { -# "file1.txt": { -# "status": {"failed_op": "add_file_db"}, -# "path_remote": "path/to/file1.txt", -# "subpath": "subpath", -# "size_raw": 100, -# "size_processed": 200, -# "compressed": False, -# "public_key": "public_key", -# "salt": "salt", -# "checksum": "checksum", -# } -# } - -# # mock ApiS3Connector to raise exception when get_object is called -# mock_s3conn = MagicMock() -# # mock_s3conn.get_object.side_effect = ClientError({"Error": {"Code": "404"}}, "operation_name") -# mock_s3conn.resource.meta.client.head_object.side_effect = ClientError({"Error": {"Code": "404"}}, "operation_name") -# mock_api_s3_conn = MagicMock(return_value=mock_s3conn) - -# # call add_uploaded_files_to_db and check for expected errors -# with patch("dds_web.api.api_s3_connector.ApiS3Connector", mock_api_s3_conn): -# utils.add_uploaded_files_to_db(proj_in_db, log) - -# file = models.File.query.filter_by(name="file1.txt").first() -# assert not file +def test_add_uploaded_files_to_db_file_not_found(client: flask.testing.FlaskClient, capfd): + from botocore.exceptions import ClientError + + # create mock project and log + proj_in_db = models.Project.query.first() + log = { + "file1.txt": { + "status": {"failed_op": "add_file_db"}, + "path_remote": "path/to/file1.txt", + "subpath": "subpath", + "size_raw": 100, + "size_processed": 200, + "compressed": False, + "public_key": "public_key", + "salt": "salt", + "checksum": "checksum", + } + } + + # mock ApiS3Connector + mock_api_s3_conn = MagicMock() + mock_s3conn = mock_api_s3_conn.return_value.__enter__.return_value + + # call add_uploaded_files_to_db and check for expected errors + with patch("dds_web.api.api_s3_connector.ApiS3Connector", mock_api_s3_conn): + mock_s3conn.resource.meta.client.head_object.side_effect = ClientError( + {"Error": {"Code": "404"}}, "operation_name" + ) + utils.add_uploaded_files_to_db(proj_in_db, log) + + file = models.File.query.filter_by(name="file1.txt").first() + assert not file + + # _,err = capfd.readouterr() + # assert "Errors while adding files: {'file1.txt': {'error': 'File not found in S3" in err From a988b32f08e157a1f9659d7a493bfd7184f146a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Mon, 16 Oct 2023 09:12:13 +0200 Subject: [PATCH 066/114] adr tool new decision --- ...lan-for-2024-objectives-and-key-results.md | 92 +++++++++++++++++++ 1 file changed, 92 insertions(+) create mode 100644 doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md diff --git a/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md b/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md new file mode 100644 index 000000000..d5e8bcb24 --- /dev/null +++ b/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md @@ -0,0 +1,92 @@ +# 21. Plan for 2024: Objectives and Key Results + +Date: 2023-10-16 + +## Status + +Accepted + +## Context + +The Product Owner (PO) / Project Leader (PL) of Team Hermes - responsible for the development and maintenance of the Data Delivery System (DDS) - is going on parental leave from November 17th 2023. Due do this, and that the substitute(s) / replacement(s) has not had enough time to learn the system in order to fully take over the PO / PL responsibilities, there needs to be a plan for what the team should work on during the coming year. Starting a more formal plans for the coming year (now and in the future) is also a general improvement to the team and stakeholders, since it will allow for more tranparancy outward and help guide the team's focus. + +In order to plan for the coming year (2024, and December 2023), the team is using the tool _OKRs: Objects and Key Results_. + +> OKR [is] a collaborative goal-setting methodology used by teams and individuals to set challenging, ambitious goals with measurable results. +> +> -- [What Matters](https://www.whatmatters.com/faqs/okr-meaning-definition-example) + +> An **Objective** is what you want to accomplish. +> [Should be]: +> * Significant +> * Concrete +> * Action-oriented +> * Inspirational +> +> Can be set annually or over [a] [...] longer term. +> +> **Key Results** are how you will accomplish the _Objectives_. +> [Should be]: +> * Specific +> * Timebound +> * Aggressive yet realistic +> * Measurable +> * Verifiable +> +> Can be set quarterly and evolve as work progresses. +> +> -- [What Matters](https://www.whatmatters.com/faqs/okr-meaning-definition-example) +> +> Initiatives [are] tasks [that are] required to drive [the] progress of key results. +> +> -- [Intuit Mint](https://mint.intuit.com/blog/planning-2/okr-framework/) + +The issue motivating this decision, and any context that influences or constrains the decision. + +### Discussions regarding possible objectives + +#### "Improve user experience via UI" + +##### Alternative 1: Implement the `dds-cli` functionality in the web interface. + +* The web interface will not be a good way for uploads and downloads of huge amounts of data. The units are also saying this. +* Implementing the functionality in the web interface would require us to have a front-end developer that also has experience in cryptography. We (the PO mainly) have been asking- and waiting for this person for years, so we cannot expect that that's going to happen any time soon. The last time it was mentioned was both before and after summer 2023; since then we haven't heard or said anything regarding this. Therefore, creating the web interface that is envisioned - a complete reset of the web using some JS framework - is not possible. + * Even if a front-end developer was to get hired at some point during 2024, doing a complete reset of the frontend (which houses functionality required to register, reset password, setup 2FA, etc) and building the web from scratch, while the person who has been involved in developing the majority of the code, is away, is **not** a good idea. +* If we were to work on implementing the functionality into the web interface as it is now, without having a front-end developer, we would have to continue using pure flask in the web, and that would mean that we would need to make a duplicate of practically all of the API code that exists, because: + * Calling the API endpoints from the Flask routes does not work since those endpoints require authentication tokens - the ones that the CLI uses. + * Moving the API code, creating helper functions, and changing everything in order to use the new helper functions in both the API and web should not be done when the PO is away; It's too much work and it risks the functionality in the web. We should be adding functionality to the system during 2024, **not** refactoring and risking working functionality to break. +* Duplicating the code for listing projects, users, files, adding users to projects (etc, etc) in the web means that we later on will have to redo it all and the team will have spent time on something that will not be used anyway since the upload and download by pure flask and html is not a good idea. Also, upload and download of potentially huge amounts of data via browser is as mentioned above not a good solution. + +**Because of these things, implementing the functionality in the web interface is not an option; we won't plan for working on the web interface during the coming year.** + +##### Alternative 2: Creating a Graphical User Interface (GUI) + +* The unit's _end-users_ (the users downloading the data) would benefit from this. +* The NGI units that do not need the download functionality for their end users in the GUI also do not need it in the web, they just have bioinformaticians that help the users and the bioinformaticians are familiar with terminal tools +* Other smaller units have less experienced end users and are more likely to want to download locally and to want to click buttons instead of using the terminal +* This GUI would be very simple to begin with, it could (for example) be created by `tkinter` or another simple package (google "simple GUI python" or similar, there are several). The main thing here is that we should not need to write new code for the actual DDS functionality; The idea is that the GUI would run the CLI commands when buttons are clicked etc. Buttons would run the commands, and the same things that are displayed in the CLI would be displayed in the GUI. + * We could start with the authentication, listing of the projects, their deadlines etc, users, project users, inviting users etc. + * The GUI would automatically use the authentication token. + * We could technically implement download, but we could start with displaying the commands and allow them to copy paste the commands to the terminal + * The very simple GUI can be compiled with the pyinstaller package via GitHub actions, in a similar way that the CLI is currently. The user would then download the GUI, open it and then do what they want in a simple way. +* This would therefore mean that we wouldn't duplicate code, we would just use the code that already exist. +* The GUI would not be able to use on HPC clusters etc _but neither would the browser_. +* Both options would currently download to the local computer. +* Both the GUI and the web interface would later maybe be able to pipe the data to a long term storage location instead of downloading locally, but the plans for how that would work exactly are not made yet, and GUI or web interface shouldn't make a difference since the functionality will anyway likely be managed and executed via the API. +* Making the GUI is still not a simple task, we would still need to make reasonable plans and not go overboard. We would start small, let the users try it out or have demos, and if this is not something that would be used, you would scrap the GUI plan and move on to a new idea or a new objective. + +**So the choices we have are:** +1. Start making a GUI **OR...** +2. Come up with a new objective. + +## Decision + +The change that we're proposing or have agreed to implement. + +## Consequences + +What becomes easier or more difficult to do and any risks introduced by the change that will need to be mitigated. + +## Footnotes + +[^fn1]: https://www.whatmatters.com/faqs/okr-meaning-definition-example From c22322e76e9131c054e4993d8dd6ee0b24ba3d10 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Mon, 16 Oct 2023 09:28:31 +0200 Subject: [PATCH 067/114] prettier --- ...lan-for-2024-objectives-and-key-results.md | 106 +++++++++++------- 1 file changed, 67 insertions(+), 39 deletions(-) diff --git a/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md b/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md index d5e8bcb24..7bb2314b7 100644 --- a/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md +++ b/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md @@ -8,9 +8,9 @@ Accepted ## Context -The Product Owner (PO) / Project Leader (PL) of Team Hermes - responsible for the development and maintenance of the Data Delivery System (DDS) - is going on parental leave from November 17th 2023. Due do this, and that the substitute(s) / replacement(s) has not had enough time to learn the system in order to fully take over the PO / PL responsibilities, there needs to be a plan for what the team should work on during the coming year. Starting a more formal plans for the coming year (now and in the future) is also a general improvement to the team and stakeholders, since it will allow for more tranparancy outward and help guide the team's focus. +The Product Owner (PO) / Project Leader (PL) of Team Hermes - responsible for the development and maintenance of the Data Delivery System (DDS) - is going on parental leave from November 17th 2023. Due do this, and that the substitute(s) / replacement(s) has not had enough time to learn the system in order to fully take over the PO / PL responsibilities, there needs to be a plan for what the team should work on during the coming year. Starting a more formal plans for the coming year (now and in the future) is also a general improvement to the team and stakeholders, since it will allow for more tranparancy outward and help guide the team's focus. -In order to plan for the coming year (2024, and December 2023), the team is using the tool _OKRs: Objects and Key Results_. +In order to plan for the coming year (2024, and December 2023), the team is using the tool _OKRs: Objects and Key Results_. > OKR [is] a collaborative goal-setting methodology used by teams and individuals to set challenging, ambitious goals with measurable results. > @@ -18,27 +18,29 @@ In order to plan for the coming year (2024, and December 2023), the team is usin > An **Objective** is what you want to accomplish. > [Should be]: -> * Significant -> * Concrete -> * Action-oriented -> * Inspirational -> +> +> - Significant +> - Concrete +> - Action-oriented +> - Inspirational +> > Can be set annually or over [a] [...] longer term. -> +> > **Key Results** are how you will accomplish the _Objectives_. > [Should be]: -> * Specific -> * Timebound -> * Aggressive yet realistic -> * Measurable -> * Verifiable -> -> Can be set quarterly and evolve as work progresses. -> +> +> - Specific +> - Timebound +> - Aggressive yet realistic +> - Measurable +> - Verifiable +> +> Can be set quarterly and evolve as work progresses. +> > -- [What Matters](https://www.whatmatters.com/faqs/okr-meaning-definition-example) > -> Initiatives [are] tasks [that are] required to drive [the] progress of key results. -> +> Initiatives [are] tasks [that are] required to drive [the] progress of key results. +> > -- [Intuit Mint](https://mint.intuit.com/blog/planning-2/okr-framework/) The issue motivating this decision, and any context that influences or constrains the decision. @@ -49,39 +51,65 @@ The issue motivating this decision, and any context that influences or constrain ##### Alternative 1: Implement the `dds-cli` functionality in the web interface. -* The web interface will not be a good way for uploads and downloads of huge amounts of data. The units are also saying this. -* Implementing the functionality in the web interface would require us to have a front-end developer that also has experience in cryptography. We (the PO mainly) have been asking- and waiting for this person for years, so we cannot expect that that's going to happen any time soon. The last time it was mentioned was both before and after summer 2023; since then we haven't heard or said anything regarding this. Therefore, creating the web interface that is envisioned - a complete reset of the web using some JS framework - is not possible. - * Even if a front-end developer was to get hired at some point during 2024, doing a complete reset of the frontend (which houses functionality required to register, reset password, setup 2FA, etc) and building the web from scratch, while the person who has been involved in developing the majority of the code, is away, is **not** a good idea. -* If we were to work on implementing the functionality into the web interface as it is now, without having a front-end developer, we would have to continue using pure flask in the web, and that would mean that we would need to make a duplicate of practically all of the API code that exists, because: - * Calling the API endpoints from the Flask routes does not work since those endpoints require authentication tokens - the ones that the CLI uses. - * Moving the API code, creating helper functions, and changing everything in order to use the new helper functions in both the API and web should not be done when the PO is away; It's too much work and it risks the functionality in the web. We should be adding functionality to the system during 2024, **not** refactoring and risking working functionality to break. -* Duplicating the code for listing projects, users, files, adding users to projects (etc, etc) in the web means that we later on will have to redo it all and the team will have spent time on something that will not be used anyway since the upload and download by pure flask and html is not a good idea. Also, upload and download of potentially huge amounts of data via browser is as mentioned above not a good solution. +- The web interface will not be a good way for uploads and downloads of huge amounts of data. The units are also saying this. +- Implementing the functionality in the web interface would require us to have a front-end developer that also has experience in cryptography. We (the PO mainly) have been asking- and waiting for this person for years, so we cannot expect that that's going to happen any time soon. The last time it was mentioned was both before and after summer 2023; since then we haven't heard or said anything regarding this. Therefore, creating the web interface that is envisioned - a complete reset of the web using some JS framework - is not possible. + - Even if a front-end developer was to get hired at some point during 2024, doing a complete reset of the frontend (which houses functionality required to register, reset password, setup 2FA, etc) and building the web from scratch, while the person who has been involved in developing the majority of the code, is away, is **not** a good idea. +- If we were to work on implementing the functionality into the web interface as it is now, without having a front-end developer, we would have to continue using pure flask in the web, and that would mean that we would need to make a duplicate of practically all of the API code that exists, because: + - Calling the API endpoints from the Flask routes does not work since those endpoints require authentication tokens - the ones that the CLI uses. + - Moving the API code, creating helper functions, and changing everything in order to use the new helper functions in both the API and web should not be done when the PO is away; It's too much work and it risks the functionality in the web. We should be adding functionality to the system during 2024, **not** refactoring and risking working functionality to break. +- Duplicating the code for listing projects, users, files, adding users to projects (etc, etc) in the web means that we later on will have to redo it all and the team will have spent time on something that will not be used anyway since the upload and download by pure flask and html is not a good idea. Also, upload and download of potentially huge amounts of data via browser is as mentioned above not a good solution. **Because of these things, implementing the functionality in the web interface is not an option; we won't plan for working on the web interface during the coming year.** ##### Alternative 2: Creating a Graphical User Interface (GUI) -* The unit's _end-users_ (the users downloading the data) would benefit from this. -* The NGI units that do not need the download functionality for their end users in the GUI also do not need it in the web, they just have bioinformaticians that help the users and the bioinformaticians are familiar with terminal tools -* Other smaller units have less experienced end users and are more likely to want to download locally and to want to click buttons instead of using the terminal -* This GUI would be very simple to begin with, it could (for example) be created by `tkinter` or another simple package (google "simple GUI python" or similar, there are several). The main thing here is that we should not need to write new code for the actual DDS functionality; The idea is that the GUI would run the CLI commands when buttons are clicked etc. Buttons would run the commands, and the same things that are displayed in the CLI would be displayed in the GUI. - * We could start with the authentication, listing of the projects, their deadlines etc, users, project users, inviting users etc. - * The GUI would automatically use the authentication token. - * We could technically implement download, but we could start with displaying the commands and allow them to copy paste the commands to the terminal - * The very simple GUI can be compiled with the pyinstaller package via GitHub actions, in a similar way that the CLI is currently. The user would then download the GUI, open it and then do what they want in a simple way. -* This would therefore mean that we wouldn't duplicate code, we would just use the code that already exist. -* The GUI would not be able to use on HPC clusters etc _but neither would the browser_. -* Both options would currently download to the local computer. -* Both the GUI and the web interface would later maybe be able to pipe the data to a long term storage location instead of downloading locally, but the plans for how that would work exactly are not made yet, and GUI or web interface shouldn't make a difference since the functionality will anyway likely be managed and executed via the API. -* Making the GUI is still not a simple task, we would still need to make reasonable plans and not go overboard. We would start small, let the users try it out or have demos, and if this is not something that would be used, you would scrap the GUI plan and move on to a new idea or a new objective. +- The unit's _end-users_ (the users downloading the data) would benefit from this. +- The NGI units that do not need the download functionality for their end users in the GUI also do not need it in the web, they just have bioinformaticians that help the users and the bioinformaticians are familiar with terminal tools +- Other smaller units have less experienced end users and are more likely to want to download locally and to want to click buttons instead of using the terminal +- This GUI would be very simple to begin with, it could (for example) be created by `tkinter` or another simple package (google "simple GUI python" or similar, there are several). The main thing here is that we should not need to write new code for the actual DDS functionality; The idea is that the GUI would run the CLI commands when buttons are clicked etc. Buttons would run the commands, and the same things that are displayed in the CLI would be displayed in the GUI. + - We could start with the authentication, listing of the projects, their deadlines etc, users, project users, inviting users etc. + - The GUI would automatically use the authentication token. + - We could technically implement download, but we could start with displaying the commands and allow them to copy paste the commands to the terminal + - The very simple GUI can be compiled with the pyinstaller package via GitHub actions, in a similar way that the CLI is currently. The user would then download the GUI, open it and then do what they want in a simple way. +- This would therefore mean that we wouldn't duplicate code, we would just use the code that already exist. +- The GUI would not be able to use on HPC clusters etc _but neither would the browser_. +- Both options would currently download to the local computer. +- Both the GUI and the web interface would later maybe be able to pipe the data to a long term storage location instead of downloading locally, but the plans for how that would work exactly are not made yet, and GUI or web interface shouldn't make a difference since the functionality will anyway likely be managed and executed via the API. +- Making the GUI is still not a simple task, we would still need to make reasonable plans and not go overboard. We would start small, let the users try it out or have demos, and if this is not something that would be used, you would scrap the GUI plan and move on to a new idea or a new objective. **So the choices we have are:** + 1. Start making a GUI **OR...** 2. Come up with a new objective. ## Decision -The change that we're proposing or have agreed to implement. +The sections below have the following structure: + +> **Objective X: [short goal] -- [long goal]** +> +> - **Key Result 1 [COMMITTED / ASPIRATIONAL / LEARNING]:** [Description / goal] +> - **Key Result 2 [COMMITTED / ASPIRATIONAL / LEARNING]:** [Description / goal] +> - **Key Result X [COMMITTED / ASPIRATIONAL / LEARNING]:** [Description / goal] + +### Objective 1: GUI -- "Improve user experience via UI" + +- **Reduced learning curve:** Users can perform common tasks in the new GUI in less time and effort compared to the CLI (ASPIRATIONAL/LEARNING) +- **Feature adoption:** Increase the adoption rate of the GUI features by XX % within xx months of launch, by measuring the number of asset downloads (ASPIRATIONAL/LEARNING) +- **Cross-Platform Consistency:** Ensure the GUI functions consistency and seamlessly on the OSs macOS, Linux and Windows (different distributions. (ASPIRATIONAL/LEARNING) + +### Objective 2: Support -- "Optimize support" + +- **Workshop for users:** Plan for a workshop for DDS users; The workshop should be run at least once a year. (COMMITTED) +- **Support documentation:** Create or update support documentation for the top 5 most common support inquiries to facilitate self-service (COMMITTED) +- **Ticket Volume:** Reduce the number of "irrelevant" support tickets submitted by 50% percent within the next 6 months months, by implementing a chatbot in the web interface. "Irrelevant": questions that should go to units or that units should already know. (ASPIRATIONAL) + Initiatives: Create chatbot, provide answers for x, ask "was this helpful" and measure + +### Objective 3: Quality -- "Improve software quality" + +- **Resolve security alerts:** Resolve all solvable (with known fixes) critical security alerts within 7 days, high-severity alerts within 14 days and medium-severity alerts within a month. (ASPIRATIONAL) +- **Test coverage:** Increase test coverage for dds_web to 100% and dds_cli to 70% (ASPIRATIONAL) +- **API documentation:** Create / Generate documentation for the dds_web/api, which covers all endpoints. (COMMITTED) ## Consequences From 2e920615042da13b7434196494b1576529f58d56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Mon, 16 Oct 2023 10:44:32 +0200 Subject: [PATCH 068/114] typo --- .../0021-plan-for-2024-objectives-and-key-results.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md b/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md index 7bb2314b7..4a73f0830 100644 --- a/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md +++ b/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md @@ -8,9 +8,9 @@ Accepted ## Context -The Product Owner (PO) / Project Leader (PL) of Team Hermes - responsible for the development and maintenance of the Data Delivery System (DDS) - is going on parental leave from November 17th 2023. Due do this, and that the substitute(s) / replacement(s) has not had enough time to learn the system in order to fully take over the PO / PL responsibilities, there needs to be a plan for what the team should work on during the coming year. Starting a more formal plans for the coming year (now and in the future) is also a general improvement to the team and stakeholders, since it will allow for more tranparancy outward and help guide the team's focus. +The Product Owner (PO) / Project Leader (PL) of Team Hermes - responsible for the development and maintenance of the Data Delivery System (DDS) - is going on parental leave from November 17th 2023. Due do this, and that the substitute(s) / replacement(s) has not had enough time to learn the system in order to fully take over the PO / PL responsibilities, there needs to be a plan for what the team should work on during the coming year. Starting a more formal plans for the coming year (now and in the future) is also a general improvement to the team and stakeholders, since it will allow for more tranparency outward and help guide the team's focus. -In order to plan for the coming year (2024, and December 2023), the team is using the tool _OKRs: Objects and Key Results_. +In order to plan for the coming year (2024, and December 2023), the team is using the tool / method _OKRs: Objects and Key Results_. > OKR [is] a collaborative goal-setting methodology used by teams and individuals to set challenging, ambitious goals with measurable results. > From d6fc10d3d50a2813cd56ad15f3b40077990b9adb Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Mon, 16 Oct 2023 13:52:45 +0200 Subject: [PATCH 069/114] fix the test --- tests/test_utils.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index d984d6d48..da56a8ea9 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1513,6 +1513,7 @@ def test_use_sto4_return_true(client: flask.testing.FlaskClient): def test_add_uploaded_files_to_db_correct_failed_op(client: flask.testing.FlaskClient): + """Test calling the function with correct "failed_op".""" # Mock input data proj_in_db = models.Project.query.first() log = { @@ -1552,7 +1553,8 @@ def test_add_uploaded_files_to_db_correct_failed_op(client: flask.testing.FlaskC def test_add_uploaded_files_to_db_other_failed_op(client: flask.testing.FlaskClient): - # Mock input data + """Test calling the function with "failed_op" other than "add_file_db".""" + # Prepare input data proj_in_db = models.Project.query.first() log = { "file1.txt": { @@ -1582,9 +1584,10 @@ def test_add_uploaded_files_to_db_other_failed_op(client: flask.testing.FlaskCli def test_add_uploaded_files_to_db_file_not_found(client: flask.testing.FlaskClient, capfd): + """Test the return values of the function when file is not found on S3.""" from botocore.exceptions import ClientError - # create mock project and log + # Prepare input data proj_in_db = models.Project.query.first() log = { "file1.txt": { @@ -1604,15 +1607,17 @@ def test_add_uploaded_files_to_db_file_not_found(client: flask.testing.FlaskClie mock_api_s3_conn = MagicMock() mock_s3conn = mock_api_s3_conn.return_value.__enter__.return_value - # call add_uploaded_files_to_db and check for expected errors + # call add_uploaded_files_to_db with patch("dds_web.api.api_s3_connector.ApiS3Connector", mock_api_s3_conn): mock_s3conn.resource.meta.client.head_object.side_effect = ClientError( {"Error": {"Code": "404"}}, "operation_name" ) - utils.add_uploaded_files_to_db(proj_in_db, log) + files_added, errors = utils.add_uploaded_files_to_db(proj_in_db, log) + # check that the file is not added to the database file = models.File.query.filter_by(name="file1.txt").first() assert not file - # _,err = capfd.readouterr() - # assert "Errors while adding files: {'file1.txt': {'error': 'File not found in S3" in err + # check that the error is returned and files_added is empty + assert files_added == [] + assert "File not found in S3" in errors["file1.txt"]["error"] From 9432f5e058b2ec130930c90498b6e5c4039a361e Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Mon, 16 Oct 2023 14:49:19 +0200 Subject: [PATCH 070/114] more tests fixes --- dds_web/utils.py | 1 + tests/test_utils.py | 15 +++++++++++++-- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/dds_web/utils.py b/dds_web/utils.py index ce8f647f5..2515e29cc 100644 --- a/dds_web/utils.py +++ b/dds_web/utils.py @@ -760,6 +760,7 @@ def add_uploaded_files_to_db(proj_in_db, log): for file, vals in log.items(): status = vals.get("status") if not status or not status.get("failed_op") == "add_file_db": + errors[file] = {"error": "Incorrect 'failed_op'."} continue with ApiS3Connector(project=proj_in_db) as s3conn: diff --git a/tests/test_utils.py b/tests/test_utils.py index da56a8ea9..3a19bbf38 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1536,7 +1536,7 @@ def test_add_uploaded_files_to_db_correct_failed_op(client: flask.testing.FlaskC # Call the function with patch("dds_web.api.api_s3_connector.ApiS3Connector", return_value=mock_s3conn): - utils.add_uploaded_files_to_db(proj_in_db, log) + files_added, errors = utils.add_uploaded_files_to_db(proj_in_db, log) # check that the file is added to the database file = models.File.query.filter_by(name="file1.txt").first() @@ -1551,6 +1551,10 @@ def test_add_uploaded_files_to_db_correct_failed_op(client: flask.testing.FlaskC version = models.Version.query.filter_by(active_file=file.id).first() assert version + # check the return values + assert file in files_added + assert errors == {} + def test_add_uploaded_files_to_db_other_failed_op(client: flask.testing.FlaskClient): """Test calling the function with "failed_op" other than "add_file_db".""" @@ -1576,12 +1580,18 @@ def test_add_uploaded_files_to_db_other_failed_op(client: flask.testing.FlaskCli # Call the function with patch("dds_web.api.api_s3_connector.ApiS3Connector", return_value=mock_s3conn): - utils.add_uploaded_files_to_db(proj_in_db, log) + files_added, errors = utils.add_uploaded_files_to_db(proj_in_db, log) # check that the file is added to the database file = models.File.query.filter_by(name="file1.txt").first() assert not file + # check that the error is returned and files_added is empty + assert file not in files_added + assert files_added == [] + print(errors) + assert "Incorrect 'failed_op'." in errors["file1.txt"]["error"] + def test_add_uploaded_files_to_db_file_not_found(client: flask.testing.FlaskClient, capfd): """Test the return values of the function when file is not found on S3.""" @@ -1619,5 +1629,6 @@ def test_add_uploaded_files_to_db_file_not_found(client: flask.testing.FlaskClie assert not file # check that the error is returned and files_added is empty + assert file not in files_added assert files_added == [] assert "File not found in S3" in errors["file1.txt"]["error"] From 4c2f2ab8e926af5517c2dac89ce052a9da37b65b Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Tue, 17 Oct 2023 16:10:08 +0200 Subject: [PATCH 071/114] review suggestions --- dds_web/api/files.py | 2 +- dds_web/utils.py | 42 ++++++++++++++++++++++-------------------- 2 files changed, 23 insertions(+), 21 deletions(-) diff --git a/dds_web/api/files.py b/dds_web/api/files.py index eb050a4a7..2a5dddb68 100644 --- a/dds_web/api/files.py +++ b/dds_web/api/files.py @@ -758,4 +758,4 @@ def put(self): request_json = flask.request.get_json(silent=True) files_added, errors = dds_web.utils.add_uploaded_files_to_db(project, request_json) - return {"message": "File(s) added to database."} + return {"files_added": [file.name for file in files_added], "message": errors} diff --git a/dds_web/utils.py b/dds_web/utils.py index 2515e29cc..c8a838925 100644 --- a/dds_web/utils.py +++ b/dds_web/utils.py @@ -781,30 +781,32 @@ def add_uploaded_files_to_db(proj_in_db, log): if file_object: errors[file] = {"error": "File already in database."} else: - new_file = models.File( - name=file, - name_in_bucket=vals["path_remote"], - subpath=vals["subpath"], - project_id=proj_in_db.id, - size_original=vals["size_raw"], - size_stored=vals["size_processed"], - compressed=not vals["compressed"], - public_key=vals["public_key"], - salt=vals["salt"], - checksum=vals["checksum"], - ) - new_version = models.Version( - size_stored=new_file.size_stored, time_uploaded=datetime.datetime.utcnow() - ) - proj_in_db.file_versions.append(new_version) - proj_in_db.files.append(new_file) - new_file.versions.append(new_version) - - db.session.add(new_file) try: + new_file = models.File( + name=file, + name_in_bucket=vals["path_remote"], + subpath=vals["subpath"], + project_id=proj_in_db.id, + size_original=vals["size_raw"], + size_stored=vals["size_processed"], + compressed=not vals["compressed"], + public_key=vals["public_key"], + salt=vals["salt"], + checksum=vals["checksum"], + ) + new_version = models.Version( + size_stored=new_file.size_stored, + time_uploaded=datetime.datetime.utcnow(), + ) + proj_in_db.file_versions.append(new_version) + proj_in_db.files.append(new_file) + new_file.versions.append(new_version) + + db.session.add(new_file) db.session.commit() files_added.append(new_file) except (sqlalchemy.exc.IntegrityError, sqlalchemy.exc.OperationalError) as err: errors[file] = {"error": str(err)} + db.session.rollback() return files_added, errors From 9365f4412c42bbf2ce9756121f60a97daffa08ec Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Tue, 17 Oct 2023 16:14:43 +0200 Subject: [PATCH 072/114] change try block --- dds_web/utils.py | 55 ++++++++++++++++++++++++------------------------ 1 file changed, 27 insertions(+), 28 deletions(-) diff --git a/dds_web/utils.py b/dds_web/utils.py index c8a838925..10b4f8d3d 100644 --- a/dds_web/utils.py +++ b/dds_web/utils.py @@ -771,7 +771,7 @@ def add_uploaded_files_to_db(proj_in_db, log): except botocore.client.ClientError as err: if err.response["Error"]["Code"] == "404": errors[file] = {"error": "File not found in S3", "traceback": err.__traceback__} - else: + try: file_object = models.File.query.filter( sqlalchemy.and_( models.File.name == sqlalchemy.func.binary(file), @@ -781,32 +781,31 @@ def add_uploaded_files_to_db(proj_in_db, log): if file_object: errors[file] = {"error": "File already in database."} else: - try: - new_file = models.File( - name=file, - name_in_bucket=vals["path_remote"], - subpath=vals["subpath"], - project_id=proj_in_db.id, - size_original=vals["size_raw"], - size_stored=vals["size_processed"], - compressed=not vals["compressed"], - public_key=vals["public_key"], - salt=vals["salt"], - checksum=vals["checksum"], - ) - new_version = models.Version( - size_stored=new_file.size_stored, - time_uploaded=datetime.datetime.utcnow(), - ) - proj_in_db.file_versions.append(new_version) - proj_in_db.files.append(new_file) - new_file.versions.append(new_version) - - db.session.add(new_file) - db.session.commit() - files_added.append(new_file) - except (sqlalchemy.exc.IntegrityError, sqlalchemy.exc.OperationalError) as err: - errors[file] = {"error": str(err)} - db.session.rollback() + new_file = models.File( + name=file, + name_in_bucket=vals["path_remote"], + subpath=vals["subpath"], + project_id=proj_in_db.id, + size_original=vals["size_raw"], + size_stored=vals["size_processed"], + compressed=not vals["compressed"], + public_key=vals["public_key"], + salt=vals["salt"], + checksum=vals["checksum"], + ) + new_version = models.Version( + size_stored=new_file.size_stored, + time_uploaded=datetime.datetime.utcnow(), + ) + proj_in_db.file_versions.append(new_version) + proj_in_db.files.append(new_file) + new_file.versions.append(new_version) + + db.session.add(new_file) + db.session.commit() + files_added.append(new_file) + except (sqlalchemy.exc.IntegrityError, sqlalchemy.exc.OperationalError) as err: + errors[file] = {"error": str(err)} + db.session.rollback() return files_added, errors From 123a566743c603d0cfc62846e98df2837ca6c6ba Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Wed, 18 Oct 2023 11:00:20 +0200 Subject: [PATCH 073/114] change try block and fix test --- dds_web/utils.py | 73 +++++++++++++++++++++-------------------- tests/test_files_new.py | 3 +- 2 files changed, 39 insertions(+), 37 deletions(-) diff --git a/dds_web/utils.py b/dds_web/utils.py index 10b4f8d3d..30503e149 100644 --- a/dds_web/utils.py +++ b/dds_web/utils.py @@ -771,41 +771,42 @@ def add_uploaded_files_to_db(proj_in_db, log): except botocore.client.ClientError as err: if err.response["Error"]["Code"] == "404": errors[file] = {"error": "File not found in S3", "traceback": err.__traceback__} - try: - file_object = models.File.query.filter( - sqlalchemy.and_( - models.File.name == sqlalchemy.func.binary(file), - models.File.project_id == proj_in_db.id, - ) - ).first() - if file_object: - errors[file] = {"error": "File already in database."} - else: - new_file = models.File( - name=file, - name_in_bucket=vals["path_remote"], - subpath=vals["subpath"], - project_id=proj_in_db.id, - size_original=vals["size_raw"], - size_stored=vals["size_processed"], - compressed=not vals["compressed"], - public_key=vals["public_key"], - salt=vals["salt"], - checksum=vals["checksum"], - ) - new_version = models.Version( - size_stored=new_file.size_stored, - time_uploaded=datetime.datetime.utcnow(), - ) - proj_in_db.file_versions.append(new_version) - proj_in_db.files.append(new_file) - new_file.versions.append(new_version) - - db.session.add(new_file) - db.session.commit() - files_added.append(new_file) - except (sqlalchemy.exc.IntegrityError, sqlalchemy.exc.OperationalError) as err: - errors[file] = {"error": str(err)} - db.session.rollback() + else: + try: + file_object = models.File.query.filter( + sqlalchemy.and_( + models.File.name == sqlalchemy.func.binary(file), + models.File.project_id == proj_in_db.id, + ) + ).first() + if file_object: + errors[file] = {"error": "File already in database."} + else: + new_file = models.File( + name=file, + name_in_bucket=vals["path_remote"], + subpath=vals["subpath"], + project_id=proj_in_db.id, + size_original=vals["size_raw"], + size_stored=vals["size_processed"], + compressed=not vals["compressed"], + public_key=vals["public_key"], + salt=vals["salt"], + checksum=vals["checksum"], + ) + new_version = models.Version( + size_stored=new_file.size_stored, + time_uploaded=datetime.datetime.utcnow(), + ) + proj_in_db.file_versions.append(new_version) + proj_in_db.files.append(new_file) + new_file.versions.append(new_version) + + db.session.add(new_file) + db.session.commit() + files_added.append(new_file) + except (sqlalchemy.exc.IntegrityError, sqlalchemy.exc.OperationalError) as err: + errors[file] = {"error": str(err)} + db.session.rollback() return files_added, errors diff --git a/tests/test_files_new.py b/tests/test_files_new.py index 67c681260..591ec3492 100644 --- a/tests/test_files_new.py +++ b/tests/test_files_new.py @@ -955,9 +955,10 @@ def test_update_failed_files_success(client, boto3_session): ) assert response.status_code == http.HTTPStatus.OK - assert response.json["message"] == "File(s) added to database." + assert response.json["message"] == {} for file in FAILED_FILES: assert db.session.query(models.File).filter(models.File.name == file).first() + assert file in response.json["files_added"] def test_update_failed_files_no_json(client, boto3_session): From 60d328a639d4502293d0540039fbf5225745cc36 Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Wed, 18 Oct 2023 14:55:09 +0200 Subject: [PATCH 074/114] more tests --- tests/test_utils.py | 82 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 81 insertions(+), 1 deletion(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index 3a19bbf38..f9f2005de 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1593,7 +1593,7 @@ def test_add_uploaded_files_to_db_other_failed_op(client: flask.testing.FlaskCli assert "Incorrect 'failed_op'." in errors["file1.txt"]["error"] -def test_add_uploaded_files_to_db_file_not_found(client: flask.testing.FlaskClient, capfd): +def test_add_uploaded_files_to_db_file_not_found(client: flask.testing.FlaskClient): """Test the return values of the function when file is not found on S3.""" from botocore.exceptions import ClientError @@ -1632,3 +1632,83 @@ def test_add_uploaded_files_to_db_file_not_found(client: flask.testing.FlaskClie assert file not in files_added assert files_added == [] assert "File not found in S3" in errors["file1.txt"]["error"] + + +def test_add_uploaded_files_to_db_file_already_in_db(client: flask.testing.FlaskClient): + """Function should return error if file is already in the database.""" + + # get a project and an existing file from this project + proj_in_db = models.Project.query.first() + file_in_db = proj_in_db.files[0] + assert file_in_db + + log = { + file_in_db.name: { + "status": {"failed_op": "add_file_db"}, + "path_remote": "path/to/file1.txt", + "subpath": "subpath", + "size_raw": 100, + "size_processed": 200, + "compressed": False, + "public_key": "public_key", + "salt": "salt", + "checksum": "checksum", + } + } + + # Mock the S3 connector and head_object method + mock_s3conn = MagicMock() + mock_s3conn.resource.meta.client.head_object.return_value = None + + # Call the function + with patch("dds_web.api.api_s3_connector.ApiS3Connector", return_value=mock_s3conn): + files_added, errors = utils.add_uploaded_files_to_db(proj_in_db, log) + + assert files_added == [] + assert file_in_db.name in errors + assert "File already in database." in errors[file_in_db.name]["error"] + + +def test_add_uploaded_files_to_db_sql_error(client: flask.testing.FlaskClient): + """Test the return values of the function when sqlalchemy error occurs.""" + import sqlalchemy.exc + from dds_web import db + + # Prepare input data + proj_in_db = models.Project.query.first() + log = { + "file1.txt": { + "status": {"failed_op": "add_file_db"}, + "path_remote": "path/to/file1.txt", + "subpath": "subpath", + "size_raw": 100, + "size_processed": 200, + "compressed": False, + "public_key": "public_key", + "salt": "salt", + "checksum": "checksum", + } + } + + # mock ApiS3Connector + mock_s3conn = MagicMock() + mock_s3conn.resource.meta.client.head_object.return_value = None + + # mock db.session.commit + db_session_commit_mock = MagicMock( + side_effect=sqlalchemy.exc.OperationalError("OperationalError", "test", "sqlalchemy") + ) + + # call add_uploaded_files_to_db + with patch("dds_web.api.api_s3_connector.ApiS3Connector", return_value=mock_s3conn): + with patch("dds_web.db.session.commit", db_session_commit_mock): + files_added, errors = utils.add_uploaded_files_to_db(proj_in_db, log) + + # check that the file is not added to the database + file = models.File.query.filter_by(name="file1.txt").first() + assert not file + + # check that the error is returned and files_added is empty + assert file not in files_added + assert files_added == [] + assert "OperationalError" in errors["file1.txt"]["error"] From 1ae4107c8bc641542cb56ef6def7c5d72b6a503b Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Wed, 18 Oct 2023 16:39:23 +0200 Subject: [PATCH 075/114] one more test --- tests/test_utils.py | 39 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index f9f2005de..f0a132485 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1435,6 +1435,43 @@ def test_list_lost_files_in_project_overlap( ) +def test_list_lost_files_in_project_sql_error( + client: flask.testing.FlaskClient, boto3_session, capfd +): + """Verify proper behaviour when sql OperationalError occurs.""" + # Imports + from dds_web.utils import list_lost_files_in_project + from sqlalchemy.exc import OperationalError + + # Get project + project = models.Project.query.first() + assert project + + # Mock files in s3 + boto3_session.Bucket(project.bucket).objects.all = mock_items_in_bucket + # Get created testfiles + fake_files_in_bucket = mock_items_in_bucket() + + # mock db.session.commit + files_name_in_bucket_mock = PropertyMock( + side_effect=sqlalchemy.exc.OperationalError("OperationalError", "test", "sqlalchemy") + ) + + # Run listing + with patch("dds_web.database.models.Project.files", files_name_in_bucket_mock): + try: + in_db_but_not_in_s3, in_s3_but_not_in_db = list_lost_files_in_project( + project=project, s3_resource=boto3_session + ) + except OperationalError as e: + print(f"OperationalError occurred: {e}") + + # Get logging output + out, err = capfd.readouterr() + assert "OperationalError occurred" in out + assert "Unable to connect to db" in err + + # use_sto4 @@ -1589,7 +1626,7 @@ def test_add_uploaded_files_to_db_other_failed_op(client: flask.testing.FlaskCli # check that the error is returned and files_added is empty assert file not in files_added assert files_added == [] - print(errors) + assert "Incorrect 'failed_op'." in errors["file1.txt"]["error"] From ec3ac7ff3d643c4edb26d95bce7999e0d4b0b828 Mon Sep 17 00:00:00 2001 From: valyo <582646+valyo@users.noreply.github.com> Date: Tue, 24 Oct 2023 16:06:30 +0200 Subject: [PATCH 076/114] handling of --overwrite --- dds_web/utils.py | 119 ++++++++++++++++++++++++++++++++++++++++++-- tests/test_utils.py | 66 ++++++++++++------------ 2 files changed, 149 insertions(+), 36 deletions(-) diff --git a/dds_web/utils.py b/dds_web/utils.py index 30503e149..ce1576a31 100644 --- a/dds_web/utils.py +++ b/dds_web/utils.py @@ -751,18 +751,37 @@ def use_sto4(unit_object, project_object) -> bool: return False -def add_uploaded_files_to_db(proj_in_db, log): +def add_uploaded_files_to_db(proj_in_db, log: typing.Dict): + """ + Adds uploaded files to the database. + + Args: + proj_in_db (dds_web.models.Project): The project to add the files to. + log (typing.Dict): A dictionary containing information about the uploaded files. + + Returns: + A tuple containing a list of files that were successfully added to the database, + and a dictionary containing any errors that occurred while + adding the files. + """ + # Import necessary modules and initialize variables from dds_web import db from dds_web.api.api_s3_connector import ApiS3Connector errors = {} files_added = [] + + # Loop through each file in the log for file, vals in log.items(): status = vals.get("status") + overwrite = vals.get("overwrite") + + # Check if the file was successfully uploaded if not status or not status.get("failed_op") == "add_file_db": errors[file] = {"error": "Incorrect 'failed_op'."} continue + # Connect to S3 and check if the file exists with ApiS3Connector(project=proj_in_db) as s3conn: try: _ = s3conn.resource.meta.client.head_object( @@ -773,14 +792,25 @@ def add_uploaded_files_to_db(proj_in_db, log): errors[file] = {"error": "File not found in S3", "traceback": err.__traceback__} else: try: + # Check if the file already exists in the database file_object = models.File.query.filter( sqlalchemy.and_( models.File.name == sqlalchemy.func.binary(file), models.File.project_id == proj_in_db.id, ) ).first() + + # If the file already exists, create a new version of it if "--overwrite" was specified if file_object: - errors[file] = {"error": "File already in database."} + if vals.get("overwrite", True): + try: + new_file_version(existing_file=file_object, new_info=vals) + files_added.append(file_object) + except KeyError as err: + errors[file] = {"error": f"Missing key: {err}"} + else: + errors[file] = {"error": "File already in database."} + # If the file does not exist, create a new file and version else: new_file = models.File( name=file, @@ -805,8 +835,91 @@ def add_uploaded_files_to_db(proj_in_db, log): db.session.add(new_file) db.session.commit() files_added.append(new_file) - except (sqlalchemy.exc.IntegrityError, sqlalchemy.exc.OperationalError) as err: + except ( + sqlalchemy.exc.IntegrityError, + sqlalchemy.exc.OperationalError, + sqlalchemy.exc.SQLAlchemyError, + ) as err: errors[file] = {"error": str(err)} db.session.rollback() + if errors: + flask.current_app.logger.error(f"Error in new_file_version: {errors}") return files_added, errors + + +def new_file_version(existing_file, new_info): + """ + Create new version of a file. + + Args: + existing_file (dds_web.models.File): The existing file to create a new version of. + new_info (dict): A dictionary containing information about the new version of the file. + + Returns: + None + """ + from dds_web import db + import dds_web.utils + + # Get project + project = existing_file.project + + # Get versions + current_file_version = models.Version.query.filter( + sqlalchemy.and_( + models.Version.active_file == sqlalchemy.func.binary(existing_file.id), + models.Version.time_deleted.is_(None), + ) + ).all() + + # If there is more than one version of the file which does not yet have a deletion timestamp, log a warning + if len(current_file_version) > 1: + flask.current_app.logger.warning( + "There is more than one version of the file " + "which does not yet have a deletion timestamp." + ) + + # Same timestamp for deleted and created new version + new_timestamp = dds_web.utils.current_time() + + # Set the deletion timestamp for the latests version of the file + for version in current_file_version: + if version.time_deleted is None: + version.time_deleted = new_timestamp + + # Get information about the new version of the file + subpath = new_info["subpath"] + size_original = new_info["size_raw"] + size_stored = new_info["size_processed"] + compressed = new_info["compressed"] + salt = new_info["salt"] + public_key = new_info["public_key"] + time_uploaded = new_timestamp + checksum = new_info["checksum"] + + # Update file info + existing_file.subpath = subpath + existing_file.size_original = size_original + existing_file.size_stored = size_stored + existing_file.compressed = compressed + existing_file.salt = salt + existing_file.public_key = public_key + existing_file.time_uploaded = time_uploaded + existing_file.checksum = checksum + + # Create a new version of the file + new_version = models.Version( + size_stored=new_info.get("size_processed"), + time_uploaded=new_timestamp, + active_file=existing_file.id, + project_id=project, + ) + + # Update foreign keys and relationships + project.file_versions.append(new_version) + existing_file.versions.append(new_version) + + # Add the new version to the database and commit the changes + db.session.add(new_version) + db.session.commit() diff --git a/tests/test_utils.py b/tests/test_utils.py index f0a132485..c616b9a09 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1671,39 +1671,39 @@ def test_add_uploaded_files_to_db_file_not_found(client: flask.testing.FlaskClie assert "File not found in S3" in errors["file1.txt"]["error"] -def test_add_uploaded_files_to_db_file_already_in_db(client: flask.testing.FlaskClient): - """Function should return error if file is already in the database.""" - - # get a project and an existing file from this project - proj_in_db = models.Project.query.first() - file_in_db = proj_in_db.files[0] - assert file_in_db - - log = { - file_in_db.name: { - "status": {"failed_op": "add_file_db"}, - "path_remote": "path/to/file1.txt", - "subpath": "subpath", - "size_raw": 100, - "size_processed": 200, - "compressed": False, - "public_key": "public_key", - "salt": "salt", - "checksum": "checksum", - } - } - - # Mock the S3 connector and head_object method - mock_s3conn = MagicMock() - mock_s3conn.resource.meta.client.head_object.return_value = None - - # Call the function - with patch("dds_web.api.api_s3_connector.ApiS3Connector", return_value=mock_s3conn): - files_added, errors = utils.add_uploaded_files_to_db(proj_in_db, log) - - assert files_added == [] - assert file_in_db.name in errors - assert "File already in database." in errors[file_in_db.name]["error"] +# def test_add_uploaded_files_to_db_file_already_in_db(client: flask.testing.FlaskClient): +# """Function should return error if file is already in the database.""" + +# # get a project and an existing file from this project +# proj_in_db = models.Project.query.first() +# file_in_db = proj_in_db.files[0] +# assert file_in_db + +# log = { +# file_in_db.name: { +# "status": {"failed_op": "add_file_db"}, +# "path_remote": "path/to/file1.txt", +# "subpath": "subpath", +# "size_raw": 100, +# "size_processed": 200, +# "compressed": False, +# "public_key": "public_key", +# "salt": "salt", +# "checksum": "checksum", +# } +# } + +# # Mock the S3 connector and head_object method +# mock_s3conn = MagicMock() +# mock_s3conn.resource.meta.client.head_object.return_value = None + +# # Call the function +# with patch("dds_web.api.api_s3_connector.ApiS3Connector", return_value=mock_s3conn): +# files_added, errors = utils.add_uploaded_files_to_db(proj_in_db, log) + +# assert files_added == [] +# assert file_in_db.name in errors +# assert "File already in database." in errors[file_in_db.name]["error"] def test_add_uploaded_files_to_db_sql_error(client: flask.testing.FlaskClient): From 24596fc7310bf568126a9bd8156f77d4d41abdfd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Thu, 26 Oct 2023 09:48:43 +0200 Subject: [PATCH 077/114] Updated adr --- ...lan-for-2024-objectives-and-key-results.md | 147 ++++++++++++++---- 1 file changed, 115 insertions(+), 32 deletions(-) diff --git a/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md b/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md index 4a73f0830..f1bac49f3 100644 --- a/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md +++ b/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md @@ -8,7 +8,7 @@ Accepted ## Context -The Product Owner (PO) / Project Leader (PL) of Team Hermes - responsible for the development and maintenance of the Data Delivery System (DDS) - is going on parental leave from November 17th 2023. Due do this, and that the substitute(s) / replacement(s) has not had enough time to learn the system in order to fully take over the PO / PL responsibilities, there needs to be a plan for what the team should work on during the coming year. Starting a more formal plans for the coming year (now and in the future) is also a general improvement to the team and stakeholders, since it will allow for more tranparency outward and help guide the team's focus. +The Product Owner (PO) / Project Leader (PL) of Team Hermes - responsible for the development and maintenance of the Data Delivery System (DDS) - is going on parental leave from November 17th 2023. Due do this, and that the substitute(s) / replacement(s) has not had enough time to learn the system in order to fully take over the PO / PL responsibilities, there needs to be a plan for what the team should work on during the coming year. Starting a more formal plan for the coming year (now and in the future) is also a general improvement to the team and stakeholders, since it will allow for more tranparency outward and help guide the team's focus. In order to plan for the coming year (2024, and December 2023), the team is using the tool / method _OKRs: Objects and Key Results_. @@ -43,30 +43,28 @@ In order to plan for the coming year (2024, and December 2023), the team is usin > > -- [Intuit Mint](https://mint.intuit.com/blog/planning-2/okr-framework/) -The issue motivating this decision, and any context that influences or constrains the decision. - ### Discussions regarding possible objectives -#### "Improve user experience via UI" +#### Possible objective: "Improve user experience via UI" ##### Alternative 1: Implement the `dds-cli` functionality in the web interface. - The web interface will not be a good way for uploads and downloads of huge amounts of data. The units are also saying this. -- Implementing the functionality in the web interface would require us to have a front-end developer that also has experience in cryptography. We (the PO mainly) have been asking- and waiting for this person for years, so we cannot expect that that's going to happen any time soon. The last time it was mentioned was both before and after summer 2023; since then we haven't heard or said anything regarding this. Therefore, creating the web interface that is envisioned - a complete reset of the web using some JS framework - is not possible. +- Implementing the functionality in the web interface would require us to have a front-end developer that also has experience in cryptography. We (the PO mainly) have been asking- and waiting for this person for a long time, so we cannot expect that that's going to happen any time soon. The last time it was mentioned was both before and after summer 2023; since then we haven't heard or said anything regarding this. Therefore, creating the web interface that is envisioned - a complete reset of the web using some JS framework - is not possible. - Even if a front-end developer was to get hired at some point during 2024, doing a complete reset of the frontend (which houses functionality required to register, reset password, setup 2FA, etc) and building the web from scratch, while the person who has been involved in developing the majority of the code, is away, is **not** a good idea. - If we were to work on implementing the functionality into the web interface as it is now, without having a front-end developer, we would have to continue using pure flask in the web, and that would mean that we would need to make a duplicate of practically all of the API code that exists, because: - Calling the API endpoints from the Flask routes does not work since those endpoints require authentication tokens - the ones that the CLI uses. - Moving the API code, creating helper functions, and changing everything in order to use the new helper functions in both the API and web should not be done when the PO is away; It's too much work and it risks the functionality in the web. We should be adding functionality to the system during 2024, **not** refactoring and risking working functionality to break. -- Duplicating the code for listing projects, users, files, adding users to projects (etc, etc) in the web means that we later on will have to redo it all and the team will have spent time on something that will not be used anyway since the upload and download by pure flask and html is not a good idea. Also, upload and download of potentially huge amounts of data via browser is as mentioned above not a good solution. +- Duplicating the code for listing projects, users, files, adding users to projects (etc, etc) in the web means that we later on will have to redo it all and the team will have spent time on something that will not be used anyway since the upload and download by pure flask and html is not a good idea (and not possible with huge files due to load on cluster). Also, upload and download of potentially huge amounts of data via browser is as mentioned above not a good solution. -**Because of these things, implementing the functionality in the web interface is not an option; we won't plan for working on the web interface during the coming year.** +**Because of these items, implementing the functionality in the web interface is not an option; we won't plan for working on the web interface during the coming year.** ##### Alternative 2: Creating a Graphical User Interface (GUI) - The unit's _end-users_ (the users downloading the data) would benefit from this. - The NGI units that do not need the download functionality for their end users in the GUI also do not need it in the web, they just have bioinformaticians that help the users and the bioinformaticians are familiar with terminal tools - Other smaller units have less experienced end users and are more likely to want to download locally and to want to click buttons instead of using the terminal -- This GUI would be very simple to begin with, it could (for example) be created by `tkinter` or another simple package (google "simple GUI python" or similar, there are several). The main thing here is that we should not need to write new code for the actual DDS functionality; The idea is that the GUI would run the CLI commands when buttons are clicked etc. Buttons would run the commands, and the same things that are displayed in the CLI would be displayed in the GUI. +- This GUI would be very simple to begin with, it could (for example) be created by `tkinter` or another simple package (google "simple GUI python" or similar, there are several). The main thing here is that we **should not need to write new code for the actual DDS functionality**; The idea is that the GUI would run the CLI commands when buttons are clicked etc. Buttons would run the commands, and the same things that are displayed in the CLI would be displayed in the GUI. - We could start with the authentication, listing of the projects, their deadlines etc, users, project users, inviting users etc. - The GUI would automatically use the authentication token. - We could technically implement download, but we could start with displaying the commands and allow them to copy paste the commands to the terminal @@ -86,34 +84,119 @@ The issue motivating this decision, and any context that influences or constrain The sections below have the following structure: -> **Objective X: [short goal] -- [long goal]** +> **Objective X: [short title] -- [long description]** > -> - **Key Result 1 [COMMITTED / ASPIRATIONAL / LEARNING]:** [Description / goal] -> - **Key Result 2 [COMMITTED / ASPIRATIONAL / LEARNING]:** [Description / goal] -> - **Key Result X [COMMITTED / ASPIRATIONAL / LEARNING]:** [Description / goal] - -### Objective 1: GUI -- "Improve user experience via UI" - -- **Reduced learning curve:** Users can perform common tasks in the new GUI in less time and effort compared to the CLI (ASPIRATIONAL/LEARNING) -- **Feature adoption:** Increase the adoption rate of the GUI features by XX % within xx months of launch, by measuring the number of asset downloads (ASPIRATIONAL/LEARNING) -- **Cross-Platform Consistency:** Ensure the GUI functions consistency and seamlessly on the OSs macOS, Linux and Windows (different distributions. (ASPIRATIONAL/LEARNING) - -### Objective 2: Support -- "Optimize support" - -- **Workshop for users:** Plan for a workshop for DDS users; The workshop should be run at least once a year. (COMMITTED) -- **Support documentation:** Create or update support documentation for the top 5 most common support inquiries to facilitate self-service (COMMITTED) -- **Ticket Volume:** Reduce the number of "irrelevant" support tickets submitted by 50% percent within the next 6 months months, by implementing a chatbot in the web interface. "Irrelevant": questions that should go to units or that units should already know. (ASPIRATIONAL) - Initiatives: Create chatbot, provide answers for x, ask "was this helpful" and measure - -### Objective 3: Quality -- "Improve software quality" - -- **Resolve security alerts:** Resolve all solvable (with known fixes) critical security alerts within 7 days, high-severity alerts within 14 days and medium-severity alerts within a month. (ASPIRATIONAL) -- **Test coverage:** Increase test coverage for dds_web to 100% and dds_cli to 70% (ASPIRATIONAL) -- **API documentation:** Create / Generate documentation for the dds_web/api, which covers all endpoints. (COMMITTED) +> - **Key Result Y for Objective X [COMMITTED / ASPIRATIONAL / LEARNING]:** [Description / goal] +> - Initiative 1 +> - _Notes or possible task example_ +> - Initiative 2 +> - _etc..._ + +> **The objectives, key results and initiatives are also available for the team on Confluence: https://scilifelab.atlassian.net/wiki/spaces/deliveryportal/pages/2615705604/Plan+for+2024** + +> The initiatives have been added to the [Jira Board](https://scilifelab.atlassian.net/jira/software/projects/DDS/boards/13/backlog?epics=visible) as _epics_. Tasks that fall under the initiatives should therefore be marked as a part of those epics. This can be changed if the team wants to try another structure. + +### Objective 1: Support -- "Optimize support" + +- **Workshop for users [COMMITTED]:** Plan for a workshop for DDS users, intended to present the system, help users get started and answer common questions. The workshop should be run at least once a year. + - Schedule the workshop for autumn 2024, before summer 2024. + - _Talk to training hub to plan for event._ + - Create workshop material before the workshop (autumn 2024) + - _Decide on target audience_ + - _List parts to include in workshop, depending on audience_ + - _Create workshop content_ + - Improve workshop material based on audience feedback by the end of 2024 + - _Collect feedback within 2 weeks after the workshop_ +- **Support documentation [COMMITTED]:** Create or update support documentation for the top 5 most common support inquiries to facilitate self-service + - Identify the top 5 most common support inquiries by [??] + - _List top 5 most common support inquiries send to Data Centre_ + - _Ask Units (production) what the 5 most common support inquiries they get from their users_ + - _Ask Units (production) about what 5 things they think should be clarified_ + - _Ask Units (testing) about what 5 things they think should be clarified_ + - Analyse the support inquiries and feedback by ?? + - _Group the support inquiries into "themes"_ + - _Choose the 5 most common inquiries_ + - _Investigate which of the inquiries need to have their documentation updated / created_ + - Create / update the documentation for the 5 most common support inquiries by ?? + - _Inquiry 1_ + - _Inquiry 2…_ + - Get feedback/review of documentation from ?? (outside team) +- **Ticket Volume [ASPIRATIONAL]:** Reduce the number of "unnecessary" support tickets submitted by 50% percent within the next 6 months months, by implementing a chatbot in the web interface. +"Unnecessary": questions that should go to units or that units should already know. + - Identify number of unnecessary support tickets in the last x months + - _Find 3 possible tools for creating the chatbot_ + - _List pros and cons for the possible tools_ + - _Make decision_ + - Decide on design and architecture of Chatbot by ?? + - Create a Chatbot prototype by ?? that can answer questions regarding where the documentation and technical overview is located and who they should contact in the case of a question that the bot cannot answer + - Implement "was this helpful?" in the chatbot by ?? + - Implement responses to top 10 most common irrelevant questions by ?? + - Find a way to evaluate the chatbot + +### Objective 2: Quality -- "Improve software quality" + +- **Resolve security alerts [ASPIRATIONAL]:** Resolve all solvable (with known fixes) critical security alerts within 7 days, high-severity alerts within 14 days and medium-severity alerts within a month. + - Find a way to implement a routine on the scrum cycle + - Evaluate if different notification/alerts systems outside GitHub could also be helpful + - Decide a new release procedure for critical alerts when there is no schedule release soon + - _Study how feasible is to redeploy more often_ + - _Study if there is a way, and if so, how to redeploy backend (rolling updates) in Kubernetes_ +- **Test coverage [ASPIRATIONAL]:** Increase test coverage for dds_web to 100% and dds_cli to 70%. + - Increase the coverage for the 10 files within dds_web with the least amount of coverage to above 90% by ?? + - _List 10 files with the least amount of coverage_ + - _Increase coverage for file 1_ + - _Increase coverage for file 2... etc_ + - Increase the coverage for the 10 files within dds_cli with the least amount of coverage to above 40% by ?? + - _List 10 files with the least amount of coverage_ + - _Increase coverage for file 1_ + - _Increase coverage for file 2... etc_ + - Add tests for all changes in the dds_cli code, aiming at at least 50% coverage. The tests should be added prior to merging the code. + - For all changes in the dds_web code, add tests so that the coverage is 100%. The tests should be added prior to merging the code. +- **API documentation [COMMITTED]:** Create / Generate documentation for the dds_web/api, which covers all endpoints. + - Decide on a tool for generating the API documentation + - _Find 3 possible solutions / tools_ + - _List pros and cons for the possible tools/solutions_ + - _Make decision_ + - Research if it is possible to automate the documentation generation when a new endpoint is added + - _Maybe GitHub Actions_ + - _Maybe the tool used allows for that_ + - Make API documentation accessible first for the team members, then discuss if we should publish for everyone + +### Objective 3: GUI -- "Improve user experience via a GUI prototype" + +- **Reduced learning curve [ASPIRATIONAL]:** Users can perform common tasks in the new GUI in less time and effort compared to the CLI. + - Make a list of the 5 most used features that could be simplified via the GUI + - Design the layout of those features + - Implement the GUI so it is easily accessible for users + - _Decide a framework/technology that can be easily ported to different OS architectures_ + - _Find 3 possible tools/solutions for creating the GUI_ + - _List pros and cons for the possible tools/solutions_ + - _Make decision_ + - Ask for feedback from someone that is using the CLI atm +- **Cross-Platform Consistency [COMMITTED]:** Ensure the GUI functions consistently and seamlessly on macOS (latest), Linux (ubuntu) and Windows (latest). + - Research best way to publish the binaries to the users + - Create a GitHub action for generating the GUI binary for macOS lastest + - Create a GitHub action for generating the GUI binary for ubuntu latest + - Create a GitHub action for generating the GUI binary for windows latest + - Continuously test all implemented features for cross-platform consistency. + - _Discuss if we should focus more on some OS over others because of:_ + 1. macOS (team develops on mac) + 2. Windows (most users that need the GUI) + 3. Linux (would usually be more comfortable with command line) +- **Feedback [LEARNING]:** Get feedback on GUI prototype from 5 users on each OS. + - Identify users: Make list of 20 people to ask for feedback from + - Create testing protocol, covering the testing of all features implemented in the GUI prototype + - Gather, analyze and prioritize the feedback for implementation + - _Gather feedback from both unit users and researchers_ + - _Prioritize feedback to add new task to the backlog according to their importance_ ## Consequences -What becomes easier or more difficult to do and any risks introduced by the change that will need to be mitigated. +- The GUI prototype should not be prioritized over the other two - this is why it's listed as objective #3. + - If, during the key results and initiatives within objective #3 (GUI), the team finds that this is not an appropriate objective, or that there's a new, better, objective, the team can switch direction and work on the new one. In this case, the team must inform the users of the new plan. + - For more consequences regarding the GUI, see the **Context** section above. +- The task examples listed under some of the initiatives are just that; _examples_. The team will decide on the appropriate tasks during the sprints, depending on which objective and key result they will be working on. +- The key results and initiatives _may change_ as time passes since, for example, they may depend on another initiative or information that has been gathered previous to starting the initiative. ## Footnotes From f7d7ccd16eea0cdf9c4641f374fa4a315b936a0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Thu, 26 Oct 2023 10:12:35 +0200 Subject: [PATCH 078/114] sprintlog --- SPRINTLOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/SPRINTLOG.md b/SPRINTLOG.md index e56b41d8d..8db2e2834 100644 --- a/SPRINTLOG.md +++ b/SPRINTLOG.md @@ -319,3 +319,4 @@ _Nothing merged in CLI during this sprint_ - Added new API endpoint ProjectStatus.patch to extend the deadline ([#1480])(https://github.com/ScilifelabDataCentre/dds_web/pull/1480) - New version: 2.5.2 ([#1482](https://github.com/ScilifelabDataCentre/dds_web/pull/1482)) +- New ADR record regarding OKR 2024 ([#1483](https://github.com/ScilifelabDataCentre/dds_web/pull/1483)) From 98877321e97081a1de9a120a365677eb306f2581 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Thu, 26 Oct 2023 10:12:59 +0200 Subject: [PATCH 079/114] prettier --- .../0021-plan-for-2024-objectives-and-key-results.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md b/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md index f1bac49f3..658aff18d 100644 --- a/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md +++ b/doc/architecture/decisions/0021-plan-for-2024-objectives-and-key-results.md @@ -121,8 +121,8 @@ The sections below have the following structure: - _Inquiry 1_ - _Inquiry 2…_ - Get feedback/review of documentation from ?? (outside team) -- **Ticket Volume [ASPIRATIONAL]:** Reduce the number of "unnecessary" support tickets submitted by 50% percent within the next 6 months months, by implementing a chatbot in the web interface. -"Unnecessary": questions that should go to units or that units should already know. +- **Ticket Volume [ASPIRATIONAL]:** Reduce the number of "unnecessary" support tickets submitted by 50% percent within the next 6 months months, by implementing a chatbot in the web interface. + "Unnecessary": questions that should go to units or that units should already know. - Identify number of unnecessary support tickets in the last x months - _Find 3 possible tools for creating the chatbot_ - _List pros and cons for the possible tools_ @@ -192,9 +192,9 @@ The sections below have the following structure: ## Consequences -- The GUI prototype should not be prioritized over the other two - this is why it's listed as objective #3. +- The GUI prototype should not be prioritized over the other two - this is why it's listed as objective #3. - If, during the key results and initiatives within objective #3 (GUI), the team finds that this is not an appropriate objective, or that there's a new, better, objective, the team can switch direction and work on the new one. In this case, the team must inform the users of the new plan. - - For more consequences regarding the GUI, see the **Context** section above. + - For more consequences regarding the GUI, see the **Context** section above. - The task examples listed under some of the initiatives are just that; _examples_. The team will decide on the appropriate tasks during the sprints, depending on which objective and key result they will be working on. - The key results and initiatives _may change_ as time passes since, for example, they may depend on another initiative or information that has been gathered previous to starting the initiative. From d757ae3f2b3ff8a6bf0551714b6e6812caaed648 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Mon, 30 Oct 2023 10:27:48 +0100 Subject: [PATCH 080/114] overwrite False as default --- dds_web/utils.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/dds_web/utils.py b/dds_web/utils.py index ce1576a31..b5fab2707 100644 --- a/dds_web/utils.py +++ b/dds_web/utils.py @@ -752,8 +752,7 @@ def use_sto4(unit_object, project_object) -> bool: def add_uploaded_files_to_db(proj_in_db, log: typing.Dict): - """ - Adds uploaded files to the database. + """Adds uploaded files to the database. Args: proj_in_db (dds_web.models.Project): The project to add the files to. @@ -774,9 +773,9 @@ def add_uploaded_files_to_db(proj_in_db, log: typing.Dict): # Loop through each file in the log for file, vals in log.items(): status = vals.get("status") - overwrite = vals.get("overwrite") + overwrite = vals.get("overwrite", False) - # Check if the file was successfully uploaded + # Check if the file was successfully uploaded but database not updated if not status or not status.get("failed_op") == "add_file_db": errors[file] = {"error": "Incorrect 'failed_op'."} continue @@ -802,7 +801,7 @@ def add_uploaded_files_to_db(proj_in_db, log: typing.Dict): # If the file already exists, create a new version of it if "--overwrite" was specified if file_object: - if vals.get("overwrite", True): + if overwrite: try: new_file_version(existing_file=file_object, new_info=vals) files_added.append(file_object) From 324ad5cedc8427df92ae53f6cd31df0e92f193d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Mon, 30 Oct 2023 11:08:29 +0100 Subject: [PATCH 081/114] use new_info instead of defining new variables --- dds_web/utils.py | 35 +++++++++++++++-------------------- 1 file changed, 15 insertions(+), 20 deletions(-) diff --git a/dds_web/utils.py b/dds_web/utils.py index b5fab2707..b2f8e82e1 100644 --- a/dds_web/utils.py +++ b/dds_web/utils.py @@ -13,6 +13,7 @@ import time import smtplib from dateutil.relativedelta import relativedelta +import gc # Installed import botocore @@ -804,11 +805,11 @@ def add_uploaded_files_to_db(proj_in_db, log: typing.Dict): if overwrite: try: new_file_version(existing_file=file_object, new_info=vals) - files_added.append(file_object) except KeyError as err: errors[file] = {"error": f"Missing key: {err}"} else: errors[file] = {"error": "File already in database."} + # If the file does not exist, create a new file and version else: new_file = models.File( @@ -887,29 +888,19 @@ def new_file_version(existing_file, new_info): if version.time_deleted is None: version.time_deleted = new_timestamp - # Get information about the new version of the file - subpath = new_info["subpath"] - size_original = new_info["size_raw"] - size_stored = new_info["size_processed"] - compressed = new_info["compressed"] - salt = new_info["salt"] - public_key = new_info["public_key"] - time_uploaded = new_timestamp - checksum = new_info["checksum"] - # Update file info - existing_file.subpath = subpath - existing_file.size_original = size_original - existing_file.size_stored = size_stored - existing_file.compressed = compressed - existing_file.salt = salt - existing_file.public_key = public_key - existing_file.time_uploaded = time_uploaded - existing_file.checksum = checksum + existing_file.subpath = new_info["subpath"] + existing_file.size_original = new_info["size_raw"] + existing_file.size_stored = new_info["size_processed"] + existing_file.compressed = new_info["compressed"] + existing_file.salt = new_info["salt"] + existing_file.public_key = new_info["public_key"] + existing_file.time_uploaded = new_timestamp + existing_file.checksum = new_info["checksum"] # Create a new version of the file new_version = models.Version( - size_stored=new_info.get("size_processed"), + size_stored=new_info["size_processed"], time_uploaded=new_timestamp, active_file=existing_file.id, project_id=project, @@ -922,3 +913,7 @@ def new_file_version(existing_file, new_info): # Add the new version to the database and commit the changes db.session.add(new_version) db.session.commit() + + # Clean up information + del new_info + gc.collect() From 0407b2a17a593442788af07f9dc0e6675c1abe30 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Mon, 30 Oct 2023 11:44:17 +0100 Subject: [PATCH 082/114] change order of tests and test file --- tests/test_utils.py | 118 +++++++++++++++++++++++++++----------------- 1 file changed, 74 insertions(+), 44 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index c616b9a09..791f124c0 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1549,48 +1549,7 @@ def test_use_sto4_return_true(client: flask.testing.FlaskClient): assert result is True -def test_add_uploaded_files_to_db_correct_failed_op(client: flask.testing.FlaskClient): - """Test calling the function with correct "failed_op".""" - # Mock input data - proj_in_db = models.Project.query.first() - log = { - "file1.txt": { - "status": {"failed_op": "add_file_db"}, - "path_remote": "path/to/file1.txt", - "subpath": "subpath", - "size_raw": 100, - "size_processed": 200, - "compressed": False, - "public_key": "public_key", - "salt": "salt", - "checksum": "checksum", - } - } - - # Mock the S3 connector and head_object method - mock_s3conn = MagicMock() - mock_s3conn.resource.meta.client.head_object.return_value = None - - # Call the function - with patch("dds_web.api.api_s3_connector.ApiS3Connector", return_value=mock_s3conn): - files_added, errors = utils.add_uploaded_files_to_db(proj_in_db, log) - - # check that the file is added to the database - file = models.File.query.filter_by(name="file1.txt").first() - assert file - assert file.name == "file1.txt" - assert file.name_in_bucket == "path/to/file1.txt" - - # check that the file is added to the project - assert file in proj_in_db.files - - # check that the version is added to the database - version = models.Version.query.filter_by(active_file=file.id).first() - assert version - - # check the return values - assert file in files_added - assert errors == {} +# add_uploaded_files_to_db def test_add_uploaded_files_to_db_other_failed_op(client: flask.testing.FlaskClient): @@ -1629,8 +1588,7 @@ def test_add_uploaded_files_to_db_other_failed_op(client: flask.testing.FlaskCli assert "Incorrect 'failed_op'." in errors["file1.txt"]["error"] - -def test_add_uploaded_files_to_db_file_not_found(client: flask.testing.FlaskClient): +def test_add_uploaded_files_to_db_correct_failed_op_file_not_found_in_s3(client: flask.testing.FlaskClient): """Test the return values of the function when file is not found on S3.""" from botocore.exceptions import ClientError @@ -1671,6 +1629,78 @@ def test_add_uploaded_files_to_db_file_not_found(client: flask.testing.FlaskClie assert "File not found in S3" in errors["file1.txt"]["error"] +def test_add_uploaded_files_to_db_correct_failed_op_file_not_found_in_db(client: flask.testing.FlaskClient): + """Test calling the function with correct "failed_op" and file isn't found in database.""" + # Mock input data + proj_in_db = models.Project.query.first() + file_name = "file1.txt" + log = { + file_name: { + "status": {"failed_op": "add_file_db"}, + "path_remote": f"path/to/{file_name}", + "subpath": "subpath", + "size_raw": 100, + "size_processed": 200, + "compressed": False, + "public_key": "public_key", + "salt": "salt", + "checksum": "checksum", + } + } + + # Verify that file does not exist + file_object = models.File.query.filter( + sqlalchemy.and_( + models.File.name == sqlalchemy.func.binary(file_name), + models.File.project_id == proj_in_db.id, + ) + ).first() + assert not file_object + + # Mock the S3 connector and head_object method + mock_s3conn = MagicMock() + mock_s3conn.resource.meta.client.head_object.return_value = None + + # Call the function + with patch("dds_web.api.api_s3_connector.ApiS3Connector", return_value=mock_s3conn): + files_added, errors = utils.add_uploaded_files_to_db(proj_in_db, log) + + # check that the file is added to the database + file = models.File.query.filter( + sqlalchemy.and_( + models.File.name == sqlalchemy.func.binary(file), + models.File.project_id == proj_in_db.id, + ) + ).first() + assert file + assert file.name == file_name + assert file.name_in_bucket == log[file_name]["path_remote"] + assert file.subpath == log[file_name]["subpath"] + assert file.size_original == log[file_name]["size_raw"] + assert file.size_stored == log[file_name]["size_processed"] + assert file.compressed == log[file_name]["compressed"] + assert file.public_key == log[file_name]["public_key"] + assert file.salt == log[file_name]["salt"] + assert file.checksum == log[file_name]["checksum"] + + # Check that the file is added to the project + assert file in proj_in_db.files + + # Check that the version is added to the database + version = models.Version.query.filter_by(active_file=file.id).first() + assert version + assert version.size_stored == log[file_name]["size_processed"] + + # Check the return values + assert file in files_added + assert errors == {} + + + + + + + # def test_add_uploaded_files_to_db_file_already_in_db(client: flask.testing.FlaskClient): # """Function should return error if file is already in the database.""" From 109175a3942eb10efdf01893e09b26b2aed0e7ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Mon, 30 Oct 2023 12:02:25 +0100 Subject: [PATCH 083/114] test for file already in database --- tests/test_utils.py | 84 ++++++++++++++++++++++++++------------------- 1 file changed, 48 insertions(+), 36 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index 791f124c0..2396cfb0f 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1668,7 +1668,7 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_not_found_in_db(client: # check that the file is added to the database file = models.File.query.filter( sqlalchemy.and_( - models.File.name == sqlalchemy.func.binary(file), + models.File.name == sqlalchemy.func.binary(file_name), models.File.project_id == proj_in_db.id, ) ).first() @@ -1678,7 +1678,7 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_not_found_in_db(client: assert file.subpath == log[file_name]["subpath"] assert file.size_original == log[file_name]["size_raw"] assert file.size_stored == log[file_name]["size_processed"] - assert file.compressed == log[file_name]["compressed"] + assert file.compressed != log[file_name]["compressed"] assert file.public_key == log[file_name]["public_key"] assert file.salt == log[file_name]["salt"] assert file.checksum == log[file_name]["checksum"] @@ -1696,44 +1696,56 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_not_found_in_db(client: assert errors == {} +def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_no_overwrite(client: flask.testing.FlaskClient): + """Test calling the function with correct "failed_op" and file IS found in database.""" + # Mock input data + proj_in_db = models.Project.query.first() + file_name = "file1.txt" + log = { + file_name: { + "status": {"failed_op": "add_file_db"}, + "path_remote": f"path/to/{file_name}", + "subpath": "subpath", + "size_raw": 100, + "size_processed": 200, + "compressed": False, + "public_key": "public_key", + "salt": "salt", + "checksum": "checksum", + } + } + + # Create new file + new_file = models.File( + name=file_name, + name_in_bucket=log[file_name]["path_remote"], + subpath =log[file_name]["subpath"], + size_original =log[file_name]["size_raw"], + size_stored =log[file_name]["size_processed"], + compressed=not log[file_name]["compressed"], + public_key =log[file_name]["public_key"], + salt =log[file_name]["salt"], + checksum =log[file_name]["checksum"], + ) + proj_in_db.files.append(new_file) + db.session.add(new_file) + db.session.commit() + # Mock the S3 connector and head_object method + mock_s3conn = MagicMock() + mock_s3conn.resource.meta.client.head_object.return_value = None + # Call the function + with patch("dds_web.api.api_s3_connector.ApiS3Connector", return_value=mock_s3conn): + files_added, errors = utils.add_uploaded_files_to_db(proj_in_db, log) + # check that the error is returned and files_added is empty + assert files_added == [] + assert "File already in database" in errors[file_name]["error"] - -# def test_add_uploaded_files_to_db_file_already_in_db(client: flask.testing.FlaskClient): -# """Function should return error if file is already in the database.""" - -# # get a project and an existing file from this project -# proj_in_db = models.Project.query.first() -# file_in_db = proj_in_db.files[0] -# assert file_in_db - -# log = { -# file_in_db.name: { -# "status": {"failed_op": "add_file_db"}, -# "path_remote": "path/to/file1.txt", -# "subpath": "subpath", -# "size_raw": 100, -# "size_processed": 200, -# "compressed": False, -# "public_key": "public_key", -# "salt": "salt", -# "checksum": "checksum", -# } -# } - -# # Mock the S3 connector and head_object method -# mock_s3conn = MagicMock() -# mock_s3conn.resource.meta.client.head_object.return_value = None - -# # Call the function -# with patch("dds_web.api.api_s3_connector.ApiS3Connector", return_value=mock_s3conn): -# files_added, errors = utils.add_uploaded_files_to_db(proj_in_db, log) - -# assert files_added == [] -# assert file_in_db.name in errors -# assert "File already in database." in errors[file_in_db.name]["error"] + # Check that the version is added to the database + version = models.Version.query.filter_by(active_file=new_file.id).first() + assert not version def test_add_uploaded_files_to_db_sql_error(client: flask.testing.FlaskClient): From b59be808f6ccb1c540d2cbc9a2a179d95d1966a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Mon, 30 Oct 2023 12:18:22 +0100 Subject: [PATCH 084/114] test overwrite --- tests/test_utils.py | 136 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 136 insertions(+) diff --git a/tests/test_utils.py b/tests/test_utils.py index 2396cfb0f..18153e0e0 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1748,6 +1748,142 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_no_overw assert not version +def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_overwrite_missing_key(client: flask.testing.FlaskClient): + """Test calling the function with correct "failed_op" and file IS found in database but there's at least one key missing.""" + # Mock input data + proj_in_db = models.Project.query.first() + file_name = "file1.txt" + log = { + file_name: { + "status": {"failed_op": "add_file_db"}, + "path_remote": f"path/to/{file_name}", + "subpath": "subpath", + "size_raw": 100, + "size_processed": 200, + "compressed": False, + "public_key": "public_key", + "salt": "salt", + "checksum": "checksum", + "overwrite": True + } + } + + # Create new file + new_file = models.File( + name=file_name, + name_in_bucket=log[file_name]["path_remote"], + subpath =log[file_name]["subpath"], + size_original =log[file_name]["size_raw"], + size_stored =log[file_name]["size_processed"], + compressed=not log[file_name]["compressed"], + public_key =log[file_name]["public_key"], + salt =log[file_name]["salt"], + checksum =log[file_name]["checksum"], + ) + proj_in_db.files.append(new_file) + db.session.add(new_file) + db.session.commit() + + # Mock the S3 connector and head_object method + mock_s3conn = MagicMock() + mock_s3conn.resource.meta.client.head_object.return_value = None + + # Remove key + log[file_name].pop("checksum") + + # Call the function + with patch("dds_web.api.api_s3_connector.ApiS3Connector", return_value=mock_s3conn): + files_added, errors = utils.add_uploaded_files_to_db(proj_in_db, log) + + # check that the error is returned and files_added is empty + assert files_added == [] + assert "Missing key: 'checksum'" in errors[file_name]["error"] + + # Check that the version is added to the database + version = models.Version.query.filter_by(active_file=new_file.id).first() + assert not version + +def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_overwrite_ok(client: flask.testing.FlaskClient): + """Test calling the function with correct "failed_op" and file IS found in database but overwrite is specified.""" + # Mock input data + proj_in_db = models.Project.query.first() + file_name = "file1.txt" + original_file_info = { + "name": file_name, + "path_remote": f"path/to/{file_name}", + "subpath": "subpath", + "size_raw": 100, + "size_processed": 200, + "compressed": False, + "public_key": "public_key", + "salt": "salt", + "checksum": "checksum", + } + log = { + file_name: { + "status": {"failed_op": "add_file_db"}, + "path_remote": f"path/to/{file_name}", + "subpath": "subpath2", + "size_raw": 1001, + "size_processed": 201, + "compressed": True, + "public_key": "public_key2", + "salt": "salt2", + "checksum": "checksum2", + "overwrite": True + } + } + + # Create new file + new_file = models.File( + name=file_name, + name_in_bucket=original_file_info["path_remote"], + subpath =original_file_info["subpath"], + size_original =original_file_info["size_raw"], + size_stored =original_file_info["size_processed"], + compressed=original_file_info["compressed"], + public_key =original_file_info["public_key"], + salt =original_file_info["salt"], + checksum =original_file_info["checksum"], + ) + proj_in_db.files.append(new_file) + db.session.add(new_file) + db.session.commit() + + # Mock the S3 connector and head_object method + mock_s3conn = MagicMock() + mock_s3conn.resource.meta.client.head_object.return_value = None + + # Call the function + with patch("dds_web.api.api_s3_connector.ApiS3Connector", return_value=mock_s3conn): + files_added, errors = utils.add_uploaded_files_to_db(proj_in_db, log) + + # check that no error is returned and that there's a file and version added + file = models.File.query.filter( + sqlalchemy.and_( + models.File.name == sqlalchemy.func.binary(file_name), + models.File.project_id == proj_in_db.id, + ) + ).first() + assert file + assert file.name == file_name + assert file.name_in_bucket == log[file_name]["path_remote"] + assert file.subpath == log[file_name]["subpath"] + assert file.size_original == log[file_name]["size_raw"] + assert file.size_stored == log[file_name]["size_processed"] + assert file.compressed == log[file_name]["compressed"] + assert file.public_key == log[file_name]["public_key"] + assert file.salt == log[file_name]["salt"] + assert file.checksum == log[file_name]["checksum"] + assert files_added + + assert errors == {} + + # Check that the version is added to the database + version = models.Version.query.filter_by(active_file=new_file.id).first() + assert version + assert version.size_stored == file.size_stored + def test_add_uploaded_files_to_db_sql_error(client: flask.testing.FlaskClient): """Test the return values of the function when sqlalchemy error occurs.""" import sqlalchemy.exc From 83e17fc12167b52672e2fcfed13125ec0ac52299 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Mon, 30 Oct 2023 12:20:56 +0100 Subject: [PATCH 085/114] additional assert --- dds_web/utils.py | 1 + tests/test_utils.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/dds_web/utils.py b/dds_web/utils.py index b2f8e82e1..80a40ff1a 100644 --- a/dds_web/utils.py +++ b/dds_web/utils.py @@ -805,6 +805,7 @@ def add_uploaded_files_to_db(proj_in_db, log: typing.Dict): if overwrite: try: new_file_version(existing_file=file_object, new_info=vals) + files_added.append(file_object) except KeyError as err: errors[file] = {"error": f"Missing key: {err}"} else: diff --git a/tests/test_utils.py b/tests/test_utils.py index 18153e0e0..233704e2a 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1875,7 +1875,7 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_overwrit assert file.public_key == log[file_name]["public_key"] assert file.salt == log[file_name]["salt"] assert file.checksum == log[file_name]["checksum"] - assert files_added + assert files_added and file in files_added assert errors == {} From bc6e11a375ad825d12e4064ddb187fefe0adf06e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Mon, 30 Oct 2023 12:21:42 +0100 Subject: [PATCH 086/114] black --- dds_web/utils.py | 4 +- tests/test_utils.py | 99 +++++++++++++++++++++++++-------------------- 2 files changed, 58 insertions(+), 45 deletions(-) diff --git a/dds_web/utils.py b/dds_web/utils.py index 80a40ff1a..8408c9a8f 100644 --- a/dds_web/utils.py +++ b/dds_web/utils.py @@ -13,7 +13,7 @@ import time import smtplib from dateutil.relativedelta import relativedelta -import gc +import gc # Installed import botocore @@ -810,7 +810,7 @@ def add_uploaded_files_to_db(proj_in_db, log: typing.Dict): errors[file] = {"error": f"Missing key: {err}"} else: errors[file] = {"error": "File already in database."} - + # If the file does not exist, create a new file and version else: new_file = models.File( diff --git a/tests/test_utils.py b/tests/test_utils.py index 233704e2a..d05eec184 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1588,7 +1588,10 @@ def test_add_uploaded_files_to_db_other_failed_op(client: flask.testing.FlaskCli assert "Incorrect 'failed_op'." in errors["file1.txt"]["error"] -def test_add_uploaded_files_to_db_correct_failed_op_file_not_found_in_s3(client: flask.testing.FlaskClient): + +def test_add_uploaded_files_to_db_correct_failed_op_file_not_found_in_s3( + client: flask.testing.FlaskClient, +): """Test the return values of the function when file is not found on S3.""" from botocore.exceptions import ClientError @@ -1629,7 +1632,9 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_not_found_in_s3(client: assert "File not found in S3" in errors["file1.txt"]["error"] -def test_add_uploaded_files_to_db_correct_failed_op_file_not_found_in_db(client: flask.testing.FlaskClient): +def test_add_uploaded_files_to_db_correct_failed_op_file_not_found_in_db( + client: flask.testing.FlaskClient, +): """Test calling the function with correct "failed_op" and file isn't found in database.""" # Mock input data proj_in_db = models.Project.query.first() @@ -1647,7 +1652,7 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_not_found_in_db(client: "checksum": "checksum", } } - + # Verify that file does not exist file_object = models.File.query.filter( sqlalchemy.and_( @@ -1696,7 +1701,9 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_not_found_in_db(client: assert errors == {} -def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_no_overwrite(client: flask.testing.FlaskClient): +def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_no_overwrite( + client: flask.testing.FlaskClient, +): """Test calling the function with correct "failed_op" and file IS found in database.""" # Mock input data proj_in_db = models.Project.query.first() @@ -1714,18 +1721,18 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_no_overw "checksum": "checksum", } } - + # Create new file new_file = models.File( - name=file_name, - name_in_bucket=log[file_name]["path_remote"], - subpath =log[file_name]["subpath"], - size_original =log[file_name]["size_raw"], - size_stored =log[file_name]["size_processed"], - compressed=not log[file_name]["compressed"], - public_key =log[file_name]["public_key"], - salt =log[file_name]["salt"], - checksum =log[file_name]["checksum"], + name=file_name, + name_in_bucket=log[file_name]["path_remote"], + subpath=log[file_name]["subpath"], + size_original=log[file_name]["size_raw"], + size_stored=log[file_name]["size_processed"], + compressed=not log[file_name]["compressed"], + public_key=log[file_name]["public_key"], + salt=log[file_name]["salt"], + checksum=log[file_name]["checksum"], ) proj_in_db.files.append(new_file) db.session.add(new_file) @@ -1748,7 +1755,9 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_no_overw assert not version -def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_overwrite_missing_key(client: flask.testing.FlaskClient): +def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_overwrite_missing_key( + client: flask.testing.FlaskClient, +): """Test calling the function with correct "failed_op" and file IS found in database but there's at least one key missing.""" # Mock input data proj_in_db = models.Project.query.first() @@ -1764,21 +1773,21 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_overwrit "public_key": "public_key", "salt": "salt", "checksum": "checksum", - "overwrite": True + "overwrite": True, } } - + # Create new file new_file = models.File( - name=file_name, - name_in_bucket=log[file_name]["path_remote"], - subpath =log[file_name]["subpath"], - size_original =log[file_name]["size_raw"], - size_stored =log[file_name]["size_processed"], - compressed=not log[file_name]["compressed"], - public_key =log[file_name]["public_key"], - salt =log[file_name]["salt"], - checksum =log[file_name]["checksum"], + name=file_name, + name_in_bucket=log[file_name]["path_remote"], + subpath=log[file_name]["subpath"], + size_original=log[file_name]["size_raw"], + size_stored=log[file_name]["size_processed"], + compressed=not log[file_name]["compressed"], + public_key=log[file_name]["public_key"], + salt=log[file_name]["salt"], + checksum=log[file_name]["checksum"], ) proj_in_db.files.append(new_file) db.session.add(new_file) @@ -1787,10 +1796,10 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_overwrit # Mock the S3 connector and head_object method mock_s3conn = MagicMock() mock_s3conn.resource.meta.client.head_object.return_value = None - - # Remove key + + # Remove key log[file_name].pop("checksum") - + # Call the function with patch("dds_web.api.api_s3_connector.ApiS3Connector", return_value=mock_s3conn): files_added, errors = utils.add_uploaded_files_to_db(proj_in_db, log) @@ -1803,7 +1812,10 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_overwrit version = models.Version.query.filter_by(active_file=new_file.id).first() assert not version -def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_overwrite_ok(client: flask.testing.FlaskClient): + +def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_overwrite_ok( + client: flask.testing.FlaskClient, +): """Test calling the function with correct "failed_op" and file IS found in database but overwrite is specified.""" # Mock input data proj_in_db = models.Project.query.first() @@ -1830,21 +1842,21 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_overwrit "public_key": "public_key2", "salt": "salt2", "checksum": "checksum2", - "overwrite": True + "overwrite": True, } } - + # Create new file new_file = models.File( - name=file_name, - name_in_bucket=original_file_info["path_remote"], - subpath =original_file_info["subpath"], - size_original =original_file_info["size_raw"], - size_stored =original_file_info["size_processed"], - compressed=original_file_info["compressed"], - public_key =original_file_info["public_key"], - salt =original_file_info["salt"], - checksum =original_file_info["checksum"], + name=file_name, + name_in_bucket=original_file_info["path_remote"], + subpath=original_file_info["subpath"], + size_original=original_file_info["size_raw"], + size_stored=original_file_info["size_processed"], + compressed=original_file_info["compressed"], + public_key=original_file_info["public_key"], + salt=original_file_info["salt"], + checksum=original_file_info["checksum"], ) proj_in_db.files.append(new_file) db.session.add(new_file) @@ -1853,7 +1865,7 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_overwrit # Mock the S3 connector and head_object method mock_s3conn = MagicMock() mock_s3conn.resource.meta.client.head_object.return_value = None - + # Call the function with patch("dds_web.api.api_s3_connector.ApiS3Connector", return_value=mock_s3conn): files_added, errors = utils.add_uploaded_files_to_db(proj_in_db, log) @@ -1876,7 +1888,7 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_overwrit assert file.salt == log[file_name]["salt"] assert file.checksum == log[file_name]["checksum"] assert files_added and file in files_added - + assert errors == {} # Check that the version is added to the database @@ -1884,6 +1896,7 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_overwrit assert version assert version.size_stored == file.size_stored + def test_add_uploaded_files_to_db_sql_error(client: flask.testing.FlaskClient): """Test the return values of the function when sqlalchemy error occurs.""" import sqlalchemy.exc From b150b8de3a304af98603af888a75a7625a1c04df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Mon, 30 Oct 2023 13:33:50 +0100 Subject: [PATCH 087/114] test multiple versions --- dds_web/utils.py | 2 +- tests/test_utils.py | 92 ++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 92 insertions(+), 2 deletions(-) diff --git a/dds_web/utils.py b/dds_web/utils.py index 8408c9a8f..a86848ddf 100644 --- a/dds_web/utils.py +++ b/dds_web/utils.py @@ -893,7 +893,7 @@ def new_file_version(existing_file, new_info): existing_file.subpath = new_info["subpath"] existing_file.size_original = new_info["size_raw"] existing_file.size_stored = new_info["size_processed"] - existing_file.compressed = new_info["compressed"] + existing_file.compressed = not new_info["compressed"] existing_file.salt = new_info["salt"] existing_file.public_key = new_info["public_key"] existing_file.time_uploaded = new_timestamp diff --git a/tests/test_utils.py b/tests/test_utils.py index d05eec184..c5422a71a 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -25,6 +25,7 @@ import boto3 import botocore import sqlalchemy +from _pytest.logging import LogCaptureFixture # Variables @@ -1883,7 +1884,7 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_overwrit assert file.subpath == log[file_name]["subpath"] assert file.size_original == log[file_name]["size_raw"] assert file.size_stored == log[file_name]["size_processed"] - assert file.compressed == log[file_name]["compressed"] + assert file.compressed != log[file_name]["compressed"] assert file.public_key == log[file_name]["public_key"] assert file.salt == log[file_name]["salt"] assert file.checksum == log[file_name]["checksum"] @@ -1940,3 +1941,92 @@ def test_add_uploaded_files_to_db_sql_error(client: flask.testing.FlaskClient): assert file not in files_added assert files_added == [] assert "OperationalError" in errors["file1.txt"]["error"] + +# new_file_version + +def test_new_file_version_multiple_versions(client: flask.testing.FlaskClient, capfd: LogCaptureFixture): + """If there are multiple versions for the same file then they should be updated identically.""" + # Get any project + project = models.Project.query.first() + + # Define file info + file_name = "file1.txt" + original_file_info = { + "name": file_name, + "path_remote": f"path/to/{file_name}", + "subpath": "subpath", + "size_raw": 100, + "size_processed": 200, + "compressed": False, + "public_key": "public_key", + "salt": "salt", + "checksum": "checksum", + } + + # Create new file + new_file = models.File( + name=file_name, + name_in_bucket=original_file_info["path_remote"], + subpath=original_file_info["subpath"], + size_original=original_file_info["size_raw"], + size_stored=original_file_info["size_processed"], + compressed=original_file_info["compressed"], + public_key=original_file_info["public_key"], + salt=original_file_info["salt"], + checksum=original_file_info["checksum"], + ) + + # Create new versions (multiple) of the file + new_version_1 = models.Version( + size_stored=original_file_info["size_processed"], + time_uploaded=utils.current_time(), + active_file=new_file.id, + project_id=project, + ) + new_version_2 = models.Version( + size_stored=original_file_info["size_processed"] + 10, + time_uploaded=utils.current_time(), + active_file=new_file.id, + project_id=project, + ) + + # Append to relationships + project.files.append(new_file) + project.file_versions.extend([new_version_1, new_version_2]) + new_file.versions.extend([new_version_1, new_version_2]) + + db.session.add(new_file) + db.session.commit() + + # Define new file info + new_file_info = { + "name": file_name, + "path_remote": f"path/to/{file_name}", + "subpath": "subpath", + "size_raw": 1001, + "size_processed": 2001, + "compressed": True, + "public_key": "public_key2", + "salt": "salt2", + "checksum": "checksum2", + } + + # Run function + utils.new_file_version(existing_file=new_file, new_info=new_file_info) + + # Verify that logging printed + _, err = capfd.readouterr() + assert "There is more than one version of the file which does not yet have a deletion timestamp." in err + + # Verify that there's a new version + assert len(new_file.versions) == 3 + + # Verify that the file info has been updated + assert new_file.subpath == new_file_info["subpath"] == original_file_info["subpath"] + assert new_file.size_original == new_file_info["size_raw"] != original_file_info["size_raw"] + assert new_file.size_stored == new_file_info["size_processed"] != original_file_info["size_processed"] + assert new_file.compressed == (not new_file_info["compressed"]) != (not original_file_info["compressed"]) + assert new_file.salt == new_file_info["salt"] != original_file_info["salt"] + assert new_file.public_key == new_file_info["public_key"] != original_file_info["public_key"] + assert new_file.time_uploaded != new_version_1.time_deleted == new_version_2.time_deleted + assert new_file.checksum == new_file_info["checksum"] != original_file_info["checksum"] \ No newline at end of file From 4dd37d48938c4c5944d6085a4c2ae5401a28eccc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Mon, 30 Oct 2023 13:34:29 +0100 Subject: [PATCH 088/114] black --- tests/test_utils.py | 27 +++++++++++++++++++++------ 1 file changed, 21 insertions(+), 6 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index c5422a71a..4006ae1e4 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1942,9 +1942,13 @@ def test_add_uploaded_files_to_db_sql_error(client: flask.testing.FlaskClient): assert files_added == [] assert "OperationalError" in errors["file1.txt"]["error"] + # new_file_version -def test_new_file_version_multiple_versions(client: flask.testing.FlaskClient, capfd: LogCaptureFixture): + +def test_new_file_version_multiple_versions( + client: flask.testing.FlaskClient, capfd: LogCaptureFixture +): """If there are multiple versions for the same file then they should be updated identically.""" # Get any project project = models.Project.query.first() @@ -1991,7 +1995,7 @@ def test_new_file_version_multiple_versions(client: flask.testing.FlaskClient, c ) # Append to relationships - project.files.append(new_file) + project.files.append(new_file) project.file_versions.extend([new_version_1, new_version_2]) new_file.versions.extend([new_version_1, new_version_2]) @@ -2016,7 +2020,10 @@ def test_new_file_version_multiple_versions(client: flask.testing.FlaskClient, c # Verify that logging printed _, err = capfd.readouterr() - assert "There is more than one version of the file which does not yet have a deletion timestamp." in err + assert ( + "There is more than one version of the file which does not yet have a deletion timestamp." + in err + ) # Verify that there's a new version assert len(new_file.versions) == 3 @@ -2024,9 +2031,17 @@ def test_new_file_version_multiple_versions(client: flask.testing.FlaskClient, c # Verify that the file info has been updated assert new_file.subpath == new_file_info["subpath"] == original_file_info["subpath"] assert new_file.size_original == new_file_info["size_raw"] != original_file_info["size_raw"] - assert new_file.size_stored == new_file_info["size_processed"] != original_file_info["size_processed"] - assert new_file.compressed == (not new_file_info["compressed"]) != (not original_file_info["compressed"]) + assert ( + new_file.size_stored + == new_file_info["size_processed"] + != original_file_info["size_processed"] + ) + assert ( + new_file.compressed + == (not new_file_info["compressed"]) + != (not original_file_info["compressed"]) + ) assert new_file.salt == new_file_info["salt"] != original_file_info["salt"] assert new_file.public_key == new_file_info["public_key"] != original_file_info["public_key"] assert new_file.time_uploaded != new_version_1.time_deleted == new_version_2.time_deleted - assert new_file.checksum == new_file_info["checksum"] != original_file_info["checksum"] \ No newline at end of file + assert new_file.checksum == new_file_info["checksum"] != original_file_info["checksum"] From d9aeb6cc78b0d7fdb6aead8729b8181751974051 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Mon, 30 Oct 2023 15:57:29 +0100 Subject: [PATCH 089/114] failing test --- dds_web/commands.py | 2 +- tests/test_commands.py | 65 ++++++++++++++++++++++++++++++++++++------ 2 files changed, 58 insertions(+), 9 deletions(-) diff --git a/dds_web/commands.py b/dds_web/commands.py index 2669034e8..df23256d7 100644 --- a/dds_web/commands.py +++ b/dds_web/commands.py @@ -226,7 +226,7 @@ def update_uploaded_file_with_log(project, path_to_log_file): with open(path_to_log_file, "r") as f: log = json.load(f) - files_added, errors = utils.add_uploaded_files_to_db(proj_in_db, log) + files_added, errors = utils.add_uploaded_files_to_db(proj_in_db=proj_in_db, log=log) flask.current_app.logger.info(f"Files added: {files_added}") flask.current_app.logger.info(f"Errors while adding files: {errors}") diff --git a/tests/test_commands.py b/tests/test_commands.py index 4f82dd869..50fb78583 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -3,7 +3,7 @@ # Standard import typing from unittest import mock -from unittest.mock import patch +from unittest.mock import patch, mock_open from unittest.mock import PropertyMock from unittest.mock import MagicMock import os @@ -14,6 +14,7 @@ import pathlib import csv from dateutil.relativedelta import relativedelta +import json # Installed import click @@ -458,13 +459,20 @@ def test_update_uploaded_file_with_log_nonexisting_project( # Run command assert db.session.query(models.Project).all() with patch("dds_web.database.models.Project.query.filter_by", mock_no_project): - result: click.testing.Result = runner.invoke(update_uploaded_file_with_log, command_options) + _: click.testing.Result = runner.invoke(update_uploaded_file_with_log, command_options) _, err = capfd.readouterr() assert "The project 'projectdoesntexist' doesn't exist." in err + # Verify that things are not printed out + assert "Files added:" not in err + assert "Errors while adding files:" not in err -def test_update_uploaded_file_with_log_nonexisting_file(client, runner, fs: FakeFilesystem) -> None: + +def test_update_uploaded_file_with_log_nonexisting_file(client, runner, capfd: LogCaptureFixture) -> None: """Attempt to read file which does not exist.""" + # Get project + project = models.Project.query.first() + # Verify that fake file does not exist non_existent_log_file: str = "this_is_not_a_file.json" assert not os.path.exists(non_existent_log_file) @@ -472,17 +480,58 @@ def test_update_uploaded_file_with_log_nonexisting_file(client, runner, fs: Fake # Create command options command_options: typing.List = [ "--project", - "projectdoesntexist", + project.public_id, "--path-to-log-file", non_existent_log_file, ] # Run command - result: click.testing.Result = runner.invoke(update_uploaded_file_with_log, command_options) - # TODO: Add check for logging or change command to return or raise error. capfd does not work together with fs - # _, err = capfd.readouterr() - # assert "The project 'projectdoesntexist' doesn't exist." in result.stderr + _: click.testing.Result = runner.invoke(update_uploaded_file_with_log, command_options) + + # Check logging + _, err = capfd.readouterr() + assert f"The log file '{non_existent_log_file}' doesn't exist." in err + + # Verify that things are not printed out + assert "Files added:" not in err + assert "Errors while adding files:" not in err + + +def test_update_uploaded_file(client, runner, capfd: LogCaptureFixture) -> None: + """Attempt to read file which does not exist.""" + # Get project + project = models.Project.query.first() + # # Verify that fake file exists + log_file: str = "this_is_a_file.json" + + # Get file from db + file_object: models.File = models.File.query.first() + file_dict = { + "name": file_object.name, + "path_remote": file_object.name_in_bucket, + "subpath": file_object.subpath, + "size_raw": file_object.size_original, + "size_processed": file_object.size_stored, + "compressed": not file_object.compressed, + "public_key": file_object.public_key, + "salt": file_object.salt, + "checksum": file_object.checksum, + } + + # Create command options + command_options: typing.List = [ + "--project", + project.public_id, + "--path-to-log-file", + log_file, + ] + _: click.testing.Result = runner.invoke(update_uploaded_file_with_log, command_options) + + # Check logging + _, err = capfd.readouterr() + assert "Files added:" in err + assert "Errors while adding files:" in err # lost_files_s3_db From eed6cf92526b1b6dbce8da7cda9d36ca335e87ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Tue, 31 Oct 2023 09:19:00 +0100 Subject: [PATCH 090/114] test for open and load --- dds_web/commands.py | 3 +++ dds_web/utils.py | 1 + tests/test_commands.py | 34 ++++++++++++++++++++++++++-------- 3 files changed, 30 insertions(+), 8 deletions(-) diff --git a/dds_web/commands.py b/dds_web/commands.py index df23256d7..8c8145df1 100644 --- a/dds_web/commands.py +++ b/dds_web/commands.py @@ -218,13 +218,16 @@ def update_uploaded_file_with_log(project, path_to_log_file): if not proj_in_db: flask.current_app.logger.error(f"The project '{project}' doesn't exist.") return + flask.current_app.logger.debug(f"Updating file in project '{project}'...") if not os.path.exists(path_to_log_file): flask.current_app.logger.error(f"The log file '{path_to_log_file}' doesn't exist.") return + flask.current_app.logger.debug(f"Reading file info from path '{path_to_log_file}'...") with open(path_to_log_file, "r") as f: log = json.load(f) + flask.current_app.logger.debug("File contents were loaded...") files_added, errors = utils.add_uploaded_files_to_db(proj_in_db=proj_in_db, log=log) diff --git a/dds_web/utils.py b/dds_web/utils.py index a86848ddf..b99954e27 100644 --- a/dds_web/utils.py +++ b/dds_web/utils.py @@ -771,6 +771,7 @@ def add_uploaded_files_to_db(proj_in_db, log: typing.Dict): errors = {} files_added = [] + flask.current_app.logger.info(type(log)) # Loop through each file in the log for file, vals in log.items(): status = vals.get("status") diff --git a/tests/test_commands.py b/tests/test_commands.py index 50fb78583..acf659d59 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -468,9 +468,11 @@ def test_update_uploaded_file_with_log_nonexisting_project( assert "Errors while adding files:" not in err -def test_update_uploaded_file_with_log_nonexisting_file(client, runner, capfd: LogCaptureFixture) -> None: +def test_update_uploaded_file_with_log_nonexisting_file( + client, runner, capfd: LogCaptureFixture +) -> None: """Attempt to read file which does not exist.""" - # Get project + # Get project project = models.Project.query.first() # Verify that fake file does not exist @@ -497,9 +499,9 @@ def test_update_uploaded_file_with_log_nonexisting_file(client, runner, capfd: L assert "Errors while adding files:" not in err -def test_update_uploaded_file(client, runner, capfd: LogCaptureFixture) -> None: +def test_update_uploaded_file(client, runner, capfd: LogCaptureFixture, boto3_session) -> None: """Attempt to read file which does not exist.""" - # Get project + # Get project project = models.Project.query.first() # # Verify that fake file exists @@ -508,7 +510,8 @@ def test_update_uploaded_file(client, runner, capfd: LogCaptureFixture) -> None: # Get file from db file_object: models.File = models.File.query.first() file_dict = { - "name": file_object.name, + file_object.name: { + "status": {"failed_op": "add_file_db"}, "path_remote": file_object.name_in_bucket, "subpath": file_object.subpath, "size_raw": file_object.size_original, @@ -518,7 +521,8 @@ def test_update_uploaded_file(client, runner, capfd: LogCaptureFixture) -> None: "salt": file_object.salt, "checksum": file_object.checksum, } - + } + # Create command options command_options: typing.List = [ "--project", @@ -526,12 +530,26 @@ def test_update_uploaded_file(client, runner, capfd: LogCaptureFixture) -> None: "--path-to-log-file", log_file, ] - _: click.testing.Result = runner.invoke(update_uploaded_file_with_log, command_options) + with patch("os.path.exists") as mock_exists: + mock_exists.return_value = True + with patch("dds_web.commands.open"): + with patch("json.load") as mock_json_load: + mock_json_load.return_value = file_dict + _: click.testing.Result = runner.invoke( + update_uploaded_file_with_log, command_options + ) # Check logging _, err = capfd.readouterr() - assert "Files added:" in err + assert f"The project '{project.public_id}' doesn't exist." not in err + assert f"Updating file in project '{project.public_id}'..." in err + assert f"The log file '{log_file}' doesn't exist." not in err + assert f"Reading file info from path '{log_file}'..." in err + assert "File contents were loaded..." in err + assert "Files added: []" in err assert "Errors while adding files:" in err + assert "File already in database" in err + # lost_files_s3_db From 5017a32d8b9e45b6aa79d87568e4b64cea7782f6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= <35953392+i-oden@users.noreply.github.com> Date: Tue, 31 Oct 2023 14:19:46 +0100 Subject: [PATCH 091/114] Apply suggestions from code review --- tests/test_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index 4006ae1e4..7d70681b5 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1751,7 +1751,7 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_no_overw assert files_added == [] assert "File already in database" in errors[file_name]["error"] - # Check that the version is added to the database + # Check that the version is not added to the database version = models.Version.query.filter_by(active_file=new_file.id).first() assert not version @@ -1809,7 +1809,7 @@ def test_add_uploaded_files_to_db_correct_failed_op_file_is_found_in_db_overwrit assert files_added == [] assert "Missing key: 'checksum'" in errors[file_name]["error"] - # Check that the version is added to the database + # Check that the version is not added to the database version = models.Version.query.filter_by(active_file=new_file.id).first() assert not version From 9f67d662646e627cde68b48d9bc951b0c47fad87 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= <35953392+i-oden@users.noreply.github.com> Date: Thu, 2 Nov 2023 08:49:44 +0100 Subject: [PATCH 092/114] Update dds_web/utils.py --- dds_web/utils.py | 1 - 1 file changed, 1 deletion(-) diff --git a/dds_web/utils.py b/dds_web/utils.py index b99954e27..f815df4bf 100644 --- a/dds_web/utils.py +++ b/dds_web/utils.py @@ -918,4 +918,3 @@ def new_file_version(existing_file, new_info): # Clean up information del new_info - gc.collect() From e11e58f4ba917ddbef0eb62355b1569b4f70754f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Fri, 3 Nov 2023 09:28:45 +0100 Subject: [PATCH 093/114] sprintlog --- SPRINTLOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/SPRINTLOG.md b/SPRINTLOG.md index c5eaa32d4..72938a2d6 100644 --- a/SPRINTLOG.md +++ b/SPRINTLOG.md @@ -319,5 +319,6 @@ _Nothing merged in CLI during this sprint_ - Added new API endpoint ProjectStatus.patch to extend the deadline ([#1480])(https://github.com/ScilifelabDataCentre/dds_web/pull/1480) - New version: 2.5.2 ([#1482](https://github.com/ScilifelabDataCentre/dds_web/pull/1482)) -- New endpoint `AddFailedFiles` for adding failed files to database ([#1472])(https://github.com/ScilifelabDataCentre/dds_web/pull/1472) +- New endpoint `AddFailedFiles` for adding failed files to database ([#1472](https://github.com/ScilifelabDataCentre/dds_web/pull/1472)) +- Change the generate usage command to monthly instead of quartely, and add the command to send a usage report specifying the number of months ([#1476](https://github.com/ScilifelabDataCentre/dds_web/pull/1476)) - New ADR record regarding OKR 2024 ([#1483](https://github.com/ScilifelabDataCentre/dds_web/pull/1483)) From 442e8516004b3976953687d34166bf92f59fb6b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Fri, 3 Nov 2023 09:29:17 +0100 Subject: [PATCH 094/114] init --- dds_web/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/dds_web/__init__.py b/dds_web/__init__.py index 318ff4c2f..2ba8d8ff8 100644 --- a/dds_web/__init__.py +++ b/dds_web/__init__.py @@ -274,7 +274,8 @@ def load_user(user_id): set_available_to_expired, set_expired_to_archived, delete_invites, - quarterly_usage, + monthly_usage, + send_usage, collect_stats, monitor_usage, update_unit, @@ -291,7 +292,8 @@ def load_user(user_id): app.cli.add_command(set_available_to_expired) app.cli.add_command(set_expired_to_archived) app.cli.add_command(delete_invites) - app.cli.add_command(quarterly_usage) + app.cli.add_command(monthly_usage) + app.cli.add_command(send_usage) app.cli.add_command(collect_stats) app.cli.add_command(monitor_usage) From a8e1d318403c4bbc6a1f24a84d2b8162d98cffd7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Fri, 3 Nov 2023 09:29:42 +0100 Subject: [PATCH 095/114] commands --- dds_web/commands.py | 277 ++++++++++++++++++++++++++++++++++++++------ 1 file changed, 239 insertions(+), 38 deletions(-) diff --git a/dds_web/commands.py b/dds_web/commands.py index 8c8145df1..071042bdd 100644 --- a/dds_web/commands.py +++ b/dds_web/commands.py @@ -9,6 +9,8 @@ import datetime from dateutil.relativedelta import relativedelta import gc +import pathlib +import csv # Installed import click @@ -755,15 +757,21 @@ def delete_invites(): flask.current_app.logger.error(f"{invite} not deleted: {error}") -@click.command("quartely-usage") +@click.command("monthly-usage") @flask.cli.with_appcontext -def quarterly_usage(): - """ - Get the monthly usage for the units - Should be run on the 1st of Jan,Apr,Jul,Oct at around 00:01. +def monthly_usage(): + """Get the monthly usage for the units. + + Should be run on the 1st of every month at around 00:01. + + 1. Mark projects as done (all files have been included in an invoice) + 2. Calculate project usage for all non-done projects + 3. Send success- or failure email """ - flask.current_app.logger.debug("Task: Collecting usage information from database.") + flask.current_app.logger.debug( + "Starting `monthly_usage`; Collecting usage information from database." + ) # Imports # Installed @@ -775,31 +783,43 @@ def quarterly_usage(): from dds_web.utils import ( current_time, page_query, - # calculate_period_usage, calculate_version_period_usage, + send_email_with_retry, + ) + + # Email settings + email_recipient: str = flask.current_app.config.get("MAIL_DDS") + # -- Success + email_subject: str = "[INVOICING CRONJOB]" + email_body: str = ( + "The calculation of the monthly usage succeeded; The byte hours " + "for all active projects have been saved to the database." + ) + # -- Failure + error_subject: str = f"{email_subject} Error in monthly-usage cronjob" + error_body: str = ( + "There was an error in the cronjob 'monthly-usage', used for calculating the" + " byte hours for every active project in the last month.\n\n" + "What to do:\n" + "1. Check the logs on OpenSearch.\n" + "2. The DDS team should enter the backend container and run the command `flask monthly-usage`.\n" + "3. Check that you receive a new email indicating that the command was successful.\n" ) + # 1. Mark projects as done (all files have been included in an invoice) + # .. a. Get projects where is_active = False + # .. b. Check if the versions are all time_deleted == time_invoiced + # .. c. Yes --> Set new column to True ("done") try: - # 1. Get projects where is_active = False - # .. a. Check if the versions are all time_deleted == time_invoiced - # .. b. Yes --> Set new column to True ("done") - flask.current_app.logger.info("Marking projects as 'done'....") - for unit, project in page_query( - db.session.query(models.Unit, models.Project) - .join(models.Project) - .filter(models.Project.is_active == False) + flask.current_app.logger.info("Marking projects as 'done'...") + + # Iterate through non-active projects + for project in page_query( + models.Project.query.filter_by(is_active=False).with_for_update() ): # Get number of versions in project that have been fully included in usage calcs - num_done = ( - db.session.query(models.Project, models.Version) - .join(models.Version) - .filter( - sqlalchemy.and_( - models.Project.id == project.id, - models.Version.time_deleted == models.Version.time_invoiced, - ) - ) - .count() + num_done = len( + list(v for v in project.file_versions if v.time_deleted == v.time_invoiced) ) # Check if there are any versions that are not fully included @@ -807,17 +827,39 @@ def quarterly_usage(): if num_done == len(project.file_versions): project.done = True + # Save any projects marked as done db.session.commit() - # 2. Get project where done = False - for unit, project in page_query( - db.session.query(models.Unit, models.Project) - .join(models.Project) - .filter(models.Project.done == False) - ): + except (sqlalchemy.exc.OperationalError, sqlalchemy.exc.SQLAlchemyError) as err: + db.session.rollback() + flask.current_app.logger.error( + "Usage collection during step 1: marking projects as done. Sending email..." + ) + + # Send email about error + email_message: flask_mail.Message = flask_mail.Message( + subject=error_subject, + recipients=[email_recipient], + body=error_body, + ) + send_email_with_retry(msg=email_message) + raise + + # 2. Calculate project usage for all non-done projects + # .. a. Get projects where done = False + # .. b. Calculate usage + # .. c. Save usage + try: + flask.current_app.logger.info("Calculating usage...") + + # Save all new rows at once + all_new_rows = [] + + # Iterate through non-done projects + for project in page_query(models.Project.query.filter_by(done=False).with_for_update()): project_byte_hours: int = 0 for version in project.file_versions: - # Skipp deleted and already invoiced versions + # Skip deleted and already invoiced versions if version.time_deleted == version.time_invoiced and [ version.time_deleted, version.time_invoiced, @@ -825,25 +867,184 @@ def quarterly_usage(): continue version_bhours = calculate_version_period_usage(version=version) project_byte_hours += version_bhours - flask.current_app.logger.info( + flask.current_app.logger.debug( f"Project {project.public_id} byte hours: {project_byte_hours}" ) # Create a record in usage table - new_record = models.Usage( + new_usage_row = models.Usage( project_id=project.id, usage=project_byte_hours, - cost=0, time_collected=current_time(), ) - db.session.add(new_record) - db.session.commit() + all_new_rows.append(new_usage_row) + + # Save new rows + db.session.add_all(all_new_rows) + db.session.commit() except (sqlalchemy.exc.OperationalError, sqlalchemy.exc.SQLAlchemyError) as err: - flask.current_app.logger.exception(err) db.session.rollback() + flask.current_app.logger.error( + "Usage collection during step 2: calculating and saving usage. Sending email..." + ) + + # Send email about error + email_message: flask_mail.Message = flask_mail.Message( + subject=error_subject, + recipients=[email_recipient], + body=error_body, + ) + send_email_with_retry(msg=email_message) raise + # 3. Send success email + flask.current_app.logger.info("Usage collection successful; Sending email.") + email_subject += " Usage records available for collection" + email_message: flask_mail.Message = flask_mail.Message( + subject=email_subject, + recipients=[email_recipient], + body=email_body, + ) + send_email_with_retry(msg=email_message) + + +@click.command("send-usage") +@click.option("--months", type=click.IntRange(min=1, max=12), required=True) +@flask.cli.with_appcontext +def send_usage(months): + """Get unit storage usage for the last x months and send in email.""" + # Imports + from dds_web.database import models + from dds_web.utils import current_time, page_query, send_email_with_retry + + # Email settings + email_recipient: str = flask.current_app.config.get("MAIL_DDS") + # -- Success + email_subject: str = "[SEND-USAGE CRONJOB]" + email_body: str = f"Here is the usage for the last {months} months.\n" + # -- Failure + error_subject: str = f"{email_subject} Error in send-usage cronjob" + error_body: str = ( + "There was an error in the cronjob 'send-usage', used for sending" + " information about the storage usage for each SciLifeLab unit. \n\n" + "What to do:\n" + "1. Check the logs on OpenSearch.\n" + "2. The DDS team should enter the backend container and run the command `flask send-usage`.\n" + "3. Check that you receive a new email indicating that the command was successful.\n" + ) + + end = current_time() + flask.current_app.logger.debug(f"Month now: {end.month}") + + start = end - relativedelta(months=months) + flask.current_app.logger.debug(f"Month {months} months ago: {start.month}") + + flask.current_app.logger.debug(f"Start: {start}") + flask.current_app.logger.debug(f"End: {end}") + + # CSV files to send + csv_file_names = [] + + have_failed = False # Flag to check if any csv files failed to be generated + + # Iterate through units + for unit in models.Unit.query: + # Generate CSV file name + csv_file_name = pathlib.Path( + f"{unit.public_id}_Usage_Months-{start.month}-to-{end.month}.csv" + ) + flask.current_app.logger.debug(f"CSV file name: {csv_file_name}") + + # Total usage for unit + total_usage = 0 + + # Open the csv file + try: + with csv_file_name.open(mode="w+", newline="") as file: + csv_writer = csv.writer(file) + csv_writer.writerow( + [ + "Project ID", + "Project Title", + "Project Created", + "Time Collected", + "Byte Hours", + ] + ) + + # Get usage rows connected to unit, that have been collected between X months ago and now + for usage_row, project_row in page_query( + db.session.query(models.Usage, models.Project) + .join(models.Project) + .filter( + models.Project.responsible_unit == unit, + models.Usage.time_collected.between(start, end), + ) + ): + # Increase total unit usage + total_usage += usage_row.usage + + # Save usage row info to csv file + csv_writer.writerow( + [ + project_row.public_id, + project_row.title, + project_row.date_created, + usage_row.time_collected, + usage_row.usage, + ] + ) + + # Save total + csv_writer.writerow(["--", "--", "--", "--", total_usage]) + except Exception as e: + # Catch exception, dont raise it. So it can continue to next unit + flask.current_app.logger.error(f"Error writing to CSV file: {e}") + + # Set flag to True, so we know at least 1 file have failed + have_failed = True + + csv_file_name.unlink(missing_ok=True) # Delete the csv file if it was created + + # Update email body with files with errors + error_body += "File(s) with errors: \n" + error_body += f"{csv_file_name}\n" + else: + # Add correctly created csv to list of files to send + csv_file_names.append(csv_file_name) + + # IF any csv files failed to be generated, send email about error + if have_failed: + email_message: flask_mail.Message = flask_mail.Message( + subject=error_subject, + recipients=[email_recipient], + body=error_body, + ) + send_email_with_retry(msg=email_message) + + # IF no csv files were generated, log error and return + if not csv_file_names: + flask.current_app.logger.error("No CSV files generated.") + return + + # Send email with the csv + flask.current_app.logger.info("Sending email with the CSV.") + email_subject += " Usage records attached in the present mail" + email_message: flask_mail.Message = flask_mail.Message( + subject=email_subject, + recipients=[email_recipient], + body=email_body, + ) + # add atachments + for csv_file in csv_file_names: + with csv_file.open("r") as file: + email_message.attach(filename=str(csv_file), content_type="text/csv", data=file.read()) + send_email_with_retry(msg=email_message) + + # delete the csv after sending the email + [csv_file.unlink() for csv_file in csv_file_names] + @click.command("stats") @flask.cli.with_appcontext From 293ff97b02bebfebfdf914b64261f0ed12da8d9d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Fri, 3 Nov 2023 09:30:26 +0100 Subject: [PATCH 096/114] test_user --- tests/api/test_user.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/api/test_user.py b/tests/api/test_user.py index 615590e5a..e256e07c2 100644 --- a/tests/api/test_user.py +++ b/tests/api/test_user.py @@ -1,6 +1,5 @@ from datetime import datetime from datetime import timedelta -from tracemalloc import start import typing from unittest import mock import dds_web From 3b88d6f33ef6aed662d4e05784042661b42ed15e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Fri, 3 Nov 2023 09:32:31 +0100 Subject: [PATCH 097/114] test commands --- tests/test_commands.py | 373 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 367 insertions(+), 6 deletions(-) diff --git a/tests/test_commands.py b/tests/test_commands.py index acf659d59..2d5ea05ea 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -22,6 +22,7 @@ import flask_mail import freezegun import rich.prompt +import sqlalchemy # Own from dds_web.commands import ( @@ -32,13 +33,14 @@ set_available_to_expired, set_expired_to_archived, delete_invites, - quarterly_usage, + monthly_usage, collect_stats, lost_files_s3_db, update_unit, + send_usage, ) from dds_web.database import models -from dds_web import db +from dds_web import db, mail from dds_web.utils import current_time # Tools @@ -1413,12 +1415,176 @@ def test_delete_invite_timestamp_issue(client, cli_runner): assert len(db.session.query(models.Invite).all()) == 0 -# quarterly usage +# monthly usage -def test_quarterly_usage(client, cli_runner): - """Test the quarterly_usage cron job.""" - cli_runner.invoke(quarterly_usage) +def test_monthly_usage_mark_as_done(client, cli_runner, capfd: LogCaptureFixture): + """Projects should be marked as done.""" + # Imports + from tests.api.test_project import mock_sqlalchemyerror + + # Helper function - can be moved out if we need to use in other places later + def create_file_versions(project: models.Project): + """Create file versions for project.""" + # Create new file in project + new_file = models.File( + name=f"filename_{project.public_id}", + name_in_bucket=f"name_in_bucket_{project.public_id}", + subpath=f"filename/subpath", + size_original=15000, + size_stored=10000, + compressed=True, + salt="A" * 32, + public_key="B" * 64, + checksum="C" * 64, + ) + project.files.append(new_file) + + # Create new versions + for x in range(3): + new_version = models.Version( + size_stored=10000 * x, + time_uploaded=current_time() - timedelta(days=1), + time_deleted=current_time(), + ) + project.file_versions.append(new_version) + new_file.versions.append(new_version) + db.session.add(new_file) + + db.session.commit() + + # Check if there's a non active project + non_active_projects = models.Project.query.filter_by(is_active=False).all() + project: models.Project = None + if not non_active_projects: + # Make at least one project not active + project = models.Project.query.first() + project.is_active = False + db.session.commit() + else: + project = non_active_projects[0] + + # There needs to be file versions in non active project + if not project.file_versions: + create_file_versions(project=project) + assert project.file_versions + + # Get active project - to verify that nothing happens with it + project_active: models.Project = models.Project.query.filter_by(is_active=True).first() + + # There needs to be file versions in active project + if not project_active.file_versions: + create_file_versions(project=project_active) + assert project_active.file_versions + + # Set file versions as invoiced + for version in project.file_versions: + time_now = current_time() + version.time_deleted = time_now + version.time_invoiced = time_now + db.session.commit() + + # 1. Marking projects as done + # Run command - commit should result in sqlalchemy query + with mail.record_messages() as outbox1: + with patch("dds_web.db.session.commit", mock_sqlalchemyerror): + cli_runner.invoke(monthly_usage) + + # Check that non-active project is not marked as done + assert not project.done + assert not project_active.done + + # Verify correct logging + _, logs = capfd.readouterr() + assert ( + "Usage collection during step 1: marking projects as done. Sending email..." + in logs + ) + assert "Calculating usage..." not in logs + + # Error email should be sent + assert len(outbox1) == 1 + assert "[INVOICING CRONJOB] Error in monthly-usage cronjob" in outbox1[-1].subject + assert "What to do:" in outbox1[-1].body + + # No usage rows should have been saved + num_usage_rows = models.Usage.query.count() + assert num_usage_rows == 0 + + # 2. Calculating project usage + # Run command again - part 1 should be successful + with mail.record_messages() as outbox2: + with patch("dds_web.db.session.add_all", mock_sqlalchemyerror): + cli_runner.invoke(monthly_usage) + + # The non-active project should have been marked as done + assert project.done + assert not project_active.done + + # Verify correct logging + _, logs = capfd.readouterr() + assert ( + "Usage collection during step 1: marking projects as done. Sending email..." + not in logs + ) + assert "Calculating usage..." in logs + assert f"Project {project.public_id} byte hours:" not in logs + assert f"Project {project_active.public_id} byte hours:" in logs + assert ( + "Usage collection during step 2: calculating and saving usage. Sending email..." + in logs + ) + + # Error email should have been sent + assert len(outbox2) == 1 + assert "[INVOICING CRONJOB] Error in monthly-usage cronjob" in outbox2[-1].subject + assert "What to do:" in outbox2[-1].body + + # Project versions should not be altered + assert project_active.file_versions + for version in project_active.file_versions: + assert version.time_deleted != version.time_invoiced + + # No usage rows should've been saved + usage_row_1 = models.Usage.query.filter_by(project_id=project.id).one_or_none() + usage_row_2 = models.Usage.query.filter_by(project_id=project_active.id).one_or_none() + assert not usage_row_1 + assert not usage_row_2 + + # 3. Send success email + # Run command a third time - part 1 and 2 should be successful + with mail.record_messages() as outbox3: + cli_runner.invoke(monthly_usage) + + # Verify correct logging + _, logs = capfd.readouterr() + assert ( + "Usage collection during step 1: marking projects as done. Sending email..." + not in logs + ) + assert ( + "Usage collection during step 2: calculating and saving usage. Sending email..." + not in logs + ) + assert "Usage collection successful; Sending email." in logs + + # Email should be sent + assert len(outbox3) == 1 + assert "[INVOICING CRONJOB] Usage records available for collection" in outbox3[-1].subject + assert ( + "The calculation of the monthly usage succeeded; The byte hours for all active projects have been saved to the database." + in outbox3[-1].body + ) + + # Project versions should have been altered + for version in project_active.file_versions: + assert version.time_deleted == version.time_invoiced + + # Usage rows should have been saved for active project + usage_row_1 = models.Usage.query.filter_by(project_id=project.id).one_or_none() + usage_row_2 = models.Usage.query.filter_by(project_id=project_active.id).one_or_none() + assert not usage_row_1 + assert usage_row_2 # reporting units and users @@ -1539,3 +1705,198 @@ def verify_reporting_row(row, time_date): reporting_rows = Reporting.query.all() for row in reporting_rows: verify_reporting_row(row=row, time_date=first_time if row.id == 1 else second_time) + + +def test_send_usage(client, cli_runner, capfd: LogCaptureFixture): + """Test that the email with the usage report is send""" + # Imports + from dds_web.database.models import Usage + + # Get projects + projects = models.Project.query.filter( + models.Project.public_id.in_( + ["public_project_id", "second_public_project_id", "unit2testing"] + ) + ).all() + project_1_unit_1 = next(p for p in projects if p.public_id == "public_project_id") + project_2_unit_1 = next(p for p in projects if p.public_id == "second_public_project_id") + project_1_unit_2 = next(p for p in projects if p.public_id == "unit2testing") + + # Loop to populate usage table with fake entries across two years + january_2021 = datetime(2021, 1, 1) # Start at Jan 2021 + usage_list = [] + for i in range(25): + time = january_2021 + relativedelta(months=i) + usage_1 = Usage( + project_id=project_1_unit_1.id, + usage=100, + time_collected=time, + ) + usage_2 = Usage( + project_id=project_2_unit_1.id, + usage=100, + time_collected=time, + ) + usage_3 = Usage( + project_id=project_1_unit_2.id, + usage=100, + time_collected=time, + ) + usage_list.extend([usage_1, usage_2, usage_3]) + + db.session.add_all(usage_list) + db.session.commit() + # Fake data included from Jan 2021 to Jan 2023 + + def run_command_and_check_output(months_to_test, start_time): + """ + This function tests the output of the `send_usage` command by running the command with given arguments and checking the output. + It mocks the current time and checks that the email sent contains the correct subject and body. + It also checks that the csv files attached to the email have the correct names and content. + + Return the csv files attached to the email. + """ + + with mail.record_messages() as outbox: + with patch("dds_web.utils.current_time") as current_time_func: # Mock current time + current_time_func.return_value = start_time + cli_runner.invoke(send_usage, ["--months", months_to_test]) + + # Verify output and sent email + assert len(outbox) == 1 + assert ( + "[SEND-USAGE CRONJOB] Usage records attached in the present mail" + in outbox[-1].subject + ) + assert f"Here is the usage for the last {months_to_test} months." in outbox[-1].body + + end_time = start_time - relativedelta(months=months_to_test) + start_month = start_time.month + end_month = end_time.month + unit_1_id = project_1_unit_1.responsible_unit.public_id + unit_2_id = project_1_unit_2.responsible_unit.public_id + csv_1_name = f"{unit_1_id}_Usage_Months-{end_month}-to-{start_month}.csv" + csv_2_name = f"{unit_2_id}_Usage_Months-{end_month}-to-{start_month}.csv" + + # check that the files no longer exist in the filesystem + assert not os.path.exists(csv_1_name) + assert not os.path.exists(csv_2_name) + + _, logs = capfd.readouterr() + assert f"Month now: {start_month}" in logs + assert f"Month {months_to_test} months ago: {end_month}" in logs + assert f"CSV file name: {csv_1_name}" in logs + assert f"CSV file name: {csv_2_name}" in logs + assert "Sending email with the CSV." in logs + + # Verify that the csv files are attached - two files, one for each unit + assert len(outbox[-1].attachments) == 2 + for attachment, file_name in zip(outbox[-1].attachments, [csv_1_name, csv_2_name]): + assert attachment.filename == file_name + assert attachment.content_type == "text/csv" + + # Check csv content + # retrieve the files from the email + csv_1 = outbox[-1].attachments[0].data + csv_2 = outbox[-1].attachments[1].data + + # check that the header and summatory at the end is correct + assert "Project ID,Project Title,Project Created,Time Collected,Byte Hours" in csv_1 + assert "Project ID,Project Title,Project Created,Time Collected,Byte Hours" in csv_2 + usage = 100.0 * months_to_test * 2 # 2 projects + assert f"--,--,--,--,{str(usage)}" in csv_1 + usage = 100.0 * months_to_test + assert f"--,--,--,--,{str(usage)}" in csv_2 + + # check that the content is correct + import re + + csv_1 = re.split(",|\n", csv_1) # split by comma or newline + csv_2 = re.split(",|\n", csv_2) + + # Projects and data is correct + assert csv_1.count("public_project_id") == months_to_test + assert csv_1.count("second_public_project_id") == months_to_test + assert csv_1.count("unit2testing") == 0 # this project is not in the unit + assert csv_1.count("100.0") == months_to_test * 2 + + assert csv_2.count("public_project_id") == 0 # this project is not in the unit + assert csv_2.count("second_public_project_id") == 0 # this project is not in the unit + assert csv_2.count("unit2testing") == months_to_test + assert csv_2.count("100.0") == months_to_test + + # Check that the months included in the report are the correct ones + # move start time to the first day of the month + start_collected_time = start_time.replace( + day=1, hour=0, minute=0, second=0, microsecond=0 + ) + for i in range(months_to_test): + check_time_collected = start_collected_time - relativedelta( + months=i + ) # every month is included + assert f"{check_time_collected}" in csv_1 + assert f"{check_time_collected}" in csv_1 + return csv_1, csv_2 + + # Test that the command works for 4 months from Jan 2022 + start_time = datetime(2022, 1, 15) # Mid Jan 2022 + csv_1, csv_2 = run_command_and_check_output(months_to_test=4, start_time=start_time) + # Hardcode the expected csv content to double check + # October, November, December, January (4 months) + assert "2021-10-01 00:00:00" in csv_1 + assert "2021-11-01 00:00:00" in csv_1 + assert "2021-12-01 00:00:00" in csv_1 + assert "2022-01-01 00:00:00" in csv_1 + assert "2021-10-01 00:00:00" in csv_2 + assert "2021-11-01 00:00:00" in csv_2 + assert "2021-12-01 00:00:00" in csv_2 + assert "2022-01-01 00:00:00" in csv_2 + + # Test that the command works for 4 months from May 2022 + start_time = datetime(2022, 5, 15) # Mid May 2022 + csv_1, csv_2 = run_command_and_check_output(months_to_test=4, start_time=start_time) + # Hardcode the expected csv content to double check + # February, March, April, May (4 months) + assert "2022-02-01 00:00:00" in csv_1 + assert "2022-03-01 00:00:00" in csv_1 + assert "2022-04-01 00:00:00" in csv_1 + assert "2022-05-01 00:00:00" in csv_1 + assert "2022-02-01 00:00:00" in csv_2 + assert "2022-03-01 00:00:00" in csv_2 + assert "2022-04-01 00:00:00" in csv_2 + assert "2022-05-01 00:00:00" in csv_2 + + # Test that the command works for 4 months from Sept 2022 + start_time = datetime(2022, 9, 15) # Mid Sep 2022 + csv_1, csv_2 = run_command_and_check_output(months_to_test=4, start_time=start_time) + # Hardcode the expected csv content to double check + # June, July, August, September (4 months) + assert "2022-06-01 00:00:00" in csv_1 + assert "2022-07-01 00:00:00" in csv_1 + assert "2022-08-01 00:00:00" in csv_1 + assert "2022-09-01 00:00:00" in csv_1 + assert "2022-06-01 00:00:00" in csv_2 + assert "2022-07-01 00:00:00" in csv_2 + assert "2022-08-01 00:00:00" in csv_2 + assert "2022-09-01 00:00:00" in csv_2 + + +def test_send_usage_error_csv(client, cli_runner, capfd: LogCaptureFixture): + """Test that checks errors in the csv handling""" + + with mail.record_messages() as outbox: + with patch("csv.writer") as mock_writing_file: + mock_writing_file.side_effect = IOError() + cli_runner.invoke(send_usage, ["--months", 3]) + + _, logs = capfd.readouterr() + assert "Error writing to CSV file:" in logs # error in writing the csv file + assert "No CSV files generated." in logs # no csv files generated + + # Check that no files were generated in the fs + assert not os.path.exists("*.csv") + + # Verify error email :- At least one email was sent + assert len(outbox) == 1 + assert "[SEND-USAGE CRONJOB] Error in send-usage cronjob" in outbox[-1].subject + assert "There was an error in the cronjob 'send-usage'" in outbox[-1].body From 64949a212d9f300889852f1930663d095c4d8029 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Fri, 3 Nov 2023 09:34:08 +0100 Subject: [PATCH 098/114] models --- dds_web/database/models.py | 1 - 1 file changed, 1 deletion(-) diff --git a/dds_web/database/models.py b/dds_web/database/models.py index 8d27ea779..104b413af 100644 --- a/dds_web/database/models.py +++ b/dds_web/database/models.py @@ -1053,7 +1053,6 @@ class Usage(db.Model): # Additional columns usage = db.Column(db.Float, nullable=False) - cost = db.Column(db.Float, nullable=False) time_collected = db.Column( db.DateTime(), unique=False, nullable=False, default=dds_web.utils.current_time ) From 88ced090e0e49bd807f399dfc412952b92359cee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Fri, 3 Nov 2023 09:38:37 +0100 Subject: [PATCH 099/114] migration --- ...610b382383_remove_cost_from_usage_table.py | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 migrations/versions/3d610b382383_remove_cost_from_usage_table.py diff --git a/migrations/versions/3d610b382383_remove_cost_from_usage_table.py b/migrations/versions/3d610b382383_remove_cost_from_usage_table.py new file mode 100644 index 000000000..9f82034a2 --- /dev/null +++ b/migrations/versions/3d610b382383_remove_cost_from_usage_table.py @@ -0,0 +1,28 @@ +"""remove-cost-from-usage-table + +Revision ID: 3d610b382383 +Revises: f27c5988d640 +Create Date: 2023-11-03 08:36:35.425045 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '3d610b382383' +down_revision = 'f27c5988d640' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('usage', 'cost') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('usage', sa.Column('cost', mysql.FLOAT(), nullable=False)) + # ### end Alembic commands ### From cc371b4a5df5123166b73dd7d10baf2938d5097a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Fri, 3 Nov 2023 09:41:50 +0100 Subject: [PATCH 100/114] black --- .../versions/3d610b382383_remove_cost_from_usage_table.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/migrations/versions/3d610b382383_remove_cost_from_usage_table.py b/migrations/versions/3d610b382383_remove_cost_from_usage_table.py index 9f82034a2..6cc5db230 100644 --- a/migrations/versions/3d610b382383_remove_cost_from_usage_table.py +++ b/migrations/versions/3d610b382383_remove_cost_from_usage_table.py @@ -10,19 +10,19 @@ from sqlalchemy.dialects import mysql # revision identifiers, used by Alembic. -revision = '3d610b382383' -down_revision = 'f27c5988d640' +revision = "3d610b382383" +down_revision = "f27c5988d640" branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('usage', 'cost') + op.drop_column("usage", "cost") # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.add_column('usage', sa.Column('cost', mysql.FLOAT(), nullable=False)) + op.add_column("usage", sa.Column("cost", mysql.FLOAT(), nullable=False)) # ### end Alembic commands ### From d5dc85a20906b0bb8e0ad0912f32fd5ef2154112 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Mon, 6 Nov 2023 11:23:24 +0100 Subject: [PATCH 101/114] Updated Pillow to the latest stable version --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 984882c5f..1b85e6504 100644 --- a/requirements.txt +++ b/requirements.txt @@ -42,7 +42,7 @@ MarkupSafe==2.1.1 marshmallow==3.14.1 marshmallow-sqlalchemy==0.27.0 packaging==21.3 -Pillow==9.3.0 # required by qrcode +Pillow==10.1.0 # required by qrcode pycparser==2.21 PyMySQL==1.0.2 PyNaCl==1.5.0 From 51c42c330b7b858c234ba74a54e98bf5762f1542 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Mon, 6 Nov 2023 14:12:42 +0100 Subject: [PATCH 102/114] sprintlog --- SPRINTLOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/SPRINTLOG.md b/SPRINTLOG.md index 72938a2d6..2fdf07d9b 100644 --- a/SPRINTLOG.md +++ b/SPRINTLOG.md @@ -322,3 +322,7 @@ _Nothing merged in CLI during this sprint_ - New endpoint `AddFailedFiles` for adding failed files to database ([#1472](https://github.com/ScilifelabDataCentre/dds_web/pull/1472)) - Change the generate usage command to monthly instead of quartely, and add the command to send a usage report specifying the number of months ([#1476](https://github.com/ScilifelabDataCentre/dds_web/pull/1476)) - New ADR record regarding OKR 2024 ([#1483](https://github.com/ScilifelabDataCentre/dds_web/pull/1483)) + +# 2023-11-6 - 2023-11-17 + +- Updated Pillow package version to address vulnerabities ([#1486](https://github.com/ScilifelabDataCentre/dds_web/pull/1486)) From fca2fc8a33b75faa6148f025a9fdc192312fe835 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Tue, 7 Nov 2023 10:48:31 +0100 Subject: [PATCH 103/114] update package --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 984882c5f..ee9253fb5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -62,7 +62,7 @@ SQLAlchemy==1.4.31 structlog==21.5.0 tzdata==2021.5 tzlocal==4.1 -urllib3==1.26.8 +urllib3==1.26.18 visitor==0.1.3 Werkzeug==2.2.3 wrapt==1.13.3 From 5e1037944650e90229b554a254d065f335483c36 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Tue, 7 Nov 2023 10:57:40 +0100 Subject: [PATCH 104/114] sprintlog --- SPRINTLOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/SPRINTLOG.md b/SPRINTLOG.md index 72938a2d6..f534c7c97 100644 --- a/SPRINTLOG.md +++ b/SPRINTLOG.md @@ -322,3 +322,7 @@ _Nothing merged in CLI during this sprint_ - New endpoint `AddFailedFiles` for adding failed files to database ([#1472](https://github.com/ScilifelabDataCentre/dds_web/pull/1472)) - Change the generate usage command to monthly instead of quartely, and add the command to send a usage report specifying the number of months ([#1476](https://github.com/ScilifelabDataCentre/dds_web/pull/1476)) - New ADR record regarding OKR 2024 ([#1483](https://github.com/ScilifelabDataCentre/dds_web/pull/1483)) + +# 2023-11-6 - 2023-11-17 + +- Updated urllib3 package version to address vulnerabities ([#1487](https://github.com/ScilifelabDataCentre/dds_web/pull/1487)) From 889360d69a60f5cfff47a35cb69e7aeff25d7b46 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Tue, 7 Nov 2023 16:20:59 +0100 Subject: [PATCH 105/114] package update --- .github/pull_request_template.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 27db39cc9..6b652dacf 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -28,6 +28,8 @@ What _type of change(s)_ does the PR contain? - [ ] Database change: _Remember the to include a new migration version, **or** explain here why it's not needed._ - [ ] Bug fix - [ ] Security Alert fix + - [ ] Package update + - [ ] Major version update - [ ] Documentation - [ ] Workflow - [ ] Tests **only** From ce068d2ff905b5319de9a8d91e34747f32f44667 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Wed, 8 Nov 2023 10:56:00 +0100 Subject: [PATCH 106/114] patch update --- dds_web/static/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dds_web/static/package.json b/dds_web/static/package.json index d27e864bd..a5db2d8f5 100644 --- a/dds_web/static/package.json +++ b/dds_web/static/package.json @@ -37,7 +37,7 @@ "node-sass": "^7.0.3", "nodemon": "^2.0.22", "npm-run-all": "^4.1.5", - "postcss": "^8.4.28", + "postcss": "^8.4.31", "postcss-cli": "^9.1.0", "purgecss": "^4.1.3", "serve": "^13.0.4", From 01312512f57e59b389b118e1cbe91d80c56402c6 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Wed, 8 Nov 2023 11:00:38 +0100 Subject: [PATCH 107/114] sprintlog --- SPRINTLOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/SPRINTLOG.md b/SPRINTLOG.md index 2fdf07d9b..2f2b06da2 100644 --- a/SPRINTLOG.md +++ b/SPRINTLOG.md @@ -326,3 +326,4 @@ _Nothing merged in CLI during this sprint_ # 2023-11-6 - 2023-11-17 - Updated Pillow package version to address vulnerabities ([#1486](https://github.com/ScilifelabDataCentre/dds_web/pull/1486)) +- Updated PostCss Node package to address vulnerabities ([#1489](https://github.com/ScilifelabDataCentre/dds_web/pull/1489)) From 60378f017f03c140f383cd13ba4fd9150681b6a6 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Fri, 10 Nov 2023 10:23:23 +0100 Subject: [PATCH 108/114] fixing update --- dds_web/static/package-lock.json | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/dds_web/static/package-lock.json b/dds_web/static/package-lock.json index 9485590b5..168949c7b 100644 --- a/dds_web/static/package-lock.json +++ b/dds_web/static/package-lock.json @@ -21,7 +21,7 @@ "node-sass": "^7.0.3", "nodemon": "^2.0.22", "npm-run-all": "^4.1.5", - "postcss": "^8.4.28", + "postcss": "^8.4.31", "postcss-cli": "^9.1.0", "purgecss": "^4.1.3", "serve": "^13.0.4", @@ -3949,9 +3949,9 @@ } }, "node_modules/postcss": { - "version": "8.4.28", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.28.tgz", - "integrity": "sha512-Z7V5j0cq8oEKyejIKfpD8b4eBy9cwW2JWPk0+fB1HOAMsfHbnAXLLS+PfVWlzMSLQaWttKDt607I0XHmpE67Vw==", + "version": "8.4.31", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz", + "integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==", "dev": true, "funding": [ { @@ -9095,9 +9095,9 @@ "dev": true }, "postcss": { - "version": "8.4.28", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.28.tgz", - "integrity": "sha512-Z7V5j0cq8oEKyejIKfpD8b4eBy9cwW2JWPk0+fB1HOAMsfHbnAXLLS+PfVWlzMSLQaWttKDt607I0XHmpE67Vw==", + "version": "8.4.31", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz", + "integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==", "dev": true, "requires": { "nanoid": "^3.3.6", From ba8cacb6a210b48bc56a3c969d179be4c48203e6 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Mon, 13 Nov 2023 13:02:57 +0100 Subject: [PATCH 109/114] packages updated --- dds_web/static/package-lock.json | 2530 +++++++++++++----------------- dds_web/static/package.json | 6 +- 2 files changed, 1078 insertions(+), 1458 deletions(-) diff --git a/dds_web/static/package-lock.json b/dds_web/static/package-lock.json index 168949c7b..482625544 100644 --- a/dds_web/static/package-lock.json +++ b/dds_web/static/package-lock.json @@ -18,13 +18,13 @@ }, "devDependencies": { "autoprefixer": "^10.4.15", - "node-sass": "^7.0.3", - "nodemon": "^2.0.22", + "node-sass": "^9.0.0", + "nodemon": "^3.0.1", "npm-run-all": "^4.1.5", "postcss": "^8.4.31", "postcss-cli": "^9.1.0", "purgecss": "^4.1.3", - "serve": "^13.0.4", + "serve": "^14.2.1", "stylelint": "^14.16.1", "stylelint-config-twbs-bootstrap": "^3.2.1" } @@ -308,9 +308,9 @@ "dev": true }, "node_modules/@zeit/schemas": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/@zeit/schemas/-/schemas-2.6.0.tgz", - "integrity": "sha512-uUrgZ8AxS+Lio0fZKAipJjAh415JyrOZowliZAzmnJSsf7piVL5w+G0+gFJ0KSu3QRhvui/7zuvpLz03YjXAhg==", + "version": "2.29.0", + "resolved": "https://registry.npmjs.org/@zeit/schemas/-/schemas-2.29.0.tgz", + "integrity": "sha512-g5QiLIfbg3pLuYUJPlisNKY+epQJTcMDsOnVNkscrDP1oi7vmJnzOANYJI/1pZcVJ6umUkBv3aFtlg1UvUHGzA==", "dev": true }, "node_modules/abbrev": { @@ -370,14 +370,14 @@ } }, "node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "version": "8.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "dev": true, "dependencies": { "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", "uri-js": "^4.2.2" }, "funding": { @@ -457,23 +457,10 @@ } ] }, - "node_modules/are-we-there-yet": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", - "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", - "dev": true, - "dependencies": { - "delegates": "^1.0.0", - "readable-stream": "^3.6.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/arg": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/arg/-/arg-2.0.0.tgz", - "integrity": "sha512-XxNTUzKnz1ctK3ZIcI2XUPlD96wbHP2nGqkPKpvk/HNRlPveYrXIVSTk9m3LcqOgDPg3B1nMvdV/K8wZd7PG4w==", + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", "dev": true }, "node_modules/array-buffer-byte-length": { @@ -530,24 +517,6 @@ "node": ">=0.10.0" } }, - "node_modules/asn1": { - "version": "0.2.6", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", - "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", - "dev": true, - "dependencies": { - "safer-buffer": "~2.1.0" - } - }, - "node_modules/assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==", - "dev": true, - "engines": { - "node": ">=0.8" - } - }, "node_modules/astral-regex": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", @@ -566,12 +535,6 @@ "node": "*" } }, - "node_modules/asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", - "dev": true - }, "node_modules/autoprefixer": { "version": "10.4.15", "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.15.tgz", @@ -621,36 +584,12 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/aws-sign2": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==", - "dev": true, - "engines": { - "node": "*" - } - }, - "node_modules/aws4": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.12.0.tgz", - "integrity": "sha512-NmWvPnx0F1SfrQbYwOi7OeaNGokp9XhzNioJ/CSBs8Qa4vxug81mhJEAVZwxXuBmYB5KDRfMq/F3RR0BIU7sWg==", - "dev": true - }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "dev": true }, - "node_modules/bcrypt-pbkdf": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", - "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", - "dev": true, - "dependencies": { - "tweetnacl": "^0.14.3" - } - }, "node_modules/binary-extensions": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", @@ -684,51 +623,142 @@ } }, "node_modules/boxen": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/boxen/-/boxen-5.1.2.tgz", - "integrity": "sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/boxen/-/boxen-7.0.0.tgz", + "integrity": "sha512-j//dBVuyacJbvW+tvZ9HuH03fZ46QcaKvvhZickZqtB271DxJ7SNRSNxrV/dZX0085m7hISRZWbzWlJvx/rHSg==", "dev": true, "dependencies": { - "ansi-align": "^3.0.0", - "camelcase": "^6.2.0", - "chalk": "^4.1.0", - "cli-boxes": "^2.2.1", - "string-width": "^4.2.2", - "type-fest": "^0.20.2", - "widest-line": "^3.1.0", - "wrap-ansi": "^7.0.0" + "ansi-align": "^3.0.1", + "camelcase": "^7.0.0", + "chalk": "^5.0.1", + "cli-boxes": "^3.0.0", + "string-width": "^5.1.2", + "type-fest": "^2.13.0", + "widest-line": "^4.0.1", + "wrap-ansi": "^8.0.1" }, "engines": { - "node": ">=10" + "node": ">=14.16" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/boxen/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/boxen/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, "node_modules/boxen/node_modules/camelcase": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", - "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-7.0.1.tgz", + "integrity": "sha512-xlx1yCK2Oc1APsPXDL2LdlNP6+uu8OCDdhOBSVT279M/S+y75O30C2VuD8T2ogdePBBl7PfPF4504tnLgX3zfw==", "dev": true, "engines": { - "node": ">=10" + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/boxen/node_modules/chalk": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", + "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", + "dev": true, + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/boxen/node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true + }, + "node_modules/boxen/node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/boxen/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, "node_modules/boxen/node_modules/type-fest": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", - "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", + "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==", "dev": true, "engines": { - "node": ">=10" + "node": ">=12.20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/boxen/node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -889,12 +919,6 @@ } ] }, - "node_modules/caseless": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==", - "dev": true - }, "node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -911,6 +935,21 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, + "node_modules/chalk-template": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/chalk-template/-/chalk-template-0.4.0.tgz", + "integrity": "sha512-/ghrgmhfY8RaSdeo43hNXxpoHAtxdbskUHjPpfqUWGttFgycUhYPGx3YZBCnUCvOa7Doivn1IZec3DEGFoMgLg==", + "dev": true, + "dependencies": { + "chalk": "^4.1.2" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/chalk-template?sponsor=1" + } + }, "node_modules/chokidar": { "version": "3.5.3", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", @@ -957,29 +996,32 @@ } }, "node_modules/cli-boxes": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-2.2.1.tgz", - "integrity": "sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-3.0.0.tgz", + "integrity": "sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==", "dev": true, "engines": { - "node": ">=6" + "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/clipboardy": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/clipboardy/-/clipboardy-2.3.0.tgz", - "integrity": "sha512-mKhiIL2DrQIsuXMgBgnfEHOZOryC7kY7YO//TN6c63wlEm3NG5tz+YgY5rVi29KCmq/QQjKYvM7a19+MDOTHOQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/clipboardy/-/clipboardy-3.0.0.tgz", + "integrity": "sha512-Su+uU5sr1jkUy1sGRpLKjKrvEOVXgSgiSInwa/qeID6aJ07yh+5NWc3h2QfjHjBnfX4LhtFcuAWKUsJ3r+fjbg==", "dev": true, "dependencies": { - "arch": "^2.1.1", - "execa": "^1.0.0", - "is-wsl": "^2.1.1" + "arch": "^2.2.0", + "execa": "^5.1.1", + "is-wsl": "^2.2.0" }, "engines": { - "node": ">=8" + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/cliui": { @@ -1029,18 +1071,6 @@ "integrity": "sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw==", "dev": true }, - "node_modules/combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dev": true, - "dependencies": { - "delayed-stream": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, "node_modules/commander": { "version": "8.3.0", "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", @@ -1063,16 +1093,16 @@ } }, "node_modules/compression": { - "version": "1.7.3", - "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.3.tgz", - "integrity": "sha512-HSjyBG5N1Nnz7tF2+O7A9XUhyjru71/fwgNb7oIsEVHR0WShfs2tIS/EySLgiTe98aOK18YDlMXpzjCXY/n9mg==", + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz", + "integrity": "sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==", "dev": true, "dependencies": { "accepts": "~1.3.5", "bytes": "3.0.0", - "compressible": "~2.0.14", + "compressible": "~2.0.16", "debug": "2.6.9", - "on-headers": "~1.0.1", + "on-headers": "~1.0.2", "safe-buffer": "5.1.2", "vary": "~1.1.2" }, @@ -1197,18 +1227,6 @@ "node": ">=4" } }, - "node_modules/dashdash": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==", - "dev": true, - "dependencies": { - "assert-plus": "^1.0.0" - }, - "engines": { - "node": ">=0.10" - } - }, "node_modules/datatables.net": { "version": "1.13.6", "resolved": "https://registry.npmjs.org/datatables.net/-/datatables.net-1.13.6.tgz", @@ -1302,15 +1320,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "dev": true, - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/delegates": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", @@ -1338,15 +1347,11 @@ "node": ">=8" } }, - "node_modules/ecc-jsbn": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", - "integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==", - "dev": true, - "dependencies": { - "jsbn": "~0.1.0", - "safer-buffer": "^2.1.0" - } + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true }, "node_modules/electron-to-chromium": { "version": "1.4.504", @@ -1370,15 +1375,6 @@ "iconv-lite": "^0.6.2" } }, - "node_modules/end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "dev": true, - "dependencies": { - "once": "^1.4.0" - } - }, "node_modules/env-paths": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", @@ -1506,105 +1502,28 @@ } }, "node_modules/execa": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", - "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", - "dev": true, - "dependencies": { - "cross-spawn": "^6.0.0", - "get-stream": "^4.0.0", - "is-stream": "^1.1.0", - "npm-run-path": "^2.0.0", - "p-finally": "^1.0.0", - "signal-exit": "^3.0.0", - "strip-eof": "^1.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/execa/node_modules/cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, - "dependencies": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - }, - "engines": { - "node": ">=4.8" - } - }, - "node_modules/execa/node_modules/path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/execa/node_modules/semver": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", - "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", - "dev": true, - "bin": { - "semver": "bin/semver" - } - }, - "node_modules/execa/node_modules/shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", "dev": true, "dependencies": { - "shebang-regex": "^1.0.0" + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/execa/node_modules/shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/execa/node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" + "node": ">=10" }, - "bin": { - "which": "bin/which" + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" } }, - "node_modules/extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", - "dev": true - }, - "node_modules/extsprintf": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==", - "dev": true, - "engines": [ - "node >=0.6.0" - ] - }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -1627,12 +1546,6 @@ "node": ">=8.6.0" } }, - "node_modules/fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true - }, "node_modules/fast-url-parser": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/fast-url-parser/-/fast-url-parser-1.1.3.tgz", @@ -1726,29 +1639,6 @@ "is-callable": "^1.1.3" } }, - "node_modules/forever-agent": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==", - "dev": true, - "engines": { - "node": "*" - } - }, - "node_modules/form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", - "dev": true, - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" - }, - "engines": { - "node": ">= 0.12" - } - }, "node_modules/fraction.js": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.1.tgz", @@ -1841,26 +1731,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/gauge": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", - "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", - "dev": true, - "dependencies": { - "aproba": "^1.0.3 || ^2.0.0", - "color-support": "^1.1.2", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.1", - "object-assign": "^4.1.1", - "signal-exit": "^3.0.0", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1", - "wide-align": "^1.1.2" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/gaze": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/gaze/-/gaze-1.1.3.tgz", @@ -1907,15 +1777,15 @@ } }, "node_modules/get-stream": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", - "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", "dev": true, - "dependencies": { - "pump": "^3.0.0" - }, "engines": { - "node": ">=6" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/get-symbol-description": { @@ -1934,15 +1804,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/getpass": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==", - "dev": true, - "dependencies": { - "assert-plus": "^1.0.0" - } - }, "node_modules/glob": { "version": "7.2.3", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", @@ -2118,29 +1979,6 @@ "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", "dev": true }, - "node_modules/har-schema": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/har-validator": { - "version": "5.1.5", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz", - "integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==", - "deprecated": "this library is no longer supported", - "dev": true, - "dependencies": { - "ajv": "^6.12.3", - "har-schema": "^2.0.0" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/hard-rejection": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz", @@ -2281,21 +2119,6 @@ "node": ">= 6" } }, - "node_modules/http-signature": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==", - "dev": true, - "dependencies": { - "assert-plus": "^1.0.0", - "jsprim": "^1.2.2", - "sshpk": "^1.7.0" - }, - "engines": { - "node": ">=0.8", - "npm": ">=1.3.7" - } - }, "node_modules/https-proxy-agent": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", @@ -2309,6 +2132,15 @@ "node": ">= 6" } }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "engines": { + "node": ">=10.17.0" + } + }, "node_modules/humanize-ms": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", @@ -2650,6 +2482,18 @@ "node": ">=0.10.0" } }, + "node_modules/is-port-reachable": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-port-reachable/-/is-port-reachable-4.0.0.tgz", + "integrity": "sha512-9UoipoxYmSk6Xy7QFgRv2HDyaysmgSG75TFQs6S+3pDM7ZhKTF/bskZV+0UlABHzKjNVhPjYCLfeZUEg1wXxig==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -2679,12 +2523,15 @@ } }, "node_modules/is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", "dev": true, "engines": { - "node": ">=0.10.0" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/is-string": { @@ -2732,12 +2579,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-typedarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==", - "dev": true - }, "node_modules/is-weakref": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", @@ -2774,12 +2615,6 @@ "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", "dev": true }, - "node_modules/isstream": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==", - "dev": true - }, "node_modules/jquery": { "version": "3.7.1", "resolved": "https://registry.npmjs.org/jquery/-/jquery-3.7.1.tgz", @@ -2797,12 +2632,6 @@ "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", "dev": true }, - "node_modules/jsbn": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==", - "dev": true - }, "node_modules/json-buffer": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", @@ -2821,22 +2650,10 @@ "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", "dev": true }, - "node_modules/json-schema": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", - "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", - "dev": true - }, "node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true - }, - "node_modules/json-stringify-safe": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", "dev": true }, "node_modules/jsonfile": { @@ -2851,21 +2668,6 @@ "graceful-fs": "^4.1.6" } }, - "node_modules/jsprim": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz", - "integrity": "sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==", - "dev": true, - "dependencies": { - "assert-plus": "1.0.0", - "extsprintf": "1.3.0", - "json-schema": "0.4.0", - "verror": "1.10.0" - }, - "engines": { - "node": ">=0.6.0" - } - }, "node_modules/keyv": { "version": "4.5.3", "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.3.tgz", @@ -3049,6 +2851,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, "node_modules/merge2": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", @@ -3092,6 +2900,15 @@ "node": ">= 0.6" } }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, "node_modules/min-indent": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", @@ -3303,97 +3120,318 @@ "bin": { "node-gyp": "bin/node-gyp.js" }, - "engines": { - "node": ">= 10.12.0" + "engines": { + "node": ">= 10.12.0" + } + }, + "node_modules/node-gyp/node_modules/are-we-there-yet": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz", + "integrity": "sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg==", + "dev": true, + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/node-gyp/node_modules/gauge": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-4.0.4.tgz", + "integrity": "sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==", + "dev": true, + "dependencies": { + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.3", + "console-control-strings": "^1.1.0", + "has-unicode": "^2.0.1", + "signal-exit": "^3.0.7", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wide-align": "^1.1.5" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/node-gyp/node_modules/npmlog": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-6.0.2.tgz", + "integrity": "sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==", + "dev": true, + "dependencies": { + "are-we-there-yet": "^3.0.0", + "console-control-strings": "^1.1.0", + "gauge": "^4.0.3", + "set-blocking": "^2.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/node-releases": { + "version": "2.0.13", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.13.tgz", + "integrity": "sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ==", + "dev": true + }, + "node_modules/node-sass": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/node-sass/-/node-sass-9.0.0.tgz", + "integrity": "sha512-yltEuuLrfH6M7Pq2gAj5B6Zm7m+gdZoG66wTqG6mIZV/zijq3M2OO2HswtT6oBspPyFhHDcaxWpsBm0fRNDHPg==", + "dev": true, + "hasInstallScript": true, + "dependencies": { + "async-foreach": "^0.1.3", + "chalk": "^4.1.2", + "cross-spawn": "^7.0.3", + "gaze": "^1.0.0", + "get-stdin": "^4.0.1", + "glob": "^7.0.3", + "lodash": "^4.17.15", + "make-fetch-happen": "^10.0.4", + "meow": "^9.0.0", + "nan": "^2.17.0", + "node-gyp": "^8.4.1", + "sass-graph": "^4.0.1", + "stdout-stream": "^1.4.0", + "true-case-path": "^2.2.1" + }, + "bin": { + "node-sass": "bin/node-sass" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/node-sass/node_modules/@npmcli/fs": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-2.1.2.tgz", + "integrity": "sha512-yOJKRvohFOaLqipNtwYB9WugyZKhC/DZC4VYPmpaCzDBrA8YpK3qHZ8/HGscMnE4GqbkLNuVcCnxkeQEdGt6LQ==", + "dev": true, + "dependencies": { + "@gar/promisify": "^1.1.3", + "semver": "^7.3.5" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/node-sass/node_modules/@npmcli/move-file": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@npmcli/move-file/-/move-file-2.0.1.tgz", + "integrity": "sha512-mJd2Z5TjYWq/ttPLLGqArdtnC74J6bOzg4rMDnN+p1xTacZ2yPRCk2y0oSWQtygLR9YVQXgOcONrwtnk3JupxQ==", + "deprecated": "This functionality has been moved to @npmcli/fs", + "dev": true, + "dependencies": { + "mkdirp": "^1.0.4", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/node-sass/node_modules/@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "dev": true, + "engines": { + "node": ">= 10" + } + }, + "node_modules/node-sass/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/node-sass/node_modules/cacache": { + "version": "16.1.3", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.1.3.tgz", + "integrity": "sha512-/+Emcj9DAXxX4cwlLmRI9c166RuL3w30zp4R7Joiv2cQTtTtA+jeuCAjH3ZlGnYS3tKENSrKhAzVVP9GVyzeYQ==", + "dev": true, + "dependencies": { + "@npmcli/fs": "^2.1.0", + "@npmcli/move-file": "^2.0.0", + "chownr": "^2.0.0", + "fs-minipass": "^2.1.0", + "glob": "^8.0.1", + "infer-owner": "^1.0.4", + "lru-cache": "^7.7.1", + "minipass": "^3.1.6", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^9.0.0", + "tar": "^6.1.11", + "unique-filename": "^2.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/node-sass/node_modules/cacache/node_modules/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/node-sass/node_modules/http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "dev": true, + "dependencies": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/node-sass/node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/node-sass/node_modules/make-fetch-happen": { + "version": "10.2.1", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.2.1.tgz", + "integrity": "sha512-NgOPbRiaQM10DYXvN3/hhGVI2M5MtITFryzBGxHM5p4wnFxsVCbxkrBrDsk+EZ5OB4jEOT7AjDxtdF+KVEFT7w==", + "dev": true, + "dependencies": { + "agentkeepalive": "^4.2.1", + "cacache": "^16.1.0", + "http-cache-semantics": "^4.1.0", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^7.7.1", + "minipass": "^3.1.6", + "minipass-collect": "^1.0.2", + "minipass-fetch": "^2.0.3", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^7.0.0", + "ssri": "^9.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/node-sass/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/node-sass/node_modules/minipass-fetch": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-2.1.2.tgz", + "integrity": "sha512-LT49Zi2/WMROHYoqGgdlQIZh8mLPZmOrN2NdJjMXxYe4nkN6FUyuPuOAOedNJDrx0IRGg9+4guZewtp8hE6TxA==", + "dev": true, + "dependencies": { + "minipass": "^3.1.6", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + }, + "optionalDependencies": { + "encoding": "^0.1.13" } }, - "node_modules/node-gyp/node_modules/are-we-there-yet": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz", - "integrity": "sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg==", + "node_modules/node-sass/node_modules/socks-proxy-agent": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-7.0.0.tgz", + "integrity": "sha512-Fgl0YPZ902wEsAyiQ+idGd1A7rSFx/ayC1CQVMw5P+EQx2V0SgpGtf6OKFhVjPflPUl9YMmEOnmfjCdMUsygww==", "dev": true, "dependencies": { - "delegates": "^1.0.0", - "readable-stream": "^3.6.0" + "agent-base": "^6.0.2", + "debug": "^4.3.3", + "socks": "^2.6.2" }, "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + "node": ">= 10" } }, - "node_modules/node-gyp/node_modules/gauge": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-4.0.4.tgz", - "integrity": "sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==", + "node_modules/node-sass/node_modules/ssri": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/ssri/-/ssri-9.0.1.tgz", + "integrity": "sha512-o57Wcn66jMQvfHG1FlYbWeZWW/dHZhJXjpIcTfXldXEk5nz5lStPo3mK0OJQfGR3RbZUlbISexbljkJzuEj/8Q==", "dev": true, "dependencies": { - "aproba": "^1.0.3 || ^2.0.0", - "color-support": "^1.1.3", - "console-control-strings": "^1.1.0", - "has-unicode": "^2.0.1", - "signal-exit": "^3.0.7", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1", - "wide-align": "^1.1.5" + "minipass": "^3.1.1" }, "engines": { "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, - "node_modules/node-gyp/node_modules/npmlog": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-6.0.2.tgz", - "integrity": "sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==", + "node_modules/node-sass/node_modules/unique-filename": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-2.0.1.tgz", + "integrity": "sha512-ODWHtkkdx3IAR+veKxFV+VBkUMcN+FaqzUUd7IZzt+0zhDZFPFxhlqwPF3YQvMHx1TD0tdgYl+kuPnJ8E6ql7A==", "dev": true, "dependencies": { - "are-we-there-yet": "^3.0.0", - "console-control-strings": "^1.1.0", - "gauge": "^4.0.3", - "set-blocking": "^2.0.0" + "unique-slug": "^3.0.0" }, "engines": { "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, - "node_modules/node-releases": { - "version": "2.0.13", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.13.tgz", - "integrity": "sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ==", - "dev": true - }, - "node_modules/node-sass": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/node-sass/-/node-sass-7.0.3.tgz", - "integrity": "sha512-8MIlsY/4dXUkJDYht9pIWBhMil3uHmE8b/AdJPjmFn1nBx9X9BASzfzmsCy0uCCb8eqI3SYYzVPDswWqSx7gjw==", + "node_modules/node-sass/node_modules/unique-slug": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-3.0.0.tgz", + "integrity": "sha512-8EyMynh679x/0gqE9fT9oilG+qEt+ibFyqjuVTsZn1+CMxH+XLlpvr2UZx4nVcCwTpx81nICr2JQFkM+HPLq4w==", "dev": true, - "hasInstallScript": true, "dependencies": { - "async-foreach": "^0.1.3", - "chalk": "^4.1.2", - "cross-spawn": "^7.0.3", - "gaze": "^1.0.0", - "get-stdin": "^4.0.1", - "glob": "^7.0.3", - "lodash": "^4.17.15", - "meow": "^9.0.0", - "nan": "^2.13.2", - "node-gyp": "^8.4.1", - "npmlog": "^5.0.0", - "request": "^2.88.0", - "sass-graph": "^4.0.1", - "stdout-stream": "^1.4.0", - "true-case-path": "^1.0.2" - }, - "bin": { - "node-sass": "bin/node-sass" + "imurmurhash": "^0.1.4" }, "engines": { - "node": ">=12" + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, "node_modules/nodemon": { - "version": "2.0.22", - "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-2.0.22.tgz", - "integrity": "sha512-B8YqaKMmyuCO7BowF1Z1/mkPqLk6cs/l63Ojtd6otKjMx47Dq1utxfRxcavH1I7VSaL8n5BUaoutadnsX3AAVQ==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.0.1.tgz", + "integrity": "sha512-g9AZ7HmkhQkqXkRc20w+ZfQ73cHLbE8hnPbtaFbFtCumZsjyMhKk9LajQ07U5Ux28lvFjZ5X7HvWR1xzU8jHVw==", "dev": true, "dependencies": { "chokidar": "^3.5.2", @@ -3401,8 +3439,8 @@ "ignore-by-default": "^1.0.1", "minimatch": "^3.1.2", "pstree.remy": "^1.1.8", - "semver": "^5.7.1", - "simple-update-notifier": "^1.0.7", + "semver": "^7.5.3", + "simple-update-notifier": "^2.0.0", "supports-color": "^5.5.0", "touch": "^3.1.0", "undefsafe": "^2.0.5" @@ -3411,7 +3449,7 @@ "nodemon": "bin/nodemon.js" }, "engines": { - "node": ">=8.10.0" + "node": ">=10" }, "funding": { "type": "opencollective", @@ -3436,15 +3474,6 @@ "node": ">=4" } }, - "node_modules/nodemon/node_modules/semver": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", - "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", - "dev": true, - "bin": { - "semver": "bin/semver" - } - }, "node_modules/nodemon/node_modules/supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -3660,54 +3689,15 @@ } }, "node_modules/npm-run-path": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", - "integrity": "sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", "dev": true, "dependencies": { - "path-key": "^2.0.0" + "path-key": "^3.0.0" }, "engines": { - "node": ">=4" - } - }, - "node_modules/npm-run-path/node_modules/path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/npmlog": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", - "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", - "dev": true, - "dependencies": { - "are-we-there-yet": "^2.0.0", - "console-control-strings": "^1.1.0", - "gauge": "^3.0.0", - "set-blocking": "^2.0.0" - } - }, - "node_modules/oauth-sign": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", - "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==", - "dev": true, - "engines": { - "node": "*" - } - }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", - "dev": true, - "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/object-inspect": { @@ -3764,13 +3754,19 @@ "wrappy": "1" } }, - "node_modules/p-finally": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", - "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==", + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", "dev": true, + "dependencies": { + "mimic-fn": "^2.1.0" + }, "engines": { - "node": ">=4" + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/p-limit": { @@ -3903,12 +3899,6 @@ "node": ">=8" } }, - "node_modules/performance-now": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==", - "dev": true - }, "node_modules/picocolors": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", @@ -4182,28 +4172,12 @@ "node": ">=10" } }, - "node_modules/psl": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz", - "integrity": "sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==", - "dev": true - }, "node_modules/pstree.remy": { "version": "1.1.8", "resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz", "integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==", "dev": true }, - "node_modules/pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "dev": true, - "dependencies": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, "node_modules/punycode": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", @@ -4225,15 +4199,6 @@ "purgecss": "bin/purgecss.js" } }, - "node_modules/qs": { - "version": "6.5.3", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", - "integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==", - "dev": true, - "engines": { - "node": ">=0.6" - } - }, "node_modules/queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -4531,38 +4496,6 @@ "node": ">=0.10.0" } }, - "node_modules/request": { - "version": "2.88.2", - "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", - "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==", - "deprecated": "request has been deprecated, see https://github.com/request/request/issues/3142", - "dev": true, - "dependencies": { - "aws-sign2": "~0.7.0", - "aws4": "^1.8.0", - "caseless": "~0.12.0", - "combined-stream": "~1.0.6", - "extend": "~3.0.2", - "forever-agent": "~0.6.1", - "form-data": "~2.3.2", - "har-validator": "~5.1.3", - "http-signature": "~1.2.0", - "is-typedarray": "~1.0.0", - "isstream": "~0.1.2", - "json-stringify-safe": "~5.0.1", - "mime-types": "~2.1.19", - "oauth-sign": "~0.9.0", - "performance-now": "^2.1.0", - "qs": "~6.5.2", - "safe-buffer": "^5.1.2", - "tough-cookie": "~2.5.0", - "tunnel-agent": "^0.6.0", - "uuid": "^3.3.2" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", @@ -4720,7 +4653,8 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "dev": true + "dev": true, + "optional": true }, "node_modules/sass-graph": { "version": "4.0.1", @@ -4766,36 +4700,41 @@ } }, "node_modules/serve": { - "version": "13.0.4", - "resolved": "https://registry.npmjs.org/serve/-/serve-13.0.4.tgz", - "integrity": "sha512-Lj8rhXmphJCRQVv5qwu0NQZ2h+0MrRyRJxDZu5y3qLH2i/XY6a0FPj/VmjMUdkJb672MBfE8hJ274PU6JzBd0Q==", - "dev": true, - "dependencies": { - "@zeit/schemas": "2.6.0", - "ajv": "6.12.6", - "arg": "2.0.0", - "boxen": "5.1.2", - "chalk": "2.4.1", - "clipboardy": "2.3.0", - "compression": "1.7.3", - "serve-handler": "6.1.3", - "update-check": "1.5.2" + "version": "14.2.1", + "resolved": "https://registry.npmjs.org/serve/-/serve-14.2.1.tgz", + "integrity": "sha512-48er5fzHh7GCShLnNyPBRPEjs2I6QBozeGr02gaacROiyS/8ARADlj595j39iZXAqBbJHH/ivJJyPRWY9sQWZA==", + "dev": true, + "dependencies": { + "@zeit/schemas": "2.29.0", + "ajv": "8.11.0", + "arg": "5.0.2", + "boxen": "7.0.0", + "chalk": "5.0.1", + "chalk-template": "0.4.0", + "clipboardy": "3.0.0", + "compression": "1.7.4", + "is-port-reachable": "4.0.0", + "serve-handler": "6.1.5", + "update-check": "1.5.4" }, "bin": { - "serve": "bin/serve.js" + "serve": "build/main.js" + }, + "engines": { + "node": ">= 14" } }, "node_modules/serve-handler": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.3.tgz", - "integrity": "sha512-FosMqFBNrLyeiIDvP1zgO6YoTzFYHxLDEIavhlmQ+knB2Z7l1t+kGLHkZIDN7UVWqQAmKI3D20A6F6jo3nDd4w==", + "version": "6.1.5", + "resolved": "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.5.tgz", + "integrity": "sha512-ijPFle6Hwe8zfmBxJdE+5fta53fdIY0lHISJvuikXB3VYFafRjMRpOffSPvCYsbKyBA7pvy9oYr/BT1O3EArlg==", "dev": true, "dependencies": { "bytes": "3.0.0", "content-disposition": "0.5.2", "fast-url-parser": "1.1.3", "mime-types": "2.1.18", - "minimatch": "3.0.4", + "minimatch": "3.1.2", "path-is-inside": "1.0.2", "path-to-regexp": "2.2.1", "range-parser": "1.2.0" @@ -4822,78 +4761,16 @@ "node": ">= 0.6" } }, - "node_modules/serve-handler/node_modules/minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", - "dev": true, - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/serve/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/serve/node_modules/chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/serve/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/serve/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "dev": true - }, - "node_modules/serve/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.0.1.tgz", + "integrity": "sha512-Fo07WOYGqMfCWHOzSXOt2CxDbC6skS/jO9ynEcmpANMoPrD+W1r1K6Vx7iNm+AQmETU1Xr2t+n8nzkV9t6xh3w==", "dev": true, "engines": { - "node": ">=4" - } - }, - "node_modules/serve/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "dependencies": { - "has-flag": "^3.0.0" + "node": "^12.17.0 || ^14.13 || >=16.0.0" }, - "engines": { - "node": ">=4" + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, "node_modules/set-blocking": { @@ -4953,24 +4830,15 @@ "dev": true }, "node_modules/simple-update-notifier": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-1.1.0.tgz", - "integrity": "sha512-VpsrsJSUcJEseSbMHkrsrAVSdvVS5I96Qo1QAQ4FxQ9wXFcB+pjj7FB7/us9+GcgfW4ziHtYMc1J0PLczb55mg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz", + "integrity": "sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==", "dev": true, "dependencies": { - "semver": "~7.0.0" + "semver": "^7.5.3" }, "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/simple-update-notifier/node_modules/semver": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", - "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==", - "dev": true, - "bin": { - "semver": "bin/semver.js" + "node": ">=10" } }, "node_modules/slash": { @@ -5090,31 +4958,6 @@ "integrity": "sha512-XkD+zwiqXHikFZm4AX/7JSCXA98U5Db4AFd5XUg/+9UNtnH75+Z9KxtpYiJZx36mUDVOwH83pl7yvCer6ewM3w==", "dev": true }, - "node_modules/sshpk": { - "version": "1.17.0", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz", - "integrity": "sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ==", - "dev": true, - "dependencies": { - "asn1": "~0.2.3", - "assert-plus": "^1.0.0", - "bcrypt-pbkdf": "^1.0.0", - "dashdash": "^1.12.0", - "ecc-jsbn": "~0.1.1", - "getpass": "^0.1.1", - "jsbn": "~0.1.0", - "safer-buffer": "^2.0.2", - "tweetnacl": "~0.14.0" - }, - "bin": { - "sshpk-conv": "bin/sshpk-conv", - "sshpk-sign": "bin/sshpk-sign", - "sshpk-verify": "bin/sshpk-verify" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/ssri": { "version": "8.0.1", "resolved": "https://registry.npmjs.org/ssri/-/ssri-8.0.1.tgz", @@ -5278,13 +5121,13 @@ "node": ">=4" } }, - "node_modules/strip-eof": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", - "integrity": "sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==", + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", "dev": true, "engines": { - "node": ">=0.10.0" + "node": ">=6" } }, "node_modules/strip-indent": { @@ -5625,12 +5468,6 @@ "url": "https://github.com/sponsors/epoberezkin" } }, - "node_modules/table/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "dev": true - }, "node_modules/tar": { "version": "6.1.15", "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.15.tgz", @@ -5702,62 +5539,19 @@ "node": "*" } }, - "node_modules/tough-cookie": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", - "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", - "dev": true, - "dependencies": { - "psl": "^1.1.28", - "punycode": "^2.1.1" - }, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/tough-cookie/node_modules/punycode": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", - "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", - "dev": true, - "engines": { - "node": ">=6" - } - }, "node_modules/trim-newlines": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz", "integrity": "sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==", "dev": true, "engines": { - "node": ">=8" - } - }, - "node_modules/true-case-path": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/true-case-path/-/true-case-path-1.0.3.tgz", - "integrity": "sha512-m6s2OdQe5wgpFMC+pAJ+q9djG82O2jcHPOI6RNg1yy9rCYR+WD6Nbpl32fDpfC56nirdRy+opFa/Vk7HYhqaew==", - "dev": true, - "dependencies": { - "glob": "^7.1.2" - } - }, - "node_modules/tunnel-agent": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", - "dev": true, - "dependencies": { - "safe-buffer": "^5.0.1" - }, - "engines": { - "node": "*" + "node": ">=8" } }, - "node_modules/tweetnacl": { - "version": "0.14.5", - "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==", + "node_modules/true-case-path": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/true-case-path/-/true-case-path-2.2.1.tgz", + "integrity": "sha512-0z3j8R7MCjy10kc/g+qg7Ln3alJTodw9aDuVWZa3uiWqfuBMKeAeP2ocWcxoyM3D73yz3Jt/Pu4qPr4wHSdB/Q==", "dev": true }, "node_modules/type-fest": { @@ -5916,9 +5710,9 @@ } }, "node_modules/update-check": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/update-check/-/update-check-1.5.2.tgz", - "integrity": "sha512-1TrmYLuLj/5ZovwUS7fFd1jMH3NnFDN1y1A8dboedIDt7zs/zJMo6TwwlhYKkSeEwzleeiSBV5/3c9ufAQWDaQ==", + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/update-check/-/update-check-1.5.4.tgz", + "integrity": "sha512-5YHsflzHP4t1G+8WGPlvKbJEbAJGCgw+Em+dGR1KmBUbr1J36SJBqlHLjR7oob7sco5hWHGQVcr9B2poIVDDTQ==", "dev": true, "dependencies": { "registry-auth-token": "3.3.2", @@ -5949,16 +5743,6 @@ "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", "dev": true }, - "node_modules/uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", - "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", - "dev": true, - "bin": { - "uuid": "bin/uuid" - } - }, "node_modules/v8-compile-cache": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.4.0.tgz", @@ -5984,20 +5768,6 @@ "node": ">= 0.8" } }, - "node_modules/verror": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==", - "dev": true, - "engines": [ - "node >=0.6.0" - ], - "dependencies": { - "assert-plus": "^1.0.0", - "core-util-is": "1.0.2", - "extsprintf": "^1.2.0" - } - }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -6058,15 +5828,68 @@ } }, "node_modules/widest-line": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz", - "integrity": "sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-4.0.1.tgz", + "integrity": "sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig==", "dev": true, "dependencies": { - "string-width": "^4.0.0" + "string-width": "^5.0.1" }, "engines": { - "node": ">=8" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/widest-line/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/widest-line/node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true + }, + "node_modules/widest-line/node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/widest-line/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, "node_modules/wrap-ansi": { @@ -6387,9 +6210,9 @@ "dev": true }, "@zeit/schemas": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/@zeit/schemas/-/schemas-2.6.0.tgz", - "integrity": "sha512-uUrgZ8AxS+Lio0fZKAipJjAh415JyrOZowliZAzmnJSsf7piVL5w+G0+gFJ0KSu3QRhvui/7zuvpLz03YjXAhg==", + "version": "2.29.0", + "resolved": "https://registry.npmjs.org/@zeit/schemas/-/schemas-2.29.0.tgz", + "integrity": "sha512-g5QiLIfbg3pLuYUJPlisNKY+epQJTcMDsOnVNkscrDP1oi7vmJnzOANYJI/1pZcVJ6umUkBv3aFtlg1UvUHGzA==", "dev": true }, "abbrev": { @@ -6437,14 +6260,14 @@ } }, "ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "version": "8.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "dev": true, "requires": { "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", "uri-js": "^4.2.2" } }, @@ -6494,20 +6317,10 @@ "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", "dev": true }, - "are-we-there-yet": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", - "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", - "dev": true, - "requires": { - "delegates": "^1.0.0", - "readable-stream": "^3.6.0" - } - }, "arg": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/arg/-/arg-2.0.0.tgz", - "integrity": "sha512-XxNTUzKnz1ctK3ZIcI2XUPlD96wbHP2nGqkPKpvk/HNRlPveYrXIVSTk9m3LcqOgDPg3B1nMvdV/K8wZd7PG4w==", + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", "dev": true }, "array-buffer-byte-length": { @@ -6546,21 +6359,6 @@ "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==", "dev": true }, - "asn1": { - "version": "0.2.6", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", - "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", - "dev": true, - "requires": { - "safer-buffer": "~2.1.0" - } - }, - "assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==", - "dev": true - }, "astral-regex": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", @@ -6573,12 +6371,6 @@ "integrity": "sha512-VUeSMD8nEGBWaZK4lizI1sf3yEC7pnAQ/mrI7pC2fBz2s/tq5jWWEngTwaf0Gruu/OoXRGLGg1XFqpYBiGTYJA==", "dev": true }, - "asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", - "dev": true - }, "autoprefixer": { "version": "10.4.15", "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.15.tgz", @@ -6599,33 +6391,12 @@ "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", "dev": true }, - "aws-sign2": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==", - "dev": true - }, - "aws4": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.12.0.tgz", - "integrity": "sha512-NmWvPnx0F1SfrQbYwOi7OeaNGokp9XhzNioJ/CSBs8Qa4vxug81mhJEAVZwxXuBmYB5KDRfMq/F3RR0BIU7sWg==", - "dev": true - }, "balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "dev": true }, - "bcrypt-pbkdf": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", - "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", - "dev": true, - "requires": { - "tweetnacl": "^0.14.3" - } - }, "binary-extensions": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", @@ -6647,32 +6418,87 @@ } }, "boxen": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/boxen/-/boxen-5.1.2.tgz", - "integrity": "sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/boxen/-/boxen-7.0.0.tgz", + "integrity": "sha512-j//dBVuyacJbvW+tvZ9HuH03fZ46QcaKvvhZickZqtB271DxJ7SNRSNxrV/dZX0085m7hISRZWbzWlJvx/rHSg==", "dev": true, "requires": { - "ansi-align": "^3.0.0", - "camelcase": "^6.2.0", - "chalk": "^4.1.0", - "cli-boxes": "^2.2.1", - "string-width": "^4.2.2", - "type-fest": "^0.20.2", - "widest-line": "^3.1.0", - "wrap-ansi": "^7.0.0" + "ansi-align": "^3.0.1", + "camelcase": "^7.0.0", + "chalk": "^5.0.1", + "cli-boxes": "^3.0.0", + "string-width": "^5.1.2", + "type-fest": "^2.13.0", + "widest-line": "^4.0.1", + "wrap-ansi": "^8.0.1" }, "dependencies": { + "ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true + }, + "ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true + }, "camelcase": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", - "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-7.0.1.tgz", + "integrity": "sha512-xlx1yCK2Oc1APsPXDL2LdlNP6+uu8OCDdhOBSVT279M/S+y75O30C2VuD8T2ogdePBBl7PfPF4504tnLgX3zfw==", + "dev": true + }, + "chalk": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", + "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", + "dev": true + }, + "emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", "dev": true }, + "string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "requires": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + } + }, + "strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "requires": { + "ansi-regex": "^6.0.1" + } + }, "type-fest": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", - "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", + "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==", "dev": true + }, + "wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "requires": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + } } } }, @@ -6778,12 +6604,6 @@ "integrity": "sha512-Jj917pJtYg9HSJBF95HVX3Cdr89JUyLT4IZ8SvM5aDRni95swKgYi3TgYLH5hnGfPE/U1dg6IfZ50UsIlLkwSA==", "dev": true }, - "caseless": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==", - "dev": true - }, "chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -6794,6 +6614,15 @@ "supports-color": "^7.1.0" } }, + "chalk-template": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/chalk-template/-/chalk-template-0.4.0.tgz", + "integrity": "sha512-/ghrgmhfY8RaSdeo43hNXxpoHAtxdbskUHjPpfqUWGttFgycUhYPGx3YZBCnUCvOa7Doivn1IZec3DEGFoMgLg==", + "dev": true, + "requires": { + "chalk": "^4.1.2" + } + }, "chokidar": { "version": "3.5.3", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", @@ -6823,20 +6652,20 @@ "dev": true }, "cli-boxes": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-2.2.1.tgz", - "integrity": "sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-3.0.0.tgz", + "integrity": "sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==", "dev": true }, "clipboardy": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/clipboardy/-/clipboardy-2.3.0.tgz", - "integrity": "sha512-mKhiIL2DrQIsuXMgBgnfEHOZOryC7kY7YO//TN6c63wlEm3NG5tz+YgY5rVi29KCmq/QQjKYvM7a19+MDOTHOQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/clipboardy/-/clipboardy-3.0.0.tgz", + "integrity": "sha512-Su+uU5sr1jkUy1sGRpLKjKrvEOVXgSgiSInwa/qeID6aJ07yh+5NWc3h2QfjHjBnfX4LhtFcuAWKUsJ3r+fjbg==", "dev": true, "requires": { - "arch": "^2.1.1", - "execa": "^1.0.0", - "is-wsl": "^2.1.1" + "arch": "^2.2.0", + "execa": "^5.1.1", + "is-wsl": "^2.2.0" } }, "cliui": { @@ -6877,15 +6706,6 @@ "integrity": "sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw==", "dev": true }, - "combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dev": true, - "requires": { - "delayed-stream": "~1.0.0" - } - }, "commander": { "version": "8.3.0", "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", @@ -6902,16 +6722,16 @@ } }, "compression": { - "version": "1.7.3", - "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.3.tgz", - "integrity": "sha512-HSjyBG5N1Nnz7tF2+O7A9XUhyjru71/fwgNb7oIsEVHR0WShfs2tIS/EySLgiTe98aOK18YDlMXpzjCXY/n9mg==", + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz", + "integrity": "sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==", "dev": true, "requires": { "accepts": "~1.3.5", "bytes": "3.0.0", - "compressible": "~2.0.14", + "compressible": "~2.0.16", "debug": "2.6.9", - "on-headers": "~1.0.1", + "on-headers": "~1.0.2", "safe-buffer": "5.1.2", "vary": "~1.1.2" }, @@ -7013,15 +6833,6 @@ "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", "dev": true }, - "dashdash": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==", - "dev": true, - "requires": { - "assert-plus": "^1.0.0" - } - }, "datatables.net": { "version": "1.13.6", "resolved": "https://registry.npmjs.org/datatables.net/-/datatables.net-1.13.6.tgz", @@ -7088,12 +6899,6 @@ "object-keys": "^1.1.1" } }, - "delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "dev": true - }, "delegates": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", @@ -7115,15 +6920,11 @@ "path-type": "^4.0.0" } }, - "ecc-jsbn": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", - "integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==", - "dev": true, - "requires": { - "jsbn": "~0.1.0", - "safer-buffer": "^2.1.0" - } + "eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true }, "electron-to-chromium": { "version": "1.4.504", @@ -7147,15 +6948,6 @@ "iconv-lite": "^0.6.2" } }, - "end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "dev": true, - "requires": { - "once": "^1.4.0" - } - }, "env-paths": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", @@ -7259,83 +7051,22 @@ "dev": true }, "execa": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", - "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", "dev": true, "requires": { - "cross-spawn": "^6.0.0", - "get-stream": "^4.0.0", - "is-stream": "^1.1.0", - "npm-run-path": "^2.0.0", - "p-finally": "^1.0.0", - "signal-exit": "^3.0.0", - "strip-eof": "^1.0.0" - }, - "dependencies": { - "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, - "requires": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - }, - "path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", - "dev": true - }, - "semver": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", - "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", - "dev": true - }, - "shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", - "dev": true, - "requires": { - "shebang-regex": "^1.0.0" - } - }, - "shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", - "dev": true - }, - "which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - } + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" } }, - "extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", - "dev": true - }, - "extsprintf": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==", - "dev": true - }, "fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -7355,12 +7086,6 @@ "micromatch": "^4.0.4" } }, - "fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true - }, "fast-url-parser": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/fast-url-parser/-/fast-url-parser-1.1.3.tgz", @@ -7439,23 +7164,6 @@ "is-callable": "^1.1.3" } }, - "forever-agent": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==", - "dev": true - }, - "form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", - "dev": true, - "requires": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" - } - }, "fraction.js": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.1.tgz", @@ -7519,23 +7227,6 @@ "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", "dev": true }, - "gauge": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", - "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", - "dev": true, - "requires": { - "aproba": "^1.0.3 || ^2.0.0", - "color-support": "^1.1.2", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.1", - "object-assign": "^4.1.1", - "signal-exit": "^3.0.0", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1", - "wide-align": "^1.1.2" - } - }, "gaze": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/gaze/-/gaze-1.1.3.tgz", @@ -7570,13 +7261,10 @@ "dev": true }, "get-stream": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", - "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", - "dev": true, - "requires": { - "pump": "^3.0.0" - } + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true }, "get-symbol-description": { "version": "1.0.0", @@ -7588,15 +7276,6 @@ "get-intrinsic": "^1.1.1" } }, - "getpass": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==", - "dev": true, - "requires": { - "assert-plus": "^1.0.0" - } - }, "glob": { "version": "7.2.3", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", @@ -7731,22 +7410,6 @@ "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", "dev": true }, - "har-schema": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q==", - "dev": true - }, - "har-validator": { - "version": "5.1.5", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz", - "integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==", - "dev": true, - "requires": { - "ajv": "^6.12.3", - "har-schema": "^2.0.0" - } - }, "hard-rejection": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz", @@ -7842,17 +7505,6 @@ "debug": "4" } }, - "http-signature": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==", - "dev": true, - "requires": { - "assert-plus": "^1.0.0", - "jsprim": "^1.2.2", - "sshpk": "^1.7.0" - } - }, "https-proxy-agent": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", @@ -7863,6 +7515,12 @@ "debug": "4" } }, + "human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true + }, "humanize-ms": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", @@ -8110,6 +7768,12 @@ "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", "dev": true }, + "is-port-reachable": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-port-reachable/-/is-port-reachable-4.0.0.tgz", + "integrity": "sha512-9UoipoxYmSk6Xy7QFgRv2HDyaysmgSG75TFQs6S+3pDM7ZhKTF/bskZV+0UlABHzKjNVhPjYCLfeZUEg1wXxig==", + "dev": true + }, "is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -8130,9 +7794,9 @@ } }, "is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", "dev": true }, "is-string": { @@ -8162,12 +7826,6 @@ "which-typed-array": "^1.1.11" } }, - "is-typedarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==", - "dev": true - }, "is-weakref": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", @@ -8198,12 +7856,6 @@ "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", "dev": true }, - "isstream": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==", - "dev": true - }, "jquery": { "version": "3.7.1", "resolved": "https://registry.npmjs.org/jquery/-/jquery-3.7.1.tgz", @@ -8221,12 +7873,6 @@ "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", "dev": true }, - "jsbn": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==", - "dev": true - }, "json-buffer": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", @@ -8245,22 +7891,10 @@ "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", "dev": true }, - "json-schema": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", - "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", - "dev": true - }, "json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true - }, - "json-stringify-safe": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", "dev": true }, "jsonfile": { @@ -8273,18 +7907,6 @@ "universalify": "^2.0.0" } }, - "jsprim": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz", - "integrity": "sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==", - "dev": true, - "requires": { - "assert-plus": "1.0.0", - "extsprintf": "1.3.0", - "json-schema": "0.4.0", - "verror": "1.10.0" - } - }, "keyv": { "version": "4.5.3", "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.3.tgz", @@ -8430,6 +8052,12 @@ "yargs-parser": "^20.2.3" } }, + "merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, "merge2": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", @@ -8461,6 +8089,12 @@ "mime-db": "1.52.0" } }, + "mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true + }, "min-indent": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", @@ -8661,9 +8295,9 @@ "dev": true }, "node-sass": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/node-sass/-/node-sass-7.0.3.tgz", - "integrity": "sha512-8MIlsY/4dXUkJDYht9pIWBhMil3uHmE8b/AdJPjmFn1nBx9X9BASzfzmsCy0uCCb8eqI3SYYzVPDswWqSx7gjw==", + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/node-sass/-/node-sass-9.0.0.tgz", + "integrity": "sha512-yltEuuLrfH6M7Pq2gAj5B6Zm7m+gdZoG66wTqG6mIZV/zijq3M2OO2HswtT6oBspPyFhHDcaxWpsBm0fRNDHPg==", "dev": true, "requires": { "async-foreach": "^0.1.3", @@ -8673,20 +8307,197 @@ "get-stdin": "^4.0.1", "glob": "^7.0.3", "lodash": "^4.17.15", + "make-fetch-happen": "^10.0.4", "meow": "^9.0.0", - "nan": "^2.13.2", + "nan": "^2.17.0", "node-gyp": "^8.4.1", - "npmlog": "^5.0.0", - "request": "^2.88.0", "sass-graph": "^4.0.1", "stdout-stream": "^1.4.0", - "true-case-path": "^1.0.2" + "true-case-path": "^2.2.1" + }, + "dependencies": { + "@npmcli/fs": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-2.1.2.tgz", + "integrity": "sha512-yOJKRvohFOaLqipNtwYB9WugyZKhC/DZC4VYPmpaCzDBrA8YpK3qHZ8/HGscMnE4GqbkLNuVcCnxkeQEdGt6LQ==", + "dev": true, + "requires": { + "@gar/promisify": "^1.1.3", + "semver": "^7.3.5" + } + }, + "@npmcli/move-file": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@npmcli/move-file/-/move-file-2.0.1.tgz", + "integrity": "sha512-mJd2Z5TjYWq/ttPLLGqArdtnC74J6bOzg4rMDnN+p1xTacZ2yPRCk2y0oSWQtygLR9YVQXgOcONrwtnk3JupxQ==", + "dev": true, + "requires": { + "mkdirp": "^1.0.4", + "rimraf": "^3.0.2" + } + }, + "@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "dev": true + }, + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0" + } + }, + "cacache": { + "version": "16.1.3", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.1.3.tgz", + "integrity": "sha512-/+Emcj9DAXxX4cwlLmRI9c166RuL3w30zp4R7Joiv2cQTtTtA+jeuCAjH3ZlGnYS3tKENSrKhAzVVP9GVyzeYQ==", + "dev": true, + "requires": { + "@npmcli/fs": "^2.1.0", + "@npmcli/move-file": "^2.0.0", + "chownr": "^2.0.0", + "fs-minipass": "^2.1.0", + "glob": "^8.0.1", + "infer-owner": "^1.0.4", + "lru-cache": "^7.7.1", + "minipass": "^3.1.6", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^9.0.0", + "tar": "^6.1.11", + "unique-filename": "^2.0.0" + }, + "dependencies": { + "glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + } + } + } + }, + "http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "dev": true, + "requires": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + } + }, + "lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "dev": true + }, + "make-fetch-happen": { + "version": "10.2.1", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.2.1.tgz", + "integrity": "sha512-NgOPbRiaQM10DYXvN3/hhGVI2M5MtITFryzBGxHM5p4wnFxsVCbxkrBrDsk+EZ5OB4jEOT7AjDxtdF+KVEFT7w==", + "dev": true, + "requires": { + "agentkeepalive": "^4.2.1", + "cacache": "^16.1.0", + "http-cache-semantics": "^4.1.0", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^7.7.1", + "minipass": "^3.1.6", + "minipass-collect": "^1.0.2", + "minipass-fetch": "^2.0.3", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^7.0.0", + "ssri": "^9.0.0" + } + }, + "minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "requires": { + "brace-expansion": "^2.0.1" + } + }, + "minipass-fetch": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-2.1.2.tgz", + "integrity": "sha512-LT49Zi2/WMROHYoqGgdlQIZh8mLPZmOrN2NdJjMXxYe4nkN6FUyuPuOAOedNJDrx0IRGg9+4guZewtp8hE6TxA==", + "dev": true, + "requires": { + "encoding": "^0.1.13", + "minipass": "^3.1.6", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + } + }, + "socks-proxy-agent": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-7.0.0.tgz", + "integrity": "sha512-Fgl0YPZ902wEsAyiQ+idGd1A7rSFx/ayC1CQVMw5P+EQx2V0SgpGtf6OKFhVjPflPUl9YMmEOnmfjCdMUsygww==", + "dev": true, + "requires": { + "agent-base": "^6.0.2", + "debug": "^4.3.3", + "socks": "^2.6.2" + } + }, + "ssri": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/ssri/-/ssri-9.0.1.tgz", + "integrity": "sha512-o57Wcn66jMQvfHG1FlYbWeZWW/dHZhJXjpIcTfXldXEk5nz5lStPo3mK0OJQfGR3RbZUlbISexbljkJzuEj/8Q==", + "dev": true, + "requires": { + "minipass": "^3.1.1" + } + }, + "unique-filename": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-2.0.1.tgz", + "integrity": "sha512-ODWHtkkdx3IAR+veKxFV+VBkUMcN+FaqzUUd7IZzt+0zhDZFPFxhlqwPF3YQvMHx1TD0tdgYl+kuPnJ8E6ql7A==", + "dev": true, + "requires": { + "unique-slug": "^3.0.0" + } + }, + "unique-slug": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-3.0.0.tgz", + "integrity": "sha512-8EyMynh679x/0gqE9fT9oilG+qEt+ibFyqjuVTsZn1+CMxH+XLlpvr2UZx4nVcCwTpx81nICr2JQFkM+HPLq4w==", + "dev": true, + "requires": { + "imurmurhash": "^0.1.4" + } + } } }, "nodemon": { - "version": "2.0.22", - "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-2.0.22.tgz", - "integrity": "sha512-B8YqaKMmyuCO7BowF1Z1/mkPqLk6cs/l63Ojtd6otKjMx47Dq1utxfRxcavH1I7VSaL8n5BUaoutadnsX3AAVQ==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.0.1.tgz", + "integrity": "sha512-g9AZ7HmkhQkqXkRc20w+ZfQ73cHLbE8hnPbtaFbFtCumZsjyMhKk9LajQ07U5Ux28lvFjZ5X7HvWR1xzU8jHVw==", "dev": true, "requires": { "chokidar": "^3.5.2", @@ -8694,8 +8505,8 @@ "ignore-by-default": "^1.0.1", "minimatch": "^3.1.2", "pstree.remy": "^1.1.8", - "semver": "^5.7.1", - "simple-update-notifier": "^1.0.7", + "semver": "^7.5.3", + "simple-update-notifier": "^2.0.0", "supports-color": "^5.5.0", "touch": "^3.1.0", "undefsafe": "^2.0.5" @@ -8716,12 +8527,6 @@ "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", "dev": true }, - "semver": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", - "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", - "dev": true - }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -8885,46 +8690,14 @@ } }, "npm-run-path": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", - "integrity": "sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==", - "dev": true, - "requires": { - "path-key": "^2.0.0" - }, - "dependencies": { - "path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", - "dev": true - } - } - }, - "npmlog": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", - "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", "dev": true, "requires": { - "are-we-there-yet": "^2.0.0", - "console-control-strings": "^1.1.0", - "gauge": "^3.0.0", - "set-blocking": "^2.0.0" + "path-key": "^3.0.0" } }, - "oauth-sign": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", - "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==", - "dev": true - }, - "object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", - "dev": true - }, "object-inspect": { "version": "1.12.3", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz", @@ -8964,11 +8737,14 @@ "wrappy": "1" } }, - "p-finally": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", - "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==", - "dev": true + "onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "requires": { + "mimic-fn": "^2.1.0" + } }, "p-limit": { "version": "2.3.0", @@ -9064,12 +8840,6 @@ "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", "dev": true }, - "performance-now": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==", - "dev": true - }, "picocolors": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", @@ -9230,28 +9000,12 @@ "retry": "^0.12.0" } }, - "psl": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz", - "integrity": "sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==", - "dev": true - }, "pstree.remy": { "version": "1.1.8", "resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz", "integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==", "dev": true }, - "pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "dev": true, - "requires": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, "punycode": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", @@ -9270,12 +9024,6 @@ "postcss-selector-parser": "^6.0.6" } }, - "qs": { - "version": "6.5.3", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", - "integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==", - "dev": true - }, "queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -9502,34 +9250,6 @@ "rc": "^1.0.1" } }, - "request": { - "version": "2.88.2", - "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", - "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==", - "dev": true, - "requires": { - "aws-sign2": "~0.7.0", - "aws4": "^1.8.0", - "caseless": "~0.12.0", - "combined-stream": "~1.0.6", - "extend": "~3.0.2", - "forever-agent": "~0.6.1", - "form-data": "~2.3.2", - "har-validator": "~5.1.3", - "http-signature": "~1.2.0", - "is-typedarray": "~1.0.0", - "isstream": "~0.1.2", - "json-stringify-safe": "~5.0.1", - "mime-types": "~2.1.19", - "oauth-sign": "~0.9.0", - "performance-now": "^2.1.0", - "qs": "~6.5.2", - "safe-buffer": "^5.1.2", - "tough-cookie": "~2.5.0", - "tunnel-agent": "^0.6.0", - "uuid": "^3.3.2" - } - }, "require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", @@ -9622,7 +9342,8 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "dev": true + "dev": true, + "optional": true }, "sass-graph": { "version": "4.0.1", @@ -9656,85 +9377,43 @@ } }, "serve": { - "version": "13.0.4", - "resolved": "https://registry.npmjs.org/serve/-/serve-13.0.4.tgz", - "integrity": "sha512-Lj8rhXmphJCRQVv5qwu0NQZ2h+0MrRyRJxDZu5y3qLH2i/XY6a0FPj/VmjMUdkJb672MBfE8hJ274PU6JzBd0Q==", + "version": "14.2.1", + "resolved": "https://registry.npmjs.org/serve/-/serve-14.2.1.tgz", + "integrity": "sha512-48er5fzHh7GCShLnNyPBRPEjs2I6QBozeGr02gaacROiyS/8ARADlj595j39iZXAqBbJHH/ivJJyPRWY9sQWZA==", "dev": true, "requires": { - "@zeit/schemas": "2.6.0", - "ajv": "6.12.6", - "arg": "2.0.0", - "boxen": "5.1.2", - "chalk": "2.4.1", - "clipboardy": "2.3.0", - "compression": "1.7.3", - "serve-handler": "6.1.3", - "update-check": "1.5.2" + "@zeit/schemas": "2.29.0", + "ajv": "8.11.0", + "arg": "5.0.2", + "boxen": "7.0.0", + "chalk": "5.0.1", + "chalk-template": "0.4.0", + "clipboardy": "3.0.0", + "compression": "1.7.4", + "is-port-reachable": "4.0.0", + "serve-handler": "6.1.5", + "update-check": "1.5.4" }, "dependencies": { - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "requires": { - "color-convert": "^1.9.0" - } - }, "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "dev": true, - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "dev": true - }, - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.0.1.tgz", + "integrity": "sha512-Fo07WOYGqMfCWHOzSXOt2CxDbC6skS/jO9ynEcmpANMoPrD+W1r1K6Vx7iNm+AQmETU1Xr2t+n8nzkV9t6xh3w==", "dev": true - }, - "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "requires": { - "has-flag": "^3.0.0" - } } } }, "serve-handler": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.3.tgz", - "integrity": "sha512-FosMqFBNrLyeiIDvP1zgO6YoTzFYHxLDEIavhlmQ+knB2Z7l1t+kGLHkZIDN7UVWqQAmKI3D20A6F6jo3nDd4w==", + "version": "6.1.5", + "resolved": "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.5.tgz", + "integrity": "sha512-ijPFle6Hwe8zfmBxJdE+5fta53fdIY0lHISJvuikXB3VYFafRjMRpOffSPvCYsbKyBA7pvy9oYr/BT1O3EArlg==", "dev": true, "requires": { "bytes": "3.0.0", "content-disposition": "0.5.2", "fast-url-parser": "1.1.3", "mime-types": "2.1.18", - "minimatch": "3.0.4", + "minimatch": "3.1.2", "path-is-inside": "1.0.2", "path-to-regexp": "2.2.1", "range-parser": "1.2.0" @@ -9754,15 +9433,6 @@ "requires": { "mime-db": "~1.33.0" } - }, - "minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", - "dev": true, - "requires": { - "brace-expansion": "^1.1.7" - } } } }, @@ -9811,20 +9481,12 @@ "dev": true }, "simple-update-notifier": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-1.1.0.tgz", - "integrity": "sha512-VpsrsJSUcJEseSbMHkrsrAVSdvVS5I96Qo1QAQ4FxQ9wXFcB+pjj7FB7/us9+GcgfW4ziHtYMc1J0PLczb55mg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz", + "integrity": "sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==", "dev": true, "requires": { - "semver": "~7.0.0" - }, - "dependencies": { - "semver": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", - "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==", - "dev": true - } + "semver": "^7.5.3" } }, "slash": { @@ -9915,23 +9577,6 @@ "integrity": "sha512-XkD+zwiqXHikFZm4AX/7JSCXA98U5Db4AFd5XUg/+9UNtnH75+Z9KxtpYiJZx36mUDVOwH83pl7yvCer6ewM3w==", "dev": true }, - "sshpk": { - "version": "1.17.0", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz", - "integrity": "sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ==", - "dev": true, - "requires": { - "asn1": "~0.2.3", - "assert-plus": "^1.0.0", - "bcrypt-pbkdf": "^1.0.0", - "dashdash": "^1.12.0", - "ecc-jsbn": "~0.1.1", - "getpass": "^0.1.1", - "jsbn": "~0.1.0", - "safer-buffer": "^2.0.2", - "tweetnacl": "~0.14.0" - } - }, "ssri": { "version": "8.0.1", "resolved": "https://registry.npmjs.org/ssri/-/ssri-8.0.1.tgz", @@ -10067,10 +9712,10 @@ "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", "dev": true }, - "strip-eof": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", - "integrity": "sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==", + "strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", "dev": true }, "strip-indent": { @@ -10336,12 +9981,6 @@ "require-from-string": "^2.0.2", "uri-js": "^4.2.2" } - }, - "json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "dev": true } } }, @@ -10402,24 +10041,6 @@ } } }, - "tough-cookie": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", - "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", - "dev": true, - "requires": { - "psl": "^1.1.28", - "punycode": "^2.1.1" - }, - "dependencies": { - "punycode": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", - "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", - "dev": true - } - } - }, "trim-newlines": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz", @@ -10427,27 +10048,9 @@ "dev": true }, "true-case-path": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/true-case-path/-/true-case-path-1.0.3.tgz", - "integrity": "sha512-m6s2OdQe5wgpFMC+pAJ+q9djG82O2jcHPOI6RNg1yy9rCYR+WD6Nbpl32fDpfC56nirdRy+opFa/Vk7HYhqaew==", - "dev": true, - "requires": { - "glob": "^7.1.2" - } - }, - "tunnel-agent": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", - "dev": true, - "requires": { - "safe-buffer": "^5.0.1" - } - }, - "tweetnacl": { - "version": "0.14.5", - "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/true-case-path/-/true-case-path-2.2.1.tgz", + "integrity": "sha512-0z3j8R7MCjy10kc/g+qg7Ln3alJTodw9aDuVWZa3uiWqfuBMKeAeP2ocWcxoyM3D73yz3Jt/Pu4qPr4wHSdB/Q==", "dev": true }, "type-fest": { @@ -10556,9 +10159,9 @@ } }, "update-check": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/update-check/-/update-check-1.5.2.tgz", - "integrity": "sha512-1TrmYLuLj/5ZovwUS7fFd1jMH3NnFDN1y1A8dboedIDt7zs/zJMo6TwwlhYKkSeEwzleeiSBV5/3c9ufAQWDaQ==", + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/update-check/-/update-check-1.5.4.tgz", + "integrity": "sha512-5YHsflzHP4t1G+8WGPlvKbJEbAJGCgw+Em+dGR1KmBUbr1J36SJBqlHLjR7oob7sco5hWHGQVcr9B2poIVDDTQ==", "dev": true, "requires": { "registry-auth-token": "3.3.2", @@ -10588,12 +10191,6 @@ "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", "dev": true }, - "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", - "dev": true - }, "v8-compile-cache": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.4.0.tgz", @@ -10616,17 +10213,6 @@ "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", "dev": true }, - "verror": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==", - "dev": true, - "requires": { - "assert-plus": "^1.0.0", - "core-util-is": "1.0.2", - "extsprintf": "^1.2.0" - } - }, "which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -10672,12 +10258,46 @@ } }, "widest-line": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz", - "integrity": "sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-4.0.1.tgz", + "integrity": "sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig==", "dev": true, "requires": { - "string-width": "^4.0.0" + "string-width": "^5.0.1" + }, + "dependencies": { + "ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true + }, + "emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true + }, + "string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "requires": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + } + }, + "strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "requires": { + "ansi-regex": "^6.0.1" + } + } } }, "wrap-ansi": { diff --git a/dds_web/static/package.json b/dds_web/static/package.json index a5db2d8f5..8207cfcb1 100644 --- a/dds_web/static/package.json +++ b/dds_web/static/package.json @@ -34,13 +34,13 @@ }, "devDependencies": { "autoprefixer": "^10.4.15", - "node-sass": "^7.0.3", - "nodemon": "^2.0.22", + "node-sass": "^9.0.0", + "nodemon": "^3.0.1", "npm-run-all": "^4.1.5", "postcss": "^8.4.31", "postcss-cli": "^9.1.0", "purgecss": "^4.1.3", - "serve": "^13.0.4", + "serve": "^14.2.1", "stylelint": "^14.16.1", "stylelint-config-twbs-bootstrap": "^3.2.1" } From 273ba69a524687bdbee21473c97d31aa6b0672f8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Mon, 13 Nov 2023 15:04:11 +0100 Subject: [PATCH 110/114] changelog --- CHANGELOG.rst | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 30c19b0dc..54e9b6293 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,20 @@ Changelog ========== +.. : + + - 2023-11-22 +~~~~~~~~~~~~~~~~~~~~~~~ + +- New endpoint `AddFailedFiles` to allow a retry of saving files to the database after issues during upload. +- Cronjobs: + - Updated command: `quarterly-usage` changed to `monthly-usage` and refactored to catch errors and send emails. + - New command `send-usage` to collect usage rows from the `Usage` table and send csv files to support email. +- Dependencies: + - `Pillow` from `9.3.0` to `10.1.0` + - `urllib3` from `1.26.8` to `1.26.18` + - `postcss` (npm) from `8.4.28` to `8.4.31` + .. _2.5.2: 2.5.2 - 2023-10-25 From 78c84601e2df73af0d464b3cb92a89c7ab921eaf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Mon, 13 Nov 2023 15:07:07 +0100 Subject: [PATCH 111/114] version --- dds_web/version.py | 4 +++- tests/test_version.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/dds_web/version.py b/dds_web/version.py index 667b52f95..1d61112a0 100644 --- a/dds_web/version.py +++ b/dds_web/version.py @@ -1 +1,3 @@ -__version__ = "2.5.2" +# Do not do major version upgrade during 2024. +# If mid or minor version reaches 9, continue with 10, 11 etc etc. +__version__ = "2.6.0" diff --git a/tests/test_version.py b/tests/test_version.py index 534eab711..aaf84a488 100644 --- a/tests/test_version.py +++ b/tests/test_version.py @@ -2,4 +2,4 @@ def test_version(): - assert version.__version__ == "2.5.2" + assert version.__version__ == "2.6.0" From 0f8f0f7254960e77b4c9a3e033bed76b41dbf6df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Mon, 13 Nov 2023 15:10:03 +0100 Subject: [PATCH 112/114] sprintlog --- SPRINTLOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/SPRINTLOG.md b/SPRINTLOG.md index 3b28549a6..1f2b5e5b2 100644 --- a/SPRINTLOG.md +++ b/SPRINTLOG.md @@ -328,3 +328,4 @@ _Nothing merged in CLI during this sprint_ - Updated Pillow package version to address vulnerabities ([#1486](https://github.com/ScilifelabDataCentre/dds_web/pull/1486)) - Updated urllib3 package version to address vulnerabities ([#1487](https://github.com/ScilifelabDataCentre/dds_web/pull/1487)) - Updated PostCss Node package to address vulnerabities ([#1489](https://github.com/ScilifelabDataCentre/dds_web/pull/1489)) +- New version: 2.6.0 ([#1494](https://github.com/ScilifelabDataCentre/dds_web/pull/1494)) From c5b65cd3217678a812543fe29fed57f1b0d7858b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ina=20Od=C3=A9n=20=C3=96sterbo?= Date: Mon, 13 Nov 2023 15:22:20 +0100 Subject: [PATCH 113/114] version in changelog --- CHANGELOG.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 54e9b6293..1c52999f3 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,9 +1,9 @@ Changelog ========== -.. : +.. _2.6.0: - - 2023-11-22 +2.6.0 - 2023-11-22 ~~~~~~~~~~~~~~~~~~~~~~~ - New endpoint `AddFailedFiles` to allow a retry of saving files to the database after issues during upload. From be6d35fdda2d76a15725bcad2c7edd30bc433210 Mon Sep 17 00:00:00 2001 From: rv0lt Date: Tue, 14 Nov 2023 09:28:47 +0100 Subject: [PATCH 114/114] sprintlog --- SPRINTLOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/SPRINTLOG.md b/SPRINTLOG.md index 3b28549a6..8ba6cac02 100644 --- a/SPRINTLOG.md +++ b/SPRINTLOG.md @@ -328,3 +328,4 @@ _Nothing merged in CLI during this sprint_ - Updated Pillow package version to address vulnerabities ([#1486](https://github.com/ScilifelabDataCentre/dds_web/pull/1486)) - Updated urllib3 package version to address vulnerabities ([#1487](https://github.com/ScilifelabDataCentre/dds_web/pull/1487)) - Updated PostCss Node package to address vulnerabities ([#1489](https://github.com/ScilifelabDataCentre/dds_web/pull/1489)) +- Updated Several node libraries to address vulnerabities ([#1492](https://github.com/ScilifelabDataCentre/dds_web/pull/1492))