From 7e8ecb1b8759817e60ca7519570388eaa3e21ffc Mon Sep 17 00:00:00 2001 From: Jiri Kozel Date: Wed, 22 Nov 2023 13:50:33 +0100 Subject: [PATCH] process_client optionally raises if status is not complete --- src/layman/error_list.py | 1 + src/layman/layer/filesystem/thumbnail_test.py | 1 + src/layman/layer/rest_workspace_test.py | 3 +- src/layman/requests_concurrency_test.py | 3 +- src/layman/rest_publication_test.py | 6 ++-- src/layman/upgrade/upgrade_v1_21_test.py | 2 ++ test_tools/process_client.py | 34 ++++++++++++++++--- tests/dynamic_data/publications/celery.py | 1 + .../publications/updating_layer_test.py | 1 + .../wrong_input/wrong_input_test.py | 1 + .../x_forwarded_prefix/map_file_test.py | 1 + tests/static_data/data.py | 6 ++-- 12 files changed, 50 insertions(+), 10 deletions(-) diff --git a/src/layman/error_list.py b/src/layman/error_list.py index b0eccf9bb..53d08faea 100644 --- a/src/layman/error_list.py +++ b/src/layman/error_list.py @@ -55,4 +55,5 @@ 52: (400, 'GeoServer HTTP or connection error'), 53: (500, 'Error when publishing on GeoServer. It happens for example for raster files with wrong explicit CRS.'), 54: (400, 'Wrong header value'), + 55: (400, 'Publication is not complete'), # raised by process_client only } diff --git a/src/layman/layer/filesystem/thumbnail_test.py b/src/layman/layer/filesystem/thumbnail_test.py index b82d7bbb4..2c6606a71 100644 --- a/src/layman/layer/filesystem/thumbnail_test.py +++ b/src/layman/layer/filesystem/thumbnail_test.py @@ -31,6 +31,7 @@ def wait_for_thumbnail_error(response): file_paths=geojson_file, style_file=style_file, check_response_fn=wait_for_thumbnail_error, + raise_if_not_complete=False, ) layer_info = process_client.get_workspace_layer(workspace, layer) diff --git a/src/layman/layer/rest_workspace_test.py b/src/layman/layer/rest_workspace_test.py index 9faac9429..af695ef13 100644 --- a/src/layman/layer/rest_workspace_test.py +++ b/src/layman/layer/rest_workspace_test.py @@ -958,7 +958,8 @@ def wait_for_db_finish(response): info = response.json() return info.get('db', {}).get('status', '') == 'FAILURE' - process_client.publish_workspace_layer(workspace, layername, file_paths=file_paths, check_response_fn=wait_for_db_finish) + process_client.publish_workspace_layer(workspace, layername, file_paths=file_paths, + check_response_fn=wait_for_db_finish, raise_if_not_complete=False) layer_info = util.get_layer_info(workspace, layername) assert layer_info['db']['status'] == 'FAILURE', f'layer_info={layer_info}' diff --git a/src/layman/requests_concurrency_test.py b/src/layman/requests_concurrency_test.py index ccce69914..f783fa1ec 100644 --- a/src/layman/requests_concurrency_test.py +++ b/src/layman/requests_concurrency_test.py @@ -25,7 +25,8 @@ def test_patch_after_feature_change_concurrency(publication_type): assert lock == common_const.PUBLICATION_LOCK_FEATURE_CHANGE process_client.patch_workspace_publication(publication_type, workspace, publication, title='New title', - check_response_fn=empty_method_returns_true) + check_response_fn=empty_method_returns_true, + raise_if_not_complete=False) queue = celery.get_run_after_chain_queue(workspace, publication_type, publication) assert len(queue) == 0, queue lock = redis.get_publication_lock(workspace, publication_type, publication) diff --git a/src/layman/rest_publication_test.py b/src/layman/rest_publication_test.py index 1f51258c4..e007b1645 100644 --- a/src/layman/rest_publication_test.py +++ b/src/layman/rest_publication_test.py @@ -119,7 +119,8 @@ def test_get_publication_layman_status(publ_type, error_params): workspace = 'test_get_publication_layman_status_workspace' publication = 'test_get_publication_layman_status_publication' - process_client.publish_workspace_publication(publ_type, workspace, publication, check_response_fn=common.empty_method_returns_true,) + process_client.publish_workspace_publication(publ_type, workspace, publication, check_response_fn=common.empty_method_returns_true, + raise_if_not_complete=False) info = process_client.get_workspace_publication(publ_type, workspace, publication,) assert 'layman_metadata' in info, f'info={info}' @@ -134,7 +135,8 @@ def test_get_publication_layman_status(publ_type, error_params): assert info['layman_metadata']['publication_status'] == 'COMPLETE', f'info={info}' if error_params: - process_client.patch_workspace_publication(publ_type, workspace, publication, **error_params, ) + process_client.patch_workspace_publication(publ_type, workspace, publication, **error_params, + raise_if_not_complete=False) info = process_client.get_workspace_publication(publ_type, workspace, publication, ) assert 'layman_metadata' in info, f'info={info}' assert 'publication_status' in info['layman_metadata'], f'info={info}' diff --git a/src/layman/upgrade/upgrade_v1_21_test.py b/src/layman/upgrade/upgrade_v1_21_test.py index 9a965f68d..e1fd8fd63 100644 --- a/src/layman/upgrade/upgrade_v1_21_test.py +++ b/src/layman/upgrade/upgrade_v1_21_test.py @@ -31,6 +31,7 @@ 'compress': True, 'with_chunks': True, 'do_not_upload_chunks': True, + 'raise_if_not_complete': False, }, settings.EnumWfsWmsStatus.NOT_AVAILABLE, id='layer_updating', @@ -47,6 +48,7 @@ ], 'compress': True, 'with_chunks': True, + 'raise_if_not_complete': False, }, settings.EnumWfsWmsStatus.NOT_AVAILABLE, id='layer_not_available', diff --git a/test_tools/process_client.py b/test_tools/process_client.py index 91fbf643f..3cdfffb22 100644 --- a/test_tools/process_client.py +++ b/test_tools/process_client.py @@ -114,6 +114,7 @@ def wait_for_rest(url, max_attempts, sleeping_time, check_response, headers=None if attempts > max_attempts: logger.error(f"r.status_code={response.status_code}\nrltest={response.text}") raise Exception('Max attempts reached!') + return response def raise_layman_error(response, status_codes_to_skip=None): @@ -132,6 +133,20 @@ def raise_layman_error(response, status_codes_to_skip=None): assert 'Deprecation' not in response.headers, f'This is deprecated URL! Use new one. headers={response.headers}' +def raise_if_not_complete_status(response): + resp_json = response.json() + status = resp_json.get('layman_metadata', {}).get('publication_status') + if status != 'COMPLETE': + failed_source_key = next((k for k, v in resp_json.items() if isinstance(v, dict) and v.get('status') == 'FAILURE'), None) + if failed_source_key and resp_json[failed_source_key].get('error').get('code'): + failed_source = resp_json[failed_source_key] + error_desc = failed_source['error'] + raise LaymanError(error_desc['code'], + error_desc.get('detail'), + sub_code=error_desc.get('sub_code')) + raise LaymanError(55, data=resp_json) + + def upload_file_chunks(publication_type, workspace, name, @@ -177,6 +192,7 @@ def patch_workspace_publication(publication_type, title=None, style_file=None, check_response_fn=None, + raise_if_not_complete=True, compress=False, compress_settings=None, with_chunks=False, @@ -201,6 +217,8 @@ def patch_workspace_publication(publication_type, assert not (not with_chunks and do_not_upload_chunks) assert not (check_response_fn and do_not_upload_chunks) # because check_response_fn is not called when do_not_upload_chunks + assert not (raise_if_not_complete and do_not_upload_chunks) + assert not (check_response_fn and raise_if_not_complete) assert not (time_regex and publication_type == MAP_TYPE) assert not (publication_type == LAYER_TYPE and crs and not file_paths) @@ -281,7 +299,8 @@ def patch_workspace_publication(publication_type, file_paths, ) if not do_not_upload_chunks: - wait_for_publication_status(workspace, publication_type, name, check_response_fn=check_response_fn, headers=headers) + wait_for_publication_status(workspace, publication_type, name, check_response_fn=check_response_fn, + headers=headers, raise_if_not_complete=raise_if_not_complete) wfs.clear_cache(workspace) wms.clear_cache(workspace) if temp_dir: @@ -337,6 +356,7 @@ def publish_workspace_publication(publication_type, style_file=None, description=None, check_response_fn=None, + raise_if_not_complete=True, with_chunks=False, compress=False, compress_settings=None, @@ -360,6 +380,8 @@ def publish_workspace_publication(publication_type, assert not (not with_chunks and do_not_upload_chunks) assert not (check_response_fn and do_not_upload_chunks) # because check_response_fn is not called when do_not_upload_chunks + assert not (raise_if_not_complete and do_not_upload_chunks) + assert not (check_response_fn and raise_if_not_complete) file_paths = [publication_type_def.source_path] if file_paths is None and external_table_uri is None and not map_layers else file_paths @@ -440,7 +462,8 @@ def publish_workspace_publication(publication_type, file_paths, ) if not do_not_upload_chunks: - wait_for_publication_status(workspace, publication_type, name, check_response_fn=check_response_fn, headers=headers) + wait_for_publication_status(workspace, publication_type, name, check_response_fn=check_response_fn, + headers=headers, raise_if_not_complete=raise_if_not_complete) if temp_dir: shutil.rmtree(temp_dir) return response.json()[0] @@ -661,14 +684,17 @@ def check_publication_status(response): return current_status in {'COMPLETE', 'INCOMPLETE'} -def wait_for_publication_status(workspace, publication_type, publication, *, check_response_fn=None, headers=None,): +def wait_for_publication_status(workspace, publication_type, publication, *, check_response_fn=None, headers=None, + raise_if_not_complete=True): publication_type_def = PUBLICATION_TYPES_DEF[publication_type] with app.app_context(): url = url_for(publication_type_def.get_workspace_publication_url, workspace=workspace, **{publication_type_def.url_param_name: publication}) check_response_fn = check_response_fn or check_publication_status - wait_for_rest(url, 60, 0.5, check_response=check_response_fn, headers=headers) + response = wait_for_rest(url, 60, 0.5, check_response=check_response_fn, headers=headers) + if raise_if_not_complete: + raise_if_not_complete_status(response) def patch_after_feature_change(workspace, publ_type, name): diff --git a/tests/dynamic_data/publications/celery.py b/tests/dynamic_data/publications/celery.py index 601e1a9c8..a64aa36c1 100644 --- a/tests/dynamic_data/publications/celery.py +++ b/tests/dynamic_data/publications/celery.py @@ -15,6 +15,7 @@ def generate(workspace): 'tmp/naturalearth/10m/cultural/ne_10m_admin_0_countries.geojson', ], 'check_response_fn': empty_method_returns_true, + 'raise_if_not_complete': False, }), consts.KEY_RESPONSE_ASSERTS: [ diff --git a/tests/dynamic_data/publications/updating_layer_test.py b/tests/dynamic_data/publications/updating_layer_test.py index 547bafbc6..d36760d3d 100644 --- a/tests/dynamic_data/publications/updating_layer_test.py +++ b/tests/dynamic_data/publications/updating_layer_test.py @@ -27,6 +27,7 @@ class TestUpdatingLayer(base_test.TestSingleRestPublication): params={'compress': True, 'with_chunks': True, 'do_not_upload_chunks': True, + 'raise_if_not_complete': False, } )] diff --git a/tests/dynamic_data/publications/wrong_input/wrong_input_test.py b/tests/dynamic_data/publications/wrong_input/wrong_input_test.py index 63f072013..5333703f4 100644 --- a/tests/dynamic_data/publications/wrong_input/wrong_input_test.py +++ b/tests/dynamic_data/publications/wrong_input/wrong_input_test.py @@ -1399,6 +1399,7 @@ def generate_test_cases(): for key, test_case_params in TESTCASES.items(): all_params = deepcopy(test_case_params) rest_args = all_params.pop(Key.REST_ARGS) + rest_args['raise_if_not_complete'] = False mandatory_cases = all_params.pop(Key.MANDATORY_CASES) specific_types = {mandatory_cases: EnumTestTypes.MANDATORY} if mandatory_cases else {} diff --git a/tests/dynamic_data/publications/x_forwarded_prefix/map_file_test.py b/tests/dynamic_data/publications/x_forwarded_prefix/map_file_test.py index b59dae55e..f4b5f6ace 100644 --- a/tests/dynamic_data/publications/x_forwarded_prefix/map_file_test.py +++ b/tests/dynamic_data/publications/x_forwarded_prefix/map_file_test.py @@ -36,6 +36,7 @@ class TestPublication(base_test.TestSingleRestPublication): def before_class(self): self.post_publication(MAP, args={ 'file_paths': [MAP_FILE_PATH], + 'raise_if_not_complete': False, # timgen fails, because one URL points to non-existent service }, scope='class') @pytest.mark.parametrize('headers, exp_url_prefix', [ diff --git a/tests/static_data/data.py b/tests/static_data/data.py index 1608288a5..93eee27d9 100644 --- a/tests/static_data/data.py +++ b/tests/static_data/data.py @@ -91,14 +91,16 @@ def publish_publications_step(publications_set, step_num): for workspace, publ_type, publication in publications_set: data_def = data.PUBLICATIONS[(workspace, publ_type, publication)][data.DEFINITION] params = data_def[step_num] - write_method(publ_type, workspace, publication, **params, check_response_fn=empty_method_returns_true) + write_method(publ_type, workspace, publication, **params, check_response_fn=empty_method_returns_true, + raise_if_not_complete=False) if len(data_def) == step_num + 1: done_publications.add((workspace, publ_type, publication)) for workspace, publ_type, publication in publications_set: params = data.PUBLICATIONS[(workspace, publ_type, publication)][data.DEFINITION][step_num] headers = params.get('headers') try: - process_client.wait_for_publication_status(workspace, publ_type, publication, headers=headers, check_response_fn=check_publication_status) + process_client.wait_for_publication_status(workspace, publ_type, publication, headers=headers, + check_response_fn=check_publication_status) except AssertionError as ex: print(f"AssertionError in publication {workspace, publ_type, publication}, step_num={step_num}.") raise ex