From 864b2d2489fd0abd7d470d9b9b575577dc856258 Mon Sep 17 00:00:00 2001 From: Rohan Weeden Date: Tue, 1 Feb 2022 10:01:23 -0900 Subject: [PATCH] Fix flake8 errors --- rain_api_core/aws_util.py | 23 ++++++++++++++++++----- rain_api_core/egress_util.py | 19 +++++++++++++------ rain_api_core/logging.py | 2 +- rain_api_core/urs_util.py | 27 +++++++++++++++++++++------ rain_api_core/view_util.py | 15 ++++++++++++--- 5 files changed, 65 insertions(+), 21 deletions(-) diff --git a/rain_api_core/aws_util.py b/rain_api_core/aws_util.py index 4a8e1ef..ea4c475 100644 --- a/rain_api_core/aws_util.py +++ b/rain_api_core/aws_util.py @@ -66,7 +66,11 @@ def retrieve_secret(secret_name: str) -> dict: get_secret_value_response = client.get_secret_value( SecretId=secret_name ) - log.info(return_timing_object(service="secretsmanager", endpoint=f"client().get_secret_value({secret_name})", duration=duration(timer))) + log.info(return_timing_object( + service="secretsmanager", + endpoint=f"client().get_secret_value({secret_name})", + duration=duration(timer) + )) except ClientError as e: log.error("Encountered fatal error trying to reading URS Secret: {0}".format(e)) raise e @@ -75,7 +79,7 @@ def retrieve_secret(secret_name: str) -> dict: # Depending on whether the secret is a string or binary, one of these fields will be populated. if 'SecretString' in get_secret_value_response: secret = json.loads(get_secret_value_response['SecretString']) - log.debug('ET for retrieving secret {} from secret store: {} sec'.format(secret_name, round(time() - t0, 4))) + log.debug(f'ET for retrieving secret {secret_name} from secret store: {time() - t0:.4f} sec') return secret return {} @@ -119,7 +123,11 @@ def read_s3(bucket: str, key: str, s3: ServiceResource = None) -> str: log.debug('ET for reading {} from S3: {} sec'.format(key, round(time() - t0, 4))) timer = time() body = obj.get()['Body'].read().decode('utf-8') - log.info(return_timing_object(service="s3", endpoint=f"resource().Object(s3://{bucket}/{key}).get()", duration=duration(timer))) + log.info(return_timing_object( + service="s3", + endpoint=f"resource().Object(s3://{bucket}/{key}).get()", + duration=duration(timer) + )) return body @@ -151,7 +159,8 @@ def get_role_creds(user_id: str = None, in_region: bool = False): """ :param user_id: string with URS username :param in_region: boolean If True a download role that works only in region will be returned - :return: Returns a set of temporary security credentials (consisting of an access key ID, a secret access key, and a security token) + :return: Returns a set of temporary security credentials (consisting of an access key ID, a secret access key, and + a security token) :return: Offset, in seconds for how long the STS session has been active """ global sts # pylint: disable=global-statement @@ -176,7 +185,11 @@ def get_role_creds(user_id: str = None, in_region: bool = False): if user_id not in role_creds_cache[download_role_arn]: fresh_session = sts.assume_role(**session_params) - log.info(return_timing_object(service="sts", endpoint=f"client().assume_role({dl_arn_name}/{user_id})", duration=duration(now))) + log.info(return_timing_object( + service="sts", + endpoint=f"client().assume_role({dl_arn_name}/{user_id})", + duration=duration(now) + )) role_creds_cache[download_role_arn][user_id] = {"session": fresh_session, "timestamp": now} elif now - role_creds_cache[download_role_arn][user_id]["timestamp"] > 600: # If the session has been active for more than 10 minutes, grab a new one. diff --git a/rain_api_core/egress_util.py b/rain_api_core/egress_util.py index b98cb20..3ff4ca0 100644 --- a/rain_api_core/egress_util.py +++ b/rain_api_core/egress_util.py @@ -28,9 +28,9 @@ def get_presigned_url(session, bucket_name, object_name, region_name, expire_sec datez = timez[:8] hostname = "{0}.s3{1}.amazonaws.com".format(bucket_name, "." + region_name if region_name != "us-east-1" else "") - cred = session['Credentials']['AccessKeyId'] + cred = session['Credentials']['AccessKeyId'] secret = session['Credentials']['SecretAccessKey'] - token = session['Credentials']['SessionToken'] + token = session['Credentials']['SessionToken'] aws4_request = "/".join([datez, region_name, "s3", "aws4_request"]) cred_string = "{0}/{1}".format(cred, aws4_request) @@ -47,16 +47,23 @@ def get_presigned_url(session, bucket_name, object_name, region_name, expire_sec can_query_string = "&".join(parts) # Canonical Requst - can_req = method + "\n/" + object_name + "\n" + can_query_string + "\nhost:" + hostname + "\n\nhost\nUNSIGNED-PAYLOAD" + can_req = ( + f"{method}\n" + f"/{object_name}\n" + f"{can_query_string}\n" + f"host:{hostname}\n\n" + "host\n" + "UNSIGNED-PAYLOAD" + ) can_req_hash = sha256(can_req.encode('utf-8')).hexdigest() # String to Sign stringtosign = "\n".join(["AWS4-HMAC-SHA256", timez, aws4_request, can_req_hash]) # Signing Key - StepOne = hmacsha256("AWS4{0}".format(secret).encode('utf-8'), datez).digest() - StepTwo = hmacsha256(StepOne, region_name).digest() - StepThree = hmacsha256(StepTwo, "s3").digest() + StepOne = hmacsha256("AWS4{0}".format(secret).encode('utf-8'), datez).digest() + StepTwo = hmacsha256(StepOne, region_name).digest() + StepThree = hmacsha256(StepTwo, "s3").digest() SigningKey = hmacsha256(StepThree, "aws4_request").digest() # Final Signature diff --git a/rain_api_core/logging.py b/rain_api_core/logging.py index 82952ee..4247ca2 100644 --- a/rain_api_core/logging.py +++ b/rain_api_core/logging.py @@ -191,7 +191,7 @@ def format(self, record: logging.LogRecord) -> str: class TaggingFilter(logging.Filter): """Add extra attributes to each log record""" - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.params = { "build_vers": os.getenv("BUILD_VERSION", "NOBUILD"), diff --git a/rain_api_core/urs_util.py b/rain_api_core/urs_util.py index 051bc5b..93cfa98 100644 --- a/rain_api_core/urs_util.py +++ b/rain_api_core/urs_util.py @@ -69,9 +69,13 @@ def get_urs_url(ctxt: dict, to: str = None) -> str: client_id = get_urs_creds()['UrsId'] log.debug('domain name: {0}'.format(os.getenv('DOMAIN_NAME', 'no domainname set'))) - log.debug('if no domain name set: {}.execute-api.{}.amazonaws.com/{}'.format(ctxt['apiId'], os.getenv('AWS_DEFAULT_REGION', ''), ctxt['stage'])) + log.debug('if no domain name set: {}.execute-api.{}.amazonaws.com/{}'.format( + ctxt['apiId'], + os.getenv('AWS_DEFAULT_REGION', ''), + ctxt['stage'] + )) - urs_url = '{0}?client_id={1}&response_type=code&redirect_uri={2}'.format(base_url, client_id, get_redirect_url(ctxt)) + urs_url = f'{base_url}?client_id={client_id}&response_type=code&redirect_uri={get_redirect_url(ctxt)}' if to: urs_url += f"&state={to}" @@ -121,7 +125,10 @@ def get_profile(user_id: str, token: str, temptoken: str = None, aux_headers: di log.debug('because error above, going to get_new_token_and_profile()') return get_new_token_and_profile(user_id, token, aux_headers) - log.debug('We got that 401 above and we\'re using a temptoken ({}), so giving up and not getting a profile.'.format(temptoken)) + log.debug( + f"We got that 401 above and we're using a temptoken ({temptoken}), " + "so giving up and not getting a profile." + ) return {} @@ -184,14 +191,18 @@ def user_in_group_urs(private_groups, user_id, token, user_profile=None, refresh user_profile = get_profile(user_id, token, aux_headers=aux_headers) new_profile = user_profile - if isinstance(user_profile, dict) and 'user_groups' in user_profile and user_in_group_list(private_groups, user_profile['user_groups']): + if ( + isinstance(user_profile, dict) + and 'user_groups' in user_profile + and user_in_group_list(private_groups, user_profile['user_groups']) + ): log.info("User {0} belongs to private group".format(user_id)) return True, new_profile # Couldn't find user in provided groups, but we may as well look at a fresh group list: if not refresh_first: # we have a maybe not so fresh user_profile and we could try again to see if someone added a group to this user: - log.debug("Could not validate user {0} belonging to groups {1}, attempting profile refresh".format(user_id, private_groups)) + log.debug(f"Could not validate user {user_id} belonging to groups {private_groups}, attempting profile refresh") return user_in_group_urs(private_groups, user_id, {}, refresh_first=True, aux_headers=aux_headers) log.debug("Even after profile refresh, user {0} does not belong to groups {1}".format(user_id, private_groups)) @@ -223,7 +234,11 @@ def user_in_group(private_groups, cookievars, refresh_first=False, aux_headers=N if not in_group and not refresh_first: # one last ditch effort to see if they were so very recently added to group: - jwt_payload['urs-groups'] = get_profile(jwt_payload['urs-user-id'], jwt_payload['urs-access-token'], aux_headers=aux_headers)['user_groups'] + jwt_payload['urs-groups'] = get_profile( + jwt_payload['urs-user-id'], + jwt_payload['urs-access-token'], + aux_headers=aux_headers + )['user_groups'] return user_in_group(private_groups, cookievars, refresh_first=True, aux_headers=aux_headers) return False, new_profile diff --git a/rain_api_core/view_util.py b/rain_api_core/view_util.py index b1750db..cc545af 100644 --- a/rain_api_core/view_util.py +++ b/rain_api_core/view_util.py @@ -64,7 +64,11 @@ def cache_html_templates() -> str: client = botoclient('s3') try: result = client.list_objects(Bucket=bucket, Prefix=templatedir, Delimiter='/') - log.info(return_timing_object(service="s3", endpoint=f"client().list_objects(s3://{bucket}/{templatedir}/)", duration=duration(timer))) + log.info(return_timing_object( + service="s3", + endpoint=f"client().list_objects(s3://{bucket}/{templatedir}/)", + duration=duration(timer) + )) for o in result.get('Contents'): filename = os.path.basename(o['Key']) @@ -72,7 +76,11 @@ def cache_html_templates() -> str: log.debug('attempting to save {}'.format(os.path.join(HTML_TEMPLATE_LOCAL_CACHEDIR, filename))) timer = time() client.download_file(bucket, o['Key'], os.path.join(HTML_TEMPLATE_LOCAL_CACHEDIR, filename)) - log.info(return_timing_object(service="s3", endpoint=f"client().download_file(s3://{bucket}/{o['Key']})", duration=duration(timer))) + log.info(return_timing_object( + service="s3", + endpoint=f"client().download_file(s3://{bucket}/{o['Key']})", + duration=duration(timer) + )) return 'CACHED' except (TypeError, KeyError) as e: log.error(e) @@ -108,7 +116,8 @@ def get_cookie_vars(headers: dict) -> dict: """ Extracts and decodes and returns relevant cookies from http headers :param headers: dict of http headers - :return: on success dict with keys env value of 'JWT_COOKIENAME' containing decoded jwt, 'urs-user-id', 'urs-access-token' on failure empty dict. + :return: on success dict with keys env value of 'JWT_COOKIENAME' containing decoded jwt, 'urs-user-id', + 'urs-access-token' on failure empty dict. :type: dict """ cooks = get_cookies(headers)