From 9c8a0921880d5b4d72afeaac29d8d92b029230da Mon Sep 17 00:00:00 2001 From: Rafael Carvalho Pinheiro <74972217+pixuimpou@users.noreply.github.com> Date: Thu, 19 Oct 2023 16:36:01 -0300 Subject: [PATCH] =?UTF-8?q?Cria=20L=C3=B3gica=20de=20Recaptura=20/=20Bilhe?= =?UTF-8?q?tagem=20Site-to-Site=20(#530)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * remove task de particao nao usada * unifica tasks de particao de data e hora * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * corrige condicional * change capture flow * change generic capture flow * atualiza esquema do flow padrao * change default capture flow structure * change generic capture flow * adjust constant structure * change bilhetagem to new capture flow structure * fix get_storage_blob function * fix get_storage_blob call * organize constants order * fix get_raw_from_sources function call * change transform_raw_to_json to read_raw_data * transform transform_raw_data_to_json to read_raw_data * fix nout task parameter * fix timedelta instantiation * set upstream tasks * declare raw_filepath * update docstrings * adjust get_raw_from_sources return * fix errors * change agent label to dev * refactore source values * update constants * update agent * update schedule params * update interval * fix get_datetime_range interval * remove order by from queries * fix get_raw_data_api * change json read function * update read_raw_data * update save_raw_local_func * log error * change raw api extraction for json * change read json function * print log traceback * skip pre treatment if empty df * skip save staging if dataframe is empty / save raw * remove skip upload if empty dataframe * update docstring and returned values * reorganize task order * fix tuple * change zip logic * remove skip * create gtfs zip constant * add gtfs zip file name * add csv to save raw / change filetype logic * remove comments * fix csv_args default value * change docstring get raw api * change raw data gcs docstring * remove commented task * change quadro primary key to list * update GTFS constants * change upload folder structure * undo silenciamento de falha de notificação * remove parametros de testes (gtfs) * Update pipelines/rj_smtr/constants.py Co-authored-by: Fernanda Scovino * corrige encadeamento de erros no flow * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * remove header treatment * mudar agent dev para prd * mudar agent de dev para prd * ajustar retorno das funcoes * Atualiza documentação * adicionar retorno em get_upload_storage_blob * Atualiza documentação * Atualiza string * adiciona recaptura no flow generico * alterar labels para dev * adicionar logica de recaptura * criar conexão com banco de dados * criar conexão com banco de dados * cria função para map de multiplos retornos * remover unmapped dos filepaths * log para debbug * retirar unmapped das partições * adicionar unmapped no parametro recapture * adicionar psycopg2 * comentários dos parametros * adicionar conexão com postgresql * mudar bilhetagem para extrair do db * padronizar nomenclatura dos argumentos * mudar label schedule para dev * corrigir constante db bilhetagem postgresql * alterar nomeação para runs de recaptura * ajuste connector * alterar IP para DNS * Serialize datetime objects / read sql with pandas * mudar logica do nome da run * cria recaptura bilhetagem * mudar host para IP / adiciona interval_minutes * adiciona parametro interval minutes * remove linha comentada * remove arquivo de schedules da bilhetagem * generaliza função query logs * ajuste remove schedule personalizado * unmap interval_minutes * alteração de pasta de gravação para teste * teste retirar timezone * mudar timezone * corrigir logica de recaptura * adicionar possibilidade de recapturar mais dias * ajustar recapture_window_days default * adicionae recapture_window na task query_logs * merge previous_errors * remover log de teste * ajustar log recaptura * adicionar recaptura auxiliar * criar parametros recaptura tabelas auxiliares * comentar materializacao * teste log * muda logica recaptura bilhetagem * unmapped upstream tasks * mudar forma de upstream * remover alterações de teste * mudar agent para prd * corrigir project_name * passar tirar query_logs_func * corrigir project_name * remover comentários * remover query_logs_func * aumentar max_recaptures --------- Co-authored-by: fernandascovino Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> Co-authored-by: eng-rodrigocunha Co-authored-by: Carolina Gomes Co-authored-by: Rodrigo Cunha <66736583+eng-rodrigocunha@users.noreply.github.com> --- .../br_rj_riodejaneiro_bilhetagem/flows.py | 83 ++++- .../schedules.py | 33 -- pipelines/rj_smtr/constants.py | 40 +-- pipelines/rj_smtr/flows.py | 200 ++++++++--- pipelines/rj_smtr/tasks.py | 333 ++++++++++-------- pipelines/rj_smtr/utils.py | 90 ++++- poetry.lock | 121 ++++++- pyproject.toml | 1 + 8 files changed, 619 insertions(+), 282 deletions(-) delete mode 100644 pipelines/rj_smtr/br_rj_riodejaneiro_bilhetagem/schedules.py diff --git a/pipelines/rj_smtr/br_rj_riodejaneiro_bilhetagem/flows.py b/pipelines/rj_smtr/br_rj_riodejaneiro_bilhetagem/flows.py index 568f96154..096e5d3e3 100644 --- a/pipelines/rj_smtr/br_rj_riodejaneiro_bilhetagem/flows.py +++ b/pipelines/rj_smtr/br_rj_riodejaneiro_bilhetagem/flows.py @@ -10,6 +10,7 @@ from prefect.tasks.prefect import create_flow_run, wait_for_flow_run from prefect.utilities.edges import unmapped + # EMD Imports # from pipelines.constants import constants as emd_constants @@ -29,17 +30,18 @@ default_materialization_flow, ) -from pipelines.rj_smtr.tasks import ( - get_current_timestamp, -) - -from pipelines.rj_smtr.br_rj_riodejaneiro_bilhetagem.schedules import ( - bilhetagem_transacao_schedule, -) +from pipelines.rj_smtr.tasks import get_current_timestamp from pipelines.rj_smtr.constants import constants -from pipelines.rj_smtr.schedules import every_hour +from pipelines.rj_smtr.schedules import every_hour, every_minute + + +GENERAL_CAPTURE_DEFAULT_PARAMS = { + "dataset_id": constants.BILHETAGEM_DATASET_ID.value, + "secret_path": constants.BILHETAGEM_SECRET_PATH.value, + "source_type": constants.BILHETAGEM_GENERAL_CAPTURE_PARAMS.value["source_type"], +} # Flows # @@ -52,7 +54,15 @@ image=emd_constants.DOCKER_IMAGE.value, labels=[emd_constants.RJ_SMTR_AGENT_LABEL.value], ) -bilhetagem_transacao_captura.schedule = bilhetagem_transacao_schedule + +bilhetagem_transacao_captura = set_default_parameters( + flow=bilhetagem_transacao_captura, + default_parameters=GENERAL_CAPTURE_DEFAULT_PARAMS + | constants.BILHETAGEM_TRANSACAO_CAPTURE_PARAMS.value, +) + +bilhetagem_transacao_captura.schedule = every_minute + # BILHETAGEM AUXILIAR - SUBFLOW PARA RODAR ANTES DE CADA MATERIALIZAÇÃO # @@ -66,11 +76,7 @@ bilhetagem_auxiliar_captura = set_default_parameters( flow=bilhetagem_auxiliar_captura, - default_parameters={ - "dataset_id": constants.BILHETAGEM_DATASET_ID.value, - "secret_path": constants.BILHETAGEM_SECRET_PATH.value, - "source_type": constants.BILHETAGEM_GENERAL_CAPTURE_PARAMS.value["source_type"], - }, + default_parameters=GENERAL_CAPTURE_DEFAULT_PARAMS, ) # MATERIALIZAÇÃO - SUBFLOW DE MATERIALIZAÇÃO @@ -91,11 +97,23 @@ default_parameters=bilhetagem_materializacao_parameters, ) -# TRATAMENTO - RODA DE HORA EM HORA, CAPTURA AUXILIAR + MATERIALIZAÇÃO +# RECAPTURA + +bilhetagem_recaptura = deepcopy(default_capture_flow) +bilhetagem_recaptura.name = "SMTR: Bilhetagem - Recaptura (subflow)" +bilhetagem_recaptura.storage = GCS(emd_constants.GCS_FLOWS_BUCKET.value) +bilhetagem_recaptura = set_default_parameters( + flow=bilhetagem_recaptura, + default_parameters=GENERAL_CAPTURE_DEFAULT_PARAMS | {"recapture": True}, +) + +# TRATAMENTO - RODA DE HORA EM HORA, RECAPTURAS + CAPTURA AUXILIAR + MATERIALIZAÇÃO with Flow( "SMTR: Bilhetagem Transação - Tratamento", code_owners=["caio", "fernanda", "boris", "rodrigo"], ) as bilhetagem_transacao_tratamento: + # Configuração # + timestamp = get_current_timestamp() rename_flow_run = rename_current_flow_run_now_time( @@ -105,6 +123,38 @@ LABELS = get_current_flow_labels() + # Recapturas + + run_recaptura_trasacao = create_flow_run( + flow_name=bilhetagem_recaptura.name, + project_name=emd_constants.PREFECT_DEFAULT_PROJECT.value, + labels=LABELS, + parameters=constants.BILHETAGEM_TRANSACAO_CAPTURE_PARAMS.value, + ) + + wait_recaptura_trasacao = wait_for_flow_run( + run_recaptura_trasacao, + stream_states=True, + stream_logs=True, + raise_final_state=True, + ) + + runs_recaptura_auxiliar = create_flow_run.map( + flow_name=unmapped(bilhetagem_recaptura.name), + project_name=unmapped(emd_constants.PREFECT_DEFAULT_PROJECT.value), + parameters=constants.BILHETAGEM_CAPTURE_PARAMS.value, + labels=unmapped(LABELS), + ) + + runs_recaptura_auxiliar.set_upstream(wait_recaptura_trasacao) + + wait_recaptura_auxiliar = wait_for_flow_run.map( + runs_recaptura_auxiliar, + stream_states=unmapped(True), + stream_logs=unmapped(True), + raise_final_state=unmapped(True), + ) + # Captura runs_captura = create_flow_run.map( flow_name=unmapped(bilhetagem_auxiliar_captura.name), @@ -113,6 +163,8 @@ labels=unmapped(LABELS), ) + runs_captura.set_upstream(wait_recaptura_auxiliar) + wait_captura = wait_for_flow_run.map( runs_captura, stream_states=unmapped(True), @@ -141,4 +193,3 @@ labels=[emd_constants.RJ_SMTR_AGENT_LABEL.value], ) bilhetagem_transacao_tratamento.schedule = every_hour -# bilhetagem_materializacao.schedule = bilhetagem_materializacao_schedule diff --git a/pipelines/rj_smtr/br_rj_riodejaneiro_bilhetagem/schedules.py b/pipelines/rj_smtr/br_rj_riodejaneiro_bilhetagem/schedules.py deleted file mode 100644 index 21e13f05b..000000000 --- a/pipelines/rj_smtr/br_rj_riodejaneiro_bilhetagem/schedules.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Schedules for br_rj_riodejaneiro_bilhetagem -""" - -from datetime import timedelta - -from prefect.schedules import Schedule - -from pipelines.constants import constants as emd_constants -from pipelines.utils.utils import untuple_clocks as untuple - -from pipelines.rj_smtr.constants import constants -from pipelines.rj_smtr.utils import ( - generate_execute_schedules, -) - -BILHETAGEM_TRANSACAO_INTERVAL = timedelta(minutes=1) -bilhetagem_transacao_clocks = generate_execute_schedules( - clock_interval=timedelta( - **constants.BILHETAGEM_CAPTURE_RUN_INTERVAL.value["transacao_run_interval"] - ), - labels=[ - emd_constants.RJ_SMTR_AGENT_LABEL.value, - ], - table_parameters=constants.BILHETAGEM_TRANSACAO_CAPTURE_PARAMS.value, - dataset_id=constants.BILHETAGEM_DATASET_ID.value, - secret_path=constants.BILHETAGEM_SECRET_PATH.value, - source_type=constants.BILHETAGEM_GENERAL_CAPTURE_PARAMS.value["source_type"], - runs_interval_minutes=0, -) - -bilhetagem_transacao_schedule = Schedule(clocks=untuple(bilhetagem_transacao_clocks)) diff --git a/pipelines/rj_smtr/constants.py b/pipelines/rj_smtr/constants.py index 0037c6989..ccf1c6c44 100644 --- a/pipelines/rj_smtr/constants.py +++ b/pipelines/rj_smtr/constants.py @@ -170,24 +170,18 @@ class constants(Enum): # pylint: disable=c0103 "databases": { "principal_db": { "engine": "mysql", - "host": "principal-database-replica.internal", + "host": "10.5.114.121", }, "tarifa_db": { - "engine": "postgres", - "host": "tarifa-database-replica.internal", + "engine": "postgresql", + "host": "10.5.113.254", }, "transacao_db": { - "engine": "postgres", - "host": "transacao-database-replica.internal", + "engine": "postgresql", + "host": "10.5.115.1", }, }, - "vpn_url": "http://vpn-jae.mobilidade.rio/", - "source_type": "api-json", - } - - BILHETAGEM_CAPTURE_RUN_INTERVAL = { - "transacao_run_interval": {"minutes": 1}, - "principal_run_interval": {"hours": 1}, + "source_type": "db", } BILHETAGEM_TRANSACAO_CAPTURE_PARAMS = { @@ -204,9 +198,9 @@ class constants(Enum): # pylint: disable=c0103 data_processamento BETWEEN '{start}' AND '{end}' """, - "run_interval": BILHETAGEM_CAPTURE_RUN_INTERVAL["transacao_run_interval"], }, - "primary_key": ["id"], # id column to nest data on + "primary_key": ["id"], + "interval_minutes": 1, } BILHETAGEM_SECRET_PATH = "smtr_jae_access_data" @@ -225,11 +219,9 @@ class constants(Enum): # pylint: disable=c0103 WHERE DT_INCLUSAO >= '{start}' """, - "run_interval": BILHETAGEM_CAPTURE_RUN_INTERVAL[ - "principal_run_interval" - ], }, "primary_key": ["CD_LINHA"], # id column to nest data on + "interval_minutes": 60, }, { "table_id": "grupo", @@ -244,11 +236,9 @@ class constants(Enum): # pylint: disable=c0103 WHERE DT_INCLUSAO >= '{start}' """, - "run_interval": BILHETAGEM_CAPTURE_RUN_INTERVAL[ - "principal_run_interval" - ], }, "primary_key": ["CD_GRUPO"], # id column to nest data on + "interval_minutes": 60, }, { "table_id": "grupo_linha", @@ -263,11 +253,9 @@ class constants(Enum): # pylint: disable=c0103 WHERE DT_INCLUSAO >= '{start}' """, - "run_interval": BILHETAGEM_CAPTURE_RUN_INTERVAL[ - "principal_run_interval" - ], }, - "primary_key": ["CD_GRUPO", "CD_LINHA"], # id column to nest data on + "primary_key": ["CD_GRUPO", "CD_LINHA"], + "interval_minutes": 60, }, { "table_id": "matriz_integracao", @@ -282,14 +270,12 @@ class constants(Enum): # pylint: disable=c0103 WHERE dt_inclusao >= '{start}' """, - "run_interval": BILHETAGEM_CAPTURE_RUN_INTERVAL[ - "principal_run_interval" - ], }, "primary_key": [ "cd_versao_matriz", "cd_integracao", ], # id column to nest data on + "interval_minutes": 60, }, ] diff --git a/pipelines/rj_smtr/flows.py b/pipelines/rj_smtr/flows.py index d4292129c..18a7fb1a3 100644 --- a/pipelines/rj_smtr/flows.py +++ b/pipelines/rj_smtr/flows.py @@ -5,8 +5,9 @@ from prefect.run_configs import KubernetesRun from prefect.storage import GCS -from prefect import case, Parameter +from prefect import case, Parameter, task from prefect.utilities.edges import unmapped +from prefect.tasks.control_flow import merge # EMD Imports # @@ -35,6 +36,8 @@ upload_staging_data_to_gcs, get_raw_from_sources, create_request_params, + query_logs, + unpack_mapped_results_nout2, ) from pipelines.utils.execute_dbt_model.tasks import run_dbt_model @@ -45,77 +48,121 @@ ) as default_capture_flow: # Configuração # + # Parâmetros Gerais # table_id = Parameter("table_id", default=None) + dataset_id = Parameter("dataset_id", default=None) partition_date_only = Parameter("partition_date_only", default=None) + + # Parâmetros Captura # extract_params = Parameter("extract_params", default=None) - dataset_id = Parameter("dataset_id", default=None) secret_path = Parameter("secret_path", default=None) - primary_key = Parameter("primary_key", default=None) source_type = Parameter("source_type", default=None) + interval_minutes = Parameter("interval_minutes", default=None) + recapture = Parameter("recapture", default=False) + recapture_window_days = Parameter("recapture_window_days", default=1) + + # Parâmetros Pré-tratamento # + primary_key = Parameter("primary_key", default=None) + + get_run_name_prefix = task( + lambda recap: "Recaptura" if recap else "Captura", + name="get_run_name_prefix", + checkpoint=False, + ) - timestamp = get_current_timestamp() + with case(recapture, True): + _, recapture_timestamps, recapture_previous_errors = query_logs( + dataset_id=dataset_id, + table_id=table_id, + interval_minutes=interval_minutes, + recapture_window_days=recapture_window_days, + ) + + with case(recapture, False): + capture_timestamp = [get_current_timestamp()] + capture_previous_errors = task( + lambda: [None], checkpoint=False, name="assign_none_to_previous_errors" + )() + + timestamps = merge(recapture_timestamps, capture_timestamp) + previous_errors = merge(recapture_previous_errors, capture_previous_errors) rename_flow_run = rename_current_flow_run_now_time( - prefix=default_capture_flow.name + " " + table_id + ": ", - now_time=timestamp, + prefix="SMTR: " + get_run_name_prefix(recap=recapture) + " " + table_id + ": ", + now_time=get_now_time(), ) - partitions = create_date_hour_partition( - timestamp, partition_date_only=partition_date_only + partitions = create_date_hour_partition.map( + timestamps, partition_date_only=unmapped(partition_date_only) ) - filename = parse_timestamp_to_string(timestamp) + filenames = parse_timestamp_to_string.map(timestamps) - filepath = create_local_partition_path( - dataset_id=dataset_id, - table_id=table_id, - filename=filename, + filepaths = create_local_partition_path.map( + dataset_id=unmapped(dataset_id), + table_id=unmapped(table_id), + filename=filenames, partitions=partitions, ) # Extração # - request_params, request_path = create_request_params( - dataset_id=dataset_id, - extract_params=extract_params, - table_id=table_id, - timestamp=timestamp, + create_request_params_returns = create_request_params.map( + dataset_id=unmapped(dataset_id), + extract_params=unmapped(extract_params), + table_id=unmapped(table_id), + timestamp=timestamps, + interval_minutes=unmapped(interval_minutes), ) - error, raw_filepath = get_raw_from_sources( - source_type=source_type, - local_filepath=filepath, - source_path=request_path, - dataset_id=dataset_id, - table_id=table_id, - secret_path=secret_path, + request_params, request_paths = unpack_mapped_results_nout2( + mapped_results=create_request_params_returns + ) + + get_raw_from_sources_returns = get_raw_from_sources.map( + source_type=unmapped(source_type), + local_filepath=filepaths, + source_path=request_paths, + dataset_id=unmapped(dataset_id), + table_id=unmapped(table_id), + secret_path=unmapped(secret_path), request_params=request_params, ) - error = upload_raw_data_to_gcs( - error=error, - raw_filepath=raw_filepath, - table_id=table_id, - dataset_id=dataset_id, + errors, raw_filepaths = unpack_mapped_results_nout2( + mapped_results=get_raw_from_sources_returns + ) + + errors = upload_raw_data_to_gcs.map( + error=errors, + raw_filepath=raw_filepaths, + table_id=unmapped(table_id), + dataset_id=unmapped(dataset_id), partitions=partitions, ) # Pré-tratamento # - error, staging_filepath = transform_raw_to_nested_structure( - raw_filepath=raw_filepath, - filepath=filepath, - error=error, - timestamp=timestamp, - primary_key=primary_key, + nested_structure_returns = transform_raw_to_nested_structure.map( + raw_filepath=raw_filepaths, + filepath=filepaths, + error=errors, + timestamp=timestamps, + primary_key=unmapped(primary_key), ) - STAGING_UPLOADED = upload_staging_data_to_gcs( - error=error, - staging_filepath=staging_filepath, - timestamp=timestamp, - table_id=table_id, - dataset_id=dataset_id, + errors, staging_filepaths = unpack_mapped_results_nout2( + mapped_results=nested_structure_returns + ) + + STAGING_UPLOADED = upload_staging_data_to_gcs.map( + error=errors, + staging_filepath=staging_filepaths, + timestamp=timestamps, + table_id=unmapped(table_id), + dataset_id=unmapped(dataset_id), partitions=partitions, + previous_error=previous_errors, + recapture=unmapped(recapture), ) default_capture_flow.storage = GCS(emd_constants.GCS_FLOWS_BUCKET.value) @@ -192,3 +239,72 @@ image=emd_constants.DOCKER_IMAGE.value, labels=[emd_constants.RJ_SMTR_AGENT_LABEL.value], ) + +with Flow( + "SMTR: Materialização", + code_owners=["caio", "fernanda", "boris", "rodrigo"], +) as default_materialization_flow: + # SETUP # + + dataset_id = Parameter("dataset_id", default=None) + table_id = Parameter("table_id", default=None) + raw_table_id = Parameter("raw_table_id", default=None) + dbt_alias = Parameter("dbt_alias", default=False) + upstream = Parameter("upstream", default=None) + downstream = Parameter("downstream", default=None) + exclude = Parameter("exclude", default=None) + flags = Parameter("flags", default=None) + dbt_vars = Parameter("dbt_vars", default=dict()) + + LABELS = get_current_flow_labels() + MODE = get_current_flow_mode(LABELS) + + _vars, date_var, flag_date_range = create_dbt_run_vars( + dataset_id=dataset_id, + dbt_vars=dbt_vars, + table_id=table_id, + raw_dataset_id=dataset_id, + raw_table_id=raw_table_id, + mode=MODE, + ) + + # Rename flow run + + flow_name_prefix = coalesce_task([table_id, dataset_id]) + + flow_name_now_time = coalesce_task([date_var, get_now_time()]) + + rename_flow_run = rename_current_flow_run_now_time( + prefix=default_materialization_flow.name + " " + flow_name_prefix + ": ", + now_time=flow_name_now_time, + ) + + dbt_client = get_k8s_dbt_client(mode=MODE, wait=rename_flow_run) + + RUNS = run_dbt_model.map( + dbt_client=unmapped(dbt_client), + dataset_id=unmapped(dataset_id), + table_id=unmapped(table_id), + _vars=_vars, + dbt_alias=unmapped(dbt_alias), + upstream=unmapped(upstream), + downstream=unmapped(downstream), + exclude=unmapped(exclude), + flags=unmapped(flags), + ) + + with case(flag_date_range, True): + set_last_run_timestamp( + dataset_id=dataset_id, + table_id=table_id, + timestamp=date_var["date_range_end"], + wait=RUNS, + mode=MODE, + ) + + +default_materialization_flow.storage = GCS(emd_constants.GCS_FLOWS_BUCKET.value) +default_materialization_flow.run_config = KubernetesRun( + image=emd_constants.DOCKER_IMAGE.value, + labels=[emd_constants.RJ_SMTR_AGENT_LABEL.value], +) diff --git a/pipelines/rj_smtr/tasks.py b/pipelines/rj_smtr/tasks.py index f7d687dea..79cd84751 100644 --- a/pipelines/rj_smtr/tasks.py +++ b/pipelines/rj_smtr/tasks.py @@ -8,7 +8,7 @@ import os from pathlib import Path import traceback -from typing import Dict, List, Union, Iterable +from typing import Dict, List, Union, Iterable, Any import io from basedosdados import Storage, Table @@ -26,16 +26,17 @@ bq_project, get_table_min_max_value, get_last_run_timestamp, - log_critical, data_info_str, dict_contains_keys, get_raw_data_api, get_raw_data_gcs, + get_raw_data_db, upload_run_logs_to_bq, get_datetime_range, read_raw_data, save_treated_local_func, save_raw_local_func, + log_critical, ) from pipelines.utils.execute_dbt_model.utils import get_dbt_client from pipelines.utils.utils import log, get_redis_client, get_vault_secret @@ -137,6 +138,103 @@ def build_incremental_model( # pylint: disable=too-many-arguments return False +@task(checkpoint=False, nout=3) +def create_dbt_run_vars( + dataset_id: str, + dbt_vars: dict, + table_id: str, + raw_dataset_id: str, + raw_table_id: str, + mode: str, +) -> tuple[list[dict], Union[list[dict], dict, None], bool]: + """ + Create the variables to be used in dbt materialization based on a dict + + Args: + dataset_id (str): the dataset_id to get the variables + dbt_vars (dict): dict containing the parameters + table_id (str): the table_id get the date_range variable + raw_dataset_id (str): the raw_dataset_id get the date_range variable + raw_table_id (str): the raw_table_id get the date_range variable + mode (str): the mode to get the date_range variable + + Returns: + list[dict]: the variables to be used in DBT + Union[list[dict], dict, None]: the date variable (date_range or run_date) + bool: a flag that indicates if the date_range variable came from Redis + """ + + log(f"Creating DBT variables. Parameter received: {dbt_vars}") + + if (not dbt_vars) or (not table_id): + log("dbt_vars or table_id are blank. Skiping task") + return [None], None, False + + final_vars = [] + date_var = None + flag_date_range = False + + if "date_range" in dbt_vars.keys(): + log("Creating date_range variable") + + # Set date_range variable manually + if dict_contains_keys( + dbt_vars["date_range"], ["date_range_start", "date_range_end"] + ): + date_var = { + "date_range_start": dbt_vars["date_range"]["date_range_start"], + "date_range_end": dbt_vars["date_range"]["date_range_end"], + } + # Create date_range using Redis + else: + raw_table_id = raw_table_id or table_id + + date_var = get_materialization_date_range.run( + dataset_id=dataset_id, + table_id=table_id, + raw_dataset_id=raw_dataset_id, + raw_table_id=raw_table_id, + table_run_datetime_column_name=dbt_vars["date_range"].get( + "table_run_datetime_column_name" + ), + mode=mode, + delay_hours=dbt_vars["date_range"].get("delay_hours", 0), + ) + + flag_date_range = True + + final_vars.append(date_var.copy()) + + log(f"date_range created: {date_var}") + + elif "run_date" in dbt_vars.keys(): + log("Creating run_date variable") + + date_var = get_run_dates.run( + dbt_vars["run_date"].get("date_range_start"), + dbt_vars["run_date"].get("date_range_end"), + ) + final_vars.append([d.copy() for d in date_var]) + + log(f"run_date created: {date_var}") + + if "version" in dbt_vars.keys(): + log("Creating version variable") + dataset_sha = fetch_dataset_sha.run(dataset_id=dataset_id) + + # if there are other variables inside the list, update each item adding the version variable + if final_vars: + final_vars = get_join_dict.run(dict_list=final_vars, new_dict=dataset_sha) + else: + final_vars.append(dataset_sha) + + log(f"version created: {dataset_sha}") + + log(f"All variables was created, final value is: {final_vars}") + + return final_vars, date_var, flag_date_range + + ############### # # Local file managment @@ -271,7 +369,9 @@ def query_logs( dataset_id: str, table_id: str, datetime_filter=None, - max_recaptures: int = 60, + max_recaptures: int = 360, + interval_minutes: int = 1, + recapture_window_days: int = 1, ): """ Queries capture logs to check for errors @@ -281,10 +381,15 @@ def query_logs( table_id (str): table_id on BigQuery datetime_filter (pendulum.datetime.DateTime, optional): filter passed to query. This task will query the logs table - for the last 1 day before datetime_filter + for the last n (n = recapture_window_days) days before datetime_filter + max_recaptures (int, optional): maximum number of recaptures to be done + interval_minutes (int, optional): interval in minutes between each recapture + recapture_window_days (int, optional): Number of days to query for erros Returns: - list: containing timestamps for which the capture failed + lists: errors (bool), + timestamps (list of pendulum.datetime.DateTime), + previous_errors (list of previous errors) """ if not datetime_filter: @@ -296,50 +401,64 @@ def query_logs( second=0, microsecond=0 ) + datetime_filter = datetime_filter.strftime("%Y-%m-%d %H:%M:%S") + query = f""" - with t as ( - select - datetime(timestamp_array) as timestamp_array - from - unnest(GENERATE_TIMESTAMP_ARRAY( - timestamp_sub('{datetime_filter.strftime('%Y-%m-%d %H:%M:%S')}', interval 1 day), - timestamp('{datetime_filter.strftime('%Y-%m-%d %H:%M:%S')}'), - interval 1 minute) - ) as timestamp_array - where timestamp_array < '{datetime_filter.strftime('%Y-%m-%d %H:%M:%S')}' + WITH + t AS ( + SELECT + DATETIME(timestamp_array) AS timestamp_array + FROM + UNNEST( + GENERATE_TIMESTAMP_ARRAY( + TIMESTAMP_SUB('{datetime_filter}', INTERVAL {recapture_window_days} day), + TIMESTAMP('{datetime_filter}'), + INTERVAL {interval_minutes} minute) ) + AS timestamp_array + WHERE + timestamp_array < '{datetime_filter}' ), + logs_table AS ( + SELECT + SAFE_CAST(DATETIME(TIMESTAMP(timestamp_captura), + "America/Sao_Paulo") AS DATETIME) timestamp_captura, + SAFE_CAST(sucesso AS BOOLEAN) sucesso, + SAFE_CAST(erro AS STRING) erro, + SAFE_CAST(DATA AS DATE) DATA + FROM + rj-smtr-staging.{dataset_id}_staging.{table_id}_logs AS t ), - logs as ( - select + logs AS ( + SELECT *, - timestamp_trunc(timestamp_captura, minute) as timestamp_array - from - rj-smtr.{dataset_id}.{table_id}_logs - where - data between - date(datetime_sub('{datetime_filter.strftime('%Y-%m-%d %H:%M:%S')}', - interval 1 day)) - and date('{datetime_filter.strftime('%Y-%m-%d %H:%M:%S')}') - and - timestamp_captura between - datetime_sub('{datetime_filter.strftime('%Y-%m-%d %H:%M:%S')}', interval 1 day) - and '{datetime_filter.strftime('%Y-%m-%d %H:%M:%S')}' - order by timestamp_captura - ) - select - case - when logs.timestamp_captura is not null then logs.timestamp_captura - else t.timestamp_array - end as timestamp_captura, - logs.erro - from + TIMESTAMP_TRUNC(timestamp_captura, minute) AS timestamp_array + FROM + logs_table + WHERE + DATA BETWEEN DATE(DATETIME_SUB('{datetime_filter}', + INTERVAL {recapture_window_days} day)) + AND DATE('{datetime_filter}') + AND timestamp_captura BETWEEN + DATETIME_SUB('{datetime_filter}', INTERVAL {recapture_window_days} day) + AND '{datetime_filter}' + ORDER BY + timestamp_captura ) + SELECT + CASE + WHEN logs.timestamp_captura IS NOT NULL THEN logs.timestamp_captura + ELSE + t.timestamp_array + END + AS timestamp_captura, + logs.erro + FROM t - left join + LEFT JOIN logs - on + ON logs.timestamp_array = t.timestamp_array - where - logs.sucesso is not True - order by + WHERE + logs.sucesso IS NOT TRUE + ORDER BY timestamp_captura """ log(f"Run query to check logs:\n{query}") @@ -445,6 +564,7 @@ def create_request_params( table_id: str, dataset_id: str, timestamp: datetime, + interval_minutes: int, ) -> tuple[str, str]: """ Task to create request params @@ -454,6 +574,7 @@ def create_request_params( table_id (str): table_id on BigQuery dataset_id (str): dataset_id on BigQuery timestamp (datetime): timestamp for flow run + interval_minutes (int): interval in minutes between each capture Returns: request_params: host, database and query to request data @@ -466,18 +587,15 @@ def create_request_params( database = constants.BILHETAGEM_GENERAL_CAPTURE_PARAMS.value["databases"][ extract_params["database"] ] - request_url = ( - constants.BILHETAGEM_GENERAL_CAPTURE_PARAMS.value["vpn_url"] - + database["engine"] - ) + request_url = database["host"] datetime_range = get_datetime_range( - timestamp=timestamp, interval=timedelta(**extract_params["run_interval"]) + timestamp=timestamp, interval=timedelta(minutes=interval_minutes) ) request_params = { - "host": database["host"], # TODO: exibir no log em ambiente fechado "database": extract_params["database"], + "engine": database["engine"], "query": extract_params["query"].format(**datetime_range), } @@ -534,6 +652,10 @@ def get_raw_from_sources( error, data, filetype = get_raw_data_gcs( dataset_id=dataset_id, table_id=table_id, zip_filename=request_params ) + elif source_type == "db": + error, data, filetype = get_raw_data_db( + host=source_path, secret_path=secret_path, **request_params + ) else: raise NotImplementedError(f"{source_type} not supported") @@ -771,6 +893,8 @@ def upload_staging_data_to_gcs( table_id: str, dataset_id: str, partitions: list, + previous_error: str = None, + recapture: bool = False, ) -> Union[str, None]: """ Upload staging data to GCS. @@ -805,6 +929,8 @@ def upload_staging_data_to_gcs( error=error, timestamp=timestamp, mode="staging", + previous_error=previous_error, + recapture=recapture, ) return error @@ -1098,6 +1224,13 @@ def transform_raw_to_nested_structure( return error, filepath +############### +# +# Utilitary tasks +# +############### + + @task(checkpoint=False) def coalesce_task(value_list: Iterable): """ @@ -1112,101 +1245,23 @@ def coalesce_task(value_list: Iterable): try: return next(value for value in value_list if value is not None) except StopIteration: - return + return None -@task(checkpoint=False, nout=3) -def create_dbt_run_vars( - dataset_id: str, - dbt_vars: dict, - table_id: str, - raw_dataset_id: str, - raw_table_id: str, - mode: str, -) -> tuple[list[dict], Union[list[dict], dict, None], bool]: +@task(checkpoint=False, nout=2) +def unpack_mapped_results_nout2( + mapped_results: Iterable, +) -> tuple[list[Any], list[Any]]: """ - Create the variables to be used in dbt materialization based on a dict + Task to unpack the results from an nout=2 tasks in 2 lists when it is mapped Args: - dataset_id (str): the dataset_id to get the variables - dbt_vars (dict): dict containing the parameters - table_id (str): the table_id get the date_range variable - raw_dataset_id (str): the raw_dataset_id get the date_range variable - raw_table_id (str): the raw_table_id get the date_range variable - mode (str): the mode to get the date_range variable + mapped_results (Iterable): The mapped task return Returns: - tuple[list[dict]: the variables to be used in DBT - Union[list[dict], dict, None]: the date variable (date_range or run_date) - bool: a flag that indicates if the date_range variable came from Redis - """ - - log(f"Creating DBT variables. Parameter received: {dbt_vars}") - - if (not dbt_vars) or (not table_id): - log("dbt_vars or table_id are blank. Skiping task") - return [None], None, False - - final_vars = [] - date_var = None - flag_date_range = False - - if "date_range" in dbt_vars.keys(): - log("Creating date_range variable") - - # Set date_range variable manually - if dict_contains_keys( - dbt_vars["date_range"], ["date_range_start", "date_range_end"] - ): - date_var = { - "date_range_start": dbt_vars["date_range"]["date_range_start"], - "date_range_end": dbt_vars["date_range"]["date_range_end"], - } - # Create date_range using Redis - else: - raw_table_id = raw_table_id or table_id - - date_var = get_materialization_date_range.run( - dataset_id=dataset_id, - table_id=table_id, - raw_dataset_id=raw_dataset_id, - raw_table_id=raw_table_id, - table_run_datetime_column_name=dbt_vars["date_range"].get( - "table_run_datetime_column_name" - ), - mode=mode, - delay_hours=dbt_vars["date_range"].get("delay_hours", 0), - ) - - flag_date_range = True - - final_vars.append(date_var.copy()) - - log(f"date_range created: {date_var}") - - elif "run_date" in dbt_vars.keys(): - log("Creating run_date variable") - - date_var = get_run_dates.run( - dbt_vars["run_date"].get("date_range_start"), - dbt_vars["run_date"].get("date_range_end"), - ) - final_vars.append([d.copy() for d in date_var]) - - log(f"run_date created: {date_var}") - - if "version" in dbt_vars.keys(): - log("Creating version variable") - dataset_sha = fetch_dataset_sha.run(dataset_id=dataset_id) + tuple[list[Any], list[Any]]: The task original return splited in 2 lists: + - 1st list being all the first return + - 2nd list being all the second return - # if there are other variables inside the list, update each item adding the version variable - if final_vars: - final_vars = get_join_dict.run(dict_list=final_vars, new_dict=dataset_sha) - else: - final_vars.append(dataset_sha) - - log(f"version created: {dataset_sha}") - - log(f"All variables was created, final value is: {final_vars}") - - return final_vars, date_var, flag_date_range + """ + return [r[0] for r in mapped_results], [r[1] for r in mapped_results] diff --git a/pipelines/rj_smtr/utils.py b/pipelines/rj_smtr/utils.py index f9b98afab..0d05ffb09 100644 --- a/pipelines/rj_smtr/utils.py +++ b/pipelines/rj_smtr/utils.py @@ -8,17 +8,21 @@ from pathlib import Path from datetime import timedelta, datetime -from typing import List, Union +from typing import List, Union, Any import traceback import io import json import zipfile +import pendulum import pytz import requests import basedosdados as bd from basedosdados import Table import pandas as pd from google.cloud.storage.blob import Blob +import pymysql +import psycopg2 +import psycopg2.extras from prefect.schedules.clocks import IntervalClock @@ -434,7 +438,6 @@ def generate_execute_schedules( # pylint: disable=too-many-arguments,too-many-l clocks = [] for count, parameters in enumerate(table_parameters): parameter_defaults = parameters | general_flow_params - log(f"parameter_defaults: {parameter_defaults}") clocks.append( IntervalClock( interval=clock_interval, @@ -460,17 +463,40 @@ def dict_contains_keys(input_dict: dict, keys: list[str]) -> bool: return all(x in input_dict.keys() for x in keys) +def custom_serialization(obj: Any) -> Any: + """ + Function to serialize not JSON serializable objects + + Args: + obj (Any): Object to serialize + + Returns: + Any: Serialized object + """ + if isinstance(obj, pd.Timestamp): + if obj.tzinfo is None: + obj = obj.tz_localize("UTC").tz_convert( + emd_constants.DEFAULT_TIMEZONE.value + ) + + return obj.isoformat() + + raise TypeError(f"Object of type {type(obj)} is not JSON serializable") + + def save_raw_local_func( - data: Union[dict, str], filepath: str, mode: str = "raw", filetype: str = "json" + data: Union[dict, str], + filepath: str, + mode: str = "raw", + filetype: str = "json", ) -> str: """ Saves json response from API to .json file. Args: + data (Union[dict, str]): Raw data to save filepath (str): Path which to save raw file - status (dict): Must contain keys - * data: json returned from API - * error: error catched from API request mode (str, optional): Folder to save locally, later folder which to upload to GCS. + filetype (str, optional): The file format Returns: str: Path to the saved file """ @@ -480,12 +506,11 @@ def save_raw_local_func( Path(_filepath).parent.mkdir(parents=True, exist_ok=True) if filetype == "json": - if isinstance(data, dict): + if isinstance(data, str): data = json.loads(data) - json.dump(data, Path(_filepath).open("w", encoding="utf-8")) + with Path(_filepath).open("w", encoding="utf-8") as fi: + json.dump(data, fi, default=custom_serialization) - # if filetype == "csv": - # pass if filetype in ("txt", "csv"): with open(_filepath, "w", encoding="utf-8") as file: file.write(data) @@ -611,6 +636,49 @@ def get_raw_data_gcs( return error, data, filetype +def get_raw_data_db( + query: str, engine: str, host: str, secret_path: str, database: str +) -> tuple[str, str, str]: + """ + Get data from Databases + + Args: + query (str): the SQL Query to execute + engine (str): The datase management system + host (str): The database host + secret_path (str): Secret path to get credentials + database (str): The database to connect + + Returns: + tuple[str, str, str]: Error, data and filetype + """ + connector_mapping = { + "postgresql": psycopg2.connect, + "mysql": pymysql.connect, + } + + data = None + error = None + filetype = "json" + + try: + credentials = get_vault_secret(secret_path)["data"] + + with connector_mapping[engine]( + host=host, + user=credentials["user"], + password=credentials["password"], + database=database, + ) as connection: + data = pd.read_sql(sql=query, con=connection).to_dict(orient="records") + + except Exception: + error = traceback.format_exc() + log(f"[CATCHED] Task failed with error: \n{error}", level="error") + + return error, data, filetype + + def save_treated_local_func( filepath: str, data: pd.DataFrame, error: str, mode: str = "staging" ) -> str: @@ -678,7 +746,7 @@ def upload_run_logs_to_bq( # pylint: disable=R0913 "erro": [f"[recapturado]{previous_error}"], } ) - log(f"Recapturing {timestamp} with previous error:\n{error}") + log(f"Recapturing {timestamp} with previous error:\n{previous_error}") else: # not recapturing or error during flow execution dataframe = pd.DataFrame( diff --git a/poetry.lock b/poetry.lock index f106de89b..330ce7b4b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "adal" @@ -2483,6 +2483,7 @@ files = [ {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, @@ -2491,6 +2492,7 @@ files = [ {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, @@ -2520,6 +2522,7 @@ files = [ {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, @@ -2528,6 +2531,7 @@ files = [ {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, @@ -3533,10 +3537,7 @@ packaging = "<24" pandas = "<3" prometheus-flask-exporter = {version = "*", optional = true, markers = "extra == \"extras\""} protobuf = ">=3.12.0,<5" -pyarrow = [ - {version = ">=4.0.0,<13"}, - {version = "*", optional = true, markers = "extra == \"extras\""}, -] +pyarrow = ">=4.0.0,<13" pysftp = {version = "*", optional = true, markers = "extra == \"extras\""} pytz = "<2024" pyyaml = ">=5.1,<7" @@ -3945,12 +3946,11 @@ files = [ [package.dependencies] numpy = [ - {version = ">=1.21.0", markers = "python_version <= \"3.9\" and platform_system == \"Darwin\" and platform_machine == \"arm64\""}, - {version = ">=1.19.3", markers = "python_version >= \"3.6\" and platform_system == \"Linux\" and platform_machine == \"aarch64\" or python_version >= \"3.9\""}, - {version = ">=1.17.0", markers = "python_version >= \"3.7\""}, - {version = ">=1.17.3", markers = "python_version >= \"3.8\""}, - {version = ">=1.21.2", markers = "python_version >= \"3.10\""}, + {version = ">=1.21.0", markers = "python_version <= \"3.9\" and platform_system == \"Darwin\" and platform_machine == \"arm64\" and python_version >= \"3.8\""}, + {version = ">=1.19.3", markers = "platform_system == \"Linux\" and platform_machine == \"aarch64\" and python_version >= \"3.8\" and python_version < \"3.10\" or python_version > \"3.9\" and python_version < \"3.10\" or python_version >= \"3.9\" and platform_system != \"Darwin\" and python_version < \"3.10\" or python_version >= \"3.9\" and platform_machine != \"arm64\" and python_version < \"3.10\""}, + {version = ">=1.17.3", markers = "(platform_system != \"Darwin\" and platform_system != \"Linux\") and python_version >= \"3.8\" and python_version < \"3.9\" or platform_system != \"Darwin\" and python_version >= \"3.8\" and python_version < \"3.9\" and platform_machine != \"aarch64\" or platform_machine != \"arm64\" and python_version >= \"3.8\" and python_version < \"3.9\" and platform_system != \"Linux\" or (platform_machine != \"arm64\" and platform_machine != \"aarch64\") and python_version >= \"3.8\" and python_version < \"3.9\""}, {version = ">=1.21.4", markers = "python_version >= \"3.10\" and platform_system == \"Darwin\""}, + {version = ">=1.21.2", markers = "platform_system != \"Darwin\" and python_version >= \"3.10\""}, ] [[package]] @@ -3968,6 +3968,7 @@ files = [ {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a39c2529d75373b7167bf84c814ef9b8f3737a339c225ed6c0df40736df8748"}, {file = "orjson-3.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:84ebd6fdf138eb0eb4280045442331ee71c0aab5e16397ba6645f32f911bfb37"}, {file = "orjson-3.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a60a1cfcfe310547a1946506dd4f1ed0a7d5bd5b02c8697d9d5dcd8d2e9245e"}, + {file = "orjson-3.9.2-cp310-none-win32.whl", hash = "sha256:2ae61f5d544030a6379dbc23405df66fea0777c48a0216d2d83d3e08b69eb676"}, {file = "orjson-3.9.2-cp310-none-win_amd64.whl", hash = "sha256:c290c4f81e8fd0c1683638802c11610b2f722b540f8e5e858b6914b495cf90c8"}, {file = "orjson-3.9.2-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:02ef014f9a605e84b675060785e37ec9c0d2347a04f1307a9d6840ab8ecd6f55"}, {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:992af54265ada1c1579500d6594ed73fe333e726de70d64919cf37f93defdd06"}, @@ -3977,6 +3978,7 @@ files = [ {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275b5a18fd9ed60b2720543d3ddac170051c43d680e47d04ff5203d2c6d8ebf1"}, {file = "orjson-3.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b9aea6dcb99fcbc9f6d1dd84fca92322fda261da7fb014514bb4689c7c2097a8"}, {file = "orjson-3.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7d74ae0e101d17c22ef67b741ba356ab896fc0fa64b301c2bf2bb0a4d874b190"}, + {file = "orjson-3.9.2-cp311-none-win32.whl", hash = "sha256:a9a7d618f99b2d67365f2b3a588686195cb6e16666cd5471da603a01315c17cc"}, {file = "orjson-3.9.2-cp311-none-win_amd64.whl", hash = "sha256:6320b28e7bdb58c3a3a5efffe04b9edad3318d82409e84670a9b24e8035a249d"}, {file = "orjson-3.9.2-cp37-cp37m-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:368e9cc91ecb7ac21f2aa475e1901204110cf3e714e98649c2502227d248f947"}, {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58e9e70f0dcd6a802c35887f306b555ff7a214840aad7de24901fc8bd9cf5dde"}, @@ -3986,6 +3988,7 @@ files = [ {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e46e9c5b404bb9e41d5555762fd410d5466b7eb1ec170ad1b1609cbebe71df21"}, {file = "orjson-3.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8170157288714678ffd64f5de33039e1164a73fd8b6be40a8a273f80093f5c4f"}, {file = "orjson-3.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e3e2f087161947dafe8319ea2cfcb9cea4bb9d2172ecc60ac3c9738f72ef2909"}, + {file = "orjson-3.9.2-cp37-none-win32.whl", hash = "sha256:373b7b2ad11975d143556fdbd2c27e1150b535d2c07e0b48dc434211ce557fe6"}, {file = "orjson-3.9.2-cp37-none-win_amd64.whl", hash = "sha256:d7de3dbbe74109ae598692113cec327fd30c5a30ebca819b21dfa4052f7b08ef"}, {file = "orjson-3.9.2-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8cd4385c59bbc1433cad4a80aca65d2d9039646a9c57f8084897549b55913b17"}, {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a74036aab1a80c361039290cdbc51aa7adc7ea13f56e5ef94e9be536abd227bd"}, @@ -3995,6 +3998,7 @@ files = [ {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1882a70bb69595b9ec5aac0040a819e94d2833fe54901e2b32f5e734bc259a8b"}, {file = "orjson-3.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fc05e060d452145ab3c0b5420769e7356050ea311fc03cb9d79c481982917cca"}, {file = "orjson-3.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f8bc2c40d9bb26efefb10949d261a47ca196772c308babc538dd9f4b73e8d386"}, + {file = "orjson-3.9.2-cp38-none-win32.whl", hash = "sha256:302d80198d8d5b658065627da3a356cbe5efa082b89b303f162f030c622e0a17"}, {file = "orjson-3.9.2-cp38-none-win_amd64.whl", hash = "sha256:3164fc20a585ec30a9aff33ad5de3b20ce85702b2b2a456852c413e3f0d7ab09"}, {file = "orjson-3.9.2-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7a6ccadf788531595ed4728aa746bc271955448d2460ff0ef8e21eb3f2a281ba"}, {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3245d230370f571c945f69aab823c279a868dc877352817e22e551de155cb06c"}, @@ -4004,6 +4008,7 @@ files = [ {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03fb36f187a0c19ff38f6289418863df8b9b7880cdbe279e920bef3a09d8dab1"}, {file = "orjson-3.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20925d07a97c49c6305bff1635318d9fc1804aa4ccacb5fb0deb8a910e57d97a"}, {file = "orjson-3.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:eebfed53bec5674e981ebe8ed2cf00b3f7bcda62d634733ff779c264307ea505"}, + {file = "orjson-3.9.2-cp39-none-win32.whl", hash = "sha256:ba60f09d735f16593950c6adf033fbb526faa94d776925579a87b777db7d0838"}, {file = "orjson-3.9.2-cp39-none-win_amd64.whl", hash = "sha256:869b961df5fcedf6c79f4096119b35679b63272362e9b745e668f0391a892d39"}, {file = "orjson-3.9.2.tar.gz", hash = "sha256:24257c8f641979bf25ecd3e27251b5cc194cdd3a6e96004aac8446f5e63d9664"}, ] @@ -4594,6 +4599,84 @@ files = [ [package.extras] test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] +[[package]] +name = "psycopg2-binary" +version = "2.9.9" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, +] + [[package]] name = "ptyprocess" version = "0.7.0" @@ -5296,6 +5379,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -5303,8 +5387,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -5321,6 +5412,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -5328,6 +5420,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -6088,7 +6181,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""} +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} typing-extensions = ">=4.2.0" [package.extras] @@ -6248,8 +6341,8 @@ packaging = ">=21.3" pandas = ">=0.25" patsy = ">=0.5.2" scipy = [ - {version = ">=1.3", markers = "(python_version > \"3.9\" or platform_system != \"Windows\" or platform_machine != \"x86\") and python_version < \"3.12\""}, - {version = ">=1.3,<1.9", markers = "(python_version == \"3.8\" or python_version == \"3.9\") and platform_system == \"Windows\" and platform_machine == \"x86\""}, + {version = ">=1.3", markers = "python_version > \"3.9\" and python_version < \"3.12\" or platform_system != \"Windows\" and python_version < \"3.12\" or platform_machine != \"x86\" and python_version < \"3.12\""}, + {version = ">=1.3,<1.9", markers = "python_version == \"3.8\" and platform_system == \"Windows\" and platform_machine == \"x86\" or python_version == \"3.9\" and platform_system == \"Windows\" and platform_machine == \"x86\""}, ] [package.extras] @@ -7119,4 +7212,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.11" -content-hash = "ed25c76ba0aeea3d6fc6c59725c127160d13c12b527a3cf3900cb58db177750c" +content-hash = "44c47c0f926f2494ef43ed357af82aa10b2ce5d1c5a46197a594ed94ec1e8b6a" diff --git a/pyproject.toml b/pyproject.toml index 0c8318999..36a66722a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,6 +60,7 @@ statsmodels = "^0.13.0" tweepy = "4.4" xarray = "^2022.6.0" xgboost = "^1.7.4" +psycopg2-binary = "^2.9.9" [tool.poetry.dev-dependencies] pylint = "^2.12.2"