Skip to content
This repository has been archived by the owner on Sep 18, 2024. It is now read-only.

Commit

Permalink
Merge branch 'main' into chouinar/16-actual-impl
Browse files Browse the repository at this point in the history
  • Loading branch information
chouinar committed Jun 25, 2024
2 parents 354654c + 4ed48da commit 2edec64
Show file tree
Hide file tree
Showing 36 changed files with 5,556 additions and 4,584 deletions.
73 changes: 73 additions & 0 deletions .github/workflows/ci-frontend-a11y.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
name: pa11y tests

on:
pull_request:
paths:
- frontend/**
- .github/workflows/ci-frontend-a11y.yml

jobs:
build:
name: Pa11y-ci tests
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./frontend

env:
NODE_VERSION: 20
LOCKFILE_PATH: ./frontend/package-lock.json
PACKAGE_MANAGER: npm

steps:
- name: Checkout source
uses: actions/checkout@v4

- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}
cache-dependency-path: ${{ env.LOCKFILE_PATH }}
cache: ${{ env.PACKAGE_MANAGER }}

- name: Install dependencies
run: npm ci

- name: Create screenshots directory
run: mkdir -p screenshots-output

- name: Build project
run: npm run build

- name: Start server and log output
run: npm run start &

- name: Start API Server for search results
run: |
cd ../api
make init db-seed-local start &
cd ../frontend
# ensure the API wait script is executable
chmod +x ../api/bin/wait-for-api.sh
../api/bin/wait-for-api.sh
shell: bash

- name: Wait for frontend to be ready
run: |
# Ensure the server wait script is executable
chmod +x ./bin/wait-for-frontend.sh
./bin/wait-for-frontend.sh
- name: Run pa11y-ci
run: |
set -e # Ensure the script fails if any command fails
npm run test:pa11y-desktop
npm run test:pa11y-mobile
echo "pa11y-ci tests finished."
- name: Upload screenshots to artifacts
if: always()
uses: actions/upload-artifact@v3
with:
name: screenshots
path: ./frontend/screenshots-output
3 changes: 3 additions & 0 deletions analytics/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
For more information visit: https://www.dynaconf.com/
"""

from dynaconf import Dynaconf, Validator, ValidationError

settings = Dynaconf(
Expand All @@ -16,6 +17,8 @@
# looks for config vars in the following files
# with vars in .secrets.toml overriding vars in settings.toml
settings_files=["settings.toml", ".secrets.toml"],
# merge the settings found in all files
merge_enabled= True,
# add validators for our required config vars
validators=[
Validator("SLACK_BOT_TOKEN", must_exist=True),
Expand Down
1,159 changes: 660 additions & 499 deletions analytics/poetry.lock

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions analytics/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ pydantic = "^2.0.3"
python = "^3.11"
slack-sdk = "^3.23.0"
typer = { extras = ["all"], version = "^0.9.0" }
sqlalchemy = "^2.0.30"

[tool.poetry.group.dev.dependencies]
black = "^23.7.0"
Expand Down
4 changes: 4 additions & 0 deletions analytics/settings.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
POSTGRES_NAME = "app"
POSTGRES_HOST = "0.0.0.0"
POSTGRES_USER = "app"
POSTGRES_PORT = 5432
29 changes: 28 additions & 1 deletion analytics/src/analytics/cli.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,23 @@
# pylint: disable=C0415
"""Expose a series of CLI entrypoints for the analytics package."""
import logging
from pathlib import Path
from typing import Annotated, Optional

import typer
from slack_sdk import WebClient
from sqlalchemy import text

from analytics.datasets.deliverable_tasks import DeliverableTasks
from analytics.datasets.sprint_board import SprintBoard
from analytics.integrations import github, slack
from analytics.integrations import db, github, slack
from analytics.metrics.base import BaseMetric, Unit
from analytics.metrics.burndown import SprintBurndown
from analytics.metrics.burnup import SprintBurnup
from analytics.metrics.percent_complete import DeliverablePercentComplete

logger = logging.getLogger(__name__)

# fmt: off
# Instantiate typer options with help text for the commands below
SPRINT_FILE_ARG = typer.Option(help="Path to file with exported sprint data")
Expand Down Expand Up @@ -122,6 +126,29 @@ def calculate_sprint_burnup(
)


@export_app.command(name="test_connection")
def test_connection() -> None:
"""Test function that ensures the DB connection works."""
engine = db.get_db()
# connection method from sqlalchemy
connection = engine.connect()

# Test INSERT INTO action
result = connection.execute(
text(
"INSERT INTO audit_log (topic,timestamp, end_timestamp, user_id, details)"
"VALUES('test','2024-06-11 10:41:15','2024-06-11 10:54:15',87654,'test from command');",
),
)
# Test SELECT action
result = connection.execute(text("SELECT * FROM audit_log WHERE user_id=87654;"))
for row in result:
print(row)
# commits the transaction to the db
connection.commit()
result.close()


@metrics_app.command(name="deliverable_percent_complete")
def calculate_deliverable_percent_complete(
sprint_file: Annotated[str, SPRINT_FILE_ARG],
Expand Down
79 changes: 79 additions & 0 deletions analytics/src/analytics/datasets/base.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
# ruff: noqa: E501
# pylint: disable=C0301
"""Base class for all datasets which provides an interface for metrics."""
from pathlib import Path
from typing import Self

import pandas as pd
from sqlalchemy import Engine


class BaseDataset:
Expand All @@ -22,6 +25,82 @@ def from_dict(cls, data: list[dict]) -> Self:
"""Load the dataset from a list of python dictionaries representing records."""
return cls(df=pd.DataFrame(data))

def to_sql(
self,
output_table: str,
engine: Engine,
*,
replace_table: bool = True,
) -> None:
"""
Write the contents of a pandas DataFrame to a SQL table.
This function takes a pandas DataFrame (`self.df`), an output table name (`output_table`),
and a SQLAlchemy Engine object (`engine`) as required arguments. It optionally accepts
a `replace_table` argument (default: True) that determines how existing data in the
target table is handled.
**Parameters:**
* self (required): The instance of the class containing the DataFrame (`self.df`)
to be written to the database.
* output_table (str, required): The name of the table in the database where the
data will be inserted.
* engine (sqlalchemy.engine.Engine, required): A SQLAlchemy Engine object representing
the connection to the database.
* replace_table (bool, default=True):
* If True (default), the function will completely replace the contents of the
existing table with the data from the DataFrame. (if_exists="replace")
* If False, the data from the DataFrame will be appended to the existing table.
(if_exists="append")
**Returns:**
* None
**Raises:**
* Potential exceptions raised by the underlying pandas.to_sql function, such as
database connection errors or errors related to data type mismatches.
"""
if replace_table:
self.df.to_sql(output_table, engine, if_exists="replace", index=False)
else:
self.df.to_sql(output_table, engine, if_exists="append", index=False)

@classmethod
def from_sql(
cls,
source_table: str,
engine: Engine,
) -> Self:
"""
Read data from a SQL table into a pandas DataFrame and creates an instance of the current class.
This function takes a source table name (`source_table`) and a SQLAlchemy Engine object (`engine`) as required arguments.
It utilizes pandas.read_sql to retrieve the data from the database and then creates a new instance of the current class (`cls`) initialized with the resulting DataFrame (`df`).
**Parameters:**
* cls (class, required): The class that will be instantiated with the data from the
SQL table. This allows for creating objects of the same type as the function is called on.
* source_table (str, required): The name of the table in the database from which the
data will be read.
* engine (sqlalchemy.engine.Engine, required): A SQLAlchemy Engine object representing
the connection to the database.
**Returns:**
* Self: A new instance of the current class (`cls`) initialized with the DataFrame
containing the data from the SQL table.
**Raises:**
* Potential exceptions raised by the underlying pandas.read_sql function, such as
database connection errors or errors related to data type mismatches.
"""
return cls(df=pd.read_sql(source_table, engine))

def to_csv(
self,
output_file: Path,
Expand Down
29 changes: 29 additions & 0 deletions analytics/src/analytics/integrations/db.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# pylint: disable=invalid-name, line-too-long
"""Get a connection to the database using a SQLAlchemy engine object."""

from sqlalchemy import Engine, create_engine

from config import settings


# The variables used in the connection url are set in settings.toml and
# .secrets.toml. These can be overridden with the custom prefix defined in config.py: "ANALYTICS".
# e.g. `export ANALYTICS_POSTGRES_USER=new_usr`.
# Docs: https://www.dynaconf.com/envvars/
def get_db() -> Engine:
"""
Get a connection to the database using a SQLAlchemy engine object.
This function retrieves the database connection URL from the configuration
and creates a SQLAlchemy engine object.
Yields
------
sqlalchemy.engine.Engine
A SQLAlchemy engine object representing the connection to the database.
"""
return create_engine(
f"postgresql+psycopg://{settings.postgres_user}:{settings.postgres_password}@{settings.postgres_host}:{settings.postgres_port}",
pool_pre_ping=True,
hide_parameters=True,
)
18 changes: 9 additions & 9 deletions api/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ endif
ifeq "$(PY_RUN_APPROACH)" "local"
PY_RUN_CMD := poetry run
else
PY_RUN_CMD := docker-compose run $(DOCKER_EXEC_ARGS) --rm $(APP_NAME) poetry run
PY_RUN_CMD := docker compose run $(DOCKER_EXEC_ARGS) --rm $(APP_NAME) poetry run
endif

FLASK_CMD := $(PY_RUN_CMD) flask
Expand Down Expand Up @@ -89,24 +89,24 @@ setup-local:
##################################################

build:
docker-compose build
docker compose build

start:
docker-compose up --detach
docker compose up --detach

start-debug:
docker compose -f docker-compose.yml -f docker-compose.debug.yml up --detach

run-logs: start
docker-compose logs --follow --no-color $(APP_NAME)
docker compose logs --follow --no-color $(APP_NAME)

init: build init-db init-opensearch

clean-volumes: ## Remove project docker volumes (which includes the DB state)
docker-compose down --volumes
docker compose down --volumes

stop:
docker-compose down
docker compose down

check: format-check lint db-check-migrations test

Expand All @@ -123,7 +123,7 @@ check: format-check lint db-check-migrations test
init-db: start-db setup-postgres-db db-migrate

start-db:
docker-compose up --detach grants-db
docker compose up --detach grants-db
./bin/wait-for-local-db.sh

## Destroy current DB, setup new one
Expand Down Expand Up @@ -187,8 +187,8 @@ init-opensearch: start-opensearch
# TODO - in subsequent PRs, we'll add more to this command to setup the search index locally

start-opensearch:
docker-compose up --detach opensearch-node
docker-compose up --detach opensearch-dashboards
docker compose up --detach opensearch-node
docker compose up --detach opensearch-dashboards
./bin/wait-for-local-opensearch.sh


Expand Down
2 changes: 1 addition & 1 deletion api/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ root
│ └── setup.cfg Python config for tools that don't support pyproject.toml yet
│ └── Dockerfile Docker build file for project
└── docker-compose.yml Config file for docker-compose tool, used for local development
└── docker-compose.yml Config file for docker compose tool, used for local development
```

## Local Development
Expand Down
3 changes: 0 additions & 3 deletions api/docker-compose.debug.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
version: "3.8"

# run with `docker compose -f`
# combines ports and env vars with the main docker-compose.yml main-app service

services:
grants-api:
command: [
Expand Down
3 changes: 0 additions & 3 deletions api/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
version: '3'

services:

grants-db:
image: postgres:15-alpine
container_name: grants-db
Expand Down
2 changes: 1 addition & 1 deletion api/local.env
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Local environment variables
# Used by docker-compose and it can be loaded
# Used by docker compose and it can be loaded
# by calling load_local_env_vars() from api/src/util/local.py

ENVIRONMENT=local
Expand Down
2 changes: 0 additions & 2 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
version: '3'

include:
- path: ./api/docker-compose.yml
- path: ./frontend/docker-compose.yml
Expand Down
4 changes: 4 additions & 0 deletions frontend/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -51,3 +51,7 @@ npm-debug.log*
/playwright-report/
/blob-report/
/playwright/.cache/

# pa11y
screenshots-output/*
pa11y_output.txt
Loading

0 comments on commit 2edec64

Please sign in to comment.