Skip to content

Commit

Permalink
Merge pull request #294 from vipyrsec/use-shared-logging-config
Browse files Browse the repository at this point in the history
Used shared dragonfly logging configuration and logging middleware
  • Loading branch information
AbooMinister25 authored Jul 29, 2024
2 parents 80b0752 + 46ceebd commit afffef9
Show file tree
Hide file tree
Showing 7 changed files with 940 additions and 523 deletions.
4 changes: 4 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@ FROM python:3.12-slim@sha256:6bab0b6977b98f24a1c65855a1592152f3c4268afd19adc4da3
RUN pip install -U pip setuptools wheel
RUN pip install pdm

RUN apt-get -y update
RUN apt-get -y install git

WORKDIR /app
COPY pyproject.toml pdm.lock ./
RUN mkdir __pypackages__ && pdm install --prod --no-lock --no-editable
Expand Down Expand Up @@ -33,6 +36,7 @@ COPY alembic/ alembic/
COPY alembic.ini ./
COPY src/ src/
COPY entrypoint.sh ./
COPY logging/ logging/
RUN chmod +x entrypoint.sh

CMD ["sh", "./entrypoint.sh"]
Expand Down
19 changes: 19 additions & 0 deletions logging/development.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
[handlers.default]
level = "DEBUG"
class = "logging.StreamHandler"
formatter = "development"

[loggers.""]
handlers = ["default"]
level = "DEBUG"
propagate = true

[loggers."sqlalchemy.engine"]
handlers = ["default"]
level = "INFO"
propagate = true

[loggers."sqlalchemy.pool"]
handlers = ["default"]
level = "DEBUG"
propagate = true
9 changes: 9 additions & 0 deletions logging/production.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
[handlers.default]
level = "INFO"
class = "logging.StreamHandler"
formatter = "json"

[loggers.""]
handlers = ["default"]
level = "INFO"
propagate = true
1,309 changes: 888 additions & 421 deletions pdm.lock

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ dependencies = [
"httpx>=0.24.1",
"psycopg2-binary",
"pyjwt[crypto]==2.8.0",
"dragonfly-logging-config @ git+https://github.com/vipyrsec/dragonfly-logging-config",
]
dynamic = ["version"]

Expand Down
2 changes: 1 addition & 1 deletion src/mainframe/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class Mainframe(EnvConfig):

job_timeout: int = 60 * 2

enable_json_logging: bool = False
log_config_file: str = "logging/development.toml"


mainframe_settings = Mainframe() # pyright: ignore
Expand Down
119 changes: 18 additions & 101 deletions src/mainframe/server.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
import logging
import time
import logging.config
import tomllib
from contextlib import asynccontextmanager
from typing import Awaitable, Callable
from typing import Any

import httpx
import sentry_sdk
import structlog
from asgi_correlation_id import CorrelationIdMiddleware
from asgi_correlation_id.context import correlation_id
from fastapi import Depends, FastAPI, Request, Response
from asgi_correlation_id import CorrelationIdMiddleware, correlation_id
from fastapi import Depends, FastAPI
from letsbuilda.pypi import PyPIServices
from sentry_sdk.integrations.logging import LoggingIntegration
from structlog_sentry import SentryProcessor
from logging_config import configure_logger
from logging_config.middleware import LoggingMiddleware

from mainframe.constants import GIT_SHA, Sentry, mainframe_settings
from mainframe.dependencies import validate_token, validate_token_override
Expand All @@ -22,59 +23,18 @@
from . import __version__


def configure_logger():
# Define the shared processors, regardless of whether API is running in prod or dev.
shared_processors: list[structlog.types.Processor] = [
structlog.contextvars.merge_contextvars,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.stdlib.ExtraAdder(),
SentryProcessor(event_level=logging.ERROR, level=logging.DEBUG),
structlog.processors.format_exc_info,
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.CallsiteParameterAdder(
{
structlog.processors.CallsiteParameter.FILENAME,
structlog.processors.CallsiteParameter.FUNC_NAME,
structlog.processors.CallsiteParameter.MODULE,
structlog.processors.CallsiteParameter.LINENO,
}
),
]

structlog.configure(
processors=shared_processors + [structlog.stdlib.ProcessorFormatter.wrap_for_formatter],
logger_factory=structlog.stdlib.LoggerFactory(),
cache_logger_on_first_use=True,
)

if mainframe_settings.enable_json_logging is True:
log_renderer = structlog.processors.JSONRenderer()
else:
log_renderer = structlog.dev.ConsoleRenderer(colors=False)

formatter = structlog.stdlib.ProcessorFormatter(
foreign_pre_chain=shared_processors,
processors=[
structlog.stdlib.ProcessorFormatter.remove_processors_meta,
log_renderer,
],
)
def add_correlation(logger: logging.Logger, method_name: str, event_dict: dict[str, Any]) -> dict[str, Any]:
"""Add request id to log message."""
if request_id := correlation_id.get():
event_dict["request_id"] = request_id
return event_dict

handler = logging.StreamHandler()
handler.setFormatter(formatter)
root_logger = logging.getLogger()
root_logger.addHandler(handler)
root_logger.setLevel(logging.DEBUG)

# Disable uvicorn's logging
for _log in ["uvicorn", "uvicorn.error"]:
logging.getLogger(_log).handlers.clear()
logging.getLogger(_log).propagate = True
def setup_logging():
with open(mainframe_settings.log_config_file, "rb") as f:
data = tomllib.load(f)

logging.getLogger("uvicorn.access").handlers.clear()
logging.getLogger("uvicorn.access").propagate = False
configure_logger(data, [add_correlation, SentryProcessor(event_level=logging.ERROR, level=logging.DEBUG)])


sentry_sdk.init(
Expand All @@ -100,7 +60,7 @@ async def lifespan(app_: FastAPI):
app_.state.http_session = http_client
app_.state.pypi_client = pypi_client

configure_logger()
setup_logging()

yield

Expand All @@ -116,51 +76,8 @@ async def lifespan(app_: FastAPI):
app.dependency_overrides[validate_token] = validate_token_override


@app.middleware("http")
async def logging_middleware(request: Request, call_next: Callable[[Request], Awaitable[Response]]) -> Response:
structlog.contextvars.clear_contextvars()

request_id = correlation_id.get() or ""
url = request.url
client_host = request.client.host if request.client else ""
client_port = request.client.port if request.client else ""
structlog.contextvars.bind_contextvars(
request_id=request_id,
url=url,
network={"client": {"ip": client_host, "port": client_port}},
)

start_time = time.perf_counter_ns()
logger: structlog.stdlib.BoundLogger = structlog.get_logger()

response: Response = Response(status_code=500)
try:
response = await call_next(request)
except Exception:
logger.exception("Uncaught exception")
raise
finally:
process_time = time.perf_counter_ns() - start_time
status_code = response.status_code
http_method = request.method
http_version = request.scope["http_version"]
await logger.ainfo(
f'{client_host}:{client_port} - "{http_method} {url} HTTP/{http_version}" {status_code}',
http={
"url": str(url),
"status_code": status_code,
"method": http_method,
"request_id": request_id,
"version": http_version,
},
duration=process_time,
tag="request",
)

return response


app.add_middleware(CorrelationIdMiddleware)
app.add_middleware(LoggingMiddleware)


@app.get("/", tags=["metadata"])
Expand Down

0 comments on commit afffef9

Please sign in to comment.