Skip to content

Commit

Permalink
refactor: version 1
Browse files Browse the repository at this point in the history
We start a refactor of the plugin to add better support for running
tests through Austin, and to provide a more flexible API.
  • Loading branch information
P403n1x87 committed Nov 18, 2023
1 parent 3c8abcf commit 0e8f28d
Show file tree
Hide file tree
Showing 5 changed files with 127 additions and 169 deletions.
13 changes: 6 additions & 7 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,27 +36,26 @@ classifiers = [
"Framework :: Pytest",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
]
packages = [
{ include = "pytest_austin" },
]
packages = [{ include = "pytest_austin" }]

[tool.poetry.plugins.pytest11]
cool_plugin = "pytest_austin.plugin"

[tool.poetry.dependencies]
python = "^3.6"
python = "^3.8"
austin-python = "^0.1.0"
dataclasses = "*"
psutil = ">=5.7.0"
ansimarkup = "^1.4.0"

[tool.poetry.dev-dependencies]
coverage = {extras = ["toml"], version = "5.2.1"}
coverage = { extras = ["toml"], version = "5.2.1" }
pytest = ">=5.4.2"
pytest-cov = ">=2.8.1"
nox = "^2020.5.24"
Expand Down
8 changes: 4 additions & 4 deletions pytest_austin/__init__.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
import os
from datetime import timedelta as td
from functools import lru_cache
import os
from threading import Event
from time import time
from typing import Any, Dict, Iterator, List, Optional, TextIO

from austin.format.pprof import PProf
from austin.format.speedscope import Speedscope
from austin.simple import SimpleAustin
from austin.stats import AustinStats, Frame, FrameStats, InvalidSample, Sample
from austin.threads import ThreadedAustin
from psutil import Process
import pytest_austin.markers as _markers

import pytest_austin.markers as _markers

Microseconds = int

Expand Down Expand Up @@ -43,7 +43,7 @@ def _parse_time(timedelta: Any, total_test_time: Microseconds) -> Microseconds:
raise ValueError(f"Invalid time delta type {type(timedelta)}")


class PyTestAustin(ThreadedAustin):
class PyTestAustin(SimpleAustin):
"""pytest implementation of Austin."""

def __init__(self, *args: Any, **kwargs: Any) -> None:
Expand Down
69 changes: 69 additions & 0 deletions pytest_austin/marker.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
import ast
import os
import subprocess
import sys
import time
from importlib._bootstrap_external import _code_to_timestamp_pyc as code_to_pyc
from pathlib import Path
from tempfile import NamedTemporaryFile


def dump_code_to_file(code, file):
file.write(code_to_pyc(code, time.time(), len(code.co_code)))
file.flush()


class FunctionDefFinder(ast.NodeVisitor):
def __init__(self, func_name):
super(FunctionDefFinder, self).__init__()
self.func_name = func_name
self._body = None

def generic_visit(self, node):
return self._body or super(FunctionDefFinder, self).generic_visit(node)

def visit_FunctionDef(self, node):
if node.name == self.func_name:
self._body = node.body

def find(self, file):
with open(file) as f:
t = ast.parse(f.read())
self.visit(t)
t.body = self._body
return t

def invoke_austin(*args, **kwargs):
timeout = kwargs.pop("timeout", None)
close_fds = sys.platform != "win32"
subp = subprocess.Popen(("austin", "-Pb", *args), stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=close_fds, **kwargs)

subp.wait() # TODO: Check no errors

try:
stdout, stderr = subp.communicate(timeout=timeout)
except subprocess.TimeoutExpired:
subp.terminate()
stdout, stderr = subp.communicate(timeout=timeout)

# TODO: Convert stdout to Austin stats with MojoFile and return the stats

def austin_marker_handler(item):
file, _, func = item.location

# Override environment variables for the subprocess
env = os.environ.copy()
pythonpath = os.getenv("PYTHONPATH", None)
cwd = str(Path.cwd().resolve())
env["PYTHONPATH"] = os.pathsep.join((cwd, pythonpath)) if pythonpath is not None else cwd

with NamedTemporaryFile(mode="wb", suffix=".pyc") as fp:
dump_code_to_file(compile(FunctionDefFinder(func).find(file), file, "exec"), fp.file)
def _subprocess_wrapper():
stats = invoke_austin(sys.executable, fp.name, env=env)

# TODO: Query stats according to marker options

return _subprocess_wrapper()


101 changes: 44 additions & 57 deletions pytest_austin/plugin.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
from austin import AustinTerminated
from pytest import Function, hookimpl, Module
from functools import partial

import pytest
from _pytest.runner import call_and_report
from _pytest.runner import \
pytest_runtest_protocol as default_pytest_runtest_protocol

from pytest_austin import PyTestAustin
import pytest_austin.markers as markers
from pytest_austin.marker import austin_marker_handler


def pytest_addoption(parser, pluginmanager) -> None:
Expand Down Expand Up @@ -55,23 +60,12 @@ def pytest_addoption(parser, pluginmanager) -> None:
def pytest_configure(config) -> None:
"""Configure pytest-austin."""
# Register all markers
for _ in dir(markers):
_ = getattr(markers, _)

if not callable(_):
continue

try:
args = _.__code__.co_varnames
if not args or args[0] != "mark":
continue
except AttributeError:
# We cannot get the argument names, so not a marker
continue

config.addinivalue_line(
"markers", f"{_.__name__}({', '.join(args[1:])}):{_.__doc__}"
)
config.addinivalue_line(
"markers",
"""austin(max_cpu, max_time, max_memory, function, file, line):
TODO
""",
)

if config.option.steal_mojo:
# No mojo :(
Expand All @@ -91,52 +85,45 @@ def pytest_configure(config) -> None:
config.pluginmanager.register(pytest_austin, "austin")


def pytest_sessionstart(session) -> None:
"""Start Austin if we have mojo."""
pytest_austin = session.config.pluginmanager.getplugin("austin")
if not pytest_austin:
return

pytest_austin.start()
pytest_austin.wait_ready(1)

@pytest.hookimpl(tryfirst=True)
def pytest_runtest_protocol(item):
if item.get_closest_marker("skip"):
return default_pytest_runtest_protocol(item, None)

def pytest_runtest_setup(item) -> None:
"""Register tests and checks with pytest-austin."""
pytest_austin = item.config.pluginmanager.getplugin("austin")
if not pytest_austin:
return
skipif = item.get_closest_marker("skipif")
if skipif and skipif.args[0]:
return default_pytest_runtest_protocol(item, None)

if pytest_austin.is_running():
if isinstance(item, Function) and isinstance(item.parent, Module):
function, module = item.name, item.parent.name
pytest_austin.register_test(
function, module, item.iter_markers(),
)
marker = item.get_closest_marker("austin")
if marker:
ihook = item.ihook
base_name = item.nodeid

nodeid = base_name

@hookimpl(hookwrapper=True)
def pytest_runtestloop(session):
"""Run all checks at the end and set the exit status."""
yield

# This runs effectively at the end of the session
pytest_austin = session.config.pluginmanager.getplugin("austin")
if not pytest_austin:
return
# Start
ihook.pytest_runtest_logstart(nodeid=nodeid, location=item.location)

if pytest_austin.is_running():
pytest_austin.terminate(wait=True)
# Setup
report = call_and_report(item, "setup", log=False)
report.nodeid = nodeid
ihook.pytest_runtest_logreport(report=report)

try:
pytest_austin.join()
except AustinTerminated:
pass
# Call
item.runtest = partial(austin_marker_handler, item)
report = call_and_report(item, "call", log=False)
report.nodeid = nodeid
ihook.pytest_runtest_logreport(report=report)

session.testsfailed += pytest_austin.check_tests()
# Teardown
report = call_and_report(item, "teardown", log=False, nextitem=None)
report.nodeid = nodeid
ihook.pytest_runtest_logreport(report=report)

pytest_austin.dump()
# Finish
ihook.pytest_runtest_logfinish(nodeid=nodeid, location=item.location)

return True

def pytest_terminal_summary(terminalreporter, exitstatus, config) -> None:
"""Report Austin statistics if we had mojo."""
Expand Down
105 changes: 4 additions & 101 deletions test/test_pytest_austin.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,9 @@
from datetime import timedelta as td
import os
import os.path
from textwrap import dedent

from pytest_austin import _parse_time


def check_austin_dump(dir, needle):
"""Check that we have produced a profiler dump."""
# We expect a single austin file
(austin_file,) = [file for file in os.listdir(dir) if file.startswith(".austin")]
assert austin_file

with open(
os.path.join(dir, austin_file), "rb" if austin_file.endswith(".pprof") else "r"
) as fin:
assert needle in fin.read()


def test_parse_time():
assert _parse_time(td(microseconds=10), 0) == 10

Expand All @@ -25,106 +12,22 @@ def test_austin_time_checks(testdir):
"""Test Austin time checks."""

# create a temporary pytest test file
testdir.makepyfile(
testdir.makepyfile(dedent(
"""
from datetime import timedelta as td
from time import sleep
import pytest
def hello(name="World"):
return "Hello {name}!".format(name=name)
def fibonacci(n):
if n in (0, 1):
return 1
return fibonacci(n-1) + fibonacci(n-2)
@pytest.mark.total_time(td(milliseconds=50), function="fibonacci")
@pytest.mark.total_time("99 %", line=18)
@pytest.mark.total_time("50.3141592653 %", line=19)
@pytest.mark.austin("50.3141592653 %", line=19)
def test_lines():
fibonacci(27)
fibonacci(25)
@pytest.mark.total_time(td(microseconds=1000))
def test_check_fails():
sleep(.1)
assert hello() == "Hello World!"
@pytest.mark.total_time(td(milliseconds=110))
def test_check_succeeds():
sleep(.1)
assert hello() == "Hello World!"
"""
""")
)

result = testdir.runpytest("-vs", "--austin-report", "full")

assert result.ret > 0

check_austin_dump(testdir.tmpdir, "test_lines")


def test_austin_memory_checks(testdir):
"""Test Austin memory checks."""

# create a temporary pytest test file
testdir.makepyfile(
"""
import pytest
@pytest.mark.total_memory("50.3141592653 %")
def test_memory_alloc_fails():
a = [42]
for i in range(20):
a = list(a) + list(a)
@pytest.mark.total_memory("128 MB")
def test_memory_alloc_succeeds():
a = [42]
for i in range(20):
a = list(a) + list(a)
@pytest.mark.total_memory("12 MB", net=True)
def test_memory_net_alloc():
a = [42]
for i in range(20):
a = list(a) + list(a)
"""
)

result = testdir.runpytest(
"-vs", "--profile-mode", "memory", "--profile-format", "pprof"
)

assert result.ret > 0

check_austin_dump(testdir.tmpdir, b"test_memory_alloc")


def test_austin_full_checks(testdir):
"""Test Austin full checks."""

# create a temporary pytest test file
testdir.makepyfile(
"""
import pytest
@pytest.mark.total_time(1)
@pytest.mark.total_memory("1 KB")
def test_full_checks_fails():
a = [42]
for i in range(20):
a = list(a) + list(a)
"""
)

result = testdir.runpytest(
"-vs", "--profile-mode", "all", "--profile-format", "speedscope"
)

assert result.ret > 0

check_austin_dump(testdir.tmpdir, "test_full_checks")

0 comments on commit 0e8f28d

Please sign in to comment.