Skip to content

Commit

Permalink
wip tests endpoint with testbed are good
Browse files Browse the repository at this point in the history
  • Loading branch information
touilleMan committed Nov 3, 2023
1 parent 4be4582 commit 0bcfa58
Show file tree
Hide file tree
Showing 7 changed files with 75 additions and 37 deletions.
15 changes: 15 additions & 0 deletions server/parsec/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,6 +224,21 @@ async def test_load_template(self, template: Any) -> OrganizationID:
revoked_user_certifier=event.author,
revoked_on=event.timestamp,
)
elif isinstance(event, testbed.TestbedEventNewDeviceInvitation):
await self.invite.new_for_device(
organization_id=org_id,
greeter_user_id=event.greeter_user_id,
created_on=event.created_on,
token=event.token,
)
elif isinstance(event, testbed.TestbedEventNewUserInvitation):
await self.invite.new_for_user(
organization_id=org_id,
greeter_user_id=event.greeter_user_id,
claimer_email=event.claimer_email,
created_on=event.created_on,
token=event.token,
)
elif isinstance(event, testbed.TestbedEventNewRealm):
await self.realm.create(
organization_id=org_id,
Expand Down
5 changes: 3 additions & 2 deletions server/parsec/cli/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from typing import (
Any,
Callable,
Iterable,
Iterator,
TextIO,
TypeVar,
Expand Down Expand Up @@ -283,7 +284,7 @@ def _parse_blockstore_param(value: str) -> BaseBlockStoreConfig:
raise click.BadParameter(f"Invalid blockstore type `{parts[0]}`")


def _parse_blockstore_params(raw_params: str) -> BaseBlockStoreConfig:
def _parse_blockstore_params(raw_params: Iterable[str]) -> BaseBlockStoreConfig:
raid_configs = defaultdict(list)
for raw_param in raw_params:
raid_mode: str | None
Expand All @@ -294,7 +295,7 @@ def _parse_blockstore_params(raw_params: str) -> BaseBlockStoreConfig:
try:
raid_node = int(raw_raid_node)
except ValueError:
raise click.BadParameter(f"Invalid node index `{raid_node}` (must be integer)")
raise click.BadParameter(f"Invalid node index `{raw_raid_node}` (must be integer)")
else:
raid_mode = raid_node = None
node_param = raw_param
Expand Down
2 changes: 2 additions & 0 deletions server/parsec/components/invite.py
Original file line number Diff line number Diff line change
Expand Up @@ -930,6 +930,7 @@ async def new_for_user(
greeter_user_id: UserID,
claimer_email: str,
created_on: DateTime | None = None,
token: InvitationToken | None = None,
) -> UserInvitation:
"""
Raise: Nothing
Expand All @@ -941,6 +942,7 @@ async def new_for_device(
organization_id: OrganizationID,
greeter_user_id: UserID,
created_on: DateTime | None = None,
token: InvitationToken | None = None,
) -> DeviceInvitation:
"""
Raise: Nothing
Expand Down
7 changes: 7 additions & 0 deletions server/parsec/components/memory/invite.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,7 @@ async def new_for_user(
greeter_user_id: UserID,
claimer_email: str,
created_on: DateTime | None = None,
token: InvitationToken | None = None,
) -> UserInvitation:
"""
Raise: InvitationAlreadyMemberError
Expand All @@ -219,6 +220,7 @@ async def new_for_user(
greeter_user_id=greeter_user_id,
claimer_email=claimer_email,
created_on=created_on,
token=token,
)
assert isinstance(result, UserInvitation)
return result
Expand All @@ -228,11 +230,13 @@ async def new_for_device(
organization_id: OrganizationID,
greeter_user_id: UserID,
created_on: DateTime | None = None,
token: InvitationToken | None = None,
) -> DeviceInvitation:
result = await self._new(
organization_id=organization_id,
greeter_user_id=greeter_user_id,
created_on=created_on,
token=token,
)
assert isinstance(result, DeviceInvitation)
return result
Expand All @@ -243,6 +247,7 @@ async def _new(
greeter_user_id: UserID,
created_on: DateTime | None,
claimer_email: str | None = None,
token: InvitationToken | None = None,
) -> Invitation:
assert self._user_component is not None

Expand All @@ -259,6 +264,7 @@ async def _new(
else:
# Must create a new invitation
created_on = created_on or DateTime.now()
token = token or InvitationToken.new()
greeter_human_handle = self._user_component._get_user(
organization_id, greeter_user_id
).human_handle
Expand All @@ -268,6 +274,7 @@ async def _new(
greeter_human_handle=greeter_human_handle,
claimer_email=claimer_email,
created_on=created_on,
token=token,
)
else: # Device
invitation = DeviceInvitation(
Expand Down
17 changes: 15 additions & 2 deletions server/tests/common/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
# Parsec Cloud (https://parsec.cloud) Copyright (c) BUSL-1.1 2016-present Scille SAS
from __future__ import annotations

# from .rpc_api import *
# from .sequester import *
from _pytest.logging import LogCaptureFixture as VanillaLogCaptureFixture

from .backend import * # noqa
from .client import * # noqa

Expand All @@ -14,5 +18,14 @@
# from .population import *
from .postgresql import * # noqa

# from .rpc_api import *
# from .sequester import *

# customized in `tests/conftest.py`
class LogCaptureFixture(VanillaLogCaptureFixture): # type: ignore[misc]
def assert_occurred(self, log: str) -> None:
...

def assert_occurred_once(self, log: str) -> None:
...

def assert_not_occurred(self, log: str) -> None:
...
2 changes: 1 addition & 1 deletion server/tests/common/postgresql.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def bootstrap_pg_cluster() -> str:
_pg_cluster.trust_local_connections()
_pg_cluster.start(port="dynamic", server_settings={})

def _shutdown_pg_cluster():
def _shutdown_pg_cluster() -> None:
assert _pg_cluster is not None
if _pg_cluster.get_status() == "running":
_pg_cluster.stop()
Expand Down
64 changes: 32 additions & 32 deletions server/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import logging
import os
import re
from typing import Generator, Callable, Awaitable

import hypothesis
import pytest
Expand All @@ -22,12 +23,13 @@
bootstrap_postgresql_testbed,
get_postgresql_url,
reset_postgresql_testbed,
LogCaptureFixture,
)

# Pytest hooks


def pytest_addoption(parser: pytest.Parser):
def pytest_addoption(parser: pytest.Parser) -> None:
parser.addoption("--hypothesis-max-examples", default=100, type=int)
parser.addoption("--hypothesis-derandomize", action="store_true")
parser.addoption(
Expand All @@ -49,11 +51,11 @@ def pytest_addoption(parser: pytest.Parser):
),
)

def _parse_slice_tests(value):
def _parse_slice_tests(value: str) -> tuple[list[int], int]:
try:
nums, total = value.split("/")
total = int(total)
nums = [int(x) for x in nums.split(",")]
raw_nums, raw_total = value.split("/")
total = int(raw_total)
nums = [int(x) for x in raw_nums.split(",")]
if total >= 1 and all(1 <= x <= total for x in nums):
return (nums, total)
except ValueError:
Expand All @@ -68,7 +70,7 @@ def _parse_slice_tests(value):
)


def pytest_configure(config):
def pytest_configure(config: pytest.Config) -> None:
# Configure structlog to redirect everything in logging
structlog.configure(
logger_factory=structlog.stdlib.LoggerFactory(),
Expand All @@ -89,19 +91,19 @@ def pytest_configure(config):
pg_url = bootstrap_postgresql_testbed()
capturemanager = config.pluginmanager.getplugin("capturemanager")
if capturemanager:
capturemanager.suspend(in_=True)
capturemanager.suspend(in_=True) # type: ignore
print(f"usage: PG_URL={pg_url} py.test --postgresql tests")
input("Press enter when you're done with...")
pytest.exit("bye")
elif config.getoption("--postgresql") and not _is_xdist_master(config):
bootstrap_postgresql_testbed()


def _is_xdist_master(config):
return config.getoption("dist") != "no" and not os.environ.get("PYTEST_XDIST_WORKER")
def _is_xdist_master(config: pytest.Config) -> bool:
return config.getoption("dist") != "no" and not os.environ.get("PYTEST_XDIST_WORKER") # type: ignore


def _patch_caplog():
def _patch_caplog() -> None:
from _pytest.logging import LogCaptureFixture

def _remove_colors(msg):
Expand Down Expand Up @@ -142,29 +144,25 @@ def _assert_occurred_once(self, log):

def _assert_not_occurred(self, log):
__tracebackhide__ = True
matches_msgs, matches_records = _find(self, log)
matches_msgs, _ = _find(self, log)
assert not matches_msgs

LogCaptureFixture.assert_occurred = _assert_occurred
LogCaptureFixture.assert_occurred_once = _assert_occurred_once
LogCaptureFixture.assert_not_occurred = _assert_not_occurred
LogCaptureFixture.assert_occurred = _assert_occurred # type: ignore
LogCaptureFixture.assert_occurred_once = _assert_occurred_once # type: ignore
LogCaptureFixture.assert_not_occurred = _assert_not_occurred # type: ignore


def pytest_runtest_setup(item):
def pytest_runtest_setup(item: pytest.Item) -> None:
if item.get_closest_marker("slow") and not item.config.getoption("--runslow"):
pytest.skip("need --runslow option to run")
if item.get_closest_marker("postgresql"):
if not item.config.getoption("--postgresql"):
pytest.skip("need --postgresql option to run")


def pytest_collection_modifyitems(config, items):
for item in items:
if "trio" in item.keywords:
item.fixturenames.append("task_monitoring")

def pytest_collection_modifyitems(config: pytest.Config, items: list[pytest.Item]) -> None:
# Divide tests into slices of equal size
slices_to_run, total_slices = config.getoption("--slice-tests")
slices_to_run, total_slices = config.getoption("--slice-tests") # type: ignore
if total_slices > 1:
# Reorder tests to be deterministic given they will be ran across multiples instances
# Note this must be done as an in-place update to have it taken into account
Expand All @@ -189,12 +187,12 @@ def pytest_collection_modifyitems(config, items):


@pytest.fixture(autouse=True)
def no_logs_gte_error(caplog):
def no_logs_gte_error(caplog: LogCaptureFixture) -> Generator[None, None, None]:
yield

# The test should use `caplog.assert_occurred_once` to indicate a log was expected,
# otherwise we consider error logs as *actual* errors.
asserted_records = getattr(caplog, "asserted_records", set())
asserted_records: set = getattr(caplog, "asserted_records", set())
errors = [
record
for record in caplog.get_records("call")
Expand All @@ -205,10 +203,10 @@ def no_logs_gte_error(caplog):


@pytest.fixture(scope="session")
def hypothesis_settings(request):
def hypothesis_settings(request: pytest.FixtureRequest) -> hypothesis.settings:
return hypothesis.settings(
max_examples=request.config.getoption("--hypothesis-max-examples"),
derandomize=request.config.getoption("--hypothesis-derandomize"),
max_examples=request.config.getoption("--hypothesis-max-examples"), # type: ignore
derandomize=request.config.getoption("--hypothesis-derandomize"), # type: ignore
deadline=None,
)

Expand All @@ -217,16 +215,18 @@ def hypothesis_settings(request):


@pytest.fixture
def postgresql_url(request):
def postgresql_url(request: pytest.FixtureRequest) -> str:
if not request.node.get_closest_marker("postgresql"):
raise RuntimeError(
"`postgresql_url` can only be used in tests decorated with `@pytest.mark.postgresql`"
)
return get_postgresql_url()
url = get_postgresql_url()
assert url is not None
return url


@pytest.fixture
def db_url(request) -> str:
def db_url(request: pytest.FixtureRequest) -> str:
if request.config.getoption("--postgresql"):
reset_postgresql_testbed()
url = get_postgresql_url()
Expand All @@ -251,9 +251,9 @@ def blockstore_config(db_url: str) -> BaseBlockStoreConfig:

@pytest.fixture
def reset_testbed(
request,
caplog,
):
request: pytest.FixtureRequest,
caplog: LogCaptureFixture,
) -> Callable[[bool], Awaitable[None]]:
async def _reset_testbed(keep_logs=False):
if request.config.getoption("--postgresql"):
await asyncio_reset_postgresql_testbed()
Expand Down

0 comments on commit 0bcfa58

Please sign in to comment.