diff --git a/.coveragerc b/.coveragerc index 230110d8..f31b2d6a 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,3 +1,8 @@ [run] omit = aio_pika/compat.py branch = True + +[report] +exclude_lines = + pragma: no cover + raise NotImplementedError diff --git a/aio_pika/__init__.py b/aio_pika/__init__.py index 516eb51e..e18002a4 100644 --- a/aio_pika/__init__.py +++ b/aio_pika/__init__.py @@ -13,12 +13,8 @@ from .robust_queue import RobustQueue -try: - from importlib.metadata import Distribution - __version__ = Distribution.from_name("aio-pika").version -except ImportError: - import pkg_resources - __version__ = pkg_resources.get_distribution("aio-pika").version +from importlib.metadata import Distribution +__version__ = Distribution.from_name("aio-pika").version __all__ = ( diff --git a/aio_pika/abc.py b/aio_pika/abc.py index ff81e38f..36ca1ee5 100644 --- a/aio_pika/abc.py +++ b/aio_pika/abc.py @@ -1,6 +1,7 @@ +from __future__ import annotations + import asyncio import dataclasses -import sys from abc import ABC, abstractmethod from dataclasses import dataclass from datetime import datetime, timedelta @@ -9,16 +10,10 @@ from types import TracebackType from typing import ( Any, AsyncContextManager, AsyncIterable, Awaitable, Callable, Dict, - Generator, Iterator, Mapping, Optional, Tuple, Type, TypeVar, Union, - overload, + Generator, Iterator, Literal, Mapping, Optional, Tuple, Type, TypedDict, + TypeVar, Union, overload, ) - -if sys.version_info >= (3, 8): - from typing import Literal, TypedDict -else: - from typing_extensions import Literal, TypedDict - import aiormq.abc from aiormq.abc import ExceptionType from pamqp.common import Arguments, FieldValue @@ -255,7 +250,10 @@ class AbstractQueue: arguments: Arguments passive: bool declaration_result: aiormq.spec.Queue.DeclareOk - close_callbacks: CallbackCollection + close_callbacks: CallbackCollection[ + AbstractQueue, + [Optional[BaseException]], + ] @abstractmethod def __init__( @@ -366,14 +364,14 @@ def iterator(self, **kwargs: Any) -> "AbstractQueueIterator": raise NotImplementedError -class AbstractQueueIterator(AsyncIterable): +class AbstractQueueIterator(AsyncIterable[AbstractIncomingMessage]): _amqp_queue: AbstractQueue _queue: asyncio.Queue _consumer_tag: ConsumerTag _consume_kwargs: Dict[str, Any] @abstractmethod - def close(self, *_: Any) -> Awaitable[Any]: + def close(self) -> Awaitable[Any]: raise NotImplementedError @abstractmethod @@ -511,8 +509,14 @@ class AbstractChannel(PoolInstance, ABC): QUEUE_CLASS: Type[AbstractQueue] EXCHANGE_CLASS: Type[AbstractExchange] - close_callbacks: CallbackCollection - return_callbacks: CallbackCollection + close_callbacks: CallbackCollection[ + AbstractChannel, + [Optional[BaseException]], + ] + return_callbacks: CallbackCollection[ + AbstractChannel, + [AbstractIncomingMessage], + ] default_exchange: AbstractExchange publisher_confirms: bool @@ -531,6 +535,10 @@ def is_closed(self) -> bool: def close(self, exc: Optional[ExceptionType] = None) -> Awaitable[None]: raise NotImplementedError + @abstractmethod + def closed(self) -> Awaitable[Literal[True]]: + raise NotImplementedError + @abstractmethod async def get_underlay_channel(self) -> aiormq.abc.AbstractChannel: raise NotImplementedError @@ -718,7 +726,10 @@ def parse(self, value: Optional[str]) -> Any: class AbstractConnection(PoolInstance, ABC): PARAMETERS: Tuple[ConnectionParameter, ...] - close_callbacks: CallbackCollection + close_callbacks: CallbackCollection[ + AbstractConnection, + [Optional[BaseException]], + ] connected: asyncio.Event transport: Optional[UnderlayConnection] kwargs: Mapping[str, Any] @@ -741,6 +752,10 @@ def is_closed(self) -> bool: async def close(self, exc: ExceptionType = asyncio.CancelledError) -> None: raise NotImplementedError + @abstractmethod + def closed(self) -> Awaitable[Literal[True]]: + raise NotImplementedError + @abstractmethod async def connect(self, timeout: TimeoutType = None) -> None: raise NotImplementedError @@ -831,7 +846,7 @@ async def bind( class AbstractRobustChannel(AbstractChannel): - reopen_callbacks: CallbackCollection + reopen_callbacks: CallbackCollection[AbstractRobustChannel, []] @abstractmethod def reopen(self) -> Awaitable[None]: @@ -874,7 +889,7 @@ async def declare_queue( class AbstractRobustConnection(AbstractConnection): - reconnect_callbacks: CallbackCollection + reconnect_callbacks: CallbackCollection[AbstractRobustConnection, []] @property @abstractmethod @@ -896,10 +911,10 @@ def channel( ChannelCloseCallback = Callable[ - [AbstractChannel, Optional[BaseException]], Any, + [Optional[AbstractChannel], Optional[BaseException]], Any, ] ConnectionCloseCallback = Callable[ - [AbstractConnection, Optional[BaseException]], Any, + [Optional[AbstractConnection], Optional[BaseException]], Any, ] ConnectionType = TypeVar("ConnectionType", bound=AbstractConnection) diff --git a/aio_pika/channel.py b/aio_pika/channel.py index 453cb599..b0e73c67 100644 --- a/aio_pika/channel.py +++ b/aio_pika/channel.py @@ -1,8 +1,12 @@ import asyncio +import contextlib import warnings from abc import ABC from types import TracebackType -from typing import Any, AsyncContextManager, Generator, Optional, Type, Union +from typing import ( + Any, AsyncContextManager, Awaitable, Generator, Literal, Optional, Type, + Union, +) from warnings import warn import aiormq @@ -77,8 +81,9 @@ def __init__( self._connection: AbstractConnection = connection - # That's means user closed channel instance explicitly - self._closed: bool = False + self._closed: asyncio.Future = ( + asyncio.get_running_loop().create_future() + ) self._channel: Optional[UnderlayChannel] = None self._channel_number = channel_number @@ -89,6 +94,8 @@ def __init__( self.publisher_confirms = publisher_confirms self.on_return_raises = on_return_raises + self.close_callbacks.add(self._set_closed_callback) + @property def is_initialized(self) -> bool: """Returns True when the channel has been opened @@ -99,7 +106,7 @@ def is_initialized(self) -> bool: def is_closed(self) -> bool: """Returns True when the channel has been closed from the broker side or after the close() method has been called.""" - if not self.is_initialized or self._closed: + if not self.is_initialized or self._closed.done(): return True channel = self._channel if channel is None: @@ -119,8 +126,12 @@ async def close( return log.debug("Closing channel %r", self) - self._closed = True await self._channel.close() + if not self._closed.done(): + self._closed.set_result(True) + + def closed(self) -> Awaitable[Literal[True]]: + return self._closed async def get_underlay_channel(self) -> aiormq.abc.AbstractChannel: @@ -174,12 +185,13 @@ async def _open(self) -> None: await channel.close(e) self._channel = None raise - self._closed = False + if self._closed.done(): + self._closed = asyncio.get_running_loop().create_future() async def initialize(self, timeout: TimeoutType = None) -> None: if self.is_initialized: raise RuntimeError("Already initialized") - elif self._closed: + elif self._closed.done(): raise RuntimeError("Can't initialize closed channel") await self._open() @@ -197,7 +209,10 @@ async def _on_open(self) -> None: type=ExchangeType.DIRECT, ) - async def _on_close(self, closing: asyncio.Future) -> None: + async def _on_close( + self, + closing: asyncio.Future + ) -> Optional[BaseException]: try: exc = closing.exception() except asyncio.CancelledError as e: @@ -207,6 +222,16 @@ async def _on_close(self, closing: asyncio.Future) -> None: if self._channel and self._channel.channel: self._channel.channel.on_return_callbacks.discard(self._on_return) + return exc + + async def _set_closed_callback( + self, + _: Optional[AbstractChannel], + exc: Optional[BaseException], + ) -> None: + if not self._closed.done(): + self._closed.set_result(True) + async def _on_initialized(self) -> None: channel = await self.get_underlay_channel() channel.on_return_callbacks.add(self._on_return) @@ -219,7 +244,11 @@ async def reopen(self) -> None: await self._open() def __del__(self) -> None: - self._closed = True + with contextlib.suppress(AttributeError): + # might raise because an Exception was raised in __init__ + if not self._closed.done(): + self._closed.set_result(True) + self._channel = None async def declare_exchange( diff --git a/aio_pika/connection.py b/aio_pika/connection.py index fb49db4b..e45be8e7 100644 --- a/aio_pika/connection.py +++ b/aio_pika/connection.py @@ -1,7 +1,10 @@ import asyncio from ssl import SSLContext from types import TracebackType -from typing import Any, Dict, Optional, Tuple, Type, TypeVar, Union +from typing import ( + Any, Awaitable, Dict, Literal, Optional, Tuple, Type, TypeVar, Union +) + import aiormq.abc from aiormq.connection import parse_int @@ -39,11 +42,11 @@ class Connection(AbstractConnection): ), ) - _closed: bool + _closed: asyncio.Future @property def is_closed(self) -> bool: - return self._closed + return self._closed.done() async def close( self, exc: Optional[aiormq.abc.ExceptionType] = ConnectionClosed, @@ -53,7 +56,11 @@ async def close( if not transport: return await transport.close(exc) - self._closed = True + if not self._closed.done(): + self._closed.set_result(True) + + def closed(self) -> Awaitable[Literal[True]]: + return self._closed @classmethod def _parse_parameters(cls, kwargs: Dict[str, Any]) -> Dict[str, Any]: @@ -74,7 +81,7 @@ def __init__( ): self.loop = loop or asyncio.get_event_loop() self.transport = None - self._closed = False + self._closed = self.loop.create_future() self._close_called = False self.url = URL(url) @@ -201,8 +208,7 @@ async def ready(self) -> None: def __del__(self) -> None: if ( self.is_closed or - self.loop.is_closed() or - not hasattr(self, "connection") + self.loop.is_closed() ): return diff --git a/aio_pika/message.py b/aio_pika/message.py index 2066916f..7d92c5a9 100644 --- a/aio_pika/message.py +++ b/aio_pika/message.py @@ -47,7 +47,7 @@ def encode_expiration_timedelta(value: timedelta) -> str: return str(int(value.total_seconds() * MILLISECONDS)) -@encode_expiration.register(NoneType) # type: ignore +@encode_expiration.register(NoneType) def encode_expiration_none(_: Any) -> None: return None @@ -62,7 +62,7 @@ def decode_expiration_str(t: str) -> float: return float(t) / MILLISECONDS -@decode_expiration.register(NoneType) # type: ignore +@decode_expiration.register(NoneType) def decode_expiration_none(_: Any) -> None: return None @@ -88,7 +88,7 @@ def encode_timestamp_timedelta(value: timedelta) -> datetime: return datetime.now(tz=timezone.utc) + value -@encode_timestamp.register(NoneType) # type: ignore +@encode_timestamp.register(NoneType) def encode_timestamp_none(_: Any) -> None: return None @@ -103,7 +103,7 @@ def decode_timestamp_datetime(value: datetime) -> datetime: return value -@decode_timestamp.register(NoneType) # type: ignore +@decode_timestamp.register(NoneType) def decode_timestamp_none(_: Any) -> None: return None diff --git a/aio_pika/patterns/master.py b/aio_pika/patterns/master.py index e46e4ee5..a7a70e14 100644 --- a/aio_pika/patterns/master.py +++ b/aio_pika/patterns/master.py @@ -12,7 +12,7 @@ AbstractChannel, AbstractExchange, AbstractIncomingMessage, AbstractQueue, ConsumerTag, DeliveryMode, ) -from aio_pika.message import Message, ReturnedMessage +from aio_pika.message import Message from ..tools import create_task, ensure_awaitable from .base import Base, CallbackType, Proxy, T @@ -113,8 +113,8 @@ def exchange(self) -> AbstractExchange: @staticmethod def on_message_returned( - channel: AbstractChannel, - message: ReturnedMessage, + channel: Optional[AbstractChannel], + message: AbstractIncomingMessage, ) -> None: log.warning( "Message returned. Probably destination queue does not exists: %r", diff --git a/aio_pika/patterns/rpc.py b/aio_pika/patterns/rpc.py index d9c8b07d..91a336f1 100644 --- a/aio_pika/patterns/rpc.py +++ b/aio_pika/patterns/rpc.py @@ -15,7 +15,7 @@ ) from aio_pika.exceptions import MessageProcessError from aio_pika.exchange import ExchangeType -from aio_pika.message import IncomingMessage, Message, ReturnedMessage +from aio_pika.message import IncomingMessage, Message from ..tools import ensure_awaitable from .base import Base, CallbackType, Proxy, T @@ -193,7 +193,8 @@ async def initialize( self.channel.return_callbacks.add(self.on_message_returned) def on_close( - self, channel: AbstractChannel, + self, + channel: Optional[AbstractChannel], exc: Optional[ExceptionType] = None, ) -> None: log.debug("Closing RPC futures because %r", exc) @@ -218,7 +219,9 @@ async def create(cls, channel: AbstractChannel, **kwargs: Any) -> "RPC": return rpc def on_message_returned( - self, channel: AbstractChannel, message: ReturnedMessage, + self, + channel: Optional[AbstractChannel], + message: AbstractIncomingMessage, ) -> None: if message.correlation_id is None: log.warning( diff --git a/aio_pika/queue.py b/aio_pika/queue.py index 4c02bb78..9b4cd3de 100644 --- a/aio_pika/queue.py +++ b/aio_pika/queue.py @@ -1,11 +1,14 @@ import asyncio -import sys +from asyncio import Future from functools import partial from types import TracebackType -from typing import Any, Awaitable, Callable, Optional, Type, overload +from typing import ( + Any, Awaitable, Callable, Literal, Optional, Type, cast, overload, +) import aiormq from aiormq.abc import DeliveredMessage +from exceptiongroup import ExceptionGroup from pamqp.common import Arguments from .abc import ( @@ -19,12 +22,6 @@ from .tools import CallbackCollection, create_task, ensure_awaitable -if sys.version_info >= (3, 8): - from typing import Literal -else: - from typing_extensions import Literal - - log = get_logger(__name__) @@ -92,7 +89,6 @@ async def declare( """ Declare queue. :param timeout: execution timeout - :param passive: Only check to see if the queue exists. :return: :class:`None` """ log.debug("Declaring queue: %r", self) @@ -421,7 +417,24 @@ class QueueIterator(AbstractQueueIterator): def consumer_tag(self) -> Optional[ConsumerTag]: return getattr(self, "_consumer_tag", None) - async def close(self, *_: Any) -> Any: + async def close(self) -> None: + await self._on_close(self._amqp_queue, None) + if not self._closed.done(): + self._closed.set_result(True) + + async def _set_closed( + self, + _channel: Optional[AbstractQueue], + exc: Optional[BaseException] + ) -> None: + if not self._closed.done(): + self._closed.set_result(True) + + async def _on_close( + self, + _channel: Optional[AbstractQueue], + _exc: Optional[BaseException] + ) -> None: log.debug("Cancelling queue iterator %r", self) if not hasattr(self, "_consumer_tag"): @@ -436,11 +449,15 @@ async def close(self, *_: Any) -> Any: consumer_tag = self._consumer_tag del self._consumer_tag - self._amqp_queue.close_callbacks.remove(self.close) + self._amqp_queue.close_callbacks.discard(self._on_close) await self._amqp_queue.cancel(consumer_tag) log.debug("Queue iterator %r closed", self) + if self._queue.empty(): + return + + exceptions = [] # Reject all messages that have been received and in the buffer/cache. while not self._queue.empty(): msg = self._queue.get_nowait() @@ -458,7 +475,21 @@ async def close(self, *_: Any) -> Any: self, ) else: - await msg.nack(requeue=True, multiple=False) + try: + await msg.nack(requeue=True, multiple=False) + except Exception as e: + log.warning( + "Failed to nack message %r", + msg, + exc_info=e, + ) + exceptions.append(e) + + if exceptions: + raise ExceptionGroup( + "Unable to nack all messages", + exceptions, + ) def __str__(self) -> str: return f"queue[{self._amqp_queue}](...)" @@ -472,14 +503,30 @@ def __repr__(self) -> str: def __init__(self, queue: Queue, **kwargs: Any): self._consumer_tag: ConsumerTag - self._amqp_queue: AbstractQueue = queue + self._amqp_queue: Queue = queue self._queue = asyncio.Queue() + self._closed = asyncio.get_running_loop().create_future() + self._message_or_closed = asyncio.Event() + self._timeout_event = asyncio.Event() self._consume_kwargs = kwargs - self._amqp_queue.close_callbacks.add(self.close) + cast( + asyncio.Future, self._amqp_queue.channel.closed() + ).add_done_callback(self._propagate_closed) + self._closed.add_done_callback(self._propagate_closed) + + self._amqp_queue.close_callbacks.add(self._on_close, weak=True) + self._amqp_queue.close_callbacks.add( + self._set_closed, + weak=True + ) + + def _propagate_closed(self, _: Future) -> None: + self._message_or_closed.set() async def on_message(self, message: AbstractIncomingMessage) -> None: await self._queue.put(message) + self._message_or_closed.set() async def consume(self) -> None: self._consumer_tag = await self._amqp_queue.consume( @@ -500,27 +547,70 @@ async def __aexit__( exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: - await self.close() + if hasattr(self, "__closing"): + try: + await self.__closing + finally: + del self.__closing + else: + await self.close() + + async def __anext__(self) -> AbstractIncomingMessage: + if self._closed.done(): + raise StopAsyncIteration - async def __anext__(self) -> IncomingMessage: if not hasattr(self, "_consumer_tag"): await self.consume() - try: - return await asyncio.wait_for( - self._queue.get(), - timeout=self._consume_kwargs.get("timeout"), - ) - except asyncio.CancelledError: - timeout = self._consume_kwargs.get( - "timeout", - self.DEFAULT_CLOSE_TIMEOUT, - ) - log.info( - "%r closing with timeout %d seconds", - self, timeout, - ) - await asyncio.wait_for(self.close(), timeout=timeout) - raise + + timeout: Optional[float] = self._consume_kwargs.get("timeout") + + if not self._message_or_closed.is_set(): + coroutine: Awaitable[Any] = self._message_or_closed.wait() + if timeout is not None and timeout > 0: + coroutine = asyncio.wait_for(coroutine, timeout=timeout) + + try: + await coroutine + except (asyncio.TimeoutError, asyncio.CancelledError): + if timeout is not None: + timeout = ( + timeout + if timeout > 0 + else self.DEFAULT_CLOSE_TIMEOUT + ) + log.info( + "%r closing with timeout %d seconds", + self, timeout, + ) + + task = asyncio.create_task(self.close()) + coroutine = task + if timeout is not None: + coroutine = asyncio.wait_for( + asyncio.shield(coroutine), + timeout=timeout, + ) + + try: + await coroutine + except asyncio.TimeoutError: + self.__closing = task + + raise + + if self._queue.empty(): + raise StopAsyncIteration + + msg = self._queue.get_nowait() + + if ( + self._queue.empty() + and not self._amqp_queue.channel.is_closed + and not self._closed.done() + ): + self._message_or_closed.clear() + + return msg __all__ = ("Queue", "QueueIterator", "ConsumerTag") diff --git a/aio_pika/robust_channel.py b/aio_pika/robust_channel.py index b73d9b65..77114696 100644 --- a/aio_pika/robust_channel.py +++ b/aio_pika/robust_channel.py @@ -66,10 +66,11 @@ def __init__( self._prefetch_count: int = 0 self._prefetch_size: int = 0 self._global_qos: bool = False - self.reopen_callbacks: CallbackCollection = CallbackCollection(self) + self.reopen_callbacks = CallbackCollection(self) self.__restore_lock = asyncio.Lock() self.__restored = asyncio.Event() - self.close_callbacks.add(self.__close_callback) + + self.close_callbacks.remove(self._set_closed_callback) async def ready(self) -> None: await self._connection.ready() @@ -94,23 +95,32 @@ async def restore(self, channel: Any = None) -> None: await self.reopen() self.__restored.set() - async def __close_callback(self, _: Any, exc: BaseException) -> None: + async def _on_close( + self, + closing: asyncio.Future + ) -> Optional[BaseException]: + exc = await super()._on_close(closing) + if isinstance(exc, asyncio.CancelledError): # This happens only if the channel is forced to close from the # outside, for example, if the connection is closed. # Of course, here you need to exit from this function # as soon as possible and to avoid a recovery attempt. self.__restored.clear() - return + if not self._closed.done(): + self._closed.set_result(True) + return exc in_restore_state = not self.__restored.is_set() self.__restored.clear() - if self._closed or in_restore_state: - return + if self._closed.done() or in_restore_state: + return exc await self.restore() + return exc + async def reopen(self) -> None: await super().reopen() await self.reopen_callbacks() diff --git a/aio_pika/robust_connection.py b/aio_pika/robust_connection.py index c1a28368..1f1e9366 100644 --- a/aio_pika/robust_connection.py +++ b/aio_pika/robust_connection.py @@ -60,7 +60,7 @@ def __init__( self.__reconnection_task: Optional[asyncio.Task] = None self._reconnect_lock = asyncio.Lock() - self.reconnect_callbacks: CallbackCollection = CallbackCollection(self) + self.reconnect_callbacks = CallbackCollection(self) self.__connection_close_event.set() @@ -103,14 +103,8 @@ async def _on_connected(self) -> None: except Exception: log.exception("Failed to reopen channel") raise - except asyncio.CancelledError: - # In python 3.7 asyncio.CancelledError inherited - # from Exception and this needed for catch it first - raise except Exception as e: - closing = self.loop.create_future() - closing.set_exception(e) - await self.close_callbacks(closing) + await self.close_callbacks(e) await asyncio.gather( transport.connection.close(e), return_exceptions=True, diff --git a/aio_pika/robust_queue.py b/aio_pika/robust_queue.py index e7fdc1ba..8d2dfa91 100644 --- a/aio_pika/robust_queue.py +++ b/aio_pika/robust_queue.py @@ -151,6 +151,11 @@ def iterator(self, **kwargs: Any) -> AbstractQueueIterator: class RobustQueueIterator(QueueIterator): + def __init__(self, queue: Queue, **kwargs: Any): + super().__init__(queue, **kwargs) + + self._amqp_queue.close_callbacks.discard(self._set_closed) + async def consume(self) -> None: while True: try: diff --git a/aio_pika/tools.py b/aio_pika/tools.py index daec83ea..771cd041 100644 --- a/aio_pika/tools.py +++ b/aio_pika/tools.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import asyncio import inspect import warnings @@ -5,9 +7,12 @@ from itertools import chain from threading import Lock from typing import ( - AbstractSet, Any, Awaitable, Callable, Coroutine, Generator, Iterator, List, - MutableSet, Optional, TypeVar, Union, + AbstractSet, Any, Awaitable, Callable, Coroutine, Generator, Iterator, + List, + MutableSet, Optional, TypeVar, Union, Generic, ) + +from typing_extensions import ParamSpec, Protocol from weakref import ReferenceType, WeakSet, ref from aio_pika.log import get_logger @@ -19,25 +24,12 @@ def iscoroutinepartial(fn: Callable[..., Any]) -> bool: """ - Function returns True if function is a partial instance of coroutine. - See additional information here_. - - :param fn: Function - :return: bool - - .. _here: https://goo.gl/C0S4sQ - + Use Python 3.8's inspect.iscoroutinefunction() instead """ - - while True: - parent = fn - - fn = getattr(parent, "func", None) # type: ignore - - if fn is None: - break - - return asyncio.iscoroutinefunction(parent) + warnings.warn( + "Use inspect.iscoroutinefunction() instead.", DeprecationWarning + ) + return asyncio.iscoroutinefunction(fn) def _task_done(future: asyncio.Future) -> None: @@ -57,8 +49,8 @@ def create_task( ) -> Awaitable[T]: loop = loop or asyncio.get_event_loop() - if iscoroutinepartial(func): - task = loop.create_task(func(*args, **kwargs)) # type: ignore + if inspect.iscoroutinefunction(func): + task = loop.create_task(func(*args, **kwargs)) task.add_done_callback(_task_done) return task @@ -79,8 +71,20 @@ def run(future: asyncio.Future) -> Optional[asyncio.Future]: return future -CallbackType = Callable[..., Union[T, Awaitable[T]]] -CallbackSetType = AbstractSet[CallbackType] +_Sender = TypeVar("_Sender", contravariant=True) +_Params = ParamSpec("_Params") +_Return = TypeVar("_Return", covariant=True) + + +class CallbackType(Protocol[_Sender, _Params, _Return]): + def __call__( + self, + __sender: Optional[_Sender], + /, + *args: _Params.args, + **kwargs: _Params.kwargs, + ) -> Union[_Return, Awaitable[_Return]]: + ... class StubAwaitable: @@ -93,7 +97,15 @@ def __await__(self) -> Generator[Any, Any, None]: STUB_AWAITABLE = StubAwaitable() -class CallbackCollection(MutableSet): +class CallbackCollection( + MutableSet[ + Union[ + CallbackType[_Sender, _Params, Any], + "CallbackCollection[Any, _Params]", + ], + ], + Generic[_Sender, _Params], +): __slots__ = ( "__weakref__", "__sender", @@ -102,7 +114,7 @@ class CallbackCollection(MutableSet): "__lock", ) - def __init__(self, sender: Union[T, ReferenceType]): + def __init__(self, sender: Union[_Sender, ReferenceType[_Sender]]): self.__sender: ReferenceType if isinstance(sender, ReferenceType): self.__sender = sender @@ -110,22 +122,56 @@ def __init__(self, sender: Union[T, ReferenceType]): self.__sender = ref(sender) self.__callbacks: CallbackSetType = set() - self.__weak_callbacks: MutableSet[CallbackType] = WeakSet() + self.__weak_callbacks: MutableSet[ + Union[ + CallbackType[_Sender, _Params, Any], + CallbackCollection[Any, _Params], + ], + ] = WeakSet() self.__lock: Lock = Lock() - def add(self, callback: CallbackType, weak: bool = False) -> None: + def add( + self, + callback: Union[ + CallbackType[_Sender, _Params, Any], + CallbackCollection[Any, _Params], + ], + weak: bool = False + ) -> None: if self.is_frozen: raise RuntimeError("Collection frozen") if not callable(callback): raise ValueError("Callback is not callable") with self.__lock: - if weak: + if weak or isinstance(callback, CallbackCollection): self.__weak_callbacks.add(callback) else: self.__callbacks.add(callback) # type: ignore - def discard(self, callback: CallbackType) -> None: + def remove( + self, + callback: Union[ + CallbackType[_Sender, _Params, Any], + CallbackCollection[Any, _Params], + ], + ) -> None: + if self.is_frozen: + raise RuntimeError("Collection frozen") + + with self.__lock: + try: + self.__callbacks.remove(callback) # type: ignore + except KeyError: + self.__weak_callbacks.remove(callback) + + def discard( + self, + callback: Union[ + CallbackType[_Sender, _Params, Any], + CallbackCollection[Any, _Params], + ], + ) -> None: if self.is_frozen: raise RuntimeError("Collection frozen") @@ -169,13 +215,18 @@ def __contains__(self, x: object) -> bool: def __len__(self) -> int: return len(self.__callbacks) + len(self.__weak_callbacks) - def __iter__(self) -> Iterator[CallbackType]: + def __iter__(self) -> Iterator[ + Union[ + CallbackType[_Sender, _Params, Any], + CallbackCollection[_Sender, _Params], + ], + ]: return iter(chain(self.__callbacks, self.__weak_callbacks)) def __bool__(self) -> bool: return bool(self.__callbacks) or bool(self.__weak_callbacks) - def __copy__(self) -> "CallbackCollection": + def __copy__(self) -> CallbackCollection[_Sender, _Params]: instance = self.__class__(self.__sender) with self.__lock: @@ -190,7 +241,11 @@ def __copy__(self) -> "CallbackCollection": return instance - def __call__(self, *args: Any, **kwargs: Any) -> Awaitable[Any]: + def __call__( + self, + *args: _Params.args, + **kwargs: _Params.kwargs, + ) -> Awaitable[Any]: futures: List[asyncio.Future] = [] with self.__lock: @@ -198,14 +253,18 @@ def __call__(self, *args: Any, **kwargs: Any) -> Awaitable[Any]: for cb in self: try: - result = cb(sender, *args, **kwargs) - if hasattr(result, "__await__"): + if isinstance(cb, CallbackCollection): + result = cb(*args, **kwargs) + else: + result = cb(sender, *args, **kwargs) + if inspect.isawaitable(result): futures.append(asyncio.ensure_future(result)) except Exception: log.exception("Callback %r error", cb) if not futures: return STUB_AWAITABLE + return asyncio.gather(*futures, return_exceptions=True) def __hash__(self) -> int: @@ -255,12 +314,12 @@ def __call__(self, *args: Any, **kwargs: Any) -> Awaitable[Any]: def ensure_awaitable( - func: Callable[..., Union[T, Awaitable[T]]], -) -> Callable[..., Awaitable[T]]: + func: Callable[_Params, Union[T, Awaitable[T]]], +) -> Callable[_Params, Awaitable[T]]: if inspect.iscoroutinefunction(func): return func - if inspect.isfunction(func) and not iscoroutinepartial(func): + if inspect.isfunction(func): warnings.warn( f"You probably registering the non-coroutine function {func!r}. " "This is deprecated and will be removed in future releases. " @@ -269,7 +328,7 @@ def ensure_awaitable( ) @wraps(func) - async def wrapper(*args: Any, **kwargs: Any) -> T: + async def wrapper(*args: _Params.args, **kwargs: _Params.kwargs) -> T: nonlocal func result = func(*args, **kwargs) @@ -290,6 +349,14 @@ async def wrapper(*args: Any, **kwargs: Any) -> T: return wrapper +CallbackSetType = AbstractSet[ + Union[ + CallbackType[_Sender, _Params, None], + CallbackCollection[_Sender, _Params], + ], +] + + __all__ = ( "CallbackCollection", "CallbackSetType", diff --git a/docs/source/conf.py b/docs/source/conf.py index 65f57a12..92a0e9d5 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -23,12 +23,8 @@ # noinspection PyUnresolvedReferences -try: - from importlib.metadata import Distribution - __version__ = Distribution.from_name("aio-pika").version -except ImportError: - import pkg_resources - __version__ = pkg_resources.get_distribution("aio-pika").version +from importlib.metadata import Distribution +__version__ = Distribution.from_name("aio-pika").version sys.path.insert(0, os.path.abspath(os.path.dirname("__file__"))) @@ -87,7 +83,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path -exclude_patterns = [] +exclude_patterns = [] # type: ignore # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" @@ -154,7 +150,7 @@ # -- Options for LaTeX output --------------------------------------------- -latex_elements = { +latex_elements = { # type: ignore # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'a4paper', diff --git a/docs/source/examples/pooling.py b/docs/source/examples/pooling.py index af8d5c3c..d0e18b08 100644 --- a/docs/source/examples/pooling.py +++ b/docs/source/examples/pooling.py @@ -40,7 +40,7 @@ async def publish() -> None: async with connection_pool, channel_pool: task = asyncio.create_task(consume()) - await asyncio.wait([publish() for _ in range(50)]) + await asyncio.wait([asyncio.create_task(publish()) for _ in range(50)]) await task diff --git a/docs/source/rabbitmq-tutorial/examples/7-publisher-confirms/publish_asynchronously.py b/docs/source/rabbitmq-tutorial/examples/7-publisher-confirms/publish_asynchronously.py index 52b23b23..4a421acf 100644 --- a/docs/source/rabbitmq-tutorial/examples/7-publisher-confirms/publish_asynchronously.py +++ b/docs/source/rabbitmq-tutorial/examples/7-publisher-confirms/publish_asynchronously.py @@ -1,16 +1,23 @@ import asyncio +from typing import Generator from aio_pika import Message, connect from aiormq.exceptions import DeliveryError from pamqp.commands import Basic +from aio_pika.abc import AbstractExchange -def get_messages_to_publish(): + +def get_messages_to_publish() -> Generator[bytes, None, None]: for i in range(10000): yield f"Hello World {i}!".encode() -async def publish_and_handle_confirm(exchange, queue_name, message_body): +async def publish_and_handle_confirm( + exchange: AbstractExchange, + queue_name: str, + message_body: bytes, +) -> None: try: confirmation = await exchange.publish( Message(message_body), diff --git a/docs/source/rabbitmq-tutorial/examples/7-publisher-confirms/publish_batches.py b/docs/source/rabbitmq-tutorial/examples/7-publisher-confirms/publish_batches.py index ceafdb66..854df81d 100644 --- a/docs/source/rabbitmq-tutorial/examples/7-publisher-confirms/publish_batches.py +++ b/docs/source/rabbitmq-tutorial/examples/7-publisher-confirms/publish_batches.py @@ -1,9 +1,10 @@ import asyncio +from typing import Generator from aio_pika import Message, connect -def get_messages_to_publish(): +def get_messages_to_publish() -> Generator[bytes, None, None]: for i in range(10000): yield f"Hello World {i}!".encode() diff --git a/docs/source/rabbitmq-tutorial/examples/7-publisher-confirms/publish_individually.py b/docs/source/rabbitmq-tutorial/examples/7-publisher-confirms/publish_individually.py index f6d02b16..b84893ca 100644 --- a/docs/source/rabbitmq-tutorial/examples/7-publisher-confirms/publish_individually.py +++ b/docs/source/rabbitmq-tutorial/examples/7-publisher-confirms/publish_individually.py @@ -1,9 +1,10 @@ import asyncio +from typing import Generator from aio_pika import Message, connect -def get_messages_to_publish(): +def get_messages_to_publish() -> Generator[bytes, None, None]: for i in range(10000): yield f"Hello World {i}!".encode() diff --git a/noxfile.py b/noxfile.py index 818a1836..1022bcfb 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,8 +1,9 @@ import nox +from nox import Session @nox.session -def docs(session): +def docs(session: Session) -> None: session.install(".") session.install("sphinx", "sphinx-autobuild") session.run("rm", "-rf", "build/html", external=True) diff --git a/poetry.lock b/poetry.lock index 5243c142..f94c176a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiomisc" -version = "17.3.41" +version = "17.5.26" description = "aiomisc - miscellaneous utils for asyncio" optional = false -python-versions = ">=3.8,<4.0" +python-versions = "<4.0,>=3.8" files = [ - {file = "aiomisc-17.3.41-py3-none-any.whl", hash = "sha256:e580047f01f86eb3c7bd3583895fc84b0e935f850c3fba12611203e25095d1e9"}, - {file = "aiomisc-17.3.41.tar.gz", hash = "sha256:b61241a6d8b01f78cb408630eee702bf8c706863d732913e754c11becc9446dc"}, + {file = "aiomisc-17.5.26-py3-none-any.whl", hash = "sha256:1608362ff510741c19eda79be187c525e7b01bc091d4aa304cee89e2eb38d579"}, + {file = "aiomisc-17.5.26.tar.gz", hash = "sha256:efc37c481ce0501d0b9db8fbf6bde37e1c7a67096c3fd12aee04c83e46523f33"}, ] [package.dependencies] @@ -20,22 +20,23 @@ typing_extensions = {version = "*", markers = "python_version < \"3.10\""} aiohttp = ["aiohttp (>3)"] asgi = ["aiohttp-asgi (>=0.5.2,<0.6.0)"] carbon = ["aiocarbon (>=0.15,<0.16)"] -cron = ["croniter (>=1.3.8,<2.0.0)"] -grpc = ["grpcio (>=1.56.0,<2.0.0)", "grpcio-tools (>=1.56.0,<2.0.0)"] +cron = ["croniter (==2.0)"] +dns = ["dnslib (>=0.9,<0.10)"] +grpc = ["grpcio (>=1.56,<2.0)", "grpcio-reflection (>=1.56,<2.0)", "grpcio-tools (>=1.56,<2.0)"] raven = ["aiohttp (>3)", "raven"] rich = ["rich"] -uvicorn = ["asgiref (>=3.7,<4)", "uvicorn (>=0.22,<1)"] -uvloop = ["uvloop (>=0.14,<1)"] +uvicorn = ["asgiref (>=3.7,<4.0)", "uvicorn (>=0.27,<0.28)"] +uvloop = ["uvloop (>=0.19,<1)"] [[package]] name = "aiomisc-pytest" -version = "1.1.1" +version = "1.1.2" description = "pytest integration for aiomisc" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "aiomisc_pytest-1.1.1-py3-none-any.whl", hash = "sha256:c07251f79c936c85c7589429f43c728cf1a34b80c0089b268f2cfa6186e77020"}, - {file = "aiomisc_pytest-1.1.1.tar.gz", hash = "sha256:2c378c41b078c0576027de6bf7fbc537a7e69285d23eaf4d45738d5d0de56dd3"}, + {file = "aiomisc_pytest-1.1.2-py3-none-any.whl", hash = "sha256:3519cb40a6ce245c26e18f3a77e43979d0d3675d59fa27757f17d09a16b4cc04"}, + {file = "aiomisc_pytest-1.1.2.tar.gz", hash = "sha256:6636b470d16b9fa99416564eb7302d049fc69f6eda903e7da97ea9f3ccad0fac"}, ] [package.dependencies] @@ -85,13 +86,13 @@ test = ["coverage", "flake8", "mypy", "pexpect", "wheel"] [[package]] name = "babel" -version = "2.14.0" +version = "2.16.0" description = "Internationalization utilities" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, - {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, + {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, + {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, ] [package.dependencies] @@ -102,13 +103,13 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] @@ -362,22 +363,23 @@ files = [ [[package]] name = "docker" -version = "7.0.0" +version = "7.1.0" description = "A Python library for the Docker Engine API." optional = false python-versions = ">=3.8" files = [ - {file = "docker-7.0.0-py3-none-any.whl", hash = "sha256:12ba681f2777a0ad28ffbcc846a69c31b4dfd9752b47eb425a274ee269c5e14b"}, - {file = "docker-7.0.0.tar.gz", hash = "sha256:323736fb92cd9418fc5e7133bc953e11a9da04f4483f828b527db553f1e7e5a3"}, + {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, + {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, ] [package.dependencies] -packaging = ">=14.0" pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} requests = ">=2.26.0" urllib3 = ">=1.26.0" [package.extras] +dev = ["coverage (==7.2.7)", "pytest (==7.4.2)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.1.0)", "ruff (==0.1.8)"] +docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] ssh = ["paramiko (>=2.4.3)"] websockets = ["websocket-client (>=1.3.0)"] @@ -404,13 +406,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -418,29 +420,29 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.13.1" +version = "3.15.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, - {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] name = "idna" -version = "3.6" +version = "3.8" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] [[package]] @@ -456,22 +458,22 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.1" +version = "8.4.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, - {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, + {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, + {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "iniconfig" @@ -486,13 +488,13 @@ files = [ [[package]] name = "jinja2" -version = "3.1.3" +version = "3.1.4" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] [package.dependencies] @@ -503,28 +505,27 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "livereload" -version = "2.6.3" +version = "2.7.0" description = "Python LiveReload is an awesome tool for web developers" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "livereload-2.6.3-py2.py3-none-any.whl", hash = "sha256:ad4ac6f53b2d62bb6ce1a5e6e96f1f00976a32348afedcb4b6d68df2a1d346e4"}, - {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, + {file = "livereload-2.7.0-py3-none-any.whl", hash = "sha256:19bee55aff51d5ade6ede0dc709189a0f904d3b906d3ea71641ed548acff3246"}, + {file = "livereload-2.7.0.tar.gz", hash = "sha256:f4ba199ef93248902841e298670eebfe1aa9e148e19b343bc57dbf1b74de0513"}, ] [package.dependencies] -six = "*" -tornado = {version = "*", markers = "python_version > \"2.7\""} +tornado = "*" [[package]] name = "logging-journald" -version = "0.6.7" +version = "0.6.9" description = "Pure python logging handler for writing logs to the journald using native protocol" optional = false -python-versions = ">=3.7,<4.0" +python-versions = "<4.0,>=3.8" files = [ - {file = "logging_journald-0.6.7-py3-none-any.whl", hash = "sha256:ef9333a84fd64fbe1e18ca6f22624af4fc5d92d519a2e2652aa43358548898eb"}, - {file = "logging_journald-0.6.7.tar.gz", hash = "sha256:5fdb576fff2ff82e98be1c7b4f0cbd87f061de5dbed38030f388dd4ba2d52e7d"}, + {file = "logging_journald-0.6.9-py3-none-any.whl", hash = "sha256:c3078b5db5e3e94a75fbf624d3255b3dfbf0bcbde7f36bad77271a7422acd0b0"}, + {file = "logging_journald-0.6.9.tar.gz", hash = "sha256:f4b242de387e24509e8dd490eac7980f98e2f82b259581d2c5936e1b477ec126"}, ] [[package]] @@ -708,52 +709,49 @@ files = [ [[package]] name = "mypy" -version = "0.991" +version = "1.11.2" description = "Optional static typing for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"}, - {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"}, - {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"}, - {file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"}, - {file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"}, - {file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"}, - {file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"}, - {file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"}, - {file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"}, - {file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"}, - {file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"}, - {file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"}, - {file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"}, - {file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"}, - {file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"}, - {file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"}, - {file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"}, - {file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"}, - {file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"}, - {file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"}, - {file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"}, - {file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"}, - {file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"}, - {file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"}, - {file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"}, - {file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"}, - {file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"}, - {file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"}, - {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"}, - {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"}, + {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, + {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, + {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, + {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, + {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, + {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, + {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, + {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, + {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, + {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, + {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, + {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, + {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, + {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, + {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, + {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, + {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, + {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, + {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, ] [package.dependencies] -mypy-extensions = ">=0.4.3" +mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=3.10" +typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] @@ -789,13 +787,13 @@ tox-to-nox = ["jinja2", "tox"] [[package]] name = "packaging" -version = "23.2" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] @@ -815,28 +813,29 @@ testing = ["coverage", "flake8", "flake8-comprehensions", "flake8-deprecated", " [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -856,13 +855,13 @@ files = [ [[package]] name = "pycodestyle" -version = "2.11.1" +version = "2.12.1" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, - {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, + {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, + {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, ] [[package]] @@ -895,17 +894,16 @@ files = [ [[package]] name = "pygments" -version = "2.17.2" +version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, ] [package.extras] -plugins = ["importlib-metadata"] windows-terminal = ["colorama (>=0.4.6)"] [[package]] @@ -1027,13 +1025,13 @@ files = [ [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -1048,40 +1046,29 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "setuptools" -version = "69.1.1" +version = "69.5.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, - {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "shortuuid" -version = "1.0.12" +version = "1.0.13" description = "A generator library for concise, unambiguous and URL-safe UUIDs." optional = false -python-versions = ">=3.5" -files = [ - {file = "shortuuid-1.0.12-py3-none-any.whl", hash = "sha256:dd855d8c10ced5dc2790b97e31f16ad55a6f4871b478feb4351dd3128a8aa82b"}, - {file = "shortuuid-1.0.12.tar.gz", hash = "sha256:c39f1b348b3c1e9b115a954b33b76c8c522d2d177a9d20acdbb20d24fac3ccfd"}, -] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.6" files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "shortuuid-1.0.13-py3-none-any.whl", hash = "sha256:a482a497300b49b4953e15108a7913244e1bb0d41f9d332f5e9925dba33a3c5a"}, + {file = "shortuuid-1.0.13.tar.gz", hash = "sha256:3bb9cf07f606260584b1df46399c0b87dd84773e7b25912b7e391e30797c5e72"}, ] [[package]] @@ -1294,33 +1281,33 @@ files = [ [[package]] name = "tornado" -version = "6.4" +version = "6.4.1" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false -python-versions = ">= 3.8" +python-versions = ">=3.8" files = [ - {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, - {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, - {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, - {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, - {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, + {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, + {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, + {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, ] [[package]] name = "types-docutils" -version = "0.20.0.20240227" +version = "0.21.0.20240724" description = "Typing stubs for docutils" optional = false python-versions = ">=3.8" files = [ - {file = "types-docutils-0.20.0.20240227.tar.gz", hash = "sha256:7f2dbb02356024b5db3efd9df26b236da050ad2eada89872e5284b4a394b7761"}, - {file = "types_docutils-0.20.0.20240227-py3-none-any.whl", hash = "sha256:51c139502ba0add871392cbc37200a3a64096e61eeb6396727443ba6d38ae579"}, + {file = "types-docutils-0.21.0.20240724.tar.gz", hash = "sha256:29ff7e27660f4fe76ea61d7e54d05ca3ce3b733ca9e8e8721e0fa587dbc10489"}, + {file = "types_docutils-0.21.0.20240724-py3-none-any.whl", hash = "sha256:bf51c6c488d23c0412f9b3ba10686fb1a6cb0b957ef04b45128d8a55c79ebb00"}, ] [[package]] @@ -1339,24 +1326,24 @@ types-docutils = "*" [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] @@ -1411,13 +1398,13 @@ test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)" [[package]] name = "virtualenv" -version = "20.25.1" +version = "20.26.3" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, - {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, ] [package.dependencies] @@ -1426,7 +1413,7 @@ filelock = ">=3.12.2,<4" platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] @@ -1613,20 +1600,24 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.17.0" +version = "3.20.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, + {file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"}, + {file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "a8d27cbd8d26a79f324915969fae155576f7509726d0e5c2ce16e8a5eae2eb9d" +content-hash = "6defca072e29cad69f4647832f24dbb4e3379e59f2011172c310d5c7a52755b5" diff --git a/pyproject.toml b/pyproject.toml index 90819662..135ebf69 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ packages = [{ include = "aio_pika" }] python = "^3.8" aiormq = "~6.8.0" yarl = [{ version = '*'}] -typing_extensions = [{ version = '*', python = "< 3.8" }] +exceptiongroup = "^1" # for pkg_resources setuptools = [{ version = '*', python = "< 3.8" }] @@ -49,7 +49,7 @@ aiomisc-pytest = "^1.1.1" collective-checkdocs = "^0.2" coverage = "^6.5.0" coveralls = "^3.3.1" -mypy = "^0.991" +mypy = "^1" nox = "^2022.11.21" pylama = "^8.4.1" pytest = "^7.4.0" @@ -63,6 +63,9 @@ types-setuptools = "^65.6.0.2" setuptools = "^69.0.3" testcontainers = "^3.7.1" +[tool.poetry.group.uvloop] +optional = true + [tool.poetry.group.uvloop.dependencies] uvloop = "^0.19" diff --git a/tests/test_amqp.py b/tests/test_amqp.py index 3aff94fb..a9a5a6ff 100644 --- a/tests/test_amqp.py +++ b/tests/test_amqp.py @@ -4,7 +4,7 @@ import time import uuid from datetime import datetime, timezone -from typing import Callable, Optional +from typing import Callable, Optional, List from unittest import mock import aiormq.exceptions @@ -23,6 +23,7 @@ ) from aio_pika.exchange import ExchangeType from aio_pika.message import ReturnedMessage +from aio_pika.queue import QueueIterator from tests import get_random_name @@ -59,12 +60,13 @@ async def test_channel_close(self, connection: aio_pika.Connection): closed = False def on_close( - ch: aio_pika.abc.AbstractChannel, - exc: Optional[Exception] = None, + ch: Optional[aio_pika.abc.AbstractChannel], + exc: Optional[BaseException] = None, ): nonlocal event, closed log.info("Close called") closed = True + assert ch is not None assert ch.is_closed event.set() @@ -1282,7 +1284,7 @@ async def publisher(): async def test_async_for_queue_context( self, event_loop, connection, declare_queue, - ): + ) -> None: channel2 = await self.create_channel(connection) queue = await declare_queue( @@ -1291,31 +1293,46 @@ async def test_async_for_queue_context( channel=channel2, ) - messages = 100 + messages: asyncio.Queue[bytes] = asyncio.Queue(100) + condition = asyncio.Condition() - async def publisher(): + async def publisher() -> None: channel1 = await self.create_channel(connection) - for i in range(messages): + for i in range(messages.maxsize): + body = str(i).encode() + await messages.put(body) await channel1.default_exchange.publish( - Message(body=str(i).encode()), routing_key=queue.name, + Message(body=body), routing_key=queue.name, ) - event_loop.create_task(publisher()) + async def consumer() -> None: + async with queue.iterator() as queue_iterator: + async for message in queue_iterator: + async with message.process(): + async with condition: + data.append(message.body) + messages.task_done() + condition.notify() + + async def application_stop_request() -> None: + async with condition: + await condition.wait_for(messages.full) + await messages.join() + await asyncio.sleep(1) + await connection.close() - count = 0 - data = list() + p = event_loop.create_task(publisher()) + c = event_loop.create_task(consumer()) + asr = event_loop.create_task(application_stop_request()) - async with queue.iterator() as queue_iterator: - async for message in queue_iterator: - async with message.process(): - count += 1 - data.append(message.body) + data: List[bytes] = list() - if count >= messages: - break + await asyncio.gather(p, c, asr) - assert data == list(map(lambda x: str(x).encode(), range(messages))) + assert data == list( + map(lambda x: str(x).encode(), range(messages.maxsize)) + ) async def test_async_with_connection( self, create_connection: Callable, @@ -1331,32 +1348,47 @@ async def test_async_with_connection( channel=channel, ) - messages = 100 + condition = asyncio.Condition() + messages: asyncio.Queue[bytes] = asyncio.Queue( + 100 + ) async def publisher(): channel1 = await self.create_channel(connection) - for i in range(messages): + for i in range(messages.maxsize): + body = str(i).encode() + await messages.put(body) await channel1.default_exchange.publish( - Message(body=str(i).encode()), routing_key=queue.name, + Message(body=body), routing_key=queue.name, ) - event_loop.create_task(publisher()) - - count = 0 data = list() - async with queue.iterator() as queue_iterator: - async for message in queue_iterator: - async with message.process(): - count += 1 - data.append(message.body) + async def consume_loop(): + async with queue.iterator() as queue_iterator: + async for message in queue_iterator: + async with message.process(): + async with condition: + data.append(message.body) + condition.notify() + messages.task_done() + + async def application_close_request(): + async with condition: + await condition.wait_for(messages.full) + await messages.join() + await asyncio.sleep(1) + await connection.close() - if count >= messages: - break + p = event_loop.create_task(publisher()) + cl = event_loop.create_task(consume_loop()) + acr = event_loop.create_task(application_close_request()) + + await asyncio.gather(p, cl, acr) assert data == list( - map(lambda x: str(x).encode(), range(messages)), + map(lambda x: str(x).encode(), range(messages.maxsize)), ) assert channel.is_closed @@ -1412,47 +1444,37 @@ async def test_channel_locked_resource( async def test_queue_iterator_close_was_called_twice( self, create_connection: Callable, event_loop, declare_queue, ): - future = event_loop.create_future() event = asyncio.Event() queue_name = get_random_name() + iterator: QueueIterator async def task_inner(): - nonlocal future nonlocal event + nonlocal iterator nonlocal create_connection - try: - connection = await create_connection() - - async with connection: - channel = await self.create_channel(connection) + connection = await create_connection() - queue = await declare_queue( - queue_name, channel=channel, cleanup=False, - ) + async with connection: + channel = await self.create_channel(connection) - async with queue.iterator() as q: - event.set() + queue = await declare_queue( + queue_name, channel=channel, cleanup=False, + ) - async for message in q: - with message.process(): - break + async with queue.iterator() as iterator: + event.set() - except asyncio.CancelledError as e: - future.set_exception(e) - raise + async for message in iterator: + async with message.process(): + pytest.fail("who sent this message?") task = event_loop.create_task(task_inner()) await event.wait() - event_loop.call_soon(task.cancel) - - with pytest.raises(asyncio.CancelledError): - await task - - with pytest.raises(asyncio.CancelledError): - await future + await iterator.close() + await task async def test_queue_iterator_close_with_noack( self, @@ -1461,7 +1483,7 @@ async def test_queue_iterator_close_with_noack( add_cleanup: Callable, declare_queue, ): - messages = [] + messages: asyncio.Queue = asyncio.Queue() queue_name = get_random_name("test_queue") body = get_random_name("test_body").encode() @@ -1482,7 +1504,7 @@ async def task_inner(): async with queue.iterator(no_ack=True) as q: async for message in q: - messages.append(message) + await messages.put(message) return async with await create_connection() as connection: @@ -1499,14 +1521,70 @@ async def task_inner(): task = event_loop.create_task(task_inner()) - await task + message = await messages.get() - assert messages - assert messages[0].body == body + assert message + assert message.body == body finally: + await task await queue.delete() + async def test_queue_iterator_throws_cancelled_error( + self, + create_connection: Callable, + event_loop, + add_cleanup: Callable, + declare_queue, + ): + event_loop.set_debug(True) + queue_name = get_random_name("test_queue") + + connection = await create_connection() + + async with connection: + channel = await self.create_channel(connection) + + queue = await channel.declare_queue( + queue_name, + ) + + iterator = queue.iterator() + task = event_loop.create_task(iterator.__anext__()) + done, pending = await asyncio.wait({task}, timeout=1) + assert not done + task.cancel() + + with pytest.raises(asyncio.CancelledError): + await task + + async def test_queue_iterator_throws_timeout_error( + self, + create_connection: Callable, + event_loop, + add_cleanup: Callable, + declare_queue, + ): + event_loop.set_debug(True) + queue_name = get_random_name("test_queue") + + connection = await create_connection() + + async with connection: + channel = await self.create_channel(connection) + + queue = await channel.declare_queue( + queue_name, + ) + + iterator = queue.iterator(timeout=1) + task = event_loop.create_task(iterator.__anext__()) + done, pending = await asyncio.wait({task}, timeout=5) + assert done + + with pytest.raises(asyncio.TimeoutError): + await task + async def test_passive_for_exchange( self, declare_exchange: Callable, connection, add_cleanup: Callable, ): diff --git a/tests/test_amqp_robust.py b/tests/test_amqp_robust.py index 28985324..4b50270a 100644 --- a/tests/test_amqp_robust.py +++ b/tests/test_amqp_robust.py @@ -61,7 +61,7 @@ async def test_channel_blocking_timeout_reopen(self, connection): close_reasons = [] close_event = asyncio.Event() reopen_event = asyncio.Event() - channel.reopen_callbacks.add(lambda _: reopen_event.set()) + channel.reopen_callbacks.add(lambda *_: reopen_event.set()) queue_name = get_random_name("test_channel_blocking_timeout_reopen") diff --git a/tests/test_amqp_robust_proxy.py b/tests/test_amqp_robust_proxy.py index c9eb2d01..8d7c4b80 100644 --- a/tests/test_amqp_robust_proxy.py +++ b/tests/test_amqp_robust_proxy.py @@ -3,7 +3,7 @@ import logging from contextlib import suppress from functools import partial -from typing import Callable, List, Type +from typing import Callable, List, Type, Optional import aiomisc import aiormq.exceptions @@ -13,7 +13,7 @@ from yarl import URL import aio_pika -from aio_pika.abc import AbstractRobustChannel +from aio_pika.abc import AbstractRobustChannel, AbstractRobustConnection from aio_pika.exceptions import QueueEmpty, CONNECTION_EXCEPTIONS from aio_pika.message import Message from aio_pika.robust_channel import RobustChannel @@ -109,7 +109,7 @@ async def test_revive_passive_queue_on_reconnect( reconnect_event = asyncio.Event() reconnect_count = 0 - def reconnect_callback(conn): + def reconnect_callback(conn: Optional[AbstractRobustConnection]): nonlocal reconnect_count reconnect_count += 1 reconnect_event.set()