From 3d873fc94c1a3787cb22f6f6355bbe55e570cd64 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Thu, 20 Jun 2024 01:07:22 +0200 Subject: [PATCH 01/22] wip --- nats/jetstream/__init__.py | 18 +++++++ nats/jetstream/context.py | 19 ++++++++ nats/jetstream/errors.py | 2 + nats/jetstream/message.py | 93 ++++++++++++++++++++++++++++++++++++ nats/jetstream/publish.py | 98 ++++++++++++++++++++++++++++++++++++++ 5 files changed, 230 insertions(+) create mode 100644 nats/jetstream/__init__.py create mode 100644 nats/jetstream/context.py create mode 100644 nats/jetstream/errors.py create mode 100644 nats/jetstream/message.py create mode 100644 nats/jetstream/publish.py diff --git a/nats/jetstream/__init__.py b/nats/jetstream/__init__.py new file mode 100644 index 00000000..e6367ffe --- /dev/null +++ b/nats/jetstream/__init__.py @@ -0,0 +1,18 @@ +# Copyright 2016-2024 The NATS Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +def new() -> Context: + pass + +__all__ = ['new'] diff --git a/nats/jetstream/context.py b/nats/jetstream/context.py new file mode 100644 index 00000000..2a75a9f0 --- /dev/null +++ b/nats/jetstream/context.py @@ -0,0 +1,19 @@ +# Copyright 2016-2024 The NATS Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from nats.jetstream.publish import Publisher + +class Context(Publisher): + def __init__(self): + Publisher.__init__(self) diff --git a/nats/jetstream/errors.py b/nats/jetstream/errors.py new file mode 100644 index 00000000..0f4b1f67 --- /dev/null +++ b/nats/jetstream/errors.py @@ -0,0 +1,2 @@ +class MsgAlreadyAckdError(Exception): + pass diff --git a/nats/jetstream/message.py b/nats/jetstream/message.py new file mode 100644 index 00000000..fbc9a7e7 --- /dev/null +++ b/nats/jetstream/message.py @@ -0,0 +1,93 @@ +# Copyright 2016-2024 The NATS Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from __future__ import annotations + +from enum import Enum +from typing import Iterator + +import nats +from nats.jetstream.errors import MsgAlreadyAckdError + +class Header(str, Enum): + CONSUMER_STALLED = "Nats-Consumer-Stalled" + DESCRIPTION = "Description" + EXPECTED_LAST_MSG_ID = "Nats-Expected-Last-Msg-Id" + EXPECTED_LAST_SEQUENCE = "Nats-Expected-Last-Sequence" + EXPECTED_LAST_SUBJECT_SEQUENCE = "Nats-Expected-Last-Subject-Sequence" + EXPECTED_STREAM = "Nats-Expected-Stream" + LAST_CONSUMER = "Nats-Last-Consumer" + LAST_STREAM = "Nats-Last-Stream" + MSG_ID = "Nats-Msg-Id" + ROLLUP = "Nats-Rollup" + STATUS = "Status" + +class Msg: + class Metadata: + pass + + def __init__(self, msg: nats.Msg) -> None: + self._ackd = False + self._msg = msg + self._client = None + + @property + def metadata(self) -> Iterator[Msg.Metadata]: + """ + Returns the message body + """ + raise NotImplementedError + + @property + def data(self) -> bytes: + """ + Returns the message body + """ + raise NotImplementedError + + def _ack_reply(self) -> str: + if self._ackd: + raise MsgAlreadyAckdError + + self._client.publish(self._msg.reply, b"Hello World!") + raise NotImplementedError + + async def ack(self) -> None: + """ + Acknowledges a message telling the server that the message was + successfully processed and it can move on to the next message. + """ + raise NotImplementedError + + async def double_ack(self) -> None: + """ + Acknowledges a message and waits for ack reply from the server. + While it impacts performance, it is useful for scenarios where + message loss is not acceptable. + """ + raise NotImplementedError + + async def nak(self) -> None: + """ + Negatively acknowledges a message telling the server to + redeliver the message. + """ + raise NotImplementedError + + async def nak_with_delay(self, delay: int) -> None: + """ + Negatively acknowledges a message telling the server + to redeliver the message after the given delay. + """ + raise NotImplementedError diff --git a/nats/jetstream/publish.py b/nats/jetstream/publish.py new file mode 100644 index 00000000..454fff85 --- /dev/null +++ b/nats/jetstream/publish.py @@ -0,0 +1,98 @@ +# Copyright 2016-2024 The NATS Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from asyncio import Future +from dataclasses import dataclass +from nats.aio.client import Client +from nats.errors import NoRespondersError +from nats.jetstream.errors import APIError, NoStreamResponseError +from nats.jetstream.message import Msg +from typing import Optional + +import json + +@dataclass +class PubAck: + """ + Represents an acknowledgment received after successfully publishing a message. + """ + + stream: str + """ + str: The name of the stream to which the message was published. + """ + + sequence: int + """ + int: The sequence number of the message in the stream. + """ + + duplicate: bool = False + """ + bool: Indicates whether the message was a duplicate. Defaults to False. + """ + + domain: str = "" + """ + str: The domain to which the message was published. Defaults to an empty string. + """ + +class Publisher: + def __init__(self, client: Client, max_pending_acks: int = 4000): + self.client = client + + async def publish( + self, + subject: str, + payload: bytes, + timeout: float = 1.0, + ) -> PubAck: + """ + Performs a publish to a stream and waits for ack from server. + """ + try: + msg = await self.client.request( + subject, + payload, + timeout=timeout, + ) + + data = json.loads(msg.data) + if 'error' in data: + raise APIError.from_error(data['error']) + + return PubAck(**data) + except NoRespondersError: + raise NoStreamResponseError + + raise NotImplementedError + + async def publish_async(self, subject: str, payload: bytes) -> Future[PubAck]: + """ + Performs a publish to a stream returning a future that can be awaited for the ack from server. + """ + raise NotImplementedError + + async def publish_async_pending(self) -> int: + """ + Returns the number of async publishes outstanding for this context. + An outstanding publish is one that has been sent by the publisher but has not yet received an ack. + """ + raise NotImplementedError + + async def publish_async_complete(self) -> None: + """ + Returns a future that will be closed when all outstanding asynchronously published messages are acknowledged by the server. + """ + raise NotImplementedError From 051edbf84e06d3ccfa97d396ac0c4cd905b07d38 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Fri, 21 Jun 2024 14:48:32 +0200 Subject: [PATCH 02/22] wip --- README.md | 2 +- examples/jetstream.py | 69 --------------------------- examples/kv.py | 21 --------- nats/__init__.py | 2 +- nats/jetstream/__init__.py | 7 ++- nats/jetstream/context.py | 41 ++++++++++++++-- nats/jetstream/errors.py | 8 +++- nats/jetstream/message.py | 93 ------------------------------------ nats/jetstream/publish.py | 96 ++++++++------------------------------ 9 files changed, 70 insertions(+), 269 deletions(-) delete mode 100644 examples/jetstream.py delete mode 100644 examples/kv.py delete mode 100644 nats/jetstream/message.py diff --git a/README.md b/README.md index 1f6ca96c..48d647f5 100644 --- a/README.md +++ b/README.md @@ -102,7 +102,7 @@ async def main(): nc = await nats.connect("localhost") # Create JetStream context. - js = nc.jetstream() + js = nats.jetstream.new(nc) # Persist messages on 'foo's subject. await js.add_stream(name="sample-stream", subjects=["foo"]) diff --git a/examples/jetstream.py b/examples/jetstream.py deleted file mode 100644 index ca24a78b..00000000 --- a/examples/jetstream.py +++ /dev/null @@ -1,69 +0,0 @@ -import asyncio -import nats -from nats.errors import TimeoutError - - -async def main(): - nc = await nats.connect("localhost") - - # Create JetStream context. - js = nc.jetstream() - - # Persist messages on 'foo's subject. - await js.add_stream(name="sample-stream", subjects=["foo"]) - - for i in range(0, 10): - ack = await js.publish("foo", f"hello world: {i}".encode()) - print(ack) - - # Create pull based consumer on 'foo'. - psub = await js.pull_subscribe("foo", "psub") - - # Fetch and ack messagess from consumer. - for i in range(0, 10): - msgs = await psub.fetch(1) - for msg in msgs: - print(msg) - - # Create single ephemeral push based subscriber. - sub = await js.subscribe("foo") - msg = await sub.next_msg() - await msg.ack() - - # Create single push based subscriber that is durable across restarts. - sub = await js.subscribe("foo", durable="myapp") - msg = await sub.next_msg() - await msg.ack() - - # Create deliver group that will be have load balanced messages. - async def qsub_a(msg): - print("QSUB A:", msg) - await msg.ack() - - async def qsub_b(msg): - print("QSUB B:", msg) - await msg.ack() - await js.subscribe("foo", "workers", cb=qsub_a) - await js.subscribe("foo", "workers", cb=qsub_b) - - for i in range(0, 10): - ack = await js.publish("foo", f"hello world: {i}".encode()) - print("\t", ack) - - # Create ordered consumer with flow control and heartbeats - # that auto resumes on failures. - osub = await js.subscribe("foo", ordered_consumer=True) - data = bytearray() - - while True: - try: - msg = await osub.next_msg() - data.extend(msg.data) - except TimeoutError: - break - print("All data in stream:", len(data)) - - await nc.close() - -if __name__ == '__main__': - asyncio.run(main()) diff --git a/examples/kv.py b/examples/kv.py deleted file mode 100644 index b32cc83a..00000000 --- a/examples/kv.py +++ /dev/null @@ -1,21 +0,0 @@ -import asyncio -import nats - - -async def main(): - nc = await nats.connect() - js = nc.jetstream() - - # Create a KV - kv = await js.create_key_value(bucket='MY_KV') - - # Set and retrieve a value - await kv.put('hello', b'world') - entry = await kv.get('hello') - print(f'KeyValue.Entry: key={entry.key}, value={entry.value}') - # KeyValue.Entry: key=hello, value=world - - await nc.close() - -if __name__ == '__main__': - asyncio.run(main()) diff --git a/nats/__init__.py b/nats/__init__.py index 32d01162..fefe5b35 100644 --- a/nats/__init__.py +++ b/nats/__init__.py @@ -15,7 +15,7 @@ from typing import List, Union from .aio.client import Client as NATS - +from .aio.msg import Msg async def connect( servers: Union[str, List[str]] = ["nats://localhost:4222"], diff --git a/nats/jetstream/__init__.py b/nats/jetstream/__init__.py index e6367ffe..e20130b4 100644 --- a/nats/jetstream/__init__.py +++ b/nats/jetstream/__init__.py @@ -12,7 +12,10 @@ # limitations under the License. # -def new() -> Context: - pass +from nats.aio.client import Client +from .context import Context + +async def new(client: Client) -> Context: + return Context(client) __all__ = ['new'] diff --git a/nats/jetstream/context.py b/nats/jetstream/context.py index 2a75a9f0..5c126d7b 100644 --- a/nats/jetstream/context.py +++ b/nats/jetstream/context.py @@ -12,8 +12,41 @@ # limitations under the License. # -from nats.jetstream.publish import Publisher +from typing import Type, TypeVar -class Context(Publisher): - def __init__(self): - Publisher.__init__(self) +from nats.aio.client import Client +from nats.errors import NoRespondersError +from nats.jetstream.api import * +from nats.jetstream.errors import * + + +class AccountInfo: + pass + +class Context: + def __init__(self, client: Client, api_prefix: str): + self.client = client + self.prefix = DEFAULT_PREFIX + + async def account_info(self) -> AccountInfo: + """ + Fetches account information from the server, containing details + about the account associated with this JetStream connection. + + If account is not enabled for JetStream, JetStreamNotEnabledForAccountError is raised. + If the server does not have JetStream enabled, JetStreamNotEnabledError is raised. + """ + info_subject = subject(API_ACCOUNT_INFO, self.prefix) + try: + account_info = await request_json(self.client, info_subject, b"INFO", AccountInfo) + return account_info + except Error as error: + if error.error_code == 503: + raise JetStreamNotEnabledError() + + if error.error_code == 0: + raise JetStreamNotEnabledForAccountError() + + raise error + except NoRespondersError: + raise JetStreamNotEnabledError() diff --git a/nats/jetstream/errors.py b/nats/jetstream/errors.py index 0f4b1f67..66fa30d2 100644 --- a/nats/jetstream/errors.py +++ b/nats/jetstream/errors.py @@ -1,2 +1,8 @@ -class MsgAlreadyAckdError(Exception): +class Error(Exception): + pass + +class JetStreamNotEnabledError(Error): + pass + +class JetStreamNotEnabledForAccountError(Error): pass diff --git a/nats/jetstream/message.py b/nats/jetstream/message.py deleted file mode 100644 index fbc9a7e7..00000000 --- a/nats/jetstream/message.py +++ /dev/null @@ -1,93 +0,0 @@ -# Copyright 2016-2024 The NATS Authors -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from __future__ import annotations - -from enum import Enum -from typing import Iterator - -import nats -from nats.jetstream.errors import MsgAlreadyAckdError - -class Header(str, Enum): - CONSUMER_STALLED = "Nats-Consumer-Stalled" - DESCRIPTION = "Description" - EXPECTED_LAST_MSG_ID = "Nats-Expected-Last-Msg-Id" - EXPECTED_LAST_SEQUENCE = "Nats-Expected-Last-Sequence" - EXPECTED_LAST_SUBJECT_SEQUENCE = "Nats-Expected-Last-Subject-Sequence" - EXPECTED_STREAM = "Nats-Expected-Stream" - LAST_CONSUMER = "Nats-Last-Consumer" - LAST_STREAM = "Nats-Last-Stream" - MSG_ID = "Nats-Msg-Id" - ROLLUP = "Nats-Rollup" - STATUS = "Status" - -class Msg: - class Metadata: - pass - - def __init__(self, msg: nats.Msg) -> None: - self._ackd = False - self._msg = msg - self._client = None - - @property - def metadata(self) -> Iterator[Msg.Metadata]: - """ - Returns the message body - """ - raise NotImplementedError - - @property - def data(self) -> bytes: - """ - Returns the message body - """ - raise NotImplementedError - - def _ack_reply(self) -> str: - if self._ackd: - raise MsgAlreadyAckdError - - self._client.publish(self._msg.reply, b"Hello World!") - raise NotImplementedError - - async def ack(self) -> None: - """ - Acknowledges a message telling the server that the message was - successfully processed and it can move on to the next message. - """ - raise NotImplementedError - - async def double_ack(self) -> None: - """ - Acknowledges a message and waits for ack reply from the server. - While it impacts performance, it is useful for scenarios where - message loss is not acceptable. - """ - raise NotImplementedError - - async def nak(self) -> None: - """ - Negatively acknowledges a message telling the server to - redeliver the message. - """ - raise NotImplementedError - - async def nak_with_delay(self, delay: int) -> None: - """ - Negatively acknowledges a message telling the server - to redeliver the message after the given delay. - """ - raise NotImplementedError diff --git a/nats/jetstream/publish.py b/nats/jetstream/publish.py index 454fff85..d07457cb 100644 --- a/nats/jetstream/publish.py +++ b/nats/jetstream/publish.py @@ -1,98 +1,40 @@ -# Copyright 2016-2024 The NATS Authors -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from asyncio import Future -from dataclasses import dataclass -from nats.aio.client import Client -from nats.errors import NoRespondersError -from nats.jetstream.errors import APIError, NoStreamResponseError -from nats.jetstream.message import Msg -from typing import Optional - import json -@dataclass -class PubAck: - """ - Represents an acknowledgment received after successfully publishing a message. - """ - - stream: str - """ - str: The name of the stream to which the message was published. - """ +from typing import Dict, Optional - sequence: int - """ - int: The sequence number of the message in the stream. - """ - - duplicate: bool = False - """ - bool: Indicates whether the message was a duplicate. Defaults to False. - """ - - domain: str = "" - """ - str: The domain to which the message was published. Defaults to an empty string. - """ +from nats.aio.client import Client +from nats.jetstream.api import PubAck class Publisher: - def __init__(self, client: Client, max_pending_acks: int = 4000): - self.client = client + def __init__(self, client: Client, timeout: float = 1): + self._client = client + self._timeout = timeout async def publish( self, subject: str, - payload: bytes, - timeout: float = 1.0, + payload: bytes = b'', + timeout: Optional[float] = None, + stream: Optional[str] = None, + headers: Optional[Dict] = None ) -> PubAck: """ - Performs a publish to a stream and waits for ack from server. + publish emits a new message to JetStream. """ try: - msg = await self.client.request( + msg = await self._client.request( subject, payload, - timeout=timeout, + timeout=timeout or self._timeout, + headers=headers, ) - - data = json.loads(msg.data) - if 'error' in data: - raise APIError.from_error(data['error']) - - return PubAck(**data) except NoRespondersError: raise NoStreamResponseError - raise NotImplementedError - - async def publish_async(self, subject: str, payload: bytes) -> Future[PubAck]: - """ - Performs a publish to a stream returning a future that can be awaited for the ack from server. - """ - raise NotImplementedError + data = json.loads(msg.data) + if 'error' in data: + raise Error(data['error']) - async def publish_async_pending(self) -> int: - """ - Returns the number of async publishes outstanding for this context. - An outstanding publish is one that has been sent by the publisher but has not yet received an ack. - """ - raise NotImplementedError + return PubAck.from_dict(data) - async def publish_async_complete(self) -> None: - """ - Returns a future that will be closed when all outstanding asynchronously published messages are acknowledged by the server. - """ - raise NotImplementedError +__all__ = ["Publisher", "PubAck"] From 517136956a9cf1928c4dbd561dcc17ec66ca7315 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Sun, 23 Jun 2024 23:00:46 +0200 Subject: [PATCH 03/22] wip --- nats/jetstream/api.py | 82 ++++++++ nats/jetstream/consumer.py | 0 nats/jetstream/context.py | 38 +--- nats/jetstream/errors.py | 17 +- nats/jetstream/message.py | 29 +++ nats/jetstream/publish.py | 111 ++++++++-- nats/jetstream/stream.py | 402 +++++++++++++++++++++++++++++++++++++ 7 files changed, 623 insertions(+), 56 deletions(-) create mode 100644 nats/jetstream/api.py create mode 100644 nats/jetstream/consumer.py create mode 100644 nats/jetstream/message.py create mode 100644 nats/jetstream/stream.py diff --git a/nats/jetstream/api.py b/nats/jetstream/api.py new file mode 100644 index 00000000..65d0b4a4 --- /dev/null +++ b/nats/jetstream/api.py @@ -0,0 +1,82 @@ +import json + +from dataclasses import dataclass, fields, is_dataclass, MISSING +from typing import Any, Dict, Type, TypeVar, get_origin, get_args + +from nats.aio.client import Client +from nats.jetstream.errors import Error + +T = TypeVar("T") + +STREAM_CREATE = "STREAM.CREATE.%s" + +def as_dict(instance: Any) -> Dict[str, Any]: + if not is_dataclass(instance): + return instance + + result = {} + for field in fields(instance): + name = field.metadata.get('json', field.name) + value = getattr(instance, field.name) + if is_dataclass(value): + result[name] = as_dict(value) + elif isinstance(value, list): + result[name] = [as_dict(item) for item in value] + elif isinstance(value, dict): + result[name] = {k: as_dict(v) for k, v in value.items()} + else: + result[name] = value + return result + +def from_dict(data, cls: Type[T]) -> T: + if not is_dataclass(cls): + return data + + kwargs = {} + for field in fields(cls): + json_key = field.metadata.get('json', field.name) + value = data.get(json_key, MISSING) + + if value is MISSING: + if field.default is not MISSING: + value = field.default + elif field.default_factory is not MISSING: + value = field.default_factory() + else: + raise ValueError(f"Missing value for field {field.name}") + + field_type = field.type + field_origin = get_origin(field_type) + field_args = get_args(field_type) + + if is_dataclass(field_type): + value = from_dict(value, field_type) + elif field_origin is list and len(field_args) == 1 and is_dataclass(field_args[0]): + value = [from_dict(item, field_args[0]) for item in value] + elif field_origin is dict and len(field_args) == 2 and is_dataclass(field_args[1]): + value = {k: from_dict(v, field_args[1]) for k, v in value.items()} + + kwargs[field.name] = value + + return cls(**kwargs) + +def parse_json_response(response: str | bytes | bytearray, cls: type[T]) -> T: + json_response = json.loads(response) + if 'error' in json_response: + raise from_dict(json_response['error'], Error) + + return from_dict(json_response, cls) + +async def request_json(client: Client, subject: str, item: Any, cls: Type[T], timeout: float = 5.0) -> T: + json_data = as_dict(item) + json_payload = json.dumps(json_data).encode() + response = await client.request(subject, json_payload, timeout=timeout) + return parse_json_response(response.data, cls) + +def subject(prefix: str | None, template: str, *args) -> str: + value = template.format(args) + + if prefix is None: + return value + + return f"{prefix}.{value}" diff --git a/nats/jetstream/consumer.py b/nats/jetstream/consumer.py new file mode 100644 index 00000000..e69de29b diff --git a/nats/jetstream/context.py b/nats/jetstream/context.py index 5c126d7b..d5d7802b 100644 --- a/nats/jetstream/context.py +++ b/nats/jetstream/context.py @@ -15,38 +15,8 @@ from typing import Type, TypeVar from nats.aio.client import Client -from nats.errors import NoRespondersError -from nats.jetstream.api import * -from nats.jetstream.errors import * +from nats.jetstream.publish import Publisher - -class AccountInfo: - pass - -class Context: - def __init__(self, client: Client, api_prefix: str): - self.client = client - self.prefix = DEFAULT_PREFIX - - async def account_info(self) -> AccountInfo: - """ - Fetches account information from the server, containing details - about the account associated with this JetStream connection. - - If account is not enabled for JetStream, JetStreamNotEnabledForAccountError is raised. - If the server does not have JetStream enabled, JetStreamNotEnabledError is raised. - """ - info_subject = subject(API_ACCOUNT_INFO, self.prefix) - try: - account_info = await request_json(self.client, info_subject, b"INFO", AccountInfo) - return account_info - except Error as error: - if error.error_code == 503: - raise JetStreamNotEnabledError() - - if error.error_code == 0: - raise JetStreamNotEnabledForAccountError() - - raise error - except NoRespondersError: - raise JetStreamNotEnabledError() +class Context(Publisher): + def __init__(self, client: Client): + Publisher.__init__(self, client) diff --git a/nats/jetstream/errors.py b/nats/jetstream/errors.py index 66fa30d2..33090efe 100644 --- a/nats/jetstream/errors.py +++ b/nats/jetstream/errors.py @@ -1,8 +1,23 @@ +from typing import Optional +from dataclasses import dataclass, field + +@dataclass class Error(Exception): - pass + """ + Represents an error that happens when using JetStream. + """ + code: Optional[int] = field(metadata={"json": "code"}) + error_code: Optional[int] = field(metadata={"json": "err_code"}) + description: Optional[str] = field(metadata={"json": "description"}) class JetStreamNotEnabledError(Error): pass class JetStreamNotEnabledForAccountError(Error): pass + +class InvalidAckError(Error): + pass + +class NoStreamResponseError(Error): + pass diff --git a/nats/jetstream/message.py b/nats/jetstream/message.py new file mode 100644 index 00000000..62695a62 --- /dev/null +++ b/nats/jetstream/message.py @@ -0,0 +1,29 @@ +from enum import Enum +from dataclasses import dataclass, field + +class Header(str, Enum): + """ + Provides a list of known headers that can be used to control message behavior. + """ + MSG_ID = "Nats-Msg-Id" + EXPECTED_STREAM = "Nats-Expected-Stream" + EXPECTED_LAST_SEQ = "Nats-Expected-Last-Sequence" + EXPECTED_LAST_MSG_ID = "Nats-Expected-Last-Msg-Id" + EXPECTED_LAST_SUBJECT_SEQUENCE = "Nats-Expected-Last-Subject-Sequence" + +@dataclass +class SequencePair: + """ + Provides a pair of the consumer and stream sequence numbers for a message. + """ + + consumer: int = field(metadata={"json": "consumer_seq"}) + """ + The consumer sequence number for message deliveries. + This is the total number of messages the consumer has seen (including redeliveries). + """ + + stream: int = field(metadata={"json": "stream_seq"}) + """ + The stream sequence number for a message. + """ diff --git a/nats/jetstream/publish.py b/nats/jetstream/publish.py index d07457cb..86f4a1fb 100644 --- a/nats/jetstream/publish.py +++ b/nats/jetstream/publish.py @@ -1,40 +1,109 @@ import json +from asyncio import Future +from dataclasses import dataclass, field from typing import Dict, Optional from nats.aio.client import Client -from nats.jetstream.api import PubAck +from nats.aio.msg import Msg +from nats.errors import * +from nats.jetstream.api import * +from nats.jetstream.errors import * +from nats.jetstream.message import * + +DEFAULT_RETRY_ATTEMPTS = 2 + +@dataclass +class PubAck: + """ + PubAck is an ack received after successfully publishing a message. + """ + stream: str = field(metadata={"json": "stream"}) + """ + The stream name the message was published to. + """ + sequence: int = field(metadata={"json": "seq"}) + """ + The sequence number of the message. + """ + duplicate: bool = field(metadata={"json": "duplicate"}) + """ + Indicates whether the message was a duplicate. + """ + domain: Optional[str] = field(metadata={"json": "domain"}) + """ + The domain the message was published to. + """ class Publisher: def __init__(self, client: Client, timeout: float = 1): self._client = client self._timeout = timeout + @property + def timeout(self) -> float: + return self._timeout + + @property + def client(self) -> Client: + return self._client + async def publish( self, subject: str, payload: bytes = b'', + id: Optional[str] = None, timeout: Optional[float] = None, - stream: Optional[str] = None, - headers: Optional[Dict] = None + headers: Optional[Dict] = None, + expected_last_msg_id: Optional[str] = None, + expected_stream: Optional[str] = None, + expected_last_sequence: Optional[int] = None, + expected_last_subject_sequence: Optional[int] = None, + retry_attempts: int = 2, + retry_wait: float = 0.25, ) -> PubAck: """ - publish emits a new message to JetStream. + Performs a publish to a stream and waits for ack from server. """ - try: - msg = await self._client.request( - subject, - payload, - timeout=timeout or self._timeout, - headers=headers, - ) - except NoRespondersError: - raise NoStreamResponseError - - data = json.loads(msg.data) - if 'error' in data: - raise Error(data['error']) - - return PubAck.from_dict(data) - -__all__ = ["Publisher", "PubAck"] + + if timeout is None: + timeout = self.timeout + + extra_headers = {} + if expected_last_msg_id is not None: + extra_headers[Header.EXPECTED_LAST_MSG_ID] = str(expected_last_msg_id) + + if expected_stream is not None: + extra_headers[Header.EXPECTED_STREAM] = str(expected_stream) + + if expected_last_sequence is not None: + extra_headers[Header.EXPECTED_LAST_SEQ] = str(expected_last_sequence) + + if expected_last_subject_sequence is not None: + extra_headers[Header.EXPECTED_LAST_SUBJECT_SEQUENCE] = str(expected_last_subject_sequence) + + if len(extra_headers) > 0: + if headers is not None: + extra_headers.update(headers) + + headers = extra_headers + + for attempt in range(0, retry_attempts): + try: + msg = await self.client.request( + subject, + payload, + timeout=timeout, + headers=headers, + ) + + pub_ack = parse_json_response(msg.data, PubAck) + if pub_ack.stream == None: + raise InvalidAckError() + + return pub_ack + except NoRespondersError: + if attempt < retry_attempts - 1: + await asyncio.sleep(retry_wait) + + raise NoStreamResponseError diff --git a/nats/jetstream/stream.py b/nats/jetstream/stream.py new file mode 100644 index 00000000..3e4e657e --- /dev/null +++ b/nats/jetstream/stream.py @@ -0,0 +1,402 @@ +from __future__ import annotations +from enum import Enum +from dataclasses import dataclass, field +from typing import List, Optional +import datetime + +@dataclass +class StreamInfo: + """ + StreamInfo shows config and current state for this stream. + """ + + config: StreamConfig = field(metadata={'json': 'config'}) + """Contains the configuration settings of the stream, set when creating or updating the stream.""" + + created: datetime.datetime = field(metadata={'json': 'created'}) + """The timestamp when the stream was created.""" + + state: StreamState = field(metadata={'json': 'state'}) + """Provides the state of the stream at the time of request, including metrics like the number of messages in the stream, total bytes, etc.""" + + cluster: Optional[ClusterInfo] = field(default=None, metadata={'json': 'cluster'}) + """Contains information about the cluster to which this stream belongs (if applicable).""" + + mirror: Optional[StreamSourceInfo] = field(default=None, metadata={'json': 'mirror'}) + """Contains information about another stream this one is mirroring. Mirroring is used to create replicas of another stream's data. This field is omitted if the stream is not mirroring another stream.""" + + sources: List[StreamSourceInfo] = field(default_factory=list, metadata={'json': 'sources'}) + """A list of source streams from which this stream collects data.""" + + timestamp: datetime.datetime = field(metadata={'json': 'ts'}) + """Indicates when the info was gathered by the server.""" + +@dataclass +class StreamConfig: + """ + StreamConfig is the configuration of a JetStream stream. + """ + + name: str = field(metadata={'json': 'name'}) + """Name is the name of the stream. It is required and must be unique across the JetStream account. Names cannot contain whitespace, ., *, >, path separators (forward or backwards slash), and non-printable characters.""" + + description: Optional[str] = field(default=None, metadata={'json': 'description'}) + """Description is an optional description of the stream.""" + + subjects: List[str] = field(default_factory=list, metadata={'json': 'subjects'}) + """Subjects is a list of subjects that the stream is listening on. Wildcards are supported. Subjects cannot be set if the stream is created as a mirror.""" + + retention: RetentionPolicy = field(default=RetentionPolicy.LIMIT, metadata={'json': 'retention'}) + """Retention defines the message retention policy for the stream. Defaults to LimitsPolicy.""" + + max_consumers: int = field(metadata={'json': 'max_consumers'}) + """MaxConsumers specifies the maximum number of consumers allowed for the stream.""" + + max_msgs: int = field(metadata={'json': 'max_msgs'}) + """MaxMsgs is the maximum number of messages the stream will store. After reaching the limit, stream adheres to the discard policy. If not set, server default is -1 (unlimited).""" + + max_bytes: int = field(metadata={'json': 'max_bytes'}) + """MaxBytes is the maximum total size of messages the stream will store. After reaching the limit, stream adheres to the discard policy. If not set, server default is -1 (unlimited).""" + + discard: DiscardPolicy = field(metadata={'json': 'discard'}) + """Discard defines the policy for handling messages when the stream reaches its limits in terms of number of messages or total bytes.""" + + discard_new_per_subject: Optional[bool] = field(default=None, metadata={'json': 'discard_new_per_subject'}) + """DiscardNewPerSubject is a flag to enable discarding new messages per subject when limits are reached. Requires DiscardPolicy to be DiscardNew and the MaxMsgsPerSubject to be set.""" + + max_age: datetime.timedelta = field(metadata={'json': 'max_age'}) + """MaxAge is the maximum age of messages that the stream will retain.""" + + max_msgs_per_subject: int = field(metadata={'json': 'max_msgs_per_subject'}) + """MaxMsgsPerSubject is the maximum number of messages per subject that the stream will retain.""" + + max_msg_size: Optional[int] = field(default=None, metadata={'json': 'max_msg_size'}) + """MaxMsgSize is the maximum size of any single message in the stream.""" + + storage: StorageType = field(metadata={'json': 'storage'}) + """Storage specifies the type of storage backend used for the stream (file or memory).""" + + replicas: int = field(metadata={'json': 'num_replicas'}) + """Replicas is the number of stream replicas in clustered JetStream. Defaults to 1, maximum is 5.""" + + no_ack: Optional[bool] = field(default=None, metadata={'json': 'no_ack'}) + """NoAck is a flag to disable acknowledging messages received by this stream. If set to true, publish methods from the JetStream client will not work as expected, since they rely on acknowledgements. Core NATS publish methods should be used instead. Note that this will make message delivery less reliable.""" + + duplicates: Optional[datetime.timedelta] = field(default=None, metadata={'json': 'duplicate_window'}) + """Duplicates is the window within which to track duplicate messages. If not set, server default is 2 minutes.""" + + placement: Optional[Placement] = field(default=None, metadata={'json': 'placement'}) + """Placement is used to declare where the stream should be placed via tags and/or an explicit cluster name.""" + + mirror: Optional[StreamSource] = field(default=None, metadata={'json': 'mirror'}) + """Mirror defines the configuration for mirroring another stream.""" + + sources: List[StreamSource] = field(default_factory=list, metadata={'json': 'sources'}) + """Sources is a list of other streams this stream sources messages from.""" + + sealed: Optional[bool] = field(default=None, metadata={'json': 'sealed'}) + """Sealed streams do not allow messages to be published or deleted via limits or API, sealed streams cannot be unsealed via configuration update. Can only be set on already created streams via the Update API.""" + + deny_delete: Optional[bool] = field(default=None, metadata={'json': 'deny_delete'}) + """DenyDelete restricts the ability to delete messages from a stream via the API. Defaults to false.""" + + deny_purge: Optional[bool] = field(default=None, metadata={'json': 'deny_purge'}) + """DenyPurge restricts the ability to purge messages from a stream via the API. Defaults to false.""" + + allow_rollup: Optional[bool] = field(default=None, metadata={'json': 'allow_rollup_hdrs'}) + """AllowRollup allows the use of the Nats-Rollup header to replace all contents of a stream, or subject in a stream, with a single new message.""" + + compression: StoreCompression = field(metadata={'json': 'compression'}) + """Compression specifies the message storage compression algorithm. Defaults to NoCompression.""" + + first_seq: Optional[int] = field(default=None, metadata={'json': 'first_seq'}) + """FirstSeq is the initial sequence number of the first message in the stream.""" + + subject_transform: Optional[SubjectTransformConfig] = field(default=None, metadata={'json': 'subject_transform'}) + """SubjectTransform allows applying a transformation to matching messages' subjects.""" + + republish: Optional[Republish] = field(default=None, metadata={'json': 'republish'}) + """RePublish allows immediate republishing of a message to the configured subject after it's stored.""" + + allow_direct: bool = field(default=False, metadata={'json': 'allow_direct'}) + """AllowDirect enables direct access to individual messages using direct get API. Defaults to false.""" + + mirror_direct: bool = field(default=False, metadata={'json': 'mirror_direct'}) + """MirrorDirect enables direct access to individual messages from the origin stream using direct get API. Defaults to false.""" + + consumer_limits: Optional[StreamConsumerLimits] = field(default=None, metadata={'json': 'consumer_limits'}) + """ConsumerLimits defines limits of certain values that consumers can set, defaults for those who don't set these settings.""" + + metadata: Dict[str, str] = field(default_factory=dict, metadata={'json': 'metadata'}) + """Metadata is a set of application-defined key-value pairs for associating metadata on the stream. This feature requires nats-server v2.10.0 or later.""" + + template: Optional[str] = field(default=None, metadata={'json': 'template_owner'}) + """Template identifies the template that manages the Stream. DEPRECATED: This feature is no longer supported.""" + +@dataclass +class StreamSourceInfo: + """ + StreamSourceInfo shows information about an upstream stream source/mirror. + """ + + name: str = field(metadata={'json': 'name'}) + """Name is the name of the stream that is being replicated.""" + + lag: int = field(metadata={'json': 'lag'}) + """Lag informs how many messages behind the source/mirror operation is. This will only show correctly if there is active communication with stream/mirror.""" + + active: datetime.timedelta = field(metadata={'json': 'active'}) + """Active informs when last the mirror or sourced stream had activity. Value will be -1 when there has been no activity.""" + + filter_subject: Optional[str] = field(default=None, metadata={'json': 'filter_subject'}) + """FilterSubject is the subject filter defined for this source/mirror.""" + + subject_transforms: List[SubjectTransformConfig] = field(default_factory=list, metadata={'json': 'subject_transforms'}) + """SubjectTransforms is a list of subject transforms defined for this source/mirror.""" + +@dataclass +class StreamState: + """ + StreamState is the state of a JetStream stream at the time of request. + """ + + msgs: int = field(metadata={'json': 'messages'}) + """Msgs is the number of messages stored in the stream.""" + + bytes: int = field(metadata={'json': 'bytes'}) + """Bytes is the number of bytes stored in the stream.""" + + first_seq: int = field(metadata={'json': 'first_seq'}) + """FirstSeq is the sequence number of the first message in the stream.""" + + first_time: datetime.datetime = field(metadata={'json': 'first_ts'}) + """FirstTime is the timestamp of the first message in the stream.""" + + last_seq: int = field(metadata={'json': 'last_seq'}) + """LastSeq is the sequence number of the last message in the stream.""" + + last_time: datetime.datetime = field(metadata={'json': 'last_ts'}) + """LastTime is the timestamp of the last message in the stream.""" + + consumers: int = field(metadata={'json': 'consumer_count'}) + """Consumers is the number of consumers on the stream.""" + + deleted: List[int] = field(default_factory=list, metadata={'json': 'deleted'}) + """Deleted is a list of sequence numbers that have been removed from the stream. This field will only be returned if the stream has been fetched with the DeletedDetails option.""" + + num_deleted: int = field(metadata={'json': 'num_deleted'}) + """NumDeleted is the number of messages that have been removed from the stream. Only deleted messages causing a gap in stream sequence numbers are counted. Messages deleted at the beginning or end of the stream are not counted.""" + + num_subjects: int = field(metadata={'json': 'num_subjects'}) + """NumSubjects is the number of unique subjects the stream has received messages on.""" + + subjects: Dict[str, int] = field(default_factory=dict, metadata={'json': 'subjects'}) + """Subjects is a map of subjects the stream has received messages on with message count per subject. This field will only be returned if the stream has been fetched with the SubjectFilter option.""" + +@dataclass +class ClusterInfo: + """ + ClusterInfo shows information about the underlying set of servers that + make up the stream or consumer. + """ + + name: Optional[str] = field(default=None, metadata={'json': 'name'}) + """Name is the name of the cluster.""" + + leader: Optional[str] = field(default=None, metadata={'json': 'leader'}) + """Leader is the server name of the RAFT leader.""" + + replicas: List[PeerInfo] = field(default_factory=list, metadata={'json': 'replicas'}) + """Replicas is the list of members of the RAFT cluster.""" + +from __future__ import annotations +from dataclasses import dataclass, field +from typing import List, Optional, Dict +import datetime + +@dataclass +class PeerInfo: + """ + PeerInfo shows information about the peers in the cluster that are + supporting the stream or consumer. + """ + + name: str = field(metadata={'json': 'name'}) + """The server name of the peer.""" + + current: bool = field(metadata={'json': 'current'}) + """Indicates if the peer is up to date and synchronized with the leader.""" + + offline: Optional[bool] = field(default=None, metadata={'json': 'offline'}) + """Indicates if the peer is considered offline by the group.""" + + active: datetime.timedelta = field(metadata={'json': 'active'}) + """The duration since this peer was last seen.""" + + lag: Optional[int] = field(default=None, metadata={'json': 'lag'}) + """The number of uncommitted operations this peer is behind the leader.""" + + +@dataclass +class SubjectTransformConfig: + """ + SubjectTransformConfig is for applying a subject transform (to matching + messages) before doing anything else when a new message is received. + """ + + source: str = field(metadata={'json': 'src'}) + """The subject pattern to match incoming messages against.""" + + destination: str = field(metadata={'json': 'dest'}) + """The subject pattern to remap the subject to.""" + + +@dataclass +class Republish: + """ + RePublish is for republishing messages once committed to a stream. The + original subject is remapped from the subject pattern to the destination + pattern. + """ + + source: Optional[str] = field(default=None, metadata={'json': 'src'}) + """The subject pattern to match incoming messages against.""" + + destination: str = field(metadata={'json': 'dest'}) + """The subject pattern to republish the subject to.""" + + headers_only: Optional[bool] = field(default=None, metadata={'json': 'headers_only'}) + """A flag to indicate that only the headers should be republished.""" + + +@dataclass +class Placement: + """ + Placement is used to guide placement of streams in clustered JetStream. + """ + + cluster: str = field(metadata={'json': 'cluster'}) + """The name of the cluster to which the stream should be assigned.""" + + tags: List[str] = field(default_factory=list, metadata={'json': 'tags'}) + """Tags are used to match streams to servers in the cluster. A stream will be assigned to a server with a matching tag.""" + + +@dataclass +class StreamSource: + """ + StreamSource dictates how streams can source from other streams. + """ + + name: str = field(metadata={'json': 'name'}) + """The name of the stream to source from.""" + + opt_start_seq: Optional[int] = field(default=None, metadata={'json': 'opt_start_seq'}) + """The sequence number to start sourcing from.""" + + opt_start_time: Optional[datetime.datetime] = field(default=None, metadata={'json': 'opt_start_time'}) + """The timestamp of messages to start sourcing from.""" + + filter_subject: Optional[str] = field(default=None, metadata={'json': 'filter_subject'}) + """The subject filter used to only replicate messages with matching subjects.""" + + subject_transforms: List[SubjectTransformConfig] = field(default_factory=list, metadata={'json': 'subject_transforms'}) + """ + A list of subject transforms to apply to matching messages. + + Subject transforms on sources and mirrors are also used as subject filters with optional transformations. + """ + + external: Optional[ExternalStream] = field(default=None, metadata={'json': 'external'}) + """A configuration referencing a stream source in another account or JetStream domain.""" + + domain: Optional[str] = field(default=None, metadata={'json': '-'}) + """Used to configure a stream source in another JetStream domain. This setting will set the External field with the appropriate APIPrefix.""" + + +@dataclass +class ExternalStream: + """ + ExternalStream allows you to qualify access to a stream source in another + account. + """ + + api_prefix: str = field(metadata={'json': 'api'}) + """The subject prefix that imports the other account/domain $JS.API.CONSUMER.> subjects.""" + + deliver_prefix: str = field(metadata={'json': 'deliver'}) + """The delivery subject to use for the push consumer.""" + + +@dataclass +class StreamConsumerLimits: + """ + StreamConsumerLimits are the limits for a consumer on a stream. These can + be overridden on a per consumer basis. + """ + + inactive_threshold: Optional[datetime.timedelta] = field(default=None, metadata={'json': 'inactive_threshold'}) + """A duration which instructs the server to clean up the consumer if it has been inactive for the specified duration.""" + + max_ack_pending: Optional[int] = field(default=None, metadata={'json': 'max_ack_pending'}) + """A maximum number of outstanding unacknowledged messages for a consumer.""" + + +class RetentionPolicy(Enum): + """ + RetentionPolicy determines how messages in a stream are retained. + """ + + LIMITS = "limits" + """LimitsPolicy means that messages are retained until any given limit is reached. This could be one of MaxMsgs, MaxBytes, or MaxAge.""" + + INTEREST = "interest" + """InterestPolicy specifies that when all known observables have acknowledged a message, it can be removed.""" + + WORKQUEUE = "workqueue" + """WorkQueuePolicy specifies that when the first worker or subscriber acknowledges the message, it can be removed.""" + + +class DiscardPolicy(Enum): + """ + DiscardPolicy determines how to proceed when limits of messages or bytes + are reached. + """ + + OLD = "old" + """DiscardOld will remove older messages to return to the limits. This is the default.""" + + NEW = "new" + """DiscardNew will fail to store new messages once the limits are reached.""" + + +class StorageType(Enum): + """ + StorageType determines how messages are stored for retention. + """ + + FILE = "file" + """ + Specifies on disk storage. + """ + + MEMORY = "memory" + """ + Specifies in-memory storage. + """ + + +class StoreCompression(Enum): + """ + StoreCompression determines how messages are compressed. + """ + + NONE = "none" + """ + Disables compression on the stream. + """ + + S2 = "s2" + """ + Enables S2 compression on the stream. + """ From 2ff5136c99958d9d8f9ae402cd169e4fa9d36635 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Mon, 24 Jun 2024 13:16:49 +0200 Subject: [PATCH 04/22] wip --- nats/jetstream/context.py | 4 +- nats/jetstream/stream.py | 105 ++++++++++++++++++++++++++++++++++---- 2 files changed, 99 insertions(+), 10 deletions(-) diff --git a/nats/jetstream/context.py b/nats/jetstream/context.py index d5d7802b..82f03bf5 100644 --- a/nats/jetstream/context.py +++ b/nats/jetstream/context.py @@ -16,7 +16,9 @@ from nats.aio.client import Client from nats.jetstream.publish import Publisher +from nats.jetstream.stream import StreamManager -class Context(Publisher): +class Context(Publisher, StreamManager): def __init__(self, client: Client): Publisher.__init__(self, client) + StreamManager.__init__(self, client) diff --git a/nats/jetstream/stream.py b/nats/jetstream/stream.py index 3e4e657e..4a1b9927 100644 --- a/nats/jetstream/stream.py +++ b/nats/jetstream/stream.py @@ -76,7 +76,7 @@ class StreamConfig: storage: StorageType = field(metadata={'json': 'storage'}) """Storage specifies the type of storage backend used for the stream (file or memory).""" - replicas: int = field(metadata={'json': 'num_replicas'}) + replicas: int = field(default=1, metadata={'json': 'num_replicas'}) """Replicas is the number of stream replicas in clustered JetStream. Defaults to 1, maximum is 5.""" no_ack: Optional[bool] = field(default=None, metadata={'json': 'no_ack'}) @@ -106,10 +106,10 @@ class StreamConfig: allow_rollup: Optional[bool] = field(default=None, metadata={'json': 'allow_rollup_hdrs'}) """AllowRollup allows the use of the Nats-Rollup header to replace all contents of a stream, or subject in a stream, with a single new message.""" - compression: StoreCompression = field(metadata={'json': 'compression'}) + compression: StoreCompression = field(default=StoreCompression.NONE, metadata={'json': 'compression'}) """Compression specifies the message storage compression algorithm. Defaults to NoCompression.""" - first_seq: Optional[int] = field(default=None, metadata={'json': 'first_seq'}) + first_sequence: Optional[int] = field(default=None, metadata={'json': 'first_seq'}) """FirstSeq is the initial sequence number of the first message in the stream.""" subject_transform: Optional[SubjectTransformConfig] = field(default=None, metadata={'json': 'subject_transform'}) @@ -166,13 +166,13 @@ class StreamState: bytes: int = field(metadata={'json': 'bytes'}) """Bytes is the number of bytes stored in the stream.""" - first_seq: int = field(metadata={'json': 'first_seq'}) + first_sequence: int = field(metadata={'json': 'first_seq'}) """FirstSeq is the sequence number of the first message in the stream.""" first_time: datetime.datetime = field(metadata={'json': 'first_ts'}) """FirstTime is the timestamp of the first message in the stream.""" - last_seq: int = field(metadata={'json': 'last_seq'}) + last_sequence: int = field(metadata={'json': 'last_seq'}) """LastSeq is the sequence number of the last message in the stream.""" last_time: datetime.datetime = field(metadata={'json': 'last_ts'}) @@ -209,10 +209,6 @@ class ClusterInfo: replicas: List[PeerInfo] = field(default_factory=list, metadata={'json': 'replicas'}) """Replicas is the list of members of the RAFT cluster.""" -from __future__ import annotations -from dataclasses import dataclass, field -from typing import List, Optional, Dict -import datetime @dataclass class PeerInfo: @@ -400,3 +396,94 @@ class StoreCompression(Enum): """ Enables S2 compression on the stream. """ + + +class Stream: + """ + Stream contains operations on an existing stream. It allows fetching and removing + messages from a stream, as well as purging a stream. + """ + + def __init__(self, info: StreamInfo): + self._info = info + + async def info(self, opts: Optional[List[Any]] = None, *, timeout: Optional[int] = None) -> StreamInfo: + """Info returns StreamInfo from the server.""" + pass + + def cached_info(self) -> StreamInfo: + """CachedInfo returns StreamInfo currently cached on this stream.""" + return self._info + + async def purge(self, opts: Optional[List[Any]] = None, *, timeout: Optional[int] = None) -> None: + """ + Removes messages from a stream. + This is a destructive operation. + """ + pass + + async def get_msg(self, seq: int, opts: Optional[List[Any]] = None, *, timeout: Optional[int] = None) -> RawStreamMsg: + """ + Retrieves a raw stream message stored in JetStream by sequence number. + """ + pass + + async def get_last_msg_for_subject(self, subject: str, *, timeout: Optional[int] = None) -> RawStreamMsg: + """ + Retrieves the last raw stream message stored in JetStream on a given subject. + """ + pass + + async def delete_msg(self, seq: int, *, timeout: Optional[int] = None) -> None: + """ + Deletes a message from a stream. + """ + pass + + async def secure_delete_msg(self, seq: int, *, timeout: Optional[int] = None) -> None: + """ + Deletes a message from a stream. + """ + pass + + +class StreamManager: + """ + Provides methods for managing streams. + """ + + async def create_stream(self, config: StreamConfig, *, timeout: Optional[int] = None) -> Stream: + """ + Creates a new stream with given config. + """ + pass + + async def update_stream(self, config: StreamConfig, *, timeout: Optional[int] = None) -> Stream: + """ + Updates an existing stream with the given config. + """ + pass + + async def create_or_update_stream(self, cfg: StreamConfig, *, timeout: Optional[int] = None) -> Stream: + """CreateOrUpdateStream creates a stream with given config or updates it if it already exists.""" + pass + + async def stream(self, stream: str, *, timeout: Optional[int] = None) -> Stream: + """Stream fetches StreamInfo and returns a Stream interface for a given stream name.""" + pass + + async def stream_name_by_subject(self, subject: str, *, timeout: Optional[int] = None) -> str: + """StreamNameBySubject returns a stream name listening on a given subject.""" + pass + + async def delete_stream(self, stream: str, *, timeout: Optional[int] = None) -> None: + """DeleteStream removes a stream with given name.""" + pass + + def list_streams(self, *, timeout: Optional[int] = None) -> StreamInfoLister: + """ListStreams returns a StreamInfoLister for iterating over stream infos.""" + pass + + def stream_names(self, *, timeout: Optional[int] = None) -> StreamNameLister: + """StreamNames returns a StreamNameLister for iterating over stream names.""" + pass From c82e496d7f0f8b1a9e2b62f36fe5357251aabe53 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Mon, 24 Jun 2024 14:59:44 +0200 Subject: [PATCH 05/22] wip --- nats/jetstream/api.py | 14 ++ nats/jetstream/consumer.py | 321 ++++++++++++++++++++++++++++++++ nats/jetstream/errors.py | 14 ++ nats/jetstream/key_value.py | 15 ++ nats/jetstream/message.py | 16 +- nats/jetstream/object.py | 15 ++ nats/jetstream/publish.py | 17 +- nats/jetstream/stream.py | 108 +++++++---- tests/test_jetstream_publish.py | 0 9 files changed, 478 insertions(+), 42 deletions(-) create mode 100644 nats/jetstream/key_value.py create mode 100644 nats/jetstream/object.py create mode 100644 tests/test_jetstream_publish.py diff --git a/nats/jetstream/api.py b/nats/jetstream/api.py index 65d0b4a4..a529e4d3 100644 --- a/nats/jetstream/api.py +++ b/nats/jetstream/api.py @@ -1,3 +1,17 @@ +# Copyright 2016-2024 The NATS Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + import json from dataclasses import dataclass, fields, is_dataclass, MISSING diff --git a/nats/jetstream/consumer.py b/nats/jetstream/consumer.py index e69de29b..c9750e25 100644 --- a/nats/jetstream/consumer.py +++ b/nats/jetstream/consumer.py @@ -0,0 +1,321 @@ +# Copyright 2016-2024 The NATS Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from __future__ import annotations + +from enum import Enum +from typing import Optional +from dataclasses import dataclass, field +from datetime import datetime, timedelta + +class DeliverPolicy(Enum): + """ + DeliverPolicy determines from which point to start delivering messages. + """ + ALL = "all" + """DeliverAllPolicy starts delivering messages from the very beginning of a stream.""" + + LAST = "last" + """DeliverLastPolicy will start the consumer with the last sequence received.""" + + NEW = "new" + """DeliverNewPolicy will only deliver new messages that are sent after the consumer is created.""" + + BY_START_SEQUENCE = "by_start_sequence" + """DeliverByStartSequencePolicy will deliver messages starting from a given sequence configured with OptStartSeq.""" + + BY_START_TIME = "by_start_time" + """DeliverByStartTimePolicy will deliver messages starting from a given time configured with OptStartTime.""" + + LAST_PER_SUBJECT = "last_per_subject" + """DeliverLastPerSubjectPolicy will start the consumer with the last message for all subjects received.""" + + +class AckPolicy(Enum): + """ + AckPolicy determines how the consumer should acknowledge delivered messages. + """ + NONE = "none" + """AckNonePolicy requires no acks for delivered messages.""" + + ALL = "all" + """AckAllPolicy when acking a sequence number, this implicitly acks all sequences below this one as well.""" + + EXPLICIT = "explicit" + """AckExplicitPolicy requires ack or nack for all messages.""" + + +class ReplayPolicy(Enum): + """ + ReplayPolicy determines how the consumer should replay messages it + already has queued in the stream. + """ + INSTANT = "instant" + """ReplayInstantPolicy will replay messages as fast as possible.""" + + ORIGINAL = "original" + """ReplayOriginalPolicy will maintain the same timing as the messages were received.""" + + +@dataclass +class SequenceInfo: + """ + SequenceInfo has both the consumer and the stream sequence and last activity. + """ + consumer: int = field(metadata={'json': 'consumer_seq'}) + """Consumer sequence number.""" + + stream: int = field(metadata={'json': 'stream_seq'}) + """Stream sequence number.""" + + last: Optional[datetime] = field(default=None, metadata={'json': 'last_active'}) + """Last activity timestamp.""" + +@dataclass +class ConsumerConfig: + """ + ConsumerConfig is the configuration of a JetStream consumer. + """ + name: Optional[str] = field(default=None, metadata={'json': 'name'}) + """Optional name for the consumer.""" + + durable: Optional[str] = field(default=None, metadata={'json': 'durable_name'}) + """Optional durable name for the consumer.""" + + description: Optional[str] = field(default=None, metadata={'json': 'description'}) + """Optional description of the consumer.""" + + deliver_policy: DeliverPolicy = field(default=DeliverPolicy.ALL, metadata={'json': 'deliver_policy'}) + """Defines from which point to start delivering messages from the stream. Defaults to DeliverAllPolicy.""" + + opt_start_seq: Optional[int] = field(default=None, metadata={'json': 'opt_start_seq'}) + """Optional sequence number from which to start message delivery.""" + + opt_start_time: Optional[datetime] = field(default=None, metadata={'json': 'opt_start_time'}) + """Optional time from which to start message delivery.""" + + ack_policy: AckPolicy = field(default=AckPolicy.EXPLICIT, metadata={'json': 'ack_policy'}) + """Defines the acknowledgement policy for the consumer. Defaults to AckExplicitPolicy.""" + + ack_wait: Optional[timedelta] = field(default=None, metadata={'json': 'ack_wait'}) + """How long the server will wait for an acknowledgement before resending a message.""" + + max_deliver: Optional[int] = field(default=None, metadata={'json': 'max_deliver'}) + """Maximum number of delivery attempts for a message.""" + + backoff: Optional[List[timedelta]] = field(default=None, metadata={'json': 'backoff'}) + """Optional back-off intervals for retrying message delivery after a failed acknowledgement.""" + + filter_subject: Optional[str] = field(default=None, metadata={'json': 'filter_subject'}) + """Can be used to filter messages delivered from the stream.""" + + replay_policy: ReplayPolicy = field(default=ReplayPolicy.INSTANT, metadata={'json': 'replay_policy'}) + """Defines the rate at which messages are sent to the consumer.""" + + rate_limit: Optional[int] = field(default=None, metadata={'json': 'rate_limit_bps'}) + """Optional maximum rate of message delivery in bits per second.""" + + sample_frequency: Optional[str] = field(default=None, metadata={'json': 'sample_freq'}) + """Optional frequency for sampling how often acknowledgements are sampled for observability.""" + + max_waiting: Optional[int] = field(default=None, metadata={'json': 'max_waiting'}) + """Maximum number of pull requests waiting to be fulfilled.""" + + max_ack_pending: Optional[int] = field(default=None, metadata={'json': 'max_ack_pending'}) + """Maximum number of outstanding unacknowledged messages.""" + + headers_only: Optional[bool] = field(default=None, metadata={'json': 'headers_only'}) + """Indicates whether only headers of messages should be sent.""" + + max_request_batch: Optional[int] = field(default=None, metadata={'json': 'max_batch'}) + """Optional maximum batch size a single pull request can make.""" + + max_request_expires: Optional[timedelta] = field(default=None, metadata={'json': 'max_expires'}) + """Maximum duration a single pull request will wait for messages to be available to pull.""" + + max_request_max_bytes: Optional[int] = field(default=None, metadata={'json': 'max_bytes'}) + """Optional maximum total bytes that can be requested in a given batch.""" + + inactive_threshold: Optional[timedelta] = field(default=None, metadata={'json': 'inactive_threshold'}) + """Duration which instructs the server to clean up the consumer if it has been inactive.""" + + replicas: int = field(metadata={'json': 'num_replicas'}) + """Number of replicas for the consumer's state.""" + + memory_storage: Optional[bool] = field(default=None, metadata={'json': 'mem_storage'}) + """Flag to force the consumer to use memory storage.""" + + filter_subjects: Optional[List[str]] = field(default=None, metadata={'json': 'filter_subjects'}) + """Allows filtering messages from a stream by subject.""" + + metadata: Optional[Dict[str, str]] = field(default=None, metadata={'json': 'metadata'}) + """Set of application-defined key-value pairs for associating metadata on the consumer.""" + + +@dataclass +class ConsumerInfo: + """ + ConsumerInfo is the detailed information about a JetStream consumer. + """ + stream: str = field(metadata={'json': 'stream_name'}) + """Name of the stream that the consumer is bound to.""" + + name: str = field(metadata={'json': 'name'}) + """Unique identifier for the consumer.""" + + created: datetime = field(metadata={'json': 'created'}) + """Timestamp when the consumer was created.""" + + config: ConsumerConfig = field(metadata={'json': 'config'}) + """Configuration settings of the consumer.""" + + delivered: SequenceInfo = field(metadata={'json': 'delivered'}) + """Information about the most recently delivered message.""" + + ack_floor: SequenceInfo = field(metadata={'json': 'ack_floor'}) + """Indicates the message before the first unacknowledged message.""" + + num_ack_pending: int = field(metadata={'json': 'num_ack_pending'}) + """Number of messages that have been delivered but not yet acknowledged.""" + + num_redelivered: int = field(metadata={'json': 'num_redelivered'}) + """Counts the number of messages that have been redelivered and not yet acknowledged.""" + + num_waiting: int = field(metadata={'json': 'num_waiting'}) + """Count of active pull requests.""" + + num_pending: int = field(metadata={'json': 'num_pending'}) + """Number of messages that match the consumer's filter but have not been delivered yet.""" + + cluster: Optional[ClusterInfo] = field(default=None, metadata={'json': 'cluster'}) + """Information about the cluster to which this consumer belongs.""" + + push_bound: Optional[bool] = field(default=None, metadata={'json': 'push_bound'}) + """Indicates whether at least one subscription exists for the delivery subject of this consumer.""" + + timestamp: datetime = field(metadata={'json': 'ts'}) + """Timestamp when the info was gathered by the server.""" + + +@dataclass +class OrderedConsumerConfig: + """ + OrderedConsumerConfig is the configuration of an ordered JetStream consumer. + """ + filter_subjects: Optional[List[str]] = field(default=None, metadata={'json': 'filter_subjects'}) + """Allows filtering messages from a stream by subject.""" + + deliver_policy: DeliverPolicy = field(metadata={'json': 'deliver_policy'}) + """Defines from which point to start delivering messages from the stream.""" + + opt_start_seq: Optional[int] = field(default=None, metadata={'json': 'opt_start_seq'}) + """Optional sequence number from which to start message delivery.""" + + opt_start_time: Optional[datetime] = field(default=None, metadata={'json': 'opt_start_time'}) + """Optional time from which to start message delivery.""" + + replay_policy: ReplayPolicy = field(metadata={'json': 'replay_policy'}) + """Defines the rate at which messages are sent to the consumer.""" + + inactive_threshold: Optional[timedelta] = field(default=None, metadata={'json': 'inactive_threshold'}) + """Duration which instructs the server to clean up the consumer if it has been inactive.""" + + headers_only: Optional[bool] = field(default=None, metadata={'json': 'headers_only'}) + """Indicates whether only headers of messages should be sent.""" + + max_reset_attempts: Optional[int] = field(default=None, metadata={'json': 'max_reset_attempts'}) + """Maximum number of attempts for the consumer to be recreated in a single recreation cycle.""" + +class Consumer: + raise NotImplementedError + + +class PullConsumer(Consumer): + raise NotImplementedError + + +class OrderedConsumer(Consumer): + raise NotImplementedError + + +class StreamConsumerManager: + """ + Provides methods for directly managing consumers. + """ + + async def create_or_update_consumer( + self, stream: str, config: ConsumerConfig, timeout: Optional[float] = None + ) -> Consumer: + """ + CreateOrUpdateConsumer creates a consumer on a given stream with + given config. If consumer already exists, it will be updated (if + possible). Consumer interface is returned, allowing to operate on a + consumer (e.g. fetch messages). + """ + raise NotImplementedError + + async def create_consumer( + self, stream: str, config: ConsumerConfig, timeout: Optional[float] = None + ) -> Consumer: + """ + CreateConsumer creates a consumer on a given stream with given + config. If consumer already exists and the provided configuration + differs from its configuration, ErrConsumerExists is returned. If the + provided configuration is the same as the existing consumer, the + existing consumer is returned. Consumer interface is returned, + allowing to operate on a consumer (e.g. fetch messages). + """ + raise NotImplementedError + + async def update_consumer( + self, stream: str, config: ConsumerConfig, timeout: Optional[float] = None + ) -> Consumer: + """ + Updates an existing consumer. + + If consumer does not exist, an error is raised. + """ + raise NotImplementedError + + async def ordered_consumer( + self, stream: str, config: OrderedConsumerConfig, timeout: Optional[float] = None + ) -> Consumer: + """ + Returns returns an instance of an ordered consumer. + + Ordered consumers are managed by the library and provide a simple way to consume + messages from a stream. + + Ordered consumers are ephemeral in-memory pull consumers and are resilient to deletes and restarts. + """ + raise NotImplementedError + + async def consumer( + self, stream: str, consumer: str, timeout: Optional[float] = None + ) -> Consumer: + """ + Returns an instance of an existing consumer, allowing processing of messages. + + If consumer does not exist, an error is raised. + """ + raise NotImplementedError + + async def delete_consumer( + self, stream: str, consumer: str, timeout: Optional[float] = None + ) -> None: + """ + Removes a consumer with given name from a stream. + If consumer does not exist, an error is raised. + """ + raise NotImplementedError diff --git a/nats/jetstream/errors.py b/nats/jetstream/errors.py index 33090efe..793cd05c 100644 --- a/nats/jetstream/errors.py +++ b/nats/jetstream/errors.py @@ -1,3 +1,17 @@ +# Copyright 2016-2024 The NATS Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + from typing import Optional from dataclasses import dataclass, field diff --git a/nats/jetstream/key_value.py b/nats/jetstream/key_value.py new file mode 100644 index 00000000..4cd4f8af --- /dev/null +++ b/nats/jetstream/key_value.py @@ -0,0 +1,15 @@ +# Copyright 2016-2024 The NATS Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# TODO(caspervonb) diff --git a/nats/jetstream/message.py b/nats/jetstream/message.py index 62695a62..a211c6be 100644 --- a/nats/jetstream/message.py +++ b/nats/jetstream/message.py @@ -1,9 +1,23 @@ +# Copyright 2016-2024 The NATS Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + from enum import Enum from dataclasses import dataclass, field class Header(str, Enum): """ - Provides a list of known headers that can be used to control message behavior. + Provides known headers that can be used to control message behavior. """ MSG_ID = "Nats-Msg-Id" EXPECTED_STREAM = "Nats-Expected-Stream" diff --git a/nats/jetstream/object.py b/nats/jetstream/object.py new file mode 100644 index 00000000..4cd4f8af --- /dev/null +++ b/nats/jetstream/object.py @@ -0,0 +1,15 @@ +# Copyright 2016-2024 The NATS Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# TODO(caspervonb) diff --git a/nats/jetstream/publish.py b/nats/jetstream/publish.py index 86f4a1fb..05799fa0 100644 --- a/nats/jetstream/publish.py +++ b/nats/jetstream/publish.py @@ -1,3 +1,17 @@ +# Copyright 2016-2024 The NATS Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + import json from asyncio import Future @@ -52,9 +66,10 @@ async def publish( self, subject: str, payload: bytes = b'', - id: Optional[str] = None, timeout: Optional[float] = None, headers: Optional[Dict] = None, + *, + id: Optional[str] = None, expected_last_msg_id: Optional[str] = None, expected_stream: Optional[str] = None, expected_last_sequence: Optional[int] = None, diff --git a/nats/jetstream/stream.py b/nats/jetstream/stream.py index 4a1b9927..e9c30336 100644 --- a/nats/jetstream/stream.py +++ b/nats/jetstream/stream.py @@ -1,8 +1,27 @@ +# Copyright 2016-2024 The NATS Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + from __future__ import annotations + from enum import Enum from dataclasses import dataclass, field from typing import List, Optional import datetime +from typing_extensions import AsyncIterator + +from nats.aio.client import Client +from nats.jetstream.api import subject, request_json @dataclass class StreamInfo: @@ -10,6 +29,9 @@ class StreamInfo: StreamInfo shows config and current state for this stream. """ + timestamp: datetime.datetime = field(metadata={'json': 'ts'}) + """Indicates when the info was gathered by the server.""" + config: StreamConfig = field(metadata={'json': 'config'}) """Contains the configuration settings of the stream, set when creating or updating the stream.""" @@ -28,8 +50,6 @@ class StreamInfo: sources: List[StreamSourceInfo] = field(default_factory=list, metadata={'json': 'sources'}) """A list of source streams from which this stream collects data.""" - timestamp: datetime.datetime = field(metadata={'json': 'ts'}) - """Indicates when the info was gathered by the server.""" @dataclass class StreamConfig: @@ -38,7 +58,7 @@ class StreamConfig: """ name: str = field(metadata={'json': 'name'}) - """Name is the name of the stream. It is required and must be unique across the JetStream account. Names cannot contain whitespace, ., *, >, path separators (forward or backwards slash), and non-printable characters.""" + """Name is the name of the stream. It is required and must be unique across the JetStream account. Names cannot contain whitespace, ., >, path separators (forward or backwards slash), and non-printable characters.""" description: Optional[str] = field(default=None, metadata={'json': 'description'}) """Description is an optional description of the stream.""" @@ -161,28 +181,28 @@ class StreamState: """ msgs: int = field(metadata={'json': 'messages'}) - """Msgs is the number of messages stored in the stream.""" + """The number of messages stored in the stream.""" bytes: int = field(metadata={'json': 'bytes'}) - """Bytes is the number of bytes stored in the stream.""" + """The number of bytes stored in the stream.""" first_sequence: int = field(metadata={'json': 'first_seq'}) - """FirstSeq is the sequence number of the first message in the stream.""" + """The the sequence number of the first message in the stream.""" first_time: datetime.datetime = field(metadata={'json': 'first_ts'}) - """FirstTime is the timestamp of the first message in the stream.""" + """The timestamp of the first message in the stream.""" last_sequence: int = field(metadata={'json': 'last_seq'}) - """LastSeq is the sequence number of the last message in the stream.""" + """The sequence number of the last message in the stream.""" last_time: datetime.datetime = field(metadata={'json': 'last_ts'}) - """LastTime is the timestamp of the last message in the stream.""" + """The timestamp of the last message in the stream.""" consumers: int = field(metadata={'json': 'consumer_count'}) - """Consumers is the number of consumers on the stream.""" + """The number of consumers on the stream.""" deleted: List[int] = field(default_factory=list, metadata={'json': 'deleted'}) - """Deleted is a list of sequence numbers that have been removed from the stream. This field will only be returned if the stream has been fetched with the DeletedDetails option.""" + """A list of sequence numbers that have been removed from the stream. This field will only be returned if the stream has been fetched with the DeletedDetails option.""" num_deleted: int = field(metadata={'json': 'num_deleted'}) """NumDeleted is the number of messages that have been removed from the stream. Only deleted messages causing a gap in stream sequence numbers are counted. Messages deleted at the beginning or end of the stream are not counted.""" @@ -404,47 +424,55 @@ class Stream: messages from a stream, as well as purging a stream. """ - def __init__(self, info: StreamInfo): + def __init__(self, client: Client, name: str, info: StreamInfo, api_prefix: str): + self._client = client + self._name = name self._info = info + self._api_prefix = api_prefix + + @property + def api_prefix(self) -> str: + return self._api_prefix - async def info(self, opts: Optional[List[Any]] = None, *, timeout: Optional[int] = None) -> StreamInfo: + async def info(self, opts: Optional[List[Any]] = None, timeout: Optional[int] = None) -> StreamInfo: """Info returns StreamInfo from the server.""" - pass + info_subject = subject(self._api_prefix, f"STREAM.INFO.{self._name}") + info_response = await request_json(self._client, info_subject, timeout=timeout) def cached_info(self) -> StreamInfo: """CachedInfo returns StreamInfo currently cached on this stream.""" return self._info - async def purge(self, opts: Optional[List[Any]] = None, *, timeout: Optional[int] = None) -> None: + async def purge(self, opts: Optional[List[Any]] = None, timeout: Optional[int] = None) -> None: """ Removes messages from a stream. This is a destructive operation. """ - pass + raise NotImplementedError - async def get_msg(self, seq: int, opts: Optional[List[Any]] = None, *, timeout: Optional[int] = None) -> RawStreamMsg: + async def get_msg(self, seq: int, opts: Optional[List[Any]] = None, timeout: Optional[int] = None) -> RawStreamMsg: """ Retrieves a raw stream message stored in JetStream by sequence number. """ - pass + raise NotImplementedError - async def get_last_msg_for_subject(self, subject: str, *, timeout: Optional[int] = None) -> RawStreamMsg: + async def get_last_msg_for_subject(self, subject: str, timeout: Optional[int] = None) -> RawStreamMsg: """ Retrieves the last raw stream message stored in JetStream on a given subject. """ - pass + raise NotImplementedError - async def delete_msg(self, seq: int, *, timeout: Optional[int] = None) -> None: + async def delete_msg(self, seq: int, timeout: Optional[int] = None) -> None: """ Deletes a message from a stream. """ - pass + raise NotImplementedError - async def secure_delete_msg(self, seq: int, *, timeout: Optional[int] = None) -> None: + async def secure_delete_msg(self, seq: int, timeout: Optional[int] = None) -> None: """ Deletes a message from a stream. """ - pass + raise NotImplementedError class StreamManager: @@ -452,38 +480,38 @@ class StreamManager: Provides methods for managing streams. """ - async def create_stream(self, config: StreamConfig, *, timeout: Optional[int] = None) -> Stream: + async def create_stream(self, config: StreamConfig, timeout: Optional[int] = None) -> Stream: """ Creates a new stream with given config. """ - pass + raise NotImplementedError - async def update_stream(self, config: StreamConfig, *, timeout: Optional[int] = None) -> Stream: + async def update_stream(self, config: StreamConfig, timeout: Optional[int] = None) -> Stream: """ Updates an existing stream with the given config. """ - pass + raise NotImplementedError - async def create_or_update_stream(self, cfg: StreamConfig, *, timeout: Optional[int] = None) -> Stream: + async def create_or_update_stream(self, config: StreamConfig, timeout: Optional[int] = None) -> Stream: """CreateOrUpdateStream creates a stream with given config or updates it if it already exists.""" - pass + raise NotImplementedError - async def stream(self, stream: str, *, timeout: Optional[int] = None) -> Stream: + async def stream(self, stream: str, timeout: Optional[int] = None) -> Stream: """Stream fetches StreamInfo and returns a Stream interface for a given stream name.""" - pass + raise NotImplementedError - async def stream_name_by_subject(self, subject: str, *, timeout: Optional[int] = None) -> str: + async def stream_name_by_subject(self, subject: str, timeout: Optional[int] = None) -> str: """StreamNameBySubject returns a stream name listening on a given subject.""" - pass + raise NotImplementedError - async def delete_stream(self, stream: str, *, timeout: Optional[int] = None) -> None: + async def delete_stream(self, stream: str, timeout: Optional[int] = None) -> None: """DeleteStream removes a stream with given name.""" - pass + raise NotImplementedError - def list_streams(self, *, timeout: Optional[int] = None) -> StreamInfoLister: + def list_streams(self, timeout: Optional[int] = None) -> AsyncIterator[StreamInfo]: """ListStreams returns a StreamInfoLister for iterating over stream infos.""" - pass + raise NotImplementedError - def stream_names(self, *, timeout: Optional[int] = None) -> StreamNameLister: + def stream_names(self, timeout: Optional[int] = None) -> AsyncIterator[str]: """StreamNames returns a StreamNameLister for iterating over stream names.""" - pass + raise NotImplementedError diff --git a/tests/test_jetstream_publish.py b/tests/test_jetstream_publish.py new file mode 100644 index 00000000..e69de29b From aacd76c44643ab692b44fe9849d60c610d7a7399 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Mon, 24 Jun 2024 20:13:26 +0200 Subject: [PATCH 06/22] wip --- nats/jetstream/api.py | 89 +++++++--- nats/jetstream/context.py | 2 +- nats/jetstream/errors.py | 334 ++++++++++++++++++++++++++++++++++++-- nats/jetstream/publish.py | 33 ++-- nats/jetstream/stream.py | 261 ++++++++++++++++++----------- 5 files changed, 561 insertions(+), 158 deletions(-) diff --git a/nats/jetstream/api.py b/nats/jetstream/api.py index a529e4d3..7e63f911 100644 --- a/nats/jetstream/api.py +++ b/nats/jetstream/api.py @@ -12,18 +12,19 @@ # limitations under the License. # +from __future__ import annotations + import json -from dataclasses import dataclass, fields, is_dataclass, MISSING -from typing import Any, Dict, Type, TypeVar, get_origin, get_args +from dataclasses import dataclass, fields, field, is_dataclass, MISSING +from typing import Any, Dict, Optional, Self, Type, TypeVar, get_origin, get_args +from urllib import parse -from nats.aio.client import Client from nats.jetstream.errors import Error +from nats.js.api import DEFAULT_PREFIX T = TypeVar("T") -STREAM_CREATE = "STREAM.CREATE.%s" - def as_dict(instance: Any) -> Dict[str, Any]: if not is_dataclass(instance): return instance @@ -74,23 +75,61 @@ def from_dict(data, cls: Type[T]) -> T: return cls(**kwargs) -def parse_json_response(response: str | bytes | bytearray, cls: type[T]) -> T: - json_response = json.loads(response) - if 'error' in json_response: - raise from_dict(json_response['error'], Error) - - return from_dict(json_response, cls) - -async def request_json(client: Client, subject: str, item: Any, cls: Type[T], timeout: float = 5.0) -> T: - json_data = as_dict(item) - json_payload = json.dumps(json_data).encode() - response = await client.request(subject, json_payload, timeout=timeout) - return parse_json_response(response.data, cls) - -def subject(prefix: str | None, template: str, *args) -> str: - value = template.format(args) - - if prefix is None: - return value - - return f"{prefix}.{value}" +@dataclass +class Request: + def as_dict(self) -> Dict[str, Any]: + return as_dict(self) + + def as_json(self) -> str: + return json.dumps(self.as_dict()) + +@dataclass +class Paged: + total: int = field(default=0, metadata={"json": "total"}) + offset: int = field(default=0, metadata={"json": "offset"}) + limit: int = field(default=0, metadata={"json": "limit"}) + +@dataclass +class ErrorResponse: + code: Optional[int] = field(default=None, metadata={"json": "code"}) + error_code: Optional[int] = field(default=None, metadata={"json": "err_code"}) + description: Optional[str] = field(default=None, metadata={"json": "description"}) + +@dataclass +class Response: + type: str + error: Optional[ErrorResponse] = None + + @classmethod + def from_dict(cls: Type[T], data: Dict[str, Any]) -> T: + return cls(**data) + + @classmethod + def from_json(cls: Type[T], data: str) -> T: + return cls.from_dict(json.loads(data)) + +class Client: + """ + Provides methods for sending requests and processing responses via JetStream. + """ + def __init__(self, inner: Any, timeout: float = 1.0, prefix: str = DEFAULT_PREFIX) -> None: + self.inner = inner + self.timeout = timeout + self.prefix = None + + async def request(self, subject: str, payload: bytes, timeout: Optional[float] = None, headers: Optional[Dict[str, str]] = None) -> Any: + if timeout is None: + timeout = self.timeout + + self.inner.request(subject, payload, timeout=timeout) + + async def request_json(self, subject: str, request_object: Request, response_type: Type[T], timeout: float | None) -> T: + if self.prefix is not None: + subject = f"{self.prefix}.{subject}" + + if timeout is None: + timeout = self.timeout + + request_payload = request_object.as_json() + response = await self.inner.request(subject, request_payload, timeout=timeout) + return response_type.from_json(response.data) diff --git a/nats/jetstream/context.py b/nats/jetstream/context.py index 82f03bf5..a23e681f 100644 --- a/nats/jetstream/context.py +++ b/nats/jetstream/context.py @@ -14,7 +14,7 @@ from typing import Type, TypeVar -from nats.aio.client import Client +from nats.jetstream.api import Client from nats.jetstream.publish import Publisher from nats.jetstream.stream import StreamManager diff --git a/nats/jetstream/errors.py b/nats/jetstream/errors.py index 793cd05c..8f342bd6 100644 --- a/nats/jetstream/errors.py +++ b/nats/jetstream/errors.py @@ -13,25 +13,325 @@ # from typing import Optional -from dataclasses import dataclass, field +from enum import Enum -@dataclass -class Error(Exception): - """ - Represents an error that happens when using JetStream. - """ - code: Optional[int] = field(metadata={"json": "code"}) - error_code: Optional[int] = field(metadata={"json": "err_code"}) - description: Optional[str] = field(metadata={"json": "description"}) +class ErrorCode(Enum): + JETSTREAM_NOT_ENABLED_FOR_ACCOUNT = 10039 + JETSTREAM_NOT_ENABLED = 10076 + STREAM_NOT_FOUND = 10059 + STREAM_NAME_IN_USE = 10058 + CONSUMER_CREATE = 10012 + CONSUMER_NOT_FOUND = 10014 + CONSUMER_NAME_EXISTS = 10013 + CONSUMER_ALREADY_EXISTS = 10105 + CONSUMER_EXISTS = 10148 + DUPLICATE_FILTER_SUBJECTS = 10136 + OVERLAPPING_FILTER_SUBJECTS = 10138 + CONSUMER_EMPTY_FILTER = 10139 + CONSUMER_DOES_NOT_EXIST = 10149 + MESSAGE_NOT_FOUND = 10037 + BAD_REQUEST = 10003 + STREAM_WRONG_LAST_SEQUENCE = 10071 -class JetStreamNotEnabledError(Error): - pass -class JetStreamNotEnabledForAccountError(Error): - pass +class JetStreamError(Exception): + def __init__(self, message=None, code=None, error_code=None, description=None): + self.message = message + self.code = code + self.error_code = error_code + self.description = description -class InvalidAckError(Error): - pass + def __str__(self): + if self.description: + return f"nats: API error: code={self.code} err_code={self.error_code} description={self.description}" + return f"nats: {self.message}" -class NoStreamResponseError(Error): - pass + +class JetStreamNotEnabledError(JetStreamError): + def __init__(self): + super().__init__( + message="jetstream not enabled", + code=503, + error_code=ErrorCode.JETSTREAM_NOT_ENABLED, + description="jetstream not enabled", + ) + + +class JetStreamNotEnabledForAccountError(JetStreamError): + def __init__(self): + super().__init__( + message="jetstream not enabled for account", + code=503, + error_code=ErrorCode.JETSTREAM_NOT_ENABLED_FOR_ACCOUNT, + description="jetstream not enabled for account", + ) + + +class StreamNotFoundError(JetStreamError): + def __init__(self): + super().__init__( + message="stream not found", + code=404, + error_code=ErrorCode.STREAM_NOT_FOUND, + description="stream not found", + ) + + +class StreamNameAlreadyInUseError(JetStreamError): + def __init__(self): + super().__init__( + message="stream name already in use", + code=400, + error_code=ErrorCode.STREAM_NAME_IN_USE, + description="stream name already in use", + ) + + +class StreamSubjectTransformNotSupportedError(JetStreamError): + def __init__(self): + super().__init__(message="stream subject transformation not supported by nats-server") + + +class StreamSourceSubjectTransformNotSupportedError(JetStreamError): + def __init__(self): + super().__init__(message="stream subject transformation not supported by nats-server") + + +class StreamSourceNotSupportedError(JetStreamError): + def __init__(self): + super().__init__(message="stream sourcing is not supported by nats-server") + + +class StreamSourceMultipleFilterSubjectsNotSupportedError(JetStreamError): + def __init__(self): + super().__init__(message="stream sourcing with multiple subject filters not supported by nats-server") + + +class ConsumerNotFoundError(JetStreamError): + def __init__(self): + super().__init__( + message="consumer not found", + code=404, + error_code=ErrorCode.CONSUMER_NOT_FOUND, + description="consumer not found", + ) + + +class ConsumerExistsError(JetStreamError): + def __init__(self): + super().__init__( + message="consumer already exists", + code=400, + error_code=ErrorCode.CONSUMER_EXISTS, + description="consumer already exists", + ) + + +class ConsumerDoesNotExistError(JetStreamError): + def __init__(self): + super().__init__( + message="consumer does not exist", + code=400, + error_code=ErrorCode.CONSUMER_DOES_NOT_EXIST, + description="consumer does not exist", + ) + + +class MessageNotFoundError(JetStreamError): + def __init__(self): + super().__init__( + message="message not found", + code=404, + error_code=ErrorCode.MESSAGE_NOT_FOUND, + description="message not found", + ) + + +class BadRequestError(JetStreamError): + def __init__(self): + super().__init__( + message="bad request", + code=400, + error_code=ErrorCode.BAD_REQUEST, + description="bad request", + ) + + +class ConsumerCreateError(JetStreamError): + def __init__(self): + super().__init__( + message="could not create consumer", + code=500, + error_code=ErrorCode.CONSUMER_CREATE, + description="could not create consumer", + ) + + +class DuplicateFilterSubjectsError(JetStreamError): + def __init__(self): + super().__init__( + message="consumer cannot have both FilterSubject and FilterSubjects specified", + code=500, + error_code=ErrorCode.DUPLICATE_FILTER_SUBJECTS, + description="consumer cannot have both FilterSubject and FilterSubjects specified", + ) + + +class OverlappingFilterSubjectsError(JetStreamError): + def __init__(self): + super().__init__( + message="consumer subject filters cannot overlap", + code=500, + error_code=ErrorCode.OVERLAPPING_FILTER_SUBJECTS, + description="consumer subject filters cannot overlap", + ) + + +class EmptyFilterError(JetStreamError): + def __init__(self): + super().__init__( + message="consumer filter in FilterSubjects cannot be empty", + code=500, + error_code=ErrorCode.CONSUMER_EMPTY_FILTER, + description="consumer filter in FilterSubjects cannot be empty", + ) + + +class ConsumerMultipleFilterSubjectsNotSupportedError(JetStreamError): + def __init__(self): + super().__init__(message="multiple consumer filter subjects not supported by nats-server") + + +class ConsumerNameAlreadyInUseError(JetStreamError): + def __init__(self): + super().__init__(message="consumer name already in use") + + +class InvalidJSAckError(JetStreamError): + def __init__(self): + super().__init__(message="invalid jetstream publish response") + + +class StreamNameRequiredError(JetStreamError): + def __init__(self): + super().__init__(message="stream name is required") + + +class MsgAlreadyAckdError(JetStreamError): + def __init__(self): + super().__init__(message="message was already acknowledged") + + +class NoStreamResponseError(JetStreamError): + def __init__(self): + super().__init__(message="no response from stream") + + +class NotJSMessageError(JetStreamError): + def __init__(self): + super().__init__(message="not a jetstream message") + + +class InvalidStreamNameError(JetStreamError): + def __init__(self): + super().__init__(message="invalid stream name") + + +class InvalidSubjectError(JetStreamError): + def __init__(self): + super().__init__(message="invalid subject name") + + +class InvalidConsumerNameError(JetStreamError): + def __init__(self): + super().__init__(message="invalid consumer name") + + +class NoMessagesError(JetStreamError): + def __init__(self): + super().__init__(message="no messages") + + +class MaxBytesExceededError(JetStreamError): + def __init__(self): + super().__init__(message="message size exceeds max bytes") + + +class ConsumerDeletedError(JetStreamError): + def __init__(self): + super().__init__(message="consumer deleted") + + +class ConsumerLeadershipChangedError(JetStreamError): + def __init__(self): + super().__init__(message="leadership change") + + +class HandlerRequiredError(JetStreamError): + def __init__(self): + super().__init__(message="handler cannot be empty") + + +class EndOfDataError(JetStreamError): + def __init__(self): + super().__init__(message="end of data reached") + + +class NoHeartbeatError(JetStreamError): + def __init__(self): + super().__init__(message="no heartbeat received") + + +class ConsumerHasActiveSubscriptionError(JetStreamError): + def __init__(self): + super().__init__(message="consumer has active subscription") + + +class MsgNotBoundError(JetStreamError): + def __init__(self): + super().__init__(message="message is not bound to subscription/connection") + + +class MsgNoReplyError(JetStreamError): + def __init__(self): + super().__init__(message="message does not have a reply") + + +class MsgDeleteUnsuccessfulError(JetStreamError): + def __init__(self): + super().__init__(message="message deletion unsuccessful") + + +class AsyncPublishReplySubjectSetError(JetStreamError): + def __init__(self): + super().__init__(message="reply subject should be empty") + + +class TooManyStalledMsgsError(JetStreamError): + def __init__(self): + super().__init__(message="stalled with too many outstanding async published messages") + + +class InvalidOptionError(JetStreamError): + def __init__(self): + super().__init__(message="invalid jetstream option") + + +class MsgIteratorClosedError(JetStreamError): + def __init__(self): + super().__init__(message="messages iterator closed") + + +class OrderedConsumerResetError(JetStreamError): + def __init__(self): + super().__init__(message="recreating ordered consumer") + + +class OrderConsumerUsedAsFetchError(JetStreamError): + def __init__(self): + super().__init__(message="ordered consumer initialized as fetch") + + +class OrderConsumerUsedAsConsumeError(JetStreamError): + def __init__(self): + super().__init__(message=" diff --git a/nats/jetstream/publish.py b/nats/jetstream/publish.py index 05799fa0..62b05fcf 100644 --- a/nats/jetstream/publish.py +++ b/nats/jetstream/publish.py @@ -16,10 +16,8 @@ from asyncio import Future from dataclasses import dataclass, field -from typing import Dict, Optional +from typing import Dict, Optional, cast -from nats.aio.client import Client -from nats.aio.msg import Msg from nats.errors import * from nats.jetstream.api import * from nats.jetstream.errors import * @@ -50,17 +48,8 @@ class PubAck: """ class Publisher: - def __init__(self, client: Client, timeout: float = 1): - self._client = client - self._timeout = timeout - - @property - def timeout(self) -> float: - return self._timeout - - @property - def client(self) -> Client: - return self._client + def __init__(self, client: Client): + self.client = client async def publish( self, @@ -68,7 +57,6 @@ async def publish( payload: bytes = b'', timeout: Optional[float] = None, headers: Optional[Dict] = None, - *, id: Optional[str] = None, expected_last_msg_id: Optional[str] = None, expected_stream: Optional[str] = None, @@ -81,9 +69,6 @@ async def publish( Performs a publish to a stream and waits for ack from server. """ - if timeout is None: - timeout = self.timeout - extra_headers = {} if expected_last_msg_id is not None: extra_headers[Header.EXPECTED_LAST_MSG_ID] = str(expected_last_msg_id) @@ -112,13 +97,19 @@ async def publish( headers=headers, ) - pub_ack = parse_json_response(msg.data, PubAck) - if pub_ack.stream == None: + pub_ack_response = PubAckResponse.from_json(msg.data) + if pub_ack_response.error is not None: + raise Error(**pub_ack_response.error) + + if pub_ack_response.stream == None: raise InvalidAckError() - return pub_ack + return cast(PubAck, pub_ack_response) except NoRespondersError: if attempt < retry_attempts - 1: await asyncio.sleep(retry_wait) raise NoStreamResponseError + +class PubAckResponse(Response, PubAck): + pass diff --git a/nats/jetstream/stream.py b/nats/jetstream/stream.py index e9c30336..11ee3194 100644 --- a/nats/jetstream/stream.py +++ b/nats/jetstream/stream.py @@ -16,12 +16,71 @@ from enum import Enum from dataclasses import dataclass, field -from typing import List, Optional -import datetime -from typing_extensions import AsyncIterator +from types import NotImplementedType +from typing import List, Optional, cast +from datetime import datetime, timedelta -from nats.aio.client import Client -from nats.jetstream.api import subject, request_json +from nats.jetstream.api import Client, Paged, Request, Response +from nats.jetstream.errors import * + +class RetentionPolicy(Enum): + """ + RetentionPolicy determines how messages in a stream are retained. + """ + + LIMITS = "limits" + """LimitsPolicy means that messages are retained until any given limit is reached. This could be one of MaxMsgs, MaxBytes, or MaxAge.""" + + INTEREST = "interest" + """InterestPolicy specifies that when all known observables have acknowledged a message, it can be removed.""" + + WORKQUEUE = "workqueue" + """WorkQueuePolicy specifies that when the first worker or subscriber acknowledges the message, it can be removed.""" + + +class DiscardPolicy(Enum): + """ + DiscardPolicy determines how to proceed when limits of messages or bytes + are reached. + """ + + OLD = "old" + """DiscardOld will remove older messages to return to the limits. This is the default.""" + + NEW = "new" + """DiscardNew will fail to store new messages once the limits are reached.""" + + +class StorageType(Enum): + """ + StorageType determines how messages are stored for retention. + """ + + FILE = "file" + """ + Specifies on disk storage. + """ + + MEMORY = "memory" + """ + Specifies in-memory storage. + """ + + +class StoreCompression(Enum): + """ + StoreCompression determines how messages are compressed. + """ + + NONE = "none" + """ + Disables compression on the stream. + """ + + S2 = "s2" + """ + Enables S2 compression on the stream. + """ @dataclass class StreamInfo: @@ -29,13 +88,13 @@ class StreamInfo: StreamInfo shows config and current state for this stream. """ - timestamp: datetime.datetime = field(metadata={'json': 'ts'}) + timestamp: datetime = field(metadata={'json': 'ts'}) """Indicates when the info was gathered by the server.""" config: StreamConfig = field(metadata={'json': 'config'}) """Contains the configuration settings of the stream, set when creating or updating the stream.""" - created: datetime.datetime = field(metadata={'json': 'created'}) + created: datetime = field(metadata={'json': 'created'}) """The timestamp when the stream was created.""" state: StreamState = field(metadata={'json': 'state'}) @@ -165,7 +224,7 @@ class StreamSourceInfo: lag: int = field(metadata={'json': 'lag'}) """Lag informs how many messages behind the source/mirror operation is. This will only show correctly if there is active communication with stream/mirror.""" - active: datetime.timedelta = field(metadata={'json': 'active'}) + active: timedelta = field(metadata={'json': 'active'}) """Active informs when last the mirror or sourced stream had activity. Value will be -1 when there has been no activity.""" filter_subject: Optional[str] = field(default=None, metadata={'json': 'filter_subject'}) @@ -189,13 +248,13 @@ class StreamState: first_sequence: int = field(metadata={'json': 'first_seq'}) """The the sequence number of the first message in the stream.""" - first_time: datetime.datetime = field(metadata={'json': 'first_ts'}) + first_time: datetime = field(metadata={'json': 'first_ts'}) """The timestamp of the first message in the stream.""" last_sequence: int = field(metadata={'json': 'last_seq'}) """The sequence number of the last message in the stream.""" - last_time: datetime.datetime = field(metadata={'json': 'last_ts'}) + last_time: datetime = field(metadata={'json': 'last_ts'}) """The timestamp of the last message in the stream.""" consumers: int = field(metadata={'json': 'consumer_count'}) @@ -243,12 +302,12 @@ class PeerInfo: current: bool = field(metadata={'json': 'current'}) """Indicates if the peer is up to date and synchronized with the leader.""" + active: timedelta = field(metadata={'json': 'active'}) + """The duration since this peer was last seen.""" + offline: Optional[bool] = field(default=None, metadata={'json': 'offline'}) """Indicates if the peer is considered offline by the group.""" - active: datetime.timedelta = field(metadata={'json': 'active'}) - """The duration since this peer was last seen.""" - lag: Optional[int] = field(default=None, metadata={'json': 'lag'}) """The number of uncommitted operations this peer is behind the leader.""" @@ -310,7 +369,7 @@ class StreamSource: opt_start_seq: Optional[int] = field(default=None, metadata={'json': 'opt_start_seq'}) """The sequence number to start sourcing from.""" - opt_start_time: Optional[datetime.datetime] = field(default=None, metadata={'json': 'opt_start_time'}) + opt_start_time: Optional[datetime] = field(default=None, metadata={'json': 'opt_start_time'}) """The timestamp of messages to start sourcing from.""" filter_subject: Optional[str] = field(default=None, metadata={'json': 'filter_subject'}) @@ -358,117 +417,111 @@ class StreamConsumerLimits: """A maximum number of outstanding unacknowledged messages for a consumer.""" -class RetentionPolicy(Enum): - """ - RetentionPolicy determines how messages in a stream are retained. - """ - - LIMITS = "limits" - """LimitsPolicy means that messages are retained until any given limit is reached. This could be one of MaxMsgs, MaxBytes, or MaxAge.""" - - INTEREST = "interest" - """InterestPolicy specifies that when all known observables have acknowledged a message, it can be removed.""" - - WORKQUEUE = "workqueue" - """WorkQueuePolicy specifies that when the first worker or subscriber acknowledges the message, it can be removed.""" - -class DiscardPolicy(Enum): +class Stream: """ - DiscardPolicy determines how to proceed when limits of messages or bytes - are reached. + Stream contains operations on an existing stream. It allows fetching and removing + messages from a stream, as well as purging a stream. """ - OLD = "old" - """DiscardOld will remove older messages to return to the limits. This is the default.""" - - NEW = "new" - """DiscardNew will fail to store new messages once the limits are reached.""" + def __init__(self, client: Client, name: str, info: StreamInfo): + self._client = client + self._name = name + self._info = info + async def info(self, subject_filter: Optional[str] = None, deleted_details: Optional[bool] = None, timeout: Optional[float] = None) -> StreamInfo: + """Returns `StreamInfo` from the server.""" + info_request = StreamInfoRequest( + subject_filter=subject_filter, + deleted_details=deleted_details, + ) -class StorageType(Enum): - """ - StorageType determines how messages are stored for retention. - """ + subject_map = {} + offset = 0 - FILE = "file" - """ - Specifies on disk storage. - """ - - MEMORY = "memory" - """ - Specifies in-memory storage. - """ + info_result = None + info_subject = f"STREAM.INFO.{self._name}" + while True: + if info_request.subject_filter is not None: + info_request.offset = offset -class StoreCompression(Enum): - """ - StoreCompression determines how messages are compressed. - """ + info_response = await self._client.request_json(info_subject, info_request, StreamInfoResponse, timeout=timeout) + if info_response.error is not None: + raise NotImplementedError - NONE = "none" - """ - Disables compression on the stream. - """ + info = cast(StreamInfo, info_response) + total = info_response.total if info_response.total != 0 else 0 - S2 = "s2" - """ - Enables S2 compression on the stream. - """ + if len(info.state.subjects) > 0: + for subject, msgs in info.state.subjects.items(): + subject_map[subject] = msgs + offset = len(subject_map) + if total == 0 or total <= offset: + info.state.subjects = None + # We don't want to store subjects in cache + cached = info + info.state.subjects = subject_map -class Stream: - """ - Stream contains operations on an existing stream. It allows fetching and removing - messages from a stream, as well as purging a stream. - """ + self._info = cached + break - def __init__(self, client: Client, name: str, info: StreamInfo, api_prefix: str): - self._client = client - self._name = name - self._info = info - self._api_prefix = api_prefix + return self._info @property - def api_prefix(self) -> str: - return self._api_prefix - - async def info(self, opts: Optional[List[Any]] = None, timeout: Optional[int] = None) -> StreamInfo: - """Info returns StreamInfo from the server.""" - info_subject = subject(self._api_prefix, f"STREAM.INFO.{self._name}") - info_response = await request_json(self._client, info_subject, timeout=timeout) - def cached_info(self) -> StreamInfo: - """CachedInfo returns StreamInfo currently cached on this stream.""" + """Returns the `StreamInfo` currently cached on this stream.""" return self._info - async def purge(self, opts: Optional[List[Any]] = None, timeout: Optional[int] = None) -> None: + # TODO(caspervonb): Go does not return anything for this operation, should we? + async def purge( + self, + sequence: Optional[int] = None, + keep: Optional[int] = None, + subject: Optional[str] = None, + timeout: Optional[float] = None + ) -> int: """ Removes messages from a stream. This is a destructive operation. """ - raise NotImplementedError - async def get_msg(self, seq: int, opts: Optional[List[Any]] = None, timeout: Optional[int] = None) -> RawStreamMsg: + if keep is not None and sequence is not None: + raise ValueError("both 'keep' and 'sequence' cannot be provided in purge request") + + purge_subject = f"STREAM.PURGE.{self._name}" + purge_request = StreamPurgeRequest( + sequence=sequence, + keep=keep, + subject=subject, + ) + purge_response = await self._client.request_json(purge_subject, purge_request, StreamPurgeResponse, timeout=timeout) + + return purge_response.purged + + async def get_msg(self, sequence: int, timeout: Optional[float] = None) -> RawStreamMsg: """ Retrieves a raw stream message stored in JetStream by sequence number. """ + if self._info.config.allow_direct: + pass + raise NotImplementedError - async def get_last_msg_for_subject(self, subject: str, timeout: Optional[int] = None) -> RawStreamMsg: + async def get_last_msg_for_subject(self, subject: str, timeout: Optional[float] = None) -> RawStreamMsg: """ Retrieves the last raw stream message stored in JetStream on a given subject. """ raise NotImplementedError - async def delete_msg(self, seq: int, timeout: Optional[int] = None) -> None: + async def delete_msg(self, seq: int, timeout: Optional[float] = None) -> None: """ Deletes a message from a stream. """ raise NotImplementedError - async def secure_delete_msg(self, seq: int, timeout: Optional[int] = None) -> None: + async def secure_delete_msg(self, seq: int, timeout: Optional[float] = None) -> None: """ Deletes a message from a stream. """ @@ -480,38 +533,58 @@ class StreamManager: Provides methods for managing streams. """ - async def create_stream(self, config: StreamConfig, timeout: Optional[int] = None) -> Stream: + async def create_stream(self, config: StreamConfig, timeout: Optional[float] = None) -> Stream: """ Creates a new stream with given config. """ raise NotImplementedError - async def update_stream(self, config: StreamConfig, timeout: Optional[int] = None) -> Stream: + async def update_stream(self, config: StreamConfig, timeout: Optional[float] = None) -> Stream: """ Updates an existing stream with the given config. """ raise NotImplementedError - async def create_or_update_stream(self, config: StreamConfig, timeout: Optional[int] = None) -> Stream: + async def create_or_update_stream(self, config: StreamConfig, timeout: Optional[float] = None) -> Stream: """CreateOrUpdateStream creates a stream with given config or updates it if it already exists.""" raise NotImplementedError - async def stream(self, stream: str, timeout: Optional[int] = None) -> Stream: + async def stream(self, stream: str, timeout: Optional[float] = None) -> Stream: """Stream fetches StreamInfo and returns a Stream interface for a given stream name.""" raise NotImplementedError - async def stream_name_by_subject(self, subject: str, timeout: Optional[int] = None) -> str: + async def stream_name_by_subject(self, subject: str, timeout: Optional[float] = None) -> str: """StreamNameBySubject returns a stream name listening on a given subject.""" raise NotImplementedError - async def delete_stream(self, stream: str, timeout: Optional[int] = None) -> None: + async def delete_stream(self, stream: str, timeout: Optional[float] = None) -> None: """DeleteStream removes a stream with given name.""" raise NotImplementedError - def list_streams(self, timeout: Optional[int] = None) -> AsyncIterator[StreamInfo]: + def list_streams(self, timeout: Optional[float] = None) -> AsyncIterator[StreamInfo]: """ListStreams returns a StreamInfoLister for iterating over stream infos.""" raise NotImplementedError - def stream_names(self, timeout: Optional[int] = None) -> AsyncIterator[str]: + def stream_names(self, timeout: Optional[float] = None) -> AsyncIterator[str]: """StreamNames returns a StreamNameLister for iterating over stream names.""" raise NotImplementedError + +@dataclass +class StreamInfoRequest(Request, Paged): + deleted_details: Optional[bool] = field(default=False, metadata={'json': 'deleted_details'}) + subject_filter: Optional[str] = field(default=None, metadata={'json': 'subjects_filter'}) + +@dataclass +class StreamInfoResponse(Response, StreamInfo): + pass + +@dataclass +class StreamPurgeRequest(Request): + subject: Optional[str] = field(default=None, metadata={'json': 'filter'}) + sequence: Optional[int] = field(default=None, metadata={'json': 'seq'}) + keep: Optional[int] = field(default=None, metadata={'json': 'keep'}) + +@dataclass +class StreamPurgeResponse(Response): + success: bool = field(default=False, metadata={'json': 'success'}) + purged: int = field(default=0, metadata={'json': 'purged'}) From e72e41a3a1aa7d8087abf4ccdb301feabb76f1e5 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Tue, 25 Jun 2024 02:37:37 +0200 Subject: [PATCH 07/22] wip --- nats/jetstream/api.py | 17 ++++--- nats/jetstream/errors.py | 92 +++++++++++++++++------------------ nats/jetstream/stream.py | 102 +++++++++++++++++++++++++-------------- 3 files changed, 123 insertions(+), 88 deletions(-) diff --git a/nats/jetstream/api.py b/nats/jetstream/api.py index 7e63f911..5f5ff595 100644 --- a/nats/jetstream/api.py +++ b/nats/jetstream/api.py @@ -20,11 +20,8 @@ from typing import Any, Dict, Optional, Self, Type, TypeVar, get_origin, get_args from urllib import parse -from nats.jetstream.errors import Error from nats.js.api import DEFAULT_PREFIX -T = TypeVar("T") - def as_dict(instance: Any) -> Dict[str, Any]: if not is_dataclass(instance): return instance @@ -43,7 +40,7 @@ def as_dict(instance: Any) -> Dict[str, Any]: result[name] = value return result -def from_dict(data, cls: Type[T]) -> T: +def from_dict(data, cls: type) -> Any: if not is_dataclass(cls): return data @@ -75,6 +72,8 @@ def from_dict(data, cls: Type[T]) -> T: return cls(**kwargs) +T = TypeVar("T", bound="Response") + @dataclass class Request: def as_dict(self) -> Dict[str, Any]: @@ -90,7 +89,7 @@ class Paged: limit: int = field(default=0, metadata={"json": "limit"}) @dataclass -class ErrorResponse: +class Error: code: Optional[int] = field(default=None, metadata={"json": "code"}) error_code: Optional[int] = field(default=None, metadata={"json": "err_code"}) description: Optional[str] = field(default=None, metadata={"json": "description"}) @@ -98,14 +97,20 @@ class ErrorResponse: @dataclass class Response: type: str - error: Optional[ErrorResponse] = None + error: Optional[Error] = None @classmethod def from_dict(cls: Type[T], data: Dict[str, Any]) -> T: + """ + Create an instance of the class from a dictionary. + """ return cls(**data) @classmethod def from_json(cls: Type[T], data: str) -> T: + """ + Create an instance of the class from JSON + """ return cls.from_dict(json.loads(data)) class Client: diff --git a/nats/jetstream/errors.py b/nats/jetstream/errors.py index 8f342bd6..905f353a 100644 --- a/nats/jetstream/errors.py +++ b/nats/jetstream/errors.py @@ -34,7 +34,7 @@ class ErrorCode(Enum): STREAM_WRONG_LAST_SEQUENCE = 10071 -class JetStreamError(Exception): +class Error(Exception): def __init__(self, message=None, code=None, error_code=None, description=None): self.message = message self.code = code @@ -47,7 +47,7 @@ def __str__(self): return f"nats: {self.message}" -class JetStreamNotEnabledError(JetStreamError): +class JetStreamNotEnabledError(Error): def __init__(self): super().__init__( message="jetstream not enabled", @@ -57,7 +57,7 @@ def __init__(self): ) -class JetStreamNotEnabledForAccountError(JetStreamError): +class JetStreamNotEnabledForAccountError(Error): def __init__(self): super().__init__( message="jetstream not enabled for account", @@ -67,7 +67,7 @@ def __init__(self): ) -class StreamNotFoundError(JetStreamError): +class StreamNotFoundError(Error): def __init__(self): super().__init__( message="stream not found", @@ -77,7 +77,7 @@ def __init__(self): ) -class StreamNameAlreadyInUseError(JetStreamError): +class StreamNameAlreadyInUseError(Error): def __init__(self): super().__init__( message="stream name already in use", @@ -87,27 +87,27 @@ def __init__(self): ) -class StreamSubjectTransformNotSupportedError(JetStreamError): +class StreamSubjectTransformNotSupportedError(Error): def __init__(self): super().__init__(message="stream subject transformation not supported by nats-server") -class StreamSourceSubjectTransformNotSupportedError(JetStreamError): +class StreamSourceSubjectTransformNotSupportedError(Error): def __init__(self): super().__init__(message="stream subject transformation not supported by nats-server") -class StreamSourceNotSupportedError(JetStreamError): +class StreamSourceNotSupportedError(Error): def __init__(self): super().__init__(message="stream sourcing is not supported by nats-server") -class StreamSourceMultipleFilterSubjectsNotSupportedError(JetStreamError): +class StreamSourceMultipleFilterSubjectsNotSupportedError(Error): def __init__(self): super().__init__(message="stream sourcing with multiple subject filters not supported by nats-server") -class ConsumerNotFoundError(JetStreamError): +class ConsumerNotFoundError(Error): def __init__(self): super().__init__( message="consumer not found", @@ -117,7 +117,7 @@ def __init__(self): ) -class ConsumerExistsError(JetStreamError): +class ConsumerExistsError(Error): def __init__(self): super().__init__( message="consumer already exists", @@ -127,7 +127,7 @@ def __init__(self): ) -class ConsumerDoesNotExistError(JetStreamError): +class ConsumerDoesNotExistError(Error): def __init__(self): super().__init__( message="consumer does not exist", @@ -137,7 +137,7 @@ def __init__(self): ) -class MessageNotFoundError(JetStreamError): +class MessageNotFoundError(Error): def __init__(self): super().__init__( message="message not found", @@ -147,7 +147,7 @@ def __init__(self): ) -class BadRequestError(JetStreamError): +class BadRequestError(Error): def __init__(self): super().__init__( message="bad request", @@ -157,7 +157,7 @@ def __init__(self): ) -class ConsumerCreateError(JetStreamError): +class ConsumerCreateError(Error): def __init__(self): super().__init__( message="could not create consumer", @@ -167,7 +167,7 @@ def __init__(self): ) -class DuplicateFilterSubjectsError(JetStreamError): +class DuplicateFilterSubjectsError(Error): def __init__(self): super().__init__( message="consumer cannot have both FilterSubject and FilterSubjects specified", @@ -177,7 +177,7 @@ def __init__(self): ) -class OverlappingFilterSubjectsError(JetStreamError): +class OverlappingFilterSubjectsError(Error): def __init__(self): super().__init__( message="consumer subject filters cannot overlap", @@ -187,7 +187,7 @@ def __init__(self): ) -class EmptyFilterError(JetStreamError): +class EmptyFilterError(Error): def __init__(self): super().__init__( message="consumer filter in FilterSubjects cannot be empty", @@ -197,141 +197,141 @@ def __init__(self): ) -class ConsumerMultipleFilterSubjectsNotSupportedError(JetStreamError): +class ConsumerMultipleFilterSubjectsNotSupportedError(Error): def __init__(self): super().__init__(message="multiple consumer filter subjects not supported by nats-server") -class ConsumerNameAlreadyInUseError(JetStreamError): +class ConsumerNameAlreadyInUseError(Error): def __init__(self): super().__init__(message="consumer name already in use") -class InvalidJSAckError(JetStreamError): +class InvalidJSAckError(Error): def __init__(self): super().__init__(message="invalid jetstream publish response") -class StreamNameRequiredError(JetStreamError): +class StreamNameRequiredError(Error): def __init__(self): super().__init__(message="stream name is required") -class MsgAlreadyAckdError(JetStreamError): +class MsgAlreadyAckdError(Error): def __init__(self): super().__init__(message="message was already acknowledged") -class NoStreamResponseError(JetStreamError): +class NoStreamResponseError(Error): def __init__(self): super().__init__(message="no response from stream") -class NotJSMessageError(JetStreamError): +class NotJSMessageError(Error): def __init__(self): super().__init__(message="not a jetstream message") -class InvalidStreamNameError(JetStreamError): +class InvalidStreamNameError(Error): def __init__(self): super().__init__(message="invalid stream name") -class InvalidSubjectError(JetStreamError): +class InvalidSubjectError(Error): def __init__(self): super().__init__(message="invalid subject name") -class InvalidConsumerNameError(JetStreamError): +class InvalidConsumerNameError(Error): def __init__(self): super().__init__(message="invalid consumer name") -class NoMessagesError(JetStreamError): +class NoMessagesError(Error): def __init__(self): super().__init__(message="no messages") -class MaxBytesExceededError(JetStreamError): +class MaxBytesExceededError(Error): def __init__(self): super().__init__(message="message size exceeds max bytes") -class ConsumerDeletedError(JetStreamError): +class ConsumerDeletedError(Error): def __init__(self): super().__init__(message="consumer deleted") -class ConsumerLeadershipChangedError(JetStreamError): +class ConsumerLeadershipChangedError(Error): def __init__(self): super().__init__(message="leadership change") -class HandlerRequiredError(JetStreamError): +class HandlerRequiredError(Error): def __init__(self): super().__init__(message="handler cannot be empty") -class EndOfDataError(JetStreamError): +class EndOfDataError(Error): def __init__(self): super().__init__(message="end of data reached") -class NoHeartbeatError(JetStreamError): +class NoHeartbeatError(Error): def __init__(self): super().__init__(message="no heartbeat received") -class ConsumerHasActiveSubscriptionError(JetStreamError): +class ConsumerHasActiveSubscriptionError(Error): def __init__(self): super().__init__(message="consumer has active subscription") -class MsgNotBoundError(JetStreamError): +class MsgNotBoundError(Error): def __init__(self): super().__init__(message="message is not bound to subscription/connection") -class MsgNoReplyError(JetStreamError): +class MsgNoReplyError(Error): def __init__(self): super().__init__(message="message does not have a reply") -class MsgDeleteUnsuccessfulError(JetStreamError): +class MsgDeleteUnsuccessfulError(Error): def __init__(self): super().__init__(message="message deletion unsuccessful") -class AsyncPublishReplySubjectSetError(JetStreamError): +class AsyncPublishReplySubjectSetError(Error): def __init__(self): super().__init__(message="reply subject should be empty") -class TooManyStalledMsgsError(JetStreamError): +class TooManyStalledMsgsError(Error): def __init__(self): super().__init__(message="stalled with too many outstanding async published messages") -class InvalidOptionError(JetStreamError): +class InvalidOptionError(Error): def __init__(self): super().__init__(message="invalid jetstream option") -class MsgIteratorClosedError(JetStreamError): +class MsgIteratorClosedError(Error): def __init__(self): super().__init__(message="messages iterator closed") -class OrderedConsumerResetError(JetStreamError): +class OrderedConsumerResetError(Error): def __init__(self): super().__init__(message="recreating ordered consumer") -class OrderConsumerUsedAsFetchError(JetStreamError): +class OrderConsumerUsedAsFetchError(Error): def __init__(self): super().__init__(message="ordered consumer initialized as fetch") -class OrderConsumerUsedAsConsumeError(JetStreamError): +class OrderConsumerUsedAsConsumeError(Error): def __init__(self): super().__init__(message=" diff --git a/nats/jetstream/stream.py b/nats/jetstream/stream.py index 11ee3194..79951593 100644 --- a/nats/jetstream/stream.py +++ b/nats/jetstream/stream.py @@ -19,6 +19,7 @@ from types import NotImplementedType from typing import List, Optional, cast from datetime import datetime, timedelta +from typing_extensions import Sequence from nats.jetstream.api import Client, Paged, Request, Response from nats.jetstream.errors import * @@ -436,38 +437,15 @@ async def info(self, subject_filter: Optional[str] = None, deleted_details: Opti deleted_details=deleted_details, ) - subject_map = {} - offset = 0 - - info_result = None info_subject = f"STREAM.INFO.{self._name}" + info_response = await self._client.request_json(info_subject, info_request, StreamInfoResponse, timeout=timeout) + if info_response.error is not None: + if info_response.error.error_code == ErrorCode.STREAM_NOT_FOUND: + raise StreamNotFoundError(*info_response.error) - while True: - if info_request.subject_filter is not None: - info_request.offset = offset - - info_response = await self._client.request_json(info_subject, info_request, StreamInfoResponse, timeout=timeout) - if info_response.error is not None: - raise NotImplementedError - - info = cast(StreamInfo, info_response) - total = info_response.total if info_response.total != 0 else 0 - - if len(info.state.subjects) > 0: - for subject, msgs in info.state.subjects.items(): - subject_map[subject] = msgs - offset = len(subject_map) + raise Error(*info_response.error) - if total == 0 or total <= offset: - info.state.subjects = None - # We don't want to store subjects in cache - cached = info - info.state.subjects = subject_map - - self._info = cached - break - - return self._info + return cast(StreamInfo, info_response) @property def cached_info(self) -> StreamInfo: @@ -496,7 +474,10 @@ async def purge( keep=keep, subject=subject, ) + purge_response = await self._client.request_json(purge_subject, purge_request, StreamPurgeResponse, timeout=timeout) + if purge_response.error is not None: + raise Error(*purge_response.error) return purge_response.purged @@ -504,9 +485,6 @@ async def get_msg(self, sequence: int, timeout: Optional[float] = None) -> RawSt """ Retrieves a raw stream message stored in JetStream by sequence number. """ - if self._info.config.allow_direct: - pass - raise NotImplementedError async def get_last_msg_for_subject(self, subject: str, timeout: Optional[float] = None) -> RawStreamMsg: @@ -515,18 +493,28 @@ async def get_last_msg_for_subject(self, subject: str, timeout: Optional[float] """ raise NotImplementedError - async def delete_msg(self, seq: int, timeout: Optional[float] = None) -> None: + async def _get_msg(self, request: GetMsgRequest) -> RawStreamMsg: + pass + + async def delete_msg(self, sequence: int, timeout: Optional[float] = None) -> None: """ Deletes a message from a stream. """ raise NotImplementedError - async def secure_delete_msg(self, seq: int, timeout: Optional[float] = None) -> None: + async def secure_delete_msg(self, sequence: int, timeout: Optional[float] = None) -> None: """ Deletes a message from a stream. """ - raise NotImplementedError + self._delete_msg(sequence, no_erase=False, timeout=timeout) + + async def _delete_msg(self, sequence: int, no_erase: bool, timeout: Optional[float] = None): + msg_delete_request = MsgDeleteRequest( + sequence=sequence, + no_erase=no_erase, + ) + msg_delete_response = self._client.request_json() class StreamManager: """ @@ -575,16 +563,58 @@ class StreamInfoRequest(Request, Paged): subject_filter: Optional[str] = field(default=None, metadata={'json': 'subjects_filter'}) @dataclass -class StreamInfoResponse(Response, StreamInfo): +class StreamInfoResponse(Response, Paged, StreamInfo): pass + @dataclass class StreamPurgeRequest(Request): subject: Optional[str] = field(default=None, metadata={'json': 'filter'}) sequence: Optional[int] = field(default=None, metadata={'json': 'seq'}) keep: Optional[int] = field(default=None, metadata={'json': 'keep'}) + @dataclass class StreamPurgeResponse(Response): success: bool = field(default=False, metadata={'json': 'success'}) purged: int = field(default=0, metadata={'json': 'purged'}) + + +@dataclass +class MsgGetRequest(Request): + sequence : int = field(metadata={'json': 'seq'}) + last_for : int = field(metadata={'json': 'last_by_subj'}) + next_for: int = field(metadata={'json': 'next_by_subj'}) + +@dataclass +class MsgGetResponse(Response): + pass + +@dataclass +class MsgDeleteRequest(Request): + sequence: int = field(metadata={'json': 'seq'}) + no_erase: bool = field(metadata={'json': 'no_erase'}) + +@dataclass +class MsgDeleteResponse(Response): + success: bool = field(default=False, metadata={'json': 'success'}) + +__all__ = [ + 'RetentionPolicy', + 'DiscardPolicy', + 'StorageType', + 'StoreCompression', + 'StreamInfo', + 'StreamConfig', + 'StreamSourceInfo', + 'ClusterInfo', + 'PeerInfo', + 'SubjectTransformConfig', + 'Republish', + 'Placement', + 'StreamSource', + 'ExternalStream', + 'StreamConsumerLimits', + 'Stream', + 'StreamManager', +] From 9e0d711237c5f1926dde1ebb50767ec42d2c3df5 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Tue, 25 Jun 2024 02:39:04 +0200 Subject: [PATCH 08/22] Stub out tests --- tests/test_jetstream_consumer.py | 4 ++++ tests/test_jetstream_errors.py | 4 ++++ tests/test_jetstream_message.py | 0 tests/test_jetstream_publish.py | 4 ++++ tests/test_jetstream_stream.py | 4 ++++ 5 files changed, 16 insertions(+) create mode 100644 tests/test_jetstream_consumer.py create mode 100644 tests/test_jetstream_errors.py create mode 100644 tests/test_jetstream_message.py create mode 100644 tests/test_jetstream_stream.py diff --git a/tests/test_jetstream_consumer.py b/tests/test_jetstream_consumer.py new file mode 100644 index 00000000..14dcf1de --- /dev/null +++ b/tests/test_jetstream_consumer.py @@ -0,0 +1,4 @@ +from tests.utils import SingleJetStreamServerTestCase + +class JetStreamConsumerTest(SingleJetStreamServerTestCase): + pass diff --git a/tests/test_jetstream_errors.py b/tests/test_jetstream_errors.py new file mode 100644 index 00000000..d3c40be2 --- /dev/null +++ b/tests/test_jetstream_errors.py @@ -0,0 +1,4 @@ +from tests.util import JetStreamServerTestCase + +class JetStreamErrorsTest(SingleJetStreamServerTestCase): + pass diff --git a/tests/test_jetstream_message.py b/tests/test_jetstream_message.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_jetstream_publish.py b/tests/test_jetstream_publish.py index e69de29b..e3706dd7 100644 --- a/tests/test_jetstream_publish.py +++ b/tests/test_jetstream_publish.py @@ -0,0 +1,4 @@ +from tests.utils import SingleJetStreamServerTestCase + +class JetStreamPublishTest(SingleJetStreamServerTestCase): + pass diff --git a/tests/test_jetstream_stream.py b/tests/test_jetstream_stream.py new file mode 100644 index 00000000..4ba1b258 --- /dev/null +++ b/tests/test_jetstream_stream.py @@ -0,0 +1,4 @@ +from tests.utils import SingleJetStreamServerTestCase + +class JetStreamStreamTest(SingleJetStreamServerTestCase): + pass From bb4a4236f565189a9fed23a9a1b490f13790b88f Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Tue, 25 Jun 2024 18:30:46 +0200 Subject: [PATCH 09/22] stash --- nats/jetstream/errors.py | 20 -------------------- nats/jetstream/stream.py | 38 +++++++++++++++++++++----------------- 2 files changed, 21 insertions(+), 37 deletions(-) diff --git a/nats/jetstream/errors.py b/nats/jetstream/errors.py index 905f353a..9a730af3 100644 --- a/nats/jetstream/errors.py +++ b/nats/jetstream/errors.py @@ -315,23 +315,3 @@ def __init__(self): class InvalidOptionError(Error): def __init__(self): super().__init__(message="invalid jetstream option") - - -class MsgIteratorClosedError(Error): - def __init__(self): - super().__init__(message="messages iterator closed") - - -class OrderedConsumerResetError(Error): - def __init__(self): - super().__init__(message="recreating ordered consumer") - - -class OrderConsumerUsedAsFetchError(Error): - def __init__(self): - super().__init__(message="ordered consumer initialized as fetch") - - -class OrderConsumerUsedAsConsumeError(Error): - def __init__(self): - super().__init__(message=" diff --git a/nats/jetstream/stream.py b/nats/jetstream/stream.py index 79951593..8606cd64 100644 --- a/nats/jetstream/stream.py +++ b/nats/jetstream/stream.py @@ -417,8 +417,6 @@ class StreamConsumerLimits: max_ack_pending: Optional[int] = field(default=None, metadata={'json': 'max_ack_pending'}) """A maximum number of outstanding unacknowledged messages for a consumer.""" - - class Stream: """ Stream contains operations on an existing stream. It allows fetching and removing @@ -432,12 +430,11 @@ def __init__(self, client: Client, name: str, info: StreamInfo): async def info(self, subject_filter: Optional[str] = None, deleted_details: Optional[bool] = None, timeout: Optional[float] = None) -> StreamInfo: """Returns `StreamInfo` from the server.""" + info_subject = f"STREAM.INFO.{self._name}" info_request = StreamInfoRequest( subject_filter=subject_filter, deleted_details=deleted_details, ) - - info_subject = f"STREAM.INFO.{self._name}" info_response = await self._client.request_json(info_subject, info_request, StreamInfoResponse, timeout=timeout) if info_response.error is not None: if info_response.error.error_code == ErrorCode.STREAM_NOT_FOUND: @@ -447,6 +444,7 @@ async def info(self, subject_filter: Optional[str] = None, deleted_details: Opti return cast(StreamInfo, info_response) + @property def cached_info(self) -> StreamInfo: """Returns the `StreamInfo` currently cached on this stream.""" @@ -493,28 +491,34 @@ async def get_last_msg_for_subject(self, subject: str, timeout: Optional[float] """ raise NotImplementedError - async def _get_msg(self, request: GetMsgRequest) -> RawStreamMsg: - pass - async def delete_msg(self, sequence: int, timeout: Optional[float] = None) -> None: """ Deletes a message from a stream. """ - raise NotImplementedError + msg_delete_subject = f"STREAM.MSG.DELETE.{sequence}" + msg_delete_request = MsgDeleteRequest( + sequence=sequence, + no_erase=True, + ) + + msg_delete_response = await self._client.request_json(msg_delete_subject, msg_delete_request, MsgDeleteResponse, timeout=timeout) + if msg_delete_response.error is not None: + raise Error(*msg_delete_response.error) async def secure_delete_msg(self, sequence: int, timeout: Optional[float] = None) -> None: """ Deletes a message from a stream. """ - self._delete_msg(sequence, no_erase=False, timeout=timeout) - - async def _delete_msg(self, sequence: int, no_erase: bool, timeout: Optional[float] = None): + msg_delete_subject = f"STREAM.MSG.DELETE.{sequence}" msg_delete_request = MsgDeleteRequest( - sequence=sequence, - no_erase=no_erase, - ) + sequence=sequence, + no_erase=False, + ) + + msg_delete_response = await self._client.request_json(msg_delete_subject, msg_delete_request, MsgDeleteResponse, timeout=timeout) + if msg_delete_response.error is not None: + raise Error(*msg_delete_response.error) - msg_delete_response = self._client.request_json() class StreamManager: """ @@ -582,8 +586,8 @@ class StreamPurgeResponse(Response): @dataclass class MsgGetRequest(Request): - sequence : int = field(metadata={'json': 'seq'}) - last_for : int = field(metadata={'json': 'last_by_subj'}) + sequence: int = field(metadata={'json': 'seq'}) + last_for: int = field(metadata={'json': 'last_by_subj'}) next_for: int = field(metadata={'json': 'next_by_subj'}) @dataclass From 2ab3bdd86129271e059d331e161b92c9e8015c56 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Tue, 25 Jun 2024 23:50:45 +0200 Subject: [PATCH 10/22] yapf --- nats/jetstream/__init__.py | 2 + nats/jetstream/api.py | 54 ++++++-- nats/jetstream/consumer.py | 161 +++++++++++++++++------ nats/jetstream/context.py | 2 + nats/jetstream/errors.py | 83 ++++++++++-- nats/jetstream/message.py | 2 + nats/jetstream/publish.py | 13 +- nats/jetstream/stream.py | 262 ++++++++++++++++++++++++++++--------- 8 files changed, 453 insertions(+), 126 deletions(-) diff --git a/nats/jetstream/__init__.py b/nats/jetstream/__init__.py index e20130b4..1b7c5ee7 100644 --- a/nats/jetstream/__init__.py +++ b/nats/jetstream/__init__.py @@ -15,7 +15,9 @@ from nats.aio.client import Client from .context import Context + async def new(client: Client) -> Context: return Context(client) + __all__ = ['new'] diff --git a/nats/jetstream/api.py b/nats/jetstream/api.py index 5f5ff595..ec816ea5 100644 --- a/nats/jetstream/api.py +++ b/nats/jetstream/api.py @@ -22,6 +22,7 @@ from nats.js.api import DEFAULT_PREFIX + def as_dict(instance: Any) -> Dict[str, Any]: if not is_dataclass(instance): return instance @@ -40,6 +41,7 @@ def as_dict(instance: Any) -> Dict[str, Any]: result[name] = value return result + def from_dict(data, cls: type) -> Any: if not is_dataclass(cls): return data @@ -63,36 +65,48 @@ def from_dict(data, cls: type) -> Any: if is_dataclass(field_type): value = from_dict(value, field_type) - elif field_origin is list and len(field_args) == 1 and is_dataclass(field_args[0]): + elif field_origin is list and len(field_args) == 1 and is_dataclass( + field_args[0]): value = [from_dict(item, field_args[0]) for item in value] - elif field_origin is dict and len(field_args) == 2 and is_dataclass(field_args[1]): + elif field_origin is dict and len(field_args) == 2 and is_dataclass( + field_args[1]): value = {k: from_dict(v, field_args[1]) for k, v in value.items()} kwargs[field.name] = value return cls(**kwargs) + T = TypeVar("T", bound="Response") + @dataclass class Request: + def as_dict(self) -> Dict[str, Any]: return as_dict(self) def as_json(self) -> str: return json.dumps(self.as_dict()) + @dataclass class Paged: - total: int = field(default=0, metadata={"json": "total"}) - offset: int = field(default=0, metadata={"json": "offset"}) - limit: int = field(default=0, metadata={"json": "limit"}) + total: int = field(default=0, metadata={"json": "total"}) + offset: int = field(default=0, metadata={"json": "offset"}) + limit: int = field(default=0, metadata={"json": "limit"}) + @dataclass class Error: code: Optional[int] = field(default=None, metadata={"json": "code"}) - error_code: Optional[int] = field(default=None, metadata={"json": "err_code"}) - description: Optional[str] = field(default=None, metadata={"json": "description"}) + error_code: Optional[int] = field( + default=None, metadata={"json": "err_code"} + ) + description: Optional[str] = field( + default=None, metadata={"json": "description"} + ) + @dataclass class Response: @@ -113,22 +127,38 @@ def from_json(cls: Type[T], data: str) -> T: """ return cls.from_dict(json.loads(data)) + class Client: """ Provides methods for sending requests and processing responses via JetStream. """ - def __init__(self, inner: Any, timeout: float = 1.0, prefix: str = DEFAULT_PREFIX) -> None: + + def __init__( + self, + inner: Any, + timeout: float = 1.0, + prefix: str = DEFAULT_PREFIX + ) -> None: self.inner = inner self.timeout = timeout self.prefix = None - async def request(self, subject: str, payload: bytes, timeout: Optional[float] = None, headers: Optional[Dict[str, str]] = None) -> Any: + async def request( + self, + subject: str, + payload: bytes, + timeout: Optional[float] = None, + headers: Optional[Dict[str, str]] = None + ) -> Any: if timeout is None: timeout = self.timeout self.inner.request(subject, payload, timeout=timeout) - async def request_json(self, subject: str, request_object: Request, response_type: Type[T], timeout: float | None) -> T: + async def request_json( + self, subject: str, request_object: Request, response_type: Type[T], + timeout: float | None + ) -> T: if self.prefix is not None: subject = f"{self.prefix}.{subject}" @@ -136,5 +166,7 @@ async def request_json(self, subject: str, request_object: Request, response_typ timeout = self.timeout request_payload = request_object.as_json() - response = await self.inner.request(subject, request_payload, timeout=timeout) + response = await self.inner.request( + subject, request_payload, timeout=timeout + ) return response_type.from_json(response.data) diff --git a/nats/jetstream/consumer.py b/nats/jetstream/consumer.py index c9750e25..eaecffc8 100644 --- a/nats/jetstream/consumer.py +++ b/nats/jetstream/consumer.py @@ -19,6 +19,7 @@ from dataclasses import dataclass, field from datetime import datetime, timedelta + class DeliverPolicy(Enum): """ DeliverPolicy determines from which point to start delivering messages. @@ -79,9 +80,12 @@ class SequenceInfo: stream: int = field(metadata={'json': 'stream_seq'}) """Stream sequence number.""" - last: Optional[datetime] = field(default=None, metadata={'json': 'last_active'}) + last: Optional[datetime] = field( + default=None, metadata={'json': 'last_active'} + ) """Last activity timestamp.""" + @dataclass class ConsumerConfig: """ @@ -90,76 +94,122 @@ class ConsumerConfig: name: Optional[str] = field(default=None, metadata={'json': 'name'}) """Optional name for the consumer.""" - durable: Optional[str] = field(default=None, metadata={'json': 'durable_name'}) + durable: Optional[str] = field( + default=None, metadata={'json': 'durable_name'} + ) """Optional durable name for the consumer.""" - description: Optional[str] = field(default=None, metadata={'json': 'description'}) + description: Optional[str] = field( + default=None, metadata={'json': 'description'} + ) """Optional description of the consumer.""" - deliver_policy: DeliverPolicy = field(default=DeliverPolicy.ALL, metadata={'json': 'deliver_policy'}) + deliver_policy: DeliverPolicy = field( + default=DeliverPolicy.ALL, metadata={'json': 'deliver_policy'} + ) """Defines from which point to start delivering messages from the stream. Defaults to DeliverAllPolicy.""" - opt_start_seq: Optional[int] = field(default=None, metadata={'json': 'opt_start_seq'}) + opt_start_seq: Optional[int] = field( + default=None, metadata={'json': 'opt_start_seq'} + ) """Optional sequence number from which to start message delivery.""" - opt_start_time: Optional[datetime] = field(default=None, metadata={'json': 'opt_start_time'}) + opt_start_time: Optional[datetime] = field( + default=None, metadata={'json': 'opt_start_time'} + ) """Optional time from which to start message delivery.""" - ack_policy: AckPolicy = field(default=AckPolicy.EXPLICIT, metadata={'json': 'ack_policy'}) + ack_policy: AckPolicy = field( + default=AckPolicy.EXPLICIT, metadata={'json': 'ack_policy'} + ) """Defines the acknowledgement policy for the consumer. Defaults to AckExplicitPolicy.""" - ack_wait: Optional[timedelta] = field(default=None, metadata={'json': 'ack_wait'}) + ack_wait: Optional[timedelta] = field( + default=None, metadata={'json': 'ack_wait'} + ) """How long the server will wait for an acknowledgement before resending a message.""" - max_deliver: Optional[int] = field(default=None, metadata={'json': 'max_deliver'}) + max_deliver: Optional[int] = field( + default=None, metadata={'json': 'max_deliver'} + ) """Maximum number of delivery attempts for a message.""" - backoff: Optional[List[timedelta]] = field(default=None, metadata={'json': 'backoff'}) + backoff: Optional[List[timedelta]] = field( + default=None, metadata={'json': 'backoff'} + ) """Optional back-off intervals for retrying message delivery after a failed acknowledgement.""" - filter_subject: Optional[str] = field(default=None, metadata={'json': 'filter_subject'}) + filter_subject: Optional[str] = field( + default=None, metadata={'json': 'filter_subject'} + ) """Can be used to filter messages delivered from the stream.""" - replay_policy: ReplayPolicy = field(default=ReplayPolicy.INSTANT, metadata={'json': 'replay_policy'}) + replay_policy: ReplayPolicy = field( + default=ReplayPolicy.INSTANT, metadata={'json': 'replay_policy'} + ) """Defines the rate at which messages are sent to the consumer.""" - rate_limit: Optional[int] = field(default=None, metadata={'json': 'rate_limit_bps'}) + rate_limit: Optional[int] = field( + default=None, metadata={'json': 'rate_limit_bps'} + ) """Optional maximum rate of message delivery in bits per second.""" - sample_frequency: Optional[str] = field(default=None, metadata={'json': 'sample_freq'}) + sample_frequency: Optional[str] = field( + default=None, metadata={'json': 'sample_freq'} + ) """Optional frequency for sampling how often acknowledgements are sampled for observability.""" - max_waiting: Optional[int] = field(default=None, metadata={'json': 'max_waiting'}) + max_waiting: Optional[int] = field( + default=None, metadata={'json': 'max_waiting'} + ) """Maximum number of pull requests waiting to be fulfilled.""" - max_ack_pending: Optional[int] = field(default=None, metadata={'json': 'max_ack_pending'}) + max_ack_pending: Optional[int] = field( + default=None, metadata={'json': 'max_ack_pending'} + ) """Maximum number of outstanding unacknowledged messages.""" - headers_only: Optional[bool] = field(default=None, metadata={'json': 'headers_only'}) + headers_only: Optional[bool] = field( + default=None, metadata={'json': 'headers_only'} + ) """Indicates whether only headers of messages should be sent.""" - max_request_batch: Optional[int] = field(default=None, metadata={'json': 'max_batch'}) + max_request_batch: Optional[int] = field( + default=None, metadata={'json': 'max_batch'} + ) """Optional maximum batch size a single pull request can make.""" - max_request_expires: Optional[timedelta] = field(default=None, metadata={'json': 'max_expires'}) + max_request_expires: Optional[timedelta] = field( + default=None, metadata={'json': 'max_expires'} + ) """Maximum duration a single pull request will wait for messages to be available to pull.""" - max_request_max_bytes: Optional[int] = field(default=None, metadata={'json': 'max_bytes'}) + max_request_max_bytes: Optional[int] = field( + default=None, metadata={'json': 'max_bytes'} + ) """Optional maximum total bytes that can be requested in a given batch.""" - inactive_threshold: Optional[timedelta] = field(default=None, metadata={'json': 'inactive_threshold'}) + inactive_threshold: Optional[timedelta] = field( + default=None, metadata={'json': 'inactive_threshold'} + ) """Duration which instructs the server to clean up the consumer if it has been inactive.""" replicas: int = field(metadata={'json': 'num_replicas'}) """Number of replicas for the consumer's state.""" - memory_storage: Optional[bool] = field(default=None, metadata={'json': 'mem_storage'}) + memory_storage: Optional[bool] = field( + default=None, metadata={'json': 'mem_storage'} + ) """Flag to force the consumer to use memory storage.""" - filter_subjects: Optional[List[str]] = field(default=None, metadata={'json': 'filter_subjects'}) + filter_subjects: Optional[List[str]] = field( + default=None, metadata={'json': 'filter_subjects'} + ) """Allows filtering messages from a stream by subject.""" - metadata: Optional[Dict[str, str]] = field(default=None, metadata={'json': 'metadata'}) + metadata: Optional[Dict[str, str]] = field( + default=None, metadata={'json': 'metadata'} + ) """Set of application-defined key-value pairs for associating metadata on the consumer.""" @@ -198,10 +248,14 @@ class ConsumerInfo: num_pending: int = field(metadata={'json': 'num_pending'}) """Number of messages that match the consumer's filter but have not been delivered yet.""" - cluster: Optional[ClusterInfo] = field(default=None, metadata={'json': 'cluster'}) + cluster: Optional[ClusterInfo] = field( + default=None, metadata={'json': 'cluster'} + ) """Information about the cluster to which this consumer belongs.""" - push_bound: Optional[bool] = field(default=None, metadata={'json': 'push_bound'}) + push_bound: Optional[bool] = field( + default=None, metadata={'json': 'push_bound'} + ) """Indicates whether at least one subscription exists for the delivery subject of this consumer.""" timestamp: datetime = field(metadata={'json': 'ts'}) @@ -213,30 +267,43 @@ class OrderedConsumerConfig: """ OrderedConsumerConfig is the configuration of an ordered JetStream consumer. """ - filter_subjects: Optional[List[str]] = field(default=None, metadata={'json': 'filter_subjects'}) + filter_subjects: Optional[List[str]] = field( + default=None, metadata={'json': 'filter_subjects'} + ) """Allows filtering messages from a stream by subject.""" deliver_policy: DeliverPolicy = field(metadata={'json': 'deliver_policy'}) """Defines from which point to start delivering messages from the stream.""" - opt_start_seq: Optional[int] = field(default=None, metadata={'json': 'opt_start_seq'}) + opt_start_seq: Optional[int] = field( + default=None, metadata={'json': 'opt_start_seq'} + ) """Optional sequence number from which to start message delivery.""" - opt_start_time: Optional[datetime] = field(default=None, metadata={'json': 'opt_start_time'}) + opt_start_time: Optional[datetime] = field( + default=None, metadata={'json': 'opt_start_time'} + ) """Optional time from which to start message delivery.""" replay_policy: ReplayPolicy = field(metadata={'json': 'replay_policy'}) """Defines the rate at which messages are sent to the consumer.""" - inactive_threshold: Optional[timedelta] = field(default=None, metadata={'json': 'inactive_threshold'}) + inactive_threshold: Optional[timedelta] = field( + default=None, metadata={'json': 'inactive_threshold'} + ) """Duration which instructs the server to clean up the consumer if it has been inactive.""" - headers_only: Optional[bool] = field(default=None, metadata={'json': 'headers_only'}) + headers_only: Optional[bool] = field( + default=None, metadata={'json': 'headers_only'} + ) """Indicates whether only headers of messages should be sent.""" - max_reset_attempts: Optional[int] = field(default=None, metadata={'json': 'max_reset_attempts'}) + max_reset_attempts: Optional[int] = field( + default=None, metadata={'json': 'max_reset_attempts'} + ) """Maximum number of attempts for the consumer to be recreated in a single recreation cycle.""" + class Consumer: raise NotImplementedError @@ -255,7 +322,10 @@ class StreamConsumerManager: """ async def create_or_update_consumer( - self, stream: str, config: ConsumerConfig, timeout: Optional[float] = None + self, + stream: str, + config: ConsumerConfig, + timeout: Optional[float] = None ) -> Consumer: """ CreateOrUpdateConsumer creates a consumer on a given stream with @@ -266,7 +336,10 @@ async def create_or_update_consumer( raise NotImplementedError async def create_consumer( - self, stream: str, config: ConsumerConfig, timeout: Optional[float] = None + self, + stream: str, + config: ConsumerConfig, + timeout: Optional[float] = None ) -> Consumer: """ CreateConsumer creates a consumer on a given stream with given @@ -279,7 +352,10 @@ async def create_consumer( raise NotImplementedError async def update_consumer( - self, stream: str, config: ConsumerConfig, timeout: Optional[float] = None + self, + stream: str, + config: ConsumerConfig, + timeout: Optional[float] = None ) -> Consumer: """ Updates an existing consumer. @@ -289,7 +365,10 @@ async def update_consumer( raise NotImplementedError async def ordered_consumer( - self, stream: str, config: OrderedConsumerConfig, timeout: Optional[float] = None + self, + stream: str, + config: OrderedConsumerConfig, + timeout: Optional[float] = None ) -> Consumer: """ Returns returns an instance of an ordered consumer. @@ -302,7 +381,10 @@ async def ordered_consumer( raise NotImplementedError async def consumer( - self, stream: str, consumer: str, timeout: Optional[float] = None + self, + stream: str, + consumer: str, + timeout: Optional[float] = None ) -> Consumer: """ Returns an instance of an existing consumer, allowing processing of messages. @@ -312,7 +394,10 @@ async def consumer( raise NotImplementedError async def delete_consumer( - self, stream: str, consumer: str, timeout: Optional[float] = None + self, + stream: str, + consumer: str, + timeout: Optional[float] = None ) -> None: """ Removes a consumer with given name from a stream. diff --git a/nats/jetstream/context.py b/nats/jetstream/context.py index a23e681f..94dd5d1d 100644 --- a/nats/jetstream/context.py +++ b/nats/jetstream/context.py @@ -18,7 +18,9 @@ from nats.jetstream.publish import Publisher from nats.jetstream.stream import StreamManager + class Context(Publisher, StreamManager): + def __init__(self, client: Client): Publisher.__init__(self, client) StreamManager.__init__(self, client) diff --git a/nats/jetstream/errors.py b/nats/jetstream/errors.py index 9a730af3..9de40085 100644 --- a/nats/jetstream/errors.py +++ b/nats/jetstream/errors.py @@ -15,6 +15,7 @@ from typing import Optional from enum import Enum + class ErrorCode(Enum): JETSTREAM_NOT_ENABLED_FOR_ACCOUNT = 10039 JETSTREAM_NOT_ENABLED = 10076 @@ -35,7 +36,10 @@ class ErrorCode(Enum): class Error(Exception): - def __init__(self, message=None, code=None, error_code=None, description=None): + + def __init__( + self, message=None, code=None, error_code=None, description=None + ): self.message = message self.code = code self.error_code = error_code @@ -48,6 +52,7 @@ def __str__(self): class JetStreamNotEnabledError(Error): + def __init__(self): super().__init__( message="jetstream not enabled", @@ -58,6 +63,7 @@ def __init__(self): class JetStreamNotEnabledForAccountError(Error): + def __init__(self): super().__init__( message="jetstream not enabled for account", @@ -68,6 +74,7 @@ def __init__(self): class StreamNotFoundError(Error): + def __init__(self): super().__init__( message="stream not found", @@ -78,6 +85,7 @@ def __init__(self): class StreamNameAlreadyInUseError(Error): + def __init__(self): super().__init__( message="stream name already in use", @@ -88,26 +96,40 @@ def __init__(self): class StreamSubjectTransformNotSupportedError(Error): + def __init__(self): - super().__init__(message="stream subject transformation not supported by nats-server") + super().__init__( + message="stream subject transformation not supported by nats-server" + ) class StreamSourceSubjectTransformNotSupportedError(Error): + def __init__(self): - super().__init__(message="stream subject transformation not supported by nats-server") + super().__init__( + message="stream subject transformation not supported by nats-server" + ) class StreamSourceNotSupportedError(Error): + def __init__(self): - super().__init__(message="stream sourcing is not supported by nats-server") + super().__init__( + message="stream sourcing is not supported by nats-server" + ) class StreamSourceMultipleFilterSubjectsNotSupportedError(Error): + def __init__(self): - super().__init__(message="stream sourcing with multiple subject filters not supported by nats-server") + super().__init__( + message= + "stream sourcing with multiple subject filters not supported by nats-server" + ) class ConsumerNotFoundError(Error): + def __init__(self): super().__init__( message="consumer not found", @@ -118,6 +140,7 @@ def __init__(self): class ConsumerExistsError(Error): + def __init__(self): super().__init__( message="consumer already exists", @@ -128,6 +151,7 @@ def __init__(self): class ConsumerDoesNotExistError(Error): + def __init__(self): super().__init__( message="consumer does not exist", @@ -138,6 +162,7 @@ def __init__(self): class MessageNotFoundError(Error): + def __init__(self): super().__init__( message="message not found", @@ -148,6 +173,7 @@ def __init__(self): class BadRequestError(Error): + def __init__(self): super().__init__( message="bad request", @@ -158,6 +184,7 @@ def __init__(self): class ConsumerCreateError(Error): + def __init__(self): super().__init__( message="could not create consumer", @@ -168,16 +195,20 @@ def __init__(self): class DuplicateFilterSubjectsError(Error): + def __init__(self): super().__init__( - message="consumer cannot have both FilterSubject and FilterSubjects specified", + message= + "consumer cannot have both FilterSubject and FilterSubjects specified", code=500, error_code=ErrorCode.DUPLICATE_FILTER_SUBJECTS, - description="consumer cannot have both FilterSubject and FilterSubjects specified", + description= + "consumer cannot have both FilterSubject and FilterSubjects specified", ) class OverlappingFilterSubjectsError(Error): + def __init__(self): super().__init__( message="consumer subject filters cannot overlap", @@ -188,6 +219,7 @@ def __init__(self): class EmptyFilterError(Error): + def __init__(self): super().__init__( message="consumer filter in FilterSubjects cannot be empty", @@ -198,120 +230,151 @@ def __init__(self): class ConsumerMultipleFilterSubjectsNotSupportedError(Error): + def __init__(self): - super().__init__(message="multiple consumer filter subjects not supported by nats-server") + super().__init__( + message= + "multiple consumer filter subjects not supported by nats-server" + ) class ConsumerNameAlreadyInUseError(Error): + def __init__(self): super().__init__(message="consumer name already in use") class InvalidJSAckError(Error): + def __init__(self): super().__init__(message="invalid jetstream publish response") class StreamNameRequiredError(Error): + def __init__(self): super().__init__(message="stream name is required") class MsgAlreadyAckdError(Error): + def __init__(self): super().__init__(message="message was already acknowledged") class NoStreamResponseError(Error): + def __init__(self): super().__init__(message="no response from stream") class NotJSMessageError(Error): + def __init__(self): super().__init__(message="not a jetstream message") class InvalidStreamNameError(Error): + def __init__(self): super().__init__(message="invalid stream name") class InvalidSubjectError(Error): + def __init__(self): super().__init__(message="invalid subject name") class InvalidConsumerNameError(Error): + def __init__(self): super().__init__(message="invalid consumer name") class NoMessagesError(Error): + def __init__(self): super().__init__(message="no messages") class MaxBytesExceededError(Error): + def __init__(self): super().__init__(message="message size exceeds max bytes") class ConsumerDeletedError(Error): + def __init__(self): super().__init__(message="consumer deleted") class ConsumerLeadershipChangedError(Error): + def __init__(self): super().__init__(message="leadership change") class HandlerRequiredError(Error): + def __init__(self): super().__init__(message="handler cannot be empty") class EndOfDataError(Error): + def __init__(self): super().__init__(message="end of data reached") class NoHeartbeatError(Error): + def __init__(self): super().__init__(message="no heartbeat received") class ConsumerHasActiveSubscriptionError(Error): + def __init__(self): super().__init__(message="consumer has active subscription") class MsgNotBoundError(Error): + def __init__(self): - super().__init__(message="message is not bound to subscription/connection") + super().__init__( + message="message is not bound to subscription/connection" + ) class MsgNoReplyError(Error): + def __init__(self): super().__init__(message="message does not have a reply") class MsgDeleteUnsuccessfulError(Error): + def __init__(self): super().__init__(message="message deletion unsuccessful") class AsyncPublishReplySubjectSetError(Error): + def __init__(self): super().__init__(message="reply subject should be empty") class TooManyStalledMsgsError(Error): + def __init__(self): - super().__init__(message="stalled with too many outstanding async published messages") + super().__init__( + message="stalled with too many outstanding async published messages" + ) class InvalidOptionError(Error): + def __init__(self): super().__init__(message="invalid jetstream option") diff --git a/nats/jetstream/message.py b/nats/jetstream/message.py index a211c6be..16458775 100644 --- a/nats/jetstream/message.py +++ b/nats/jetstream/message.py @@ -15,6 +15,7 @@ from enum import Enum from dataclasses import dataclass, field + class Header(str, Enum): """ Provides known headers that can be used to control message behavior. @@ -25,6 +26,7 @@ class Header(str, Enum): EXPECTED_LAST_MSG_ID = "Nats-Expected-Last-Msg-Id" EXPECTED_LAST_SUBJECT_SEQUENCE = "Nats-Expected-Last-Subject-Sequence" + @dataclass class SequencePair: """ diff --git a/nats/jetstream/publish.py b/nats/jetstream/publish.py index 62b05fcf..8168ef5c 100644 --- a/nats/jetstream/publish.py +++ b/nats/jetstream/publish.py @@ -25,6 +25,7 @@ DEFAULT_RETRY_ATTEMPTS = 2 + @dataclass class PubAck: """ @@ -47,7 +48,9 @@ class PubAck: The domain the message was published to. """ + class Publisher: + def __init__(self, client: Client): self.client = client @@ -71,16 +74,19 @@ async def publish( extra_headers = {} if expected_last_msg_id is not None: - extra_headers[Header.EXPECTED_LAST_MSG_ID] = str(expected_last_msg_id) + extra_headers[Header.EXPECTED_LAST_MSG_ID + ] = str(expected_last_msg_id) if expected_stream is not None: extra_headers[Header.EXPECTED_STREAM] = str(expected_stream) if expected_last_sequence is not None: - extra_headers[Header.EXPECTED_LAST_SEQ] = str(expected_last_sequence) + extra_headers[Header.EXPECTED_LAST_SEQ + ] = str(expected_last_sequence) if expected_last_subject_sequence is not None: - extra_headers[Header.EXPECTED_LAST_SUBJECT_SEQUENCE] = str(expected_last_subject_sequence) + extra_headers[Header.EXPECTED_LAST_SUBJECT_SEQUENCE + ] = str(expected_last_subject_sequence) if len(extra_headers) > 0: if headers is not None: @@ -111,5 +117,6 @@ async def publish( raise NoStreamResponseError + class PubAckResponse(Response, PubAck): pass diff --git a/nats/jetstream/stream.py b/nats/jetstream/stream.py index 8606cd64..bf753f60 100644 --- a/nats/jetstream/stream.py +++ b/nats/jetstream/stream.py @@ -19,11 +19,11 @@ from types import NotImplementedType from typing import List, Optional, cast from datetime import datetime, timedelta -from typing_extensions import Sequence from nats.jetstream.api import Client, Paged, Request, Response from nats.jetstream.errors import * + class RetentionPolicy(Enum): """ RetentionPolicy determines how messages in a stream are retained. @@ -83,6 +83,7 @@ class StoreCompression(Enum): Enables S2 compression on the stream. """ + @dataclass class StreamInfo: """ @@ -101,13 +102,19 @@ class StreamInfo: state: StreamState = field(metadata={'json': 'state'}) """Provides the state of the stream at the time of request, including metrics like the number of messages in the stream, total bytes, etc.""" - cluster: Optional[ClusterInfo] = field(default=None, metadata={'json': 'cluster'}) + cluster: Optional[ClusterInfo] = field( + default=None, metadata={'json': 'cluster'} + ) """Contains information about the cluster to which this stream belongs (if applicable).""" - mirror: Optional[StreamSourceInfo] = field(default=None, metadata={'json': 'mirror'}) + mirror: Optional[StreamSourceInfo] = field( + default=None, metadata={'json': 'mirror'} + ) """Contains information about another stream this one is mirroring. Mirroring is used to create replicas of another stream's data. This field is omitted if the stream is not mirroring another stream.""" - sources: List[StreamSourceInfo] = field(default_factory=list, metadata={'json': 'sources'}) + sources: List[StreamSourceInfo] = field( + default_factory=list, metadata={'json': 'sources'} + ) """A list of source streams from which this stream collects data.""" @@ -120,13 +127,19 @@ class StreamConfig: name: str = field(metadata={'json': 'name'}) """Name is the name of the stream. It is required and must be unique across the JetStream account. Names cannot contain whitespace, ., >, path separators (forward or backwards slash), and non-printable characters.""" - description: Optional[str] = field(default=None, metadata={'json': 'description'}) + description: Optional[str] = field( + default=None, metadata={'json': 'description'} + ) """Description is an optional description of the stream.""" - subjects: List[str] = field(default_factory=list, metadata={'json': 'subjects'}) + subjects: List[str] = field( + default_factory=list, metadata={'json': 'subjects'} + ) """Subjects is a list of subjects that the stream is listening on. Wildcards are supported. Subjects cannot be set if the stream is created as a mirror.""" - retention: RetentionPolicy = field(default=RetentionPolicy.LIMIT, metadata={'json': 'retention'}) + retention: RetentionPolicy = field( + default=RetentionPolicy.LIMIT, metadata={'json': 'retention'} + ) """Retention defines the message retention policy for the stream. Defaults to LimitsPolicy.""" max_consumers: int = field(metadata={'json': 'max_consumers'}) @@ -141,16 +154,22 @@ class StreamConfig: discard: DiscardPolicy = field(metadata={'json': 'discard'}) """Discard defines the policy for handling messages when the stream reaches its limits in terms of number of messages or total bytes.""" - discard_new_per_subject: Optional[bool] = field(default=None, metadata={'json': 'discard_new_per_subject'}) + discard_new_per_subject: Optional[bool] = field( + default=None, metadata={'json': 'discard_new_per_subject'} + ) """DiscardNewPerSubject is a flag to enable discarding new messages per subject when limits are reached. Requires DiscardPolicy to be DiscardNew and the MaxMsgsPerSubject to be set.""" max_age: datetime.timedelta = field(metadata={'json': 'max_age'}) """MaxAge is the maximum age of messages that the stream will retain.""" - max_msgs_per_subject: int = field(metadata={'json': 'max_msgs_per_subject'}) + max_msgs_per_subject: int = field( + metadata={'json': 'max_msgs_per_subject'} + ) """MaxMsgsPerSubject is the maximum number of messages per subject that the stream will retain.""" - max_msg_size: Optional[int] = field(default=None, metadata={'json': 'max_msg_size'}) + max_msg_size: Optional[int] = field( + default=None, metadata={'json': 'max_msg_size'} + ) """MaxMsgSize is the maximum size of any single message in the stream.""" storage: StorageType = field(metadata={'json': 'storage'}) @@ -162,57 +181,90 @@ class StreamConfig: no_ack: Optional[bool] = field(default=None, metadata={'json': 'no_ack'}) """NoAck is a flag to disable acknowledging messages received by this stream. If set to true, publish methods from the JetStream client will not work as expected, since they rely on acknowledgements. Core NATS publish methods should be used instead. Note that this will make message delivery less reliable.""" - duplicates: Optional[datetime.timedelta] = field(default=None, metadata={'json': 'duplicate_window'}) + duplicates: Optional[datetime.timedelta] = field( + default=None, metadata={'json': 'duplicate_window'} + ) """Duplicates is the window within which to track duplicate messages. If not set, server default is 2 minutes.""" - placement: Optional[Placement] = field(default=None, metadata={'json': 'placement'}) + placement: Optional[Placement] = field( + default=None, metadata={'json': 'placement'} + ) """Placement is used to declare where the stream should be placed via tags and/or an explicit cluster name.""" - mirror: Optional[StreamSource] = field(default=None, metadata={'json': 'mirror'}) + mirror: Optional[StreamSource] = field( + default=None, metadata={'json': 'mirror'} + ) """Mirror defines the configuration for mirroring another stream.""" - sources: List[StreamSource] = field(default_factory=list, metadata={'json': 'sources'}) + sources: List[StreamSource] = field( + default_factory=list, metadata={'json': 'sources'} + ) """Sources is a list of other streams this stream sources messages from.""" sealed: Optional[bool] = field(default=None, metadata={'json': 'sealed'}) """Sealed streams do not allow messages to be published or deleted via limits or API, sealed streams cannot be unsealed via configuration update. Can only be set on already created streams via the Update API.""" - deny_delete: Optional[bool] = field(default=None, metadata={'json': 'deny_delete'}) + deny_delete: Optional[bool] = field( + default=None, metadata={'json': 'deny_delete'} + ) """DenyDelete restricts the ability to delete messages from a stream via the API. Defaults to false.""" - deny_purge: Optional[bool] = field(default=None, metadata={'json': 'deny_purge'}) + deny_purge: Optional[bool] = field( + default=None, metadata={'json': 'deny_purge'} + ) """DenyPurge restricts the ability to purge messages from a stream via the API. Defaults to false.""" - allow_rollup: Optional[bool] = field(default=None, metadata={'json': 'allow_rollup_hdrs'}) + allow_rollup: Optional[bool] = field( + default=None, metadata={'json': 'allow_rollup_hdrs'} + ) """AllowRollup allows the use of the Nats-Rollup header to replace all contents of a stream, or subject in a stream, with a single new message.""" - compression: StoreCompression = field(default=StoreCompression.NONE, metadata={'json': 'compression'}) + compression: StoreCompression = field( + default=StoreCompression.NONE, metadata={'json': 'compression'} + ) """Compression specifies the message storage compression algorithm. Defaults to NoCompression.""" - first_sequence: Optional[int] = field(default=None, metadata={'json': 'first_seq'}) + first_sequence: Optional[int] = field( + default=None, metadata={'json': 'first_seq'} + ) """FirstSeq is the initial sequence number of the first message in the stream.""" - subject_transform: Optional[SubjectTransformConfig] = field(default=None, metadata={'json': 'subject_transform'}) + subject_transform: Optional[SubjectTransformConfig] = field( + default=None, metadata={'json': 'subject_transform'} + ) """SubjectTransform allows applying a transformation to matching messages' subjects.""" - republish: Optional[Republish] = field(default=None, metadata={'json': 'republish'}) + republish: Optional[Republish] = field( + default=None, metadata={'json': 'republish'} + ) """RePublish allows immediate republishing of a message to the configured subject after it's stored.""" - allow_direct: bool = field(default=False, metadata={'json': 'allow_direct'}) + allow_direct: bool = field( + default=False, metadata={'json': 'allow_direct'} + ) """AllowDirect enables direct access to individual messages using direct get API. Defaults to false.""" - mirror_direct: bool = field(default=False, metadata={'json': 'mirror_direct'}) + mirror_direct: bool = field( + default=False, metadata={'json': 'mirror_direct'} + ) """MirrorDirect enables direct access to individual messages from the origin stream using direct get API. Defaults to false.""" - consumer_limits: Optional[StreamConsumerLimits] = field(default=None, metadata={'json': 'consumer_limits'}) + consumer_limits: Optional[StreamConsumerLimits] = field( + default=None, metadata={'json': 'consumer_limits'} + ) """ConsumerLimits defines limits of certain values that consumers can set, defaults for those who don't set these settings.""" - metadata: Dict[str, str] = field(default_factory=dict, metadata={'json': 'metadata'}) + metadata: Dict[str, str] = field( + default_factory=dict, metadata={'json': 'metadata'} + ) """Metadata is a set of application-defined key-value pairs for associating metadata on the stream. This feature requires nats-server v2.10.0 or later.""" - template: Optional[str] = field(default=None, metadata={'json': 'template_owner'}) + template: Optional[str] = field( + default=None, metadata={'json': 'template_owner'} + ) """Template identifies the template that manages the Stream. DEPRECATED: This feature is no longer supported.""" + @dataclass class StreamSourceInfo: """ @@ -228,12 +280,17 @@ class StreamSourceInfo: active: timedelta = field(metadata={'json': 'active'}) """Active informs when last the mirror or sourced stream had activity. Value will be -1 when there has been no activity.""" - filter_subject: Optional[str] = field(default=None, metadata={'json': 'filter_subject'}) + filter_subject: Optional[str] = field( + default=None, metadata={'json': 'filter_subject'} + ) """FilterSubject is the subject filter defined for this source/mirror.""" - subject_transforms: List[SubjectTransformConfig] = field(default_factory=list, metadata={'json': 'subject_transforms'}) + subject_transforms: List[SubjectTransformConfig] = field( + default_factory=list, metadata={'json': 'subject_transforms'} + ) """SubjectTransforms is a list of subject transforms defined for this source/mirror.""" + @dataclass class StreamState: """ @@ -261,7 +318,9 @@ class StreamState: consumers: int = field(metadata={'json': 'consumer_count'}) """The number of consumers on the stream.""" - deleted: List[int] = field(default_factory=list, metadata={'json': 'deleted'}) + deleted: List[int] = field( + default_factory=list, metadata={'json': 'deleted'} + ) """A list of sequence numbers that have been removed from the stream. This field will only be returned if the stream has been fetched with the DeletedDetails option.""" num_deleted: int = field(metadata={'json': 'num_deleted'}) @@ -270,9 +329,12 @@ class StreamState: num_subjects: int = field(metadata={'json': 'num_subjects'}) """NumSubjects is the number of unique subjects the stream has received messages on.""" - subjects: Dict[str, int] = field(default_factory=dict, metadata={'json': 'subjects'}) + subjects: Dict[str, int] = field( + default_factory=dict, metadata={'json': 'subjects'} + ) """Subjects is a map of subjects the stream has received messages on with message count per subject. This field will only be returned if the stream has been fetched with the SubjectFilter option.""" + @dataclass class ClusterInfo: """ @@ -286,7 +348,9 @@ class ClusterInfo: leader: Optional[str] = field(default=None, metadata={'json': 'leader'}) """Leader is the server name of the RAFT leader.""" - replicas: List[PeerInfo] = field(default_factory=list, metadata={'json': 'replicas'}) + replicas: List[PeerInfo] = field( + default_factory=list, metadata={'json': 'replicas'} + ) """Replicas is the list of members of the RAFT cluster.""" @@ -341,7 +405,9 @@ class Republish: destination: str = field(metadata={'json': 'dest'}) """The subject pattern to republish the subject to.""" - headers_only: Optional[bool] = field(default=None, metadata={'json': 'headers_only'}) + headers_only: Optional[bool] = field( + default=None, metadata={'json': 'headers_only'} + ) """A flag to indicate that only the headers should be republished.""" @@ -367,23 +433,33 @@ class StreamSource: name: str = field(metadata={'json': 'name'}) """The name of the stream to source from.""" - opt_start_seq: Optional[int] = field(default=None, metadata={'json': 'opt_start_seq'}) + opt_start_seq: Optional[int] = field( + default=None, metadata={'json': 'opt_start_seq'} + ) """The sequence number to start sourcing from.""" - opt_start_time: Optional[datetime] = field(default=None, metadata={'json': 'opt_start_time'}) + opt_start_time: Optional[datetime] = field( + default=None, metadata={'json': 'opt_start_time'} + ) """The timestamp of messages to start sourcing from.""" - filter_subject: Optional[str] = field(default=None, metadata={'json': 'filter_subject'}) + filter_subject: Optional[str] = field( + default=None, metadata={'json': 'filter_subject'} + ) """The subject filter used to only replicate messages with matching subjects.""" - subject_transforms: List[SubjectTransformConfig] = field(default_factory=list, metadata={'json': 'subject_transforms'}) + subject_transforms: List[SubjectTransformConfig] = field( + default_factory=list, metadata={'json': 'subject_transforms'} + ) """ A list of subject transforms to apply to matching messages. Subject transforms on sources and mirrors are also used as subject filters with optional transformations. """ - external: Optional[ExternalStream] = field(default=None, metadata={'json': 'external'}) + external: Optional[ExternalStream] = field( + default=None, metadata={'json': 'external'} + ) """A configuration referencing a stream source in another account or JetStream domain.""" domain: Optional[str] = field(default=None, metadata={'json': '-'}) @@ -411,12 +487,17 @@ class StreamConsumerLimits: be overridden on a per consumer basis. """ - inactive_threshold: Optional[datetime.timedelta] = field(default=None, metadata={'json': 'inactive_threshold'}) + inactive_threshold: Optional[datetime.timedelta] = field( + default=None, metadata={'json': 'inactive_threshold'} + ) """A duration which instructs the server to clean up the consumer if it has been inactive for the specified duration.""" - max_ack_pending: Optional[int] = field(default=None, metadata={'json': 'max_ack_pending'}) + max_ack_pending: Optional[int] = field( + default=None, metadata={'json': 'max_ack_pending'} + ) """A maximum number of outstanding unacknowledged messages for a consumer.""" + class Stream: """ Stream contains operations on an existing stream. It allows fetching and removing @@ -428,14 +509,21 @@ def __init__(self, client: Client, name: str, info: StreamInfo): self._name = name self._info = info - async def info(self, subject_filter: Optional[str] = None, deleted_details: Optional[bool] = None, timeout: Optional[float] = None) -> StreamInfo: + async def info( + self, + subject_filter: Optional[str] = None, + deleted_details: Optional[bool] = None, + timeout: Optional[float] = None + ) -> StreamInfo: """Returns `StreamInfo` from the server.""" - info_subject = f"STREAM.INFO.{self._name}" + info_subject = f"STREAM.INFO.{self._name}" info_request = StreamInfoRequest( subject_filter=subject_filter, deleted_details=deleted_details, ) - info_response = await self._client.request_json(info_subject, info_request, StreamInfoResponse, timeout=timeout) + info_response = await self._client.request_json( + info_subject, info_request, StreamInfoResponse, timeout=timeout + ) if info_response.error is not None: if info_response.error.error_code == ErrorCode.STREAM_NOT_FOUND: raise StreamNotFoundError(*info_response.error) @@ -444,7 +532,6 @@ async def info(self, subject_filter: Optional[str] = None, deleted_details: Opti return cast(StreamInfo, info_response) - @property def cached_info(self) -> StreamInfo: """Returns the `StreamInfo` currently cached on this stream.""" @@ -464,7 +551,9 @@ async def purge( """ if keep is not None and sequence is not None: - raise ValueError("both 'keep' and 'sequence' cannot be provided in purge request") + raise ValueError( + "both 'keep' and 'sequence' cannot be provided in purge request" + ) purge_subject = f"STREAM.PURGE.{self._name}" purge_request = StreamPurgeRequest( @@ -473,25 +562,33 @@ async def purge( subject=subject, ) - purge_response = await self._client.request_json(purge_subject, purge_request, StreamPurgeResponse, timeout=timeout) + purge_response = await self._client.request_json( + purge_subject, purge_request, StreamPurgeResponse, timeout=timeout + ) if purge_response.error is not None: raise Error(*purge_response.error) return purge_response.purged - async def get_msg(self, sequence: int, timeout: Optional[float] = None) -> RawStreamMsg: + async def get_msg( + self, sequence: int, timeout: Optional[float] = None + ) -> RawStreamMsg: """ Retrieves a raw stream message stored in JetStream by sequence number. """ raise NotImplementedError - async def get_last_msg_for_subject(self, subject: str, timeout: Optional[float] = None) -> RawStreamMsg: + async def get_last_msg_for_subject( + self, subject: str, timeout: Optional[float] = None + ) -> RawStreamMsg: """ Retrieves the last raw stream message stored in JetStream on a given subject. """ raise NotImplementedError - async def delete_msg(self, sequence: int, timeout: Optional[float] = None) -> None: + async def delete_msg( + self, sequence: int, timeout: Optional[float] = None + ) -> None: """ Deletes a message from a stream. """ @@ -501,21 +598,33 @@ async def delete_msg(self, sequence: int, timeout: Optional[float] = None) -> No no_erase=True, ) - msg_delete_response = await self._client.request_json(msg_delete_subject, msg_delete_request, MsgDeleteResponse, timeout=timeout) + msg_delete_response = await self._client.request_json( + msg_delete_subject, + msg_delete_request, + MsgDeleteResponse, + timeout=timeout + ) if msg_delete_response.error is not None: raise Error(*msg_delete_response.error) - async def secure_delete_msg(self, sequence: int, timeout: Optional[float] = None) -> None: + async def secure_delete_msg( + self, sequence: int, timeout: Optional[float] = None + ) -> None: """ Deletes a message from a stream. """ msg_delete_subject = f"STREAM.MSG.DELETE.{sequence}" msg_delete_request = MsgDeleteRequest( - sequence=sequence, - no_erase=False, - ) + sequence=sequence, + no_erase=False, + ) - msg_delete_response = await self._client.request_json(msg_delete_subject, msg_delete_request, MsgDeleteResponse, timeout=timeout) + msg_delete_response = await self._client.request_json( + msg_delete_subject, + msg_delete_request, + MsgDeleteResponse, + timeout=timeout + ) if msg_delete_response.error is not None: raise Error(*msg_delete_response.error) @@ -525,46 +634,67 @@ class StreamManager: Provides methods for managing streams. """ - async def create_stream(self, config: StreamConfig, timeout: Optional[float] = None) -> Stream: + async def create_stream( + self, config: StreamConfig, timeout: Optional[float] = None + ) -> Stream: """ Creates a new stream with given config. """ raise NotImplementedError - async def update_stream(self, config: StreamConfig, timeout: Optional[float] = None) -> Stream: + async def update_stream( + self, config: StreamConfig, timeout: Optional[float] = None + ) -> Stream: """ Updates an existing stream with the given config. """ raise NotImplementedError - async def create_or_update_stream(self, config: StreamConfig, timeout: Optional[float] = None) -> Stream: + async def create_or_update_stream( + self, config: StreamConfig, timeout: Optional[float] = None + ) -> Stream: """CreateOrUpdateStream creates a stream with given config or updates it if it already exists.""" raise NotImplementedError - async def stream(self, stream: str, timeout: Optional[float] = None) -> Stream: + async def stream( + self, stream: str, timeout: Optional[float] = None + ) -> Stream: """Stream fetches StreamInfo and returns a Stream interface for a given stream name.""" raise NotImplementedError - async def stream_name_by_subject(self, subject: str, timeout: Optional[float] = None) -> str: + async def stream_name_by_subject( + self, subject: str, timeout: Optional[float] = None + ) -> str: """StreamNameBySubject returns a stream name listening on a given subject.""" raise NotImplementedError - async def delete_stream(self, stream: str, timeout: Optional[float] = None) -> None: + async def delete_stream( + self, stream: str, timeout: Optional[float] = None + ) -> None: """DeleteStream removes a stream with given name.""" raise NotImplementedError - def list_streams(self, timeout: Optional[float] = None) -> AsyncIterator[StreamInfo]: + def list_streams(self, + timeout: Optional[float] = None + ) -> AsyncIterator[StreamInfo]: """ListStreams returns a StreamInfoLister for iterating over stream infos.""" raise NotImplementedError - def stream_names(self, timeout: Optional[float] = None) -> AsyncIterator[str]: + def stream_names(self, + timeout: Optional[float] = None) -> AsyncIterator[str]: """StreamNames returns a StreamNameLister for iterating over stream names.""" raise NotImplementedError + @dataclass class StreamInfoRequest(Request, Paged): - deleted_details: Optional[bool] = field(default=False, metadata={'json': 'deleted_details'}) - subject_filter: Optional[str] = field(default=None, metadata={'json': 'subjects_filter'}) + deleted_details: Optional[bool] = field( + default=False, metadata={'json': 'deleted_details'} + ) + subject_filter: Optional[str] = field( + default=None, metadata={'json': 'subjects_filter'} + ) + @dataclass class StreamInfoResponse(Response, Paged, StreamInfo): @@ -590,19 +720,23 @@ class MsgGetRequest(Request): last_for: int = field(metadata={'json': 'last_by_subj'}) next_for: int = field(metadata={'json': 'next_by_subj'}) + @dataclass class MsgGetResponse(Response): pass + @dataclass class MsgDeleteRequest(Request): sequence: int = field(metadata={'json': 'seq'}) no_erase: bool = field(metadata={'json': 'no_erase'}) + @dataclass class MsgDeleteResponse(Response): success: bool = field(default=False, metadata={'json': 'success'}) + __all__ = [ 'RetentionPolicy', 'DiscardPolicy', From 6206b5a904560b845c7b0f5f9d6d5ba5406b2a18 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Wed, 26 Jun 2024 22:17:04 +0200 Subject: [PATCH 11/22] wip --- nats/jetstream/__init__.py | 10 +- nats/jetstream/api.py | 32 +++- nats/jetstream/consumer.py | 18 +- nats/jetstream/context.py | 18 +- nats/jetstream/errors.py | 332 +------------------------------------ nats/jetstream/message.py | 92 +++++++++- nats/jetstream/publish.py | 10 +- nats/jetstream/stream.py | 256 ++++++++++++++++++++++------ 8 files changed, 362 insertions(+), 406 deletions(-) diff --git a/nats/jetstream/__init__.py b/nats/jetstream/__init__.py index 1b7c5ee7..ce2de7f3 100644 --- a/nats/jetstream/__init__.py +++ b/nats/jetstream/__init__.py @@ -12,12 +12,14 @@ # limitations under the License. # -from nats.aio.client import Client +from typing import Any + +from .api import Client from .context import Context -async def new(client: Client) -> Context: - return Context(client) +async def new(client: Any) -> Context: + return Context(api.Client(client)) -__all__ = ['new'] +__all__ = ['new', 'Context'] diff --git a/nats/jetstream/api.py b/nats/jetstream/api.py index ec816ea5..2f2ac051 100644 --- a/nats/jetstream/api.py +++ b/nats/jetstream/api.py @@ -15,12 +15,22 @@ from __future__ import annotations import json - -from dataclasses import dataclass, fields, field, is_dataclass, MISSING -from typing import Any, Dict, Optional, Self, Type, TypeVar, get_origin, get_args +from dataclasses import MISSING, dataclass, field, fields, is_dataclass +from typing import ( + Any, + Protocol, + Dict, + Optional, + Self, + Type, + TypeVar, + get_args, + get_origin, +) from urllib import parse from nats.js.api import DEFAULT_PREFIX +from nats.jetstream.message import Msg def as_dict(instance: Any) -> Dict[str, Any]: @@ -111,7 +121,7 @@ class Error: @dataclass class Response: type: str - error: Optional[Error] = None + error: Optional[Error] = field(default=None) @classmethod def from_dict(cls: Type[T], data: Dict[str, Any]) -> T: @@ -153,7 +163,19 @@ async def request( if timeout is None: timeout = self.timeout - self.inner.request(subject, payload, timeout=timeout) + return self.inner.request(subject, payload, timeout=timeout) + + # TODO return `jetstream.Msg` + async def request_msg( + self, + subject: str, + payload: bytes, + timeout: Optional[float] = None, + ) -> Msg: + if timeout is None: + timeout = self.timeout + + return self.inner.request(subject, payload, timeout=timeout) async def request_json( self, subject: str, request_object: Request, response_type: Type[T], diff --git a/nats/jetstream/consumer.py b/nats/jetstream/consumer.py index eaecffc8..c2f526cd 100644 --- a/nats/jetstream/consumer.py +++ b/nats/jetstream/consumer.py @@ -14,10 +14,10 @@ from __future__ import annotations -from enum import Enum -from typing import Optional from dataclasses import dataclass, field from datetime import datetime, timedelta +from enum import Enum +from typing import Optional class DeliverPolicy(Enum): @@ -248,19 +248,17 @@ class ConsumerInfo: num_pending: int = field(metadata={'json': 'num_pending'}) """Number of messages that match the consumer's filter but have not been delivered yet.""" + timestamp: datetime = field(metadata={'json': 'ts'}) + """Timestamp when the info was gathered by the server.""" + + push_bound: bool = field(default=False, metadata={'json': 'push_bound'}) + """Indicates whether at least one subscription exists for the delivery subject of this consumer.""" + cluster: Optional[ClusterInfo] = field( default=None, metadata={'json': 'cluster'} ) """Information about the cluster to which this consumer belongs.""" - push_bound: Optional[bool] = field( - default=None, metadata={'json': 'push_bound'} - ) - """Indicates whether at least one subscription exists for the delivery subject of this consumer.""" - - timestamp: datetime = field(metadata={'json': 'ts'}) - """Timestamp when the info was gathered by the server.""" - @dataclass class OrderedConsumerConfig: diff --git a/nats/jetstream/context.py b/nats/jetstream/context.py index 94dd5d1d..0e8713fd 100644 --- a/nats/jetstream/context.py +++ b/nats/jetstream/context.py @@ -19,7 +19,23 @@ from nats.jetstream.stream import StreamManager -class Context(Publisher, StreamManager): +class Context( + Publisher, + StreamManager, + # StreamConsumerManager, + # KeyValueManager, + # ObjectStoreManager +): + """ + Provides a context for interacting with JetStream. + The capabilities of JetStream include: + + - Publishing messages to a stream using `Publisher`. + - Managing streams using `StreamManager`. + - Managing consumers using `StreamConsumerManager`. + - Managing key value stores using `KeyValueManager`. + - Managing object stores using `ObjectStoreManager`. + """ def __init__(self, client: Client): Publisher.__init__(self, client) diff --git a/nats/jetstream/errors.py b/nats/jetstream/errors.py index 9de40085..2111af89 100644 --- a/nats/jetstream/errors.py +++ b/nats/jetstream/errors.py @@ -12,8 +12,8 @@ # limitations under the License. # -from typing import Optional from enum import Enum +from typing import Optional class ErrorCode(Enum): @@ -45,336 +45,20 @@ def __init__( self.error_code = error_code self.description = description - def __str__(self): - if self.description: - return f"nats: API error: code={self.code} err_code={self.error_code} description={self.description}" - return f"nats: {self.message}" - - -class JetStreamNotEnabledError(Error): - - def __init__(self): - super().__init__( - message="jetstream not enabled", - code=503, - error_code=ErrorCode.JETSTREAM_NOT_ENABLED, - description="jetstream not enabled", - ) - - -class JetStreamNotEnabledForAccountError(Error): - - def __init__(self): - super().__init__( - message="jetstream not enabled for account", - code=503, - error_code=ErrorCode.JETSTREAM_NOT_ENABLED_FOR_ACCOUNT, - description="jetstream not enabled for account", + def __str__(self) -> str: + return ( + f"nats: {type(self).__name__}: code={self.code} err_code={self.error_code} " + f"description='{self.description}'" ) class StreamNotFoundError(Error): def __init__(self): - super().__init__( - message="stream not found", - code=404, - error_code=ErrorCode.STREAM_NOT_FOUND, - description="stream not found", - ) - - -class StreamNameAlreadyInUseError(Error): - - def __init__(self): - super().__init__( - message="stream name already in use", - code=400, - error_code=ErrorCode.STREAM_NAME_IN_USE, - description="stream name already in use", - ) - - -class StreamSubjectTransformNotSupportedError(Error): - - def __init__(self): - super().__init__( - message="stream subject transformation not supported by nats-server" - ) - - -class StreamSourceSubjectTransformNotSupportedError(Error): - - def __init__(self): - super().__init__( - message="stream subject transformation not supported by nats-server" - ) - - -class StreamSourceNotSupportedError(Error): - - def __init__(self): - super().__init__( - message="stream sourcing is not supported by nats-server" - ) - - -class StreamSourceMultipleFilterSubjectsNotSupportedError(Error): - - def __init__(self): - super().__init__( - message= - "stream sourcing with multiple subject filters not supported by nats-server" - ) - - -class ConsumerNotFoundError(Error): - - def __init__(self): - super().__init__( - message="consumer not found", - code=404, - error_code=ErrorCode.CONSUMER_NOT_FOUND, - description="consumer not found", - ) - - -class ConsumerExistsError(Error): - - def __init__(self): - super().__init__( - message="consumer already exists", - code=400, - error_code=ErrorCode.CONSUMER_EXISTS, - description="consumer already exists", - ) - - -class ConsumerDoesNotExistError(Error): - - def __init__(self): - super().__init__( - message="consumer does not exist", - code=400, - error_code=ErrorCode.CONSUMER_DOES_NOT_EXIST, - description="consumer does not exist", - ) - - -class MessageNotFoundError(Error): - - def __init__(self): - super().__init__( - message="message not found", - code=404, - error_code=ErrorCode.MESSAGE_NOT_FOUND, - description="message not found", - ) - - -class BadRequestError(Error): - - def __init__(self): - super().__init__( - message="bad request", - code=400, - error_code=ErrorCode.BAD_REQUEST, - description="bad request", - ) - - -class ConsumerCreateError(Error): - - def __init__(self): - super().__init__( - message="could not create consumer", - code=500, - error_code=ErrorCode.CONSUMER_CREATE, - description="could not create consumer", - ) - - -class DuplicateFilterSubjectsError(Error): - - def __init__(self): - super().__init__( - message= - "consumer cannot have both FilterSubject and FilterSubjects specified", - code=500, - error_code=ErrorCode.DUPLICATE_FILTER_SUBJECTS, - description= - "consumer cannot have both FilterSubject and FilterSubjects specified", - ) - - -class OverlappingFilterSubjectsError(Error): - - def __init__(self): - super().__init__( - message="consumer subject filters cannot overlap", - code=500, - error_code=ErrorCode.OVERLAPPING_FILTER_SUBJECTS, - description="consumer subject filters cannot overlap", - ) - - -class EmptyFilterError(Error): - - def __init__(self): - super().__init__( - message="consumer filter in FilterSubjects cannot be empty", - code=500, - error_code=ErrorCode.CONSUMER_EMPTY_FILTER, - description="consumer filter in FilterSubjects cannot be empty", - ) - - -class ConsumerMultipleFilterSubjectsNotSupportedError(Error): - - def __init__(self): - super().__init__( - message= - "multiple consumer filter subjects not supported by nats-server" - ) - - -class ConsumerNameAlreadyInUseError(Error): - - def __init__(self): - super().__init__(message="consumer name already in use") - - -class InvalidJSAckError(Error): - - def __init__(self): - super().__init__(message="invalid jetstream publish response") - - -class StreamNameRequiredError(Error): - - def __init__(self): - super().__init__(message="stream name is required") - - -class MsgAlreadyAckdError(Error): - - def __init__(self): - super().__init__(message="message was already acknowledged") - - -class NoStreamResponseError(Error): - - def __init__(self): - super().__init__(message="no response from stream") - - -class NotJSMessageError(Error): - - def __init__(self): - super().__init__(message="not a jetstream message") - - -class InvalidStreamNameError(Error): - - def __init__(self): - super().__init__(message="invalid stream name") - - -class InvalidSubjectError(Error): - - def __init__(self): - super().__init__(message="invalid subject name") - - -class InvalidConsumerNameError(Error): - - def __init__(self): - super().__init__(message="invalid consumer name") - - -class NoMessagesError(Error): - - def __init__(self): - super().__init__(message="no messages") - - -class MaxBytesExceededError(Error): - - def __init__(self): - super().__init__(message="message size exceeds max bytes") - - -class ConsumerDeletedError(Error): - - def __init__(self): - super().__init__(message="consumer deleted") - - -class ConsumerLeadershipChangedError(Error): - - def __init__(self): - super().__init__(message="leadership change") - - -class HandlerRequiredError(Error): - - def __init__(self): - super().__init__(message="handler cannot be empty") - - -class EndOfDataError(Error): - - def __init__(self): - super().__init__(message="end of data reached") - - -class NoHeartbeatError(Error): - - def __init__(self): - super().__init__(message="no heartbeat received") - - -class ConsumerHasActiveSubscriptionError(Error): - - def __init__(self): - super().__init__(message="consumer has active subscription") - - -class MsgNotBoundError(Error): - - def __init__(self): - super().__init__( - message="message is not bound to subscription/connection" - ) - - -class MsgNoReplyError(Error): - - def __init__(self): - super().__init__(message="message does not have a reply") - - -class MsgDeleteUnsuccessfulError(Error): - - def __init__(self): - super().__init__(message="message deletion unsuccessful") - - -class AsyncPublishReplySubjectSetError(Error): - - def __init__(self): - super().__init__(message="reply subject should be empty") - - -class TooManyStalledMsgsError(Error): - - def __init__(self): - super().__init__( - message="stalled with too many outstanding async published messages" - ) + super().__init__() -class InvalidOptionError(Error): +class MsgNotFoundError(Error): def __init__(self): - super().__init__(message="invalid jetstream option") + super().__init__() diff --git a/nats/jetstream/message.py b/nats/jetstream/message.py index 16458775..0622e881 100644 --- a/nats/jetstream/message.py +++ b/nats/jetstream/message.py @@ -12,19 +12,99 @@ # limitations under the License. # -from enum import Enum +import nats.aio.msg + from dataclasses import dataclass, field +from enum import Enum class Header(str, Enum): """ Provides known headers that can be used to control message behavior. """ + MSG_ID = "Nats-Msg-Id" + """Used to specify a user-defined message ID. It can be used + e.g. for deduplication in conjunction with the Duplicates duration on + ConsumerConfig or to provide optimistic concurrency safety together with + ExpectedLastMsgID. + + This can be set when publishing messages using id option. + """ + EXPECTED_STREAM = "Nats-Expected-Stream" - EXPECTED_LAST_SEQ = "Nats-Expected-Last-Sequence" + """Contains stream name and is used to assure that the + published message is received by the expected stream. The server will reject the + message if it is not the case. + + This can be set when publishing messages using expect_stream option. + """ + + EXPECTED_LAST_SEQUENCE = "Nats-Expected-Last-Sequence" + """Contains the expected last sequence number of the + stream and can be used to apply optimistic concurrency control at the stream + level. The server will reject the message if it is not the case. + + This can be set when publishing messages using expected_last_sequence + option. + """ + + EXPECTED_LAST_SUBJECT_SEQEQUENCE = "Nats-Expected-Last-Subject-Sequence" + """Contains the expected last sequence number on + the subject and can be used to apply optimistic concurrency control at + the subject level. The server will reject the message if it is not the case. + + This can be set when publishing messages using expected_last_subject_sequence option. + """ + EXPECTED_LAST_MSG_ID = "Nats-Expected-Last-Msg-Id" - EXPECTED_LAST_SUBJECT_SEQUENCE = "Nats-Expected-Last-Subject-Sequence" + """Contains the expected last message ID on the + subject and can be used to apply optimistic concurrency control at + the stream level. The server will reject the message if it is not the case. + + This can be set when publishing messages using WithExpectLastMsgID + option. + """ + + ROLLUP = "Nats-Rollup" + """Used to apply a purge of all prior messages in the stream + ("all") or at the subject ("sub") before this message. + """ + + STREAM = "Nats-Stream" + """Contains the stream name the message was republished from or + the stream name the message was retrieved from using direct get. + """ + + SEQUENCE = "Nats-Sequence" + """ + Contains the original sequence number of the message. + """ + + TIMESTAMP = "Nats-Time-Stamp" + """ + Contains the original timestamp of the message. + """ + + SUBJECT = "Nats-Subject" + """ + Contains the original subject the message was published to. + """ + + LAST_SEQUENCE = "Nats-Last-Sequence" + """ + Contains the last sequence of the message having the + same subject, otherwise zero if this is the first message for the + subject. + """ + + +class Status(str, Enum): + SERVICE_UNAVAILABLE = "503" + NO_MESSAGES = "404" + REQUEST_TIMEOUT = "408" + CONFLICT = "409" + CONTROL_MESSAGE = "100" @dataclass @@ -43,3 +123,9 @@ class SequencePair: """ The stream sequence number for a message. """ + + +# FIXME +# For now, we will use the message class from the nats.aio.msg module. +# This needs to be fixed before releasing. +Msg = nats.aio.msg.Msg diff --git a/nats/jetstream/publish.py b/nats/jetstream/publish.py index 8168ef5c..ff6a3372 100644 --- a/nats/jetstream/publish.py +++ b/nats/jetstream/publish.py @@ -13,7 +13,6 @@ # import json - from asyncio import Future from dataclasses import dataclass, field from typing import Dict, Optional, cast @@ -60,6 +59,7 @@ async def publish( payload: bytes = b'', timeout: Optional[float] = None, headers: Optional[Dict] = None, + *, id: Optional[str] = None, expected_last_msg_id: Optional[str] = None, expected_stream: Optional[str] = None, @@ -105,10 +105,12 @@ async def publish( pub_ack_response = PubAckResponse.from_json(msg.data) if pub_ack_response.error is not None: - raise Error(**pub_ack_response.error) + raise Error(*pub_ack_response.error) - if pub_ack_response.stream == None: - raise InvalidAckError() + if pub_ack_response.stream is None: + raise InvalidAckError( + "Stream was not provided with publish ack response" + ) return cast(PubAck, pub_ack_response) except NoRespondersError: diff --git a/nats/jetstream/stream.py b/nats/jetstream/stream.py index bf753f60..f6a55c0e 100644 --- a/nats/jetstream/stream.py +++ b/nats/jetstream/stream.py @@ -14,19 +14,22 @@ from __future__ import annotations -from enum import Enum +import re + from dataclasses import dataclass, field -from types import NotImplementedType -from typing import List, Optional, cast from datetime import datetime, timedelta +from enum import Enum +from types import NotImplementedType +from typing import Dict, List, Optional, cast from nats.jetstream.api import Client, Paged, Request, Response from nats.jetstream.errors import * +from nats.jetstream.message import Msg, Header, Status class RetentionPolicy(Enum): """ - RetentionPolicy determines how messages in a stream are retained. + Determines how messages in a stream are retained. """ LIMITS = "limits" @@ -41,8 +44,7 @@ class RetentionPolicy(Enum): class DiscardPolicy(Enum): """ - DiscardPolicy determines how to proceed when limits of messages or bytes - are reached. + Determines how to proceed when limits of messages or bytes are reached. """ OLD = "old" @@ -54,7 +56,7 @@ class DiscardPolicy(Enum): class StorageType(Enum): """ - StorageType determines how messages are stored for retention. + Determines how messages are stored for retention. """ FILE = "file" @@ -70,7 +72,7 @@ class StorageType(Enum): class StoreCompression(Enum): """ - StoreCompression determines how messages are compressed. + Determines how messages are compressed. """ NONE = "none" @@ -87,15 +89,15 @@ class StoreCompression(Enum): @dataclass class StreamInfo: """ - StreamInfo shows config and current state for this stream. + Provides configuration and current state for a stream. """ - timestamp: datetime = field(metadata={'json': 'ts'}) - """Indicates when the info was gathered by the server.""" - config: StreamConfig = field(metadata={'json': 'config'}) """Contains the configuration settings of the stream, set when creating or updating the stream.""" + timestamp: datetime = field(metadata={'json': 'ts'}) + """Indicates when the info was gathered by the server.""" + created: datetime = field(metadata={'json': 'created'}) """The timestamp when the stream was created.""" @@ -138,7 +140,7 @@ class StreamConfig: """Subjects is a list of subjects that the stream is listening on. Wildcards are supported. Subjects cannot be set if the stream is created as a mirror.""" retention: RetentionPolicy = field( - default=RetentionPolicy.LIMIT, metadata={'json': 'retention'} + default=RetentionPolicy.LIMITS, metadata={'json': 'retention'} ) """Retention defines the message retention policy for the stream. Defaults to LimitsPolicy.""" @@ -159,7 +161,7 @@ class StreamConfig: ) """DiscardNewPerSubject is a flag to enable discarding new messages per subject when limits are reached. Requires DiscardPolicy to be DiscardNew and the MaxMsgsPerSubject to be set.""" - max_age: datetime.timedelta = field(metadata={'json': 'max_age'}) + max_age: timedelta = field(metadata={'json': 'max_age'}) """MaxAge is the maximum age of messages that the stream will retain.""" max_msgs_per_subject: int = field( @@ -181,7 +183,7 @@ class StreamConfig: no_ack: Optional[bool] = field(default=None, metadata={'json': 'no_ack'}) """NoAck is a flag to disable acknowledging messages received by this stream. If set to true, publish methods from the JetStream client will not work as expected, since they rely on acknowledgements. Core NATS publish methods should be used instead. Note that this will make message delivery less reliable.""" - duplicates: Optional[datetime.timedelta] = field( + duplicates: Optional[timedelta] = field( default=None, metadata={'json': 'duplicate_window'} ) """Duplicates is the window within which to track duplicate messages. If not set, server default is 2 minutes.""" @@ -259,11 +261,6 @@ class StreamConfig: ) """Metadata is a set of application-defined key-value pairs for associating metadata on the stream. This feature requires nats-server v2.10.0 or later.""" - template: Optional[str] = field( - default=None, metadata={'json': 'template_owner'} - ) - """Template identifies the template that manages the Stream. DEPRECATED: This feature is no longer supported.""" - @dataclass class StreamSourceInfo: @@ -323,10 +320,10 @@ class StreamState: ) """A list of sequence numbers that have been removed from the stream. This field will only be returned if the stream has been fetched with the DeletedDetails option.""" - num_deleted: int = field(metadata={'json': 'num_deleted'}) + num_deleted: int = field(default=0, metadata={'json': 'num_deleted'}) """NumDeleted is the number of messages that have been removed from the stream. Only deleted messages causing a gap in stream sequence numbers are counted. Messages deleted at the beginning or end of the stream are not counted.""" - num_subjects: int = field(metadata={'json': 'num_subjects'}) + num_subjects: int = field(default=0, metadata={'json': 'num_subjects'}) """NumSubjects is the number of unique subjects the stream has received messages on.""" subjects: Dict[str, int] = field( @@ -399,12 +396,12 @@ class Republish: pattern. """ - source: Optional[str] = field(default=None, metadata={'json': 'src'}) - """The subject pattern to match incoming messages against.""" - destination: str = field(metadata={'json': 'dest'}) """The subject pattern to republish the subject to.""" + source: Optional[str] = field(default=None, metadata={'json': 'src'}) + """The subject pattern to match incoming messages against.""" + headers_only: Optional[bool] = field( default=None, metadata={'json': 'headers_only'} ) @@ -487,7 +484,7 @@ class StreamConsumerLimits: be overridden on a per consumer basis. """ - inactive_threshold: Optional[datetime.timedelta] = field( + inactive_threshold: Optional[timedelta] = field( default=None, metadata={'json': 'inactive_threshold'} ) """A duration which instructs the server to clean up the consumer if it has been inactive for the specified duration.""" @@ -498,6 +495,35 @@ class StreamConsumerLimits: """A maximum number of outstanding unacknowledged messages for a consumer.""" +@dataclass +class RawStreamMsg: + subject: str = field(metadata={"json": "subject"}) + """ Subject of the message. """ + + sequence: int = field(metadata={"json": "seq"}) + """ Sequence number of the message. """ + + time: datetime = field(metadata={"json": "time"}) + """ Time of the message. """ + + data: Optional[bytes] = field(default=None, metadata={"json": "data"}) + """ Data of the message.""" + + headers: Dict[str, Any] = field( + default_factory=dict, metadata={"json": "hdrs"} + ) + """ Headers of the message. """ + + +@dataclass +class StoredMsg: + subject: str = field(metadata={"json": "subject"}) + sequence: int = field(metadata={"json": "seq"}) + time: datetime = field(metadata={"json": "time"}) + headers: Optional[bytes] = field(default=None, metadata={"json": "hdrs"}) + data: Optional[bytes] = field(default=None, metadata={"json": "data"}) + + class Stream: """ Stream contains operations on an existing stream. It allows fetching and removing @@ -516,6 +542,7 @@ async def info( timeout: Optional[float] = None ) -> StreamInfo: """Returns `StreamInfo` from the server.""" + # TODO(caspervonb): handle pagination info_subject = f"STREAM.INFO.{self._name}" info_request = StreamInfoRequest( subject_filter=subject_filter, @@ -550,6 +577,7 @@ async def purge( This is a destructive operation. """ + # TODO(caspervonb): enforce types with overloads if keep is not None and sequence is not None: raise ValueError( "both 'keep' and 'sequence' cannot be provided in purge request" @@ -570,13 +598,109 @@ async def purge( return purge_response.purged + async def _get_msg( + self, + sequence: Optional[int] = None, + next_by_subject: Optional[str] = None, + last_by_subject: Optional[str] = None, + timeout: Optional[float] = None, + ) -> RawStreamMsg: + msg_get_request = MsgGetRequest( + sequence=sequence, + last_by_subject=last_by_subject, + next_by_subject=next_by_subject, + ) + + if self._info.config.allow_direct: + if last_by_subject is not None: + direct_get_subject = f"DIRECT.GET.{self._name}.{last_by_subject}" + direct_get_request = b"" + else: + direct_get_subject = f"DIRECT.GET.{sequence}" + direct_get_request = msg_get_request.as_json().encode() + + direct_get_response = await self._client.request_msg( + direct_get_subject, direct_get_request, timeout=timeout + ) + + headers = direct_get_response.headers + if headers is None: + raise Error('response should have headers') + + data = direct_get_response.data + if len(data) == 0: + status = headers.get("Status") + if status == Status.NO_MESSAGES: + raise MsgNotFoundError() + else: + description = headers.get( + "Description", "unable to get message" + ) + raise Error(description=description) + + subject = headers.get(Header.SUBJECT) + if subject is None: + raise Error('missing subject header') + + sequence = headers.get(Header.SEQUENCE) + if sequence is None: + raise Error('missing sequence header') + + try: + sequence = int(sequence) + except ValueError as error: + raise Error(f'invalid sequence header: {error}') + + time = headers.get(Header.TIMESTAMP) + if time is None: + raise Error(f'missing timestamp header') + + try: + # Parse from RFC3339 + time = datetime.strptime(time, "%Y-%m-%dT%H:%M:%S.%fZ") + except ValueError as error: + raise ValueError(f'invalid timestamp header: {error}') + + return RawStreamMsg( + subject=subject, + sequence=sequence, + headers=headers, + data=data, + time=time, + ) + + msg_get_subject = "MSG.GET.{self._name}" + msg_get_response = await self._client.request_json( + msg_get_subject, msg_get_request, MsgGetResponse, timeout=timeout + ) + + if msg_get_response.error is not None: + if msg_get_response.error.error_code == ErrorCode.MESSAGE_NOT_FOUND: + raise MsgNotFoundError() + + raise Error(*msg_get_response.error) + + headers = None + raw_headers = msg_get_response.msg.headers + if len(raw_headers) > 0: + # TODO parse headers + pass + + return RawStreamMsg( + subject=msg_get_response.msg.subject, + sequence=msg_get_response.msg.sequence, + headers=headers, + ) + async def get_msg( - self, sequence: int, timeout: Optional[float] = None + self, + sequence: int, + timeout: Optional[float] = None, ) -> RawStreamMsg: """ Retrieves a raw stream message stored in JetStream by sequence number. """ - raise NotImplementedError + return await self._get_msg(sequence=sequence, timeout=timeout) async def get_last_msg_for_subject( self, subject: str, timeout: Optional[float] = None @@ -584,18 +708,15 @@ async def get_last_msg_for_subject( """ Retrieves the last raw stream message stored in JetStream on a given subject. """ - raise NotImplementedError + return await self._get_msg(last_by_subject=subject, timeout=timeout) - async def delete_msg( - self, sequence: int, timeout: Optional[float] = None - ) -> None: - """ - Deletes a message from a stream. - """ + async def _delete_msg( + self, sequence: int, no_erase: bool, timeout: Optional[float] + ): msg_delete_subject = f"STREAM.MSG.DELETE.{sequence}" msg_delete_request = MsgDeleteRequest( sequence=sequence, - no_erase=True, + no_erase=no_erase, ) msg_delete_response = await self._client.request_json( @@ -604,29 +725,29 @@ async def delete_msg( MsgDeleteResponse, timeout=timeout ) + if msg_delete_response.error is not None: raise Error(*msg_delete_response.error) - async def secure_delete_msg( + async def delete_msg( self, sequence: int, timeout: Optional[float] = None ) -> None: """ Deletes a message from a stream. """ - msg_delete_subject = f"STREAM.MSG.DELETE.{sequence}" - msg_delete_request = MsgDeleteRequest( - sequence=sequence, - no_erase=False, + await self._delete_msg( + sequence=sequence, no_erase=True, timeout=timeout ) - msg_delete_response = await self._client.request_json( - msg_delete_subject, - msg_delete_request, - MsgDeleteResponse, - timeout=timeout + async def secure_delete_msg( + self, sequence: int, timeout: Optional[float] = None + ) -> None: + """ + Deletes a message from a stream. + """ + await self._delete_msg( + sequence=sequence, no_erase=False, timeout=timeout ) - if msg_delete_response.error is not None: - raise Error(*msg_delete_response.error) class StreamManager: @@ -634,6 +755,9 @@ class StreamManager: Provides methods for managing streams. """ + def __init__(self, client: Client) -> None: + self._client = client + async def create_stream( self, config: StreamConfig, timeout: Optional[float] = None ) -> Stream: @@ -654,13 +778,24 @@ async def create_or_update_stream( self, config: StreamConfig, timeout: Optional[float] = None ) -> Stream: """CreateOrUpdateStream creates a stream with given config or updates it if it already exists.""" - raise NotImplementedError + try: + return await self.update_stream(config, timeout=timeout) + except StreamNotFoundError: + return await self.create_stream(config, timeout=timeout) async def stream( - self, stream: str, timeout: Optional[float] = None + self, name: str, timeout: Optional[float] = None ) -> Stream: """Stream fetches StreamInfo and returns a Stream interface for a given stream name.""" - raise NotImplementedError + validate_stream_name(name) + info_request = StreamInfoRequest() + info_response = await self._client.request_json() + + return Stream( + client=self._client, + name=info_response.name, + info=cast(StreamInfo, info_response) + ) async def stream_name_by_subject( self, subject: str, timeout: Optional[float] = None @@ -716,14 +851,14 @@ class StreamPurgeResponse(Response): @dataclass class MsgGetRequest(Request): - sequence: int = field(metadata={'json': 'seq'}) - last_for: int = field(metadata={'json': 'last_by_subj'}) - next_for: int = field(metadata={'json': 'next_by_subj'}) + sequence: Optional[int] = field(metadata={'json': 'seq'}) + last_by_subject: Optional[str] = field(metadata={'json': 'last_by_subj'}) + next_by_subject: Optional[str] = field(metadata={'json': 'next_by_subj'}) @dataclass class MsgGetResponse(Response): - pass + msg: StoredMsg = field(init=False, metadata={'json': 'seq'}) @dataclass @@ -737,6 +872,17 @@ class MsgDeleteResponse(Response): success: bool = field(default=False, metadata={'json': 'success'}) +def validate_stream_name(stream_name: Optional[str]): + if stream_name is None: + raise ValueError("Stream name is required.") + + if stream_name == "": + raise ValueError("Stream name cannot be empty.") + + if re.search(r'[>\*\./\\]', stream_name): + raise ValueError(f"Invalid stream name: '{stream_name}'") + + __all__ = [ 'RetentionPolicy', 'DiscardPolicy', From a1861f81e4f9cd2520a3f243650a7daec49b60ae Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Thu, 27 Jun 2024 22:13:37 +0200 Subject: [PATCH 12/22] Wip --- nats/jetstream/__init__.py | 9 ++- nats/jetstream/api.py | 21 ++--- nats/jetstream/consumer.py | 6 +- nats/jetstream/context.py | 15 +++- nats/jetstream/errors.py | 16 ++++ nats/jetstream/key_value.py | 15 ---- nats/jetstream/message.py | 4 +- nats/jetstream/object.py | 15 ---- nats/jetstream/publish.py | 5 +- nats/jetstream/stream.py | 149 ++++++++++++++++++++++++++++++++---- 10 files changed, 188 insertions(+), 67 deletions(-) delete mode 100644 nats/jetstream/key_value.py delete mode 100644 nats/jetstream/object.py diff --git a/nats/jetstream/__init__.py b/nats/jetstream/__init__.py index ce2de7f3..55411cd0 100644 --- a/nats/jetstream/__init__.py +++ b/nats/jetstream/__init__.py @@ -14,12 +14,15 @@ from typing import Any +import nats + from .api import Client from .context import Context +from .stream import Stream, StreamConfig, StreamInfo, StreamState -async def new(client: Any) -> Context: - return Context(api.Client(client)) +async def new(client: nats.Client) -> Context: + return Context(client) -__all__ = ['new', 'Context'] +__all__ = ['new', 'Context', "Stream"] diff --git a/nats/jetstream/api.py b/nats/jetstream/api.py index 2f2ac051..89ca5b9d 100644 --- a/nats/jetstream/api.py +++ b/nats/jetstream/api.py @@ -18,19 +18,20 @@ from dataclasses import MISSING, dataclass, field, fields, is_dataclass from typing import ( Any, - Protocol, Dict, Optional, + Protocol, Self, Type, TypeVar, get_args, get_origin, ) -from urllib import parse -from nats.js.api import DEFAULT_PREFIX -from nats.jetstream.message import Msg +import nats + +from .api import DEFAULT_PREFIX +from .message import Msg def as_dict(instance: Any) -> Dict[str, Any]: @@ -92,7 +93,6 @@ def from_dict(data, cls: type) -> Any: @dataclass class Request: - def as_dict(self) -> Dict[str, Any]: return as_dict(self) @@ -108,7 +108,7 @@ class Paged: @dataclass -class Error: +class Error(Exception): code: Optional[int] = field(default=None, metadata={"json": "code"}) error_code: Optional[int] = field( default=None, metadata={"json": "err_code"} @@ -137,6 +137,9 @@ def from_json(cls: Type[T], data: str) -> T: """ return cls.from_dict(json.loads(data)) + def handle_error(self): + if self.error: + raise self.error class Client: """ @@ -159,7 +162,7 @@ async def request( payload: bytes, timeout: Optional[float] = None, headers: Optional[Dict[str, str]] = None - ) -> Any: + ) -> nats.Msg: if timeout is None: timeout = self.timeout @@ -178,7 +181,7 @@ async def request_msg( return self.inner.request(subject, payload, timeout=timeout) async def request_json( - self, subject: str, request_object: Request, response_type: Type[T], + self, subject: str, data: Request, response_type: Type[T], timeout: float | None ) -> T: if self.prefix is not None: @@ -187,7 +190,7 @@ async def request_json( if timeout is None: timeout = self.timeout - request_payload = request_object.as_json() + request_payload = data.as_json() response = await self.inner.request( subject, request_payload, timeout=timeout ) diff --git a/nats/jetstream/consumer.py b/nats/jetstream/consumer.py index c2f526cd..cf59f758 100644 --- a/nats/jetstream/consumer.py +++ b/nats/jetstream/consumer.py @@ -17,7 +17,7 @@ from dataclasses import dataclass, field from datetime import datetime, timedelta from enum import Enum -from typing import Optional +from typing import Dict, List, Optional class DeliverPolicy(Enum): @@ -80,7 +80,7 @@ class SequenceInfo: stream: int = field(metadata={'json': 'stream_seq'}) """Stream sequence number.""" - last: Optional[datetime] = field( + last_active: Optional[datetime] = field( default=None, metadata={'json': 'last_active'} ) """Last activity timestamp.""" @@ -194,7 +194,7 @@ class ConsumerConfig: ) """Duration which instructs the server to clean up the consumer if it has been inactive.""" - replicas: int = field(metadata={'json': 'num_replicas'}) + replicas: Optional[int] = field(default=None, metadata={'json': 'num_replicas'}) """Number of replicas for the consumer's state.""" memory_storage: Optional[bool] = field( diff --git a/nats/jetstream/context.py b/nats/jetstream/context.py index 0e8713fd..47c5252f 100644 --- a/nats/jetstream/context.py +++ b/nats/jetstream/context.py @@ -14,9 +14,11 @@ from typing import Type, TypeVar -from nats.jetstream.api import Client -from nats.jetstream.publish import Publisher -from nats.jetstream.stream import StreamManager +import nats + +from .api import Client +from .publish import Publisher +from .stream import StreamManager class Context( @@ -37,6 +39,11 @@ class Context( - Managing object stores using `ObjectStoreManager`. """ - def __init__(self, client: Client): + def __init__(self, connection: nats.Client, timeout: float = 2): + client = Client( + connection, + timeout=timeout, + ) + Publisher.__init__(self, client) StreamManager.__init__(self, client) diff --git a/nats/jetstream/errors.py b/nats/jetstream/errors.py index 2111af89..581ae947 100644 --- a/nats/jetstream/errors.py +++ b/nats/jetstream/errors.py @@ -52,12 +52,28 @@ def __str__(self) -> str: ) +class StreamNameAlreadyInUseError(Error): + pass + + class StreamNotFoundError(Error): def __init__(self): super().__init__() +class StreamSubjectTransformNotSupportedError(Error): + pass + + +class StreamSourceNotSupportedError(Error): + pass + + +class StreamSourceMultipleFilterSubjectsNotSupported(Error): + pass + + class MsgNotFoundError(Error): def __init__(self): diff --git a/nats/jetstream/key_value.py b/nats/jetstream/key_value.py deleted file mode 100644 index 4cd4f8af..00000000 --- a/nats/jetstream/key_value.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2016-2024 The NATS Authors -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# TODO(caspervonb) diff --git a/nats/jetstream/message.py b/nats/jetstream/message.py index 0622e881..62d130af 100644 --- a/nats/jetstream/message.py +++ b/nats/jetstream/message.py @@ -12,11 +12,11 @@ # limitations under the License. # -import nats.aio.msg - from dataclasses import dataclass, field from enum import Enum +import nats.aio.msg + class Header(str, Enum): """ diff --git a/nats/jetstream/object.py b/nats/jetstream/object.py deleted file mode 100644 index 4cd4f8af..00000000 --- a/nats/jetstream/object.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2016-2024 The NATS Authors -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# TODO(caspervonb) diff --git a/nats/jetstream/publish.py b/nats/jetstream/publish.py index ff6a3372..5f19f075 100644 --- a/nats/jetstream/publish.py +++ b/nats/jetstream/publish.py @@ -73,6 +73,9 @@ async def publish( """ extra_headers = {} + if id is not None: + extra_headers[Header.MSG_ID] = id + if expected_last_msg_id is not None: extra_headers[Header.EXPECTED_LAST_MSG_ID ] = str(expected_last_msg_id) @@ -81,7 +84,7 @@ async def publish( extra_headers[Header.EXPECTED_STREAM] = str(expected_stream) if expected_last_sequence is not None: - extra_headers[Header.EXPECTED_LAST_SEQ + extra_headers[Header.EXPECTED_LAST_SEQUENCE ] = str(expected_last_sequence) if expected_last_subject_sequence is not None: diff --git a/nats/jetstream/stream.py b/nats/jetstream/stream.py index f6a55c0e..48c2356f 100644 --- a/nats/jetstream/stream.py +++ b/nats/jetstream/stream.py @@ -15,16 +15,24 @@ from __future__ import annotations import re - -from dataclasses import dataclass, field +from dataclasses import dataclass, asdict, field from datetime import datetime, timedelta from enum import Enum from types import NotImplementedType -from typing import Dict, List, Optional, cast - +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Dict, + List, + Optional, + cast, +) + +from nats import jetstream from nats.jetstream.api import Client, Paged, Request, Response from nats.jetstream.errors import * -from nats.jetstream.message import Msg, Header, Status +from nats.jetstream.message import Header, Msg, Status class RetentionPolicy(Enum): @@ -509,7 +517,7 @@ class RawStreamMsg: data: Optional[bytes] = field(default=None, metadata={"json": "data"}) """ Data of the message.""" - headers: Dict[str, Any] = field( + headers: Optional[Dict[str, Any]] = field( default_factory=dict, metadata={"json": "hdrs"} ) """ Headers of the message. """ @@ -656,7 +664,6 @@ async def _get_msg( raise Error(f'missing timestamp header') try: - # Parse from RFC3339 time = datetime.strptime(time, "%Y-%m-%dT%H:%M:%S.%fZ") except ValueError as error: raise ValueError(f'invalid timestamp header: {error}') @@ -682,13 +689,14 @@ async def _get_msg( headers = None raw_headers = msg_get_response.msg.headers - if len(raw_headers) > 0: - # TODO parse headers + if raw_headers: + # TODO(caspervonb): parse headers pass return RawStreamMsg( subject=msg_get_response.msg.subject, sequence=msg_get_response.msg.sequence, + time=msg_get_response.msg.time, headers=headers, ) @@ -764,7 +772,45 @@ async def create_stream( """ Creates a new stream with given config. """ - raise NotImplementedError + + stream_create_subject = f"STREAM.CREATE" + stream_create_request = StreamCreateRequest(**asdict(config)) + stream_create_response = await self._client.request_json( + stream_create_subject, + stream_create_request, + StreamCreateResponse, + timeout=timeout + ) + + if stream_create_response.error: + if stream_create_response.error.error_code == ErrorCode.STREAM_NAME_IN_USE: + raise StreamNameAlreadyInUseError( + ) from stream_create_response.error + + raise Error(*stream_create_response.error) + + # Check if subject transforms are supported + if config.subject_transform and not stream_create_response.config.subject_transform: + raise StreamSubjectTransformNotSupportedError() + + # Check if sources and subject transforms are supported + if config.sources: + if not stream_create_response.config.sources: + raise StreamSourceNotSupportedError() + + for i in range(len(config.sources)): + source = config.sources[i] + response_source = stream_create_response.config.sources[i] + + if source.subject_transforms and not response_source.subject_transforms: + raise StreamSourceMultipleFilterSubjectsNotSupported() + + return Stream( + client=self._client, + name=stream_create_response.config.name, + info=cast(StreamInfo, stream_create_response), + ) + async def update_stream( self, config: StreamConfig, timeout: Optional[float] = None @@ -788,13 +834,26 @@ async def stream( ) -> Stream: """Stream fetches StreamInfo and returns a Stream interface for a given stream name.""" validate_stream_name(name) - info_request = StreamInfoRequest() - info_response = await self._client.request_json() + + stream_info_subject = f"STREAM.INFO.{name}" + stream_info_request = StreamInfoRequest() + stream_info_response = await self._client.request_json( + stream_info_subject, + stream_info_request, + StreamInfoResponse, + timeout=timeout + ) + + if stream_info_response.error: + if stream_info_response.error.error_code == ErrorCode.STREAM_NOT_FOUND: + raise StreamNotFoundError() + + raise Error(*stream_info_response.error) return Stream( client=self._client, - name=info_response.name, - info=cast(StreamInfo, info_response) + name=name, + info=cast(StreamInfo, stream_info_response) ) async def stream_name_by_subject( @@ -807,7 +866,28 @@ async def delete_stream( self, stream: str, timeout: Optional[float] = None ) -> None: """DeleteStream removes a stream with given name.""" - raise NotImplementedError + validate_stream_name(stream) + + stream_delete_subject = f"STREAM.DELETE.{stream}" + stream_delete_request = StreamDeleteRequest() + stream_delete_response = await self._client.request_json( + stream_delete_subject, + stream_delete_request, + StreamDeleteResponse, + timeout=timeout + ) + + if stream_delete_response.error: + if stream_delete_response.error.error_code == ErrorCode.STREAM_NOT_FOUND: + raise StreamNotFoundError() from stream_delete_response.error + + raise Error(*stream_delete_response.error) + + return Stream( + client=self._client, + name=name, + info=cast(StreamInfo, stream_delete_response) + ) def list_streams(self, timeout: Optional[float] = None @@ -821,6 +901,45 @@ def stream_names(self, raise NotImplementedError +class StreamInfoAsyncIterator: + pass + + +class StreamInfoLister(AsyncIterable): + "Provides asyncronous iteration over `StreamInfo`" + pass + + +class StreamNameLister: + pass + + +@dataclass +class StreamCreateRequest(Request, StreamConfig): + pass + +@dataclass +class StreamCreateResponse(Response, StreamInfo): + pass + +@dataclass +class StreamUpdateRequest(Request, StreamConfig): + pass + +@dataclass +class StreamUpdateResponse(Response, StreamInfo): + pass + +@dataclass +class StreamDeleteRequest(Request): + pass + + +@dataclass +class StreamDeleteResponse(Response): + pass + + @dataclass class StreamInfoRequest(Request, Paged): deleted_details: Optional[bool] = field( From 18ba816e821b867d6b082368aa01204cb37dcfaf Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Thu, 4 Jul 2024 00:59:45 +0200 Subject: [PATCH 13/22] stash --- nats/jetstream/api.py | 126 ++++++++------------ nats/jetstream/consumer.py | 14 +++ nats/jetstream/context.py | 6 +- nats/jetstream/errors.py | 24 +++- nats/jetstream/message.py | 2 +- nats/jetstream/publish.py | 15 ++- nats/jetstream/stream.py | 148 +++++++++++++----------- tests/test_jetstream_stream.py | 202 ++++++++++++++++++++++++++++++++- 8 files changed, 376 insertions(+), 161 deletions(-) diff --git a/nats/jetstream/api.py b/nats/jetstream/api.py index 89ca5b9d..ceff4603 100644 --- a/nats/jetstream/api.py +++ b/nats/jetstream/api.py @@ -15,7 +15,7 @@ from __future__ import annotations import json -from dataclasses import MISSING, dataclass, field, fields, is_dataclass +from dataclasses import MISSING, dataclass, field, fields, asdict, is_dataclass from typing import ( Any, Dict, @@ -30,78 +30,66 @@ import nats -from .api import DEFAULT_PREFIX from .message import Msg +T = TypeVar("T", bound="Base") -def as_dict(instance: Any) -> Dict[str, Any]: - if not is_dataclass(instance): - return instance +DEFAULT_PREFIX = "$JS.API" +INBOX_PREFIX = b'_INBOX.' - result = {} - for field in fields(instance): - name = field.metadata.get('json', field.name) - value = getattr(instance, field.name) - if is_dataclass(value): - result[name] = as_dict(value) - elif isinstance(value, list): - result[name] = [as_dict(item) for item in value] - elif isinstance(value, dict): - result[name] = {k: as_dict(v) for k, v in value.items()} - else: - result[name] = value - return result - - -def from_dict(data, cls: type) -> Any: - if not is_dataclass(cls): - return data - - kwargs = {} - for field in fields(cls): - json_key = field.metadata.get('json', field.name) - value = data.get(json_key, MISSING) +@dataclass +class Base: + """ + Provides methods for converting to and from json. + """ - if value is MISSING: - if field.default is not MISSING: - value = field.default - elif field.default_factory is not MISSING: - value = field.default_factory() - else: - raise ValueError(f"Missing value for field {field.name}") + def to_dict(self, include_null=False) -> dict: + """ + Converts self to a dictionary. + """ + def factory(fields): + return {field.metadata.get("json", value.name): value for field, value in fields if value is not MISSING} - field_type = field.type - field_origin = get_origin(field_type) - field_args = get_args(field_type) + return asdict( + self, + dict_factory=factory, + ) - if is_dataclass(field_type): - value = from_dict(value, field_type) - elif field_origin is list and len(field_args) == 1 and is_dataclass( - field_args[0]): - value = [from_dict(item, field_args[0]) for item in value] - elif field_origin is dict and len(field_args) == 2 and is_dataclass( - field_args[1]): - value = {k: from_dict(v, field_args[1]) for k, v in value.items()} + def as_json(self, include_null=False) -> str: + """Converts this to json. + """ + return json.dumps(self.to_dict(include_null)) - kwargs[field.name] = value + @classmethod + def from_dict(cls: Type[T], json: dict) -> T: + """Constructs `this` from given json. Assumes camel case convention is used and converts to camel case. - return cls(**kwargs) + Args: + json (dict): Json dictionary + Raises: + ValueError: When `this` isn't a dataclass -T = TypeVar("T", bound="Response") + Returns: + T: New instance + """ + if not is_dataclass(cls): + raise ValueError(f"{cls.__name__} must be a dataclass") + field_names = {field.metadata.get("json", field.name) for field in fields(cls)} + kwargs = { + camel_to_snake(key): value + for key, value in json.items() + if camel_to_snake(key) in field_names + } + return cls(**kwargs) @dataclass -class Request: - def as_dict(self) -> Dict[str, Any]: - return as_dict(self) - - def as_json(self) -> str: - return json.dumps(self.as_dict()) - +class Request(Base): + pass @dataclass -class Paged: +class Paged(Base): total: int = field(default=0, metadata={"json": "total"}) offset: int = field(default=0, metadata={"json": "offset"}) limit: int = field(default=0, metadata={"json": "limit"}) @@ -119,28 +107,10 @@ class Error(Exception): @dataclass -class Response: +class Response(Base): type: str error: Optional[Error] = field(default=None) - @classmethod - def from_dict(cls: Type[T], data: Dict[str, Any]) -> T: - """ - Create an instance of the class from a dictionary. - """ - return cls(**data) - - @classmethod - def from_json(cls: Type[T], data: str) -> T: - """ - Create an instance of the class from JSON - """ - return cls.from_dict(json.loads(data)) - - def handle_error(self): - if self.error: - raise self.error - class Client: """ Provides methods for sending requests and processing responses via JetStream. @@ -182,7 +152,8 @@ async def request_msg( async def request_json( self, subject: str, data: Request, response_type: Type[T], - timeout: float | None + timeout: float | None, + return_exceptions: bool = False, ) -> T: if self.prefix is not None: subject = f"{self.prefix}.{subject}" @@ -194,4 +165,5 @@ async def request_json( response = await self.inner.request( subject, request_payload, timeout=timeout ) + return response_type.from_json(response.data) diff --git a/nats/jetstream/consumer.py b/nats/jetstream/consumer.py index cf59f758..9a1ec43c 100644 --- a/nats/jetstream/consumer.py +++ b/nats/jetstream/consumer.py @@ -303,6 +303,11 @@ class OrderedConsumerConfig: class Consumer: + async def info(self) -> ConsumerInfo: + """ + Returns detailed information about the consumer. + """ + raise NotImplementedError @@ -402,3 +407,12 @@ async def delete_consumer( If consumer does not exist, an error is raised. """ raise NotImplementedError + +@dataclass +class CreateConsumerRequest(Request): + stream: str = field(metadata={'json': 'stream_name'}) + config: ConsumerConfig = field(metadata={'json': 'config'}) + action: str = field(metadata={'json': 'action'}) + +class ConsumerInfoResponse(Response, ConsumerInfo): + pass diff --git a/nats/jetstream/context.py b/nats/jetstream/context.py index 47c5252f..25ccbe96 100644 --- a/nats/jetstream/context.py +++ b/nats/jetstream/context.py @@ -12,9 +12,7 @@ # limitations under the License. # -from typing import Type, TypeVar - -import nats +from typing import Any, Type, TypeVar from .api import Client from .publish import Publisher @@ -39,7 +37,7 @@ class Context( - Managing object stores using `ObjectStoreManager`. """ - def __init__(self, connection: nats.Client, timeout: float = 2): + def __init__(self, connection: Any, timeout: float = 2): client = Client( connection, timeout=timeout, diff --git a/nats/jetstream/errors.py b/nats/jetstream/errors.py index 581ae947..17cd7dbc 100644 --- a/nats/jetstream/errors.py +++ b/nats/jetstream/errors.py @@ -63,18 +63,36 @@ def __init__(self): class StreamSubjectTransformNotSupportedError(Error): - pass + + def __init__(self): + super().__init__() + class StreamSourceNotSupportedError(Error): - pass + + def __init__(self): + super().__init__() class StreamSourceMultipleFilterSubjectsNotSupported(Error): - pass + + def __init__(self): + super().__init__() class MsgNotFoundError(Error): def __init__(self): super().__init__() + +class NoStreamResponseError(Error): + + def __init__(self): + super().__init__() + + +class InvalidResponseError(Error): + + def __init__(self): + super().__init__() diff --git a/nats/jetstream/message.py b/nats/jetstream/message.py index 62d130af..5470f026 100644 --- a/nats/jetstream/message.py +++ b/nats/jetstream/message.py @@ -49,7 +49,7 @@ class Header(str, Enum): option. """ - EXPECTED_LAST_SUBJECT_SEQEQUENCE = "Nats-Expected-Last-Subject-Sequence" + EXPECTED_LAST_SUBJECT_SEQUENCE = "Nats-Expected-Last-Subject-Sequence" """Contains the expected last sequence number on the subject and can be used to apply optimistic concurrency control at the subject level. The server will reject the message if it is not the case. diff --git a/nats/jetstream/publish.py b/nats/jetstream/publish.py index 5f19f075..1583063e 100644 --- a/nats/jetstream/publish.py +++ b/nats/jetstream/publish.py @@ -13,7 +13,7 @@ # import json -from asyncio import Future +from asyncio import Future, Semaphore from dataclasses import dataclass, field from typing import Dict, Optional, cast @@ -47,11 +47,11 @@ class PubAck: The domain the message was published to. """ - class Publisher: - - def __init__(self, client: Client): - self.client = client + def __init__(self, client: Client, max_pending_async_futures: int = 1000): + self._client = client + self._pending_async_futures = {} + self._pending_async_publishes = Semaphore(max_pending_async_futures) async def publish( self, @@ -99,7 +99,7 @@ async def publish( for attempt in range(0, retry_attempts): try: - msg = await self.client.request( + msg = await self._client.request( subject, payload, timeout=timeout, @@ -111,7 +111,7 @@ async def publish( raise Error(*pub_ack_response.error) if pub_ack_response.stream is None: - raise InvalidAckError( + raise InvalidResponseError( "Stream was not provided with publish ack response" ) @@ -122,6 +122,5 @@ async def publish( raise NoStreamResponseError - class PubAckResponse(Response, PubAck): pass diff --git a/nats/jetstream/stream.py b/nats/jetstream/stream.py index 48c2356f..e5abc5de 100644 --- a/nats/jetstream/stream.py +++ b/nats/jetstream/stream.py @@ -29,6 +29,8 @@ cast, ) +import nats.aio.client + from nats import jetstream from nats.jetstream.api import Client, Paged, Request, Response from nats.jetstream.errors import * @@ -152,16 +154,16 @@ class StreamConfig: ) """Retention defines the message retention policy for the stream. Defaults to LimitsPolicy.""" - max_consumers: int = field(metadata={'json': 'max_consumers'}) + max_consumers: Optional[int] = field(default=None, metadata={'json': 'max_consumers'}) """MaxConsumers specifies the maximum number of consumers allowed for the stream.""" - max_msgs: int = field(metadata={'json': 'max_msgs'}) + max_msgs: Optional[int] = field(default=None, metadata={'json': 'max_msgs'}) """MaxMsgs is the maximum number of messages the stream will store. After reaching the limit, stream adheres to the discard policy. If not set, server default is -1 (unlimited).""" - max_bytes: int = field(metadata={'json': 'max_bytes'}) + max_bytes: Optional[int] = field(default=None, metadata={'json': 'max_bytes'}) """MaxBytes is the maximum total size of messages the stream will store. After reaching the limit, stream adheres to the discard policy. If not set, server default is -1 (unlimited).""" - discard: DiscardPolicy = field(metadata={'json': 'discard'}) + discard: Optional[DiscardPolicy] = field(default=None, metadata={'json': 'discard'}) """Discard defines the policy for handling messages when the stream reaches its limits in terms of number of messages or total bytes.""" discard_new_per_subject: Optional[bool] = field( @@ -532,6 +534,73 @@ class StoredMsg: data: Optional[bytes] = field(default=None, metadata={"json": "data"}) +def direct_msg_to_raw_stream_msg(msg: Msg) -> RawStreamMsg: + """ + Converts from a direct `Msg` to a `RawStreamMsg`. + """ + headers = msg.headers + if headers is None: + raise Error('response should have headers') + + data = msg.data + if len(data) == 0: + status = headers.get("Status") + if status == Status.NO_MESSAGES: + raise MsgNotFoundError() + else: + description = headers.get( + "Description", "unable to get message" + ) + raise Error(description=description) + + subject = headers.get(Header.SUBJECT) + if subject is None: + raise Error('missing subject header') + + sequence = headers.get(Header.SEQUENCE) + if sequence is None: + raise Error('missing sequence header') + + try: + sequence = int(sequence) + except ValueError as error: + raise Error(f'invalid sequence header: {error}') + + time = headers.get(Header.TIMESTAMP) + if time is None: + raise Error(f'missing timestamp header') + + try: + time = datetime.strptime(time, "%Y-%m-%dT%H:%M:%S.%fZ") + except ValueError as error: + raise ValueError(f'invalid timestamp header: {error}') + + return RawStreamMsg( + subject=subject, + sequence=sequence, + headers=headers, + data=data, + time=time, + ) + +def stored_msg_to_raw_stream_msg(msg: StoredMsg) -> RawStreamMsg: + """ + Converts from a `StoredMsg` to a `RawStreamMsg`. + """ + + headers = None + raw_headers = msg.headers + if raw_headers: + raise NotImplementedError('parsing headers is not implemented yet') + + return RawStreamMsg( + subject=msg.subject, + sequence=msg.sequence, + time=msg.time, + headers=headers, + ) + + class Stream: """ Stream contains operations on an existing stream. It allows fetching and removing @@ -631,50 +700,8 @@ async def _get_msg( direct_get_subject, direct_get_request, timeout=timeout ) - headers = direct_get_response.headers - if headers is None: - raise Error('response should have headers') - - data = direct_get_response.data - if len(data) == 0: - status = headers.get("Status") - if status == Status.NO_MESSAGES: - raise MsgNotFoundError() - else: - description = headers.get( - "Description", "unable to get message" - ) - raise Error(description=description) - - subject = headers.get(Header.SUBJECT) - if subject is None: - raise Error('missing subject header') - - sequence = headers.get(Header.SEQUENCE) - if sequence is None: - raise Error('missing sequence header') - - try: - sequence = int(sequence) - except ValueError as error: - raise Error(f'invalid sequence header: {error}') - - time = headers.get(Header.TIMESTAMP) - if time is None: - raise Error(f'missing timestamp header') - - try: - time = datetime.strptime(time, "%Y-%m-%dT%H:%M:%S.%fZ") - except ValueError as error: - raise ValueError(f'invalid timestamp header: {error}') - - return RawStreamMsg( - subject=subject, - sequence=sequence, - headers=headers, - data=data, - time=time, - ) + return direct_msg_to_raw_stream_msg(direct_get_response) + msg_get_subject = "MSG.GET.{self._name}" msg_get_response = await self._client.request_json( @@ -687,18 +714,7 @@ async def _get_msg( raise Error(*msg_get_response.error) - headers = None - raw_headers = msg_get_response.msg.headers - if raw_headers: - # TODO(caspervonb): parse headers - pass - - return RawStreamMsg( - subject=msg_get_response.msg.subject, - sequence=msg_get_response.msg.sequence, - time=msg_get_response.msg.time, - headers=headers, - ) + return stored_msg_to_raw_stream_msg(msg_get_response.msg) async def get_msg( self, @@ -883,12 +899,6 @@ async def delete_stream( raise Error(*stream_delete_response.error) - return Stream( - client=self._client, - name=name, - info=cast(StreamInfo, stream_delete_response) - ) - def list_streams(self, timeout: Optional[float] = None ) -> AsyncIterator[StreamInfo]: @@ -918,18 +928,22 @@ class StreamNameLister: class StreamCreateRequest(Request, StreamConfig): pass + @dataclass class StreamCreateResponse(Response, StreamInfo): pass + @dataclass class StreamUpdateRequest(Request, StreamConfig): pass + @dataclass class StreamUpdateResponse(Response, StreamInfo): pass + @dataclass class StreamDeleteRequest(Request): pass @@ -977,7 +991,7 @@ class MsgGetRequest(Request): @dataclass class MsgGetResponse(Response): - msg: StoredMsg = field(init=False, metadata={'json': 'seq'}) + msg: Optional[StoredMsg] = field(default=None, metadata={'json': 'seq'}) @dataclass diff --git a/tests/test_jetstream_stream.py b/tests/test_jetstream_stream.py index 4ba1b258..ed232e9a 100644 --- a/tests/test_jetstream_stream.py +++ b/tests/test_jetstream_stream.py @@ -1,4 +1,204 @@ from tests.utils import SingleJetStreamServerTestCase +import nats +import nats.jetstream + +from nats.jetstream.stream import StreamConfig +from nats.jetstream.consumer import ConsumerConfig, AckPolicy +from nats.jetstream.errors import * + class JetStreamStreamTest(SingleJetStreamServerTestCase): - pass + async def test_create_or_update_consumer(self): + tests = [ + { + "name": "create durable pull consumer", + "consumer_config": ConsumerConfig(durable="dur"), + "should_create": True, + "with_error": None + }, + { + "name": "create ephemeral pull consumer", + "consumer_config": ConsumerConfig(ack_policy=AckPolicy.NONE), + "should_create": True, + "with_error": None + }, + { + "name": "with filter subject", + "consumer_config": ConsumerConfig(filter_subject="FOO.A"), + "should_create": True, + "with_error": None + }, + { + "name": "with multiple filter subjects", + "consumer_config": ConsumerConfig(filter_subjects=["FOO.A", "FOO.B"]), + "should_create": True, + "with_error": None + }, + { + "name": "with multiple filter subjects, overlapping subjects", + "consumer_config": ConsumerConfig(filter_subjects=["FOO.*", "FOO.B"]), + "should_create": False, + "with_error": OverlappingFilterSubjectsError + }, + { + "name": "with multiple filter subjects and filter subject provided", + "consumer_config": ConsumerConfig(filter_subjects=["FOO.A", "FOO.B"], filter_subject="FOO.C"), + "should_create": False, + "with_error": DuplicateFilterSubjectsError + }, + { + "name": "with empty subject in filter subjects", + "consumer_config": ConsumerConfig(filter_subjects=["FOO.A", ""]), + "should_create": False, + "with_error": EmptyFilterError + }, + { + "name": "consumer already exists, update", + "consumer_config": ConsumerConfig(durable="dur", description="test consumer"), + "should_create": True, + "with_error": None + }, + { + "name": "consumer already exists, illegal update", + "consumer_config": ConsumerConfig(durable="dur", ack_policy=AckPolicy.NONE), + "should_create": False, + "with_error": ConsumerCreateError + }, + { + "name": "invalid durable name", + "consumer_config": ConsumerConfig(durable="dur.123"), + "should_create": False, + "with_error": InvalidConsumerNameError + }, + ] + + client = await nats.connect() + context = await nats.jetstream.new(client) + stream = await context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + + for test in tests: + with self.subTest(test=test["name"]): + try: + if test["consumer_config"].filter_subject: + subscription = await client.subscribe(f"$JS.API.CONSUMER.CREATE.foo.*.{test['consumer_config'].filter_subject}") + else: + subscription = await client.subscribe("$JS.API.CONSUMER.CREATE.foo.*") + + consumer = await stream.create_or_update_consumer(test["consumer_config"]) + + if test["with_error"]: + self.fail(f"Expected error: {test['with_error']}; got: None") + if test["should_create"]: + self.assertIsNotNone(await subscription.next_msg()) + except Exception as e: + if not test["with_error"]: + self.fail(f"Unexpected error: {e}") + if not isinstance(e, test["with_error"]): + self.fail(f"Expected error: {test['with_error']}; got: {e}") + +async def test_create_consumer(self): + tests = [ + { + "name": "create durable pull consumer", + "consumer_config": ConsumerConfig(durable="dur"), + "should_create": True, + "with_error": None + }, + { + "name": "idempotent create, no error", + "consumer_config": ConsumerConfig(durable="dur"), + "should_create": True, + "with_error": None + }, + { + "name": "create ephemeral pull consumer", + "consumer_config": ConsumerConfig(ack_policy=AckPolicy.NONE), + "should_create": True, + "with_error": None + }, + { + "name": "with filter subject", + "consumer_config": ConsumerConfig(filter_subject="FOO.A"), + "should_create": True, + "with_error": None + }, + { + "name": "with metadata", + "consumer_config": ConsumerConfig(metadata={"foo": "bar", "baz": "quux"}), + "should_create": True, + "with_error": None + }, + { + "name": "with multiple filter subjects", + "consumer_config": ConsumerConfig(filter_subjects=["FOO.A", "FOO.B"]), + "should_create": True, + "with_error": None + }, + { + "name": "with multiple filter subjects, overlapping subjects", + "consumer_config": ConsumerConfig(filter_subjects=["FOO.*", "FOO.B"]), + "should_create": False, + "with_error": OverlappingFilterSubjectsError + }, + { + "name": "with multiple filter subjects and filter subject provided", + "consumer_config": ConsumerConfig(filter_subjects=["FOO.A", "FOO.B"], filter_subject="FOO.C"), + "should_create": False, + "with_error": DuplicateFilterSubjectsError + }, + { + "name": "with empty subject in filter subjects", + "consumer_config": ConsumerConfig(filter_subjects=["FOO.A", ""]), + "should_create": False, + "with_error": EmptyFilterError + }, + { + "name": "with invalid filter subject, leading dot", + "consumer_config": ConsumerConfig(filter_subject=".foo"), + "should_create": False, + "with_error": InvalidConsumerNameError + }, + { + "name": "with invalid filter subject, trailing dot", + "consumer_config": ConsumerConfig(filter_subject="foo."), + "should_create": False, + "with_error": InvalidConsumerNameError + }, + { + "name": "consumer already exists, error", + "consumer_config": ConsumerConfig(durable="dur", description="test consumer"), + "should_create": False, + "with_error": ConsumerExistsError + }, + { + "name": "invalid durable name", + "consumer_config": ConsumerConfig(durable="dur.123"), + "should_create": False, + "with_error": InvalidConsumerNameError + }, + ] + + nc = await nats.connect() + js = await nats.jetstream.new(nc) + s = await js.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + + for test in tests: + with self.subTest(test=test["name"]): + try: + if test["consumer_config"].filter_subject: + sub = await self.nc.subscribe(f"$JS.API.CONSUMER.CREATE.foo.*.{test['consumer_config'].filter_subject}") + else: + sub = await self.nc.subscribe("$JS.API.CONSUMER.CREATE.foo.*") + + c = await s.create_consumer(test["consumer_config"]) + + if test["with_error"]: + self.fail(f"Expected error: {test['with_error']}; got: None") + if test["should_create"]: + msg = await sub.next_msg() + self.assertIsNotNone(msg) + except Exception as e: + if not test["with_error"]: + self.fail(f"Unexpected error: {e}") + if not isinstance(e, test["with_error"]): + self.fail(f"Expected error: {test['with_error']}; got: {e}") From d00a09cee8a03a98b28108a6d9b59d33d404cb28 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Wed, 24 Jul 2024 18:45:56 +0200 Subject: [PATCH 14/22] stash --- nats/jetstream/__init__.py | 4 +- nats/jetstream/stream.py | 16 +-- tests/test_jetstream_stream_manager.py | 145 +++++++++++++++++++++++++ 3 files changed, 155 insertions(+), 10 deletions(-) create mode 100644 tests/test_jetstream_stream_manager.py diff --git a/nats/jetstream/__init__.py b/nats/jetstream/__init__.py index 55411cd0..064e1715 100644 --- a/nats/jetstream/__init__.py +++ b/nats/jetstream/__init__.py @@ -14,14 +14,14 @@ from typing import Any -import nats +import nats.aio.client from .api import Client from .context import Context from .stream import Stream, StreamConfig, StreamInfo, StreamState -async def new(client: nats.Client) -> Context: +async def new(client: nats.aio.client.Client) -> Context: return Context(client) diff --git a/nats/jetstream/stream.py b/nats/jetstream/stream.py index e5abc5de..320834c2 100644 --- a/nats/jetstream/stream.py +++ b/nats/jetstream/stream.py @@ -16,7 +16,7 @@ import re from dataclasses import dataclass, asdict, field -from datetime import datetime, timedelta +from datetime import datetime from enum import Enum from types import NotImplementedType from typing import ( @@ -171,10 +171,10 @@ class StreamConfig: ) """DiscardNewPerSubject is a flag to enable discarding new messages per subject when limits are reached. Requires DiscardPolicy to be DiscardNew and the MaxMsgsPerSubject to be set.""" - max_age: timedelta = field(metadata={'json': 'max_age'}) + max_age: int = field(default=-1, metadata={'json': 'max_age'}) """MaxAge is the maximum age of messages that the stream will retain.""" - max_msgs_per_subject: int = field( + max_msgs_per_subject: int = field(default=-1, metadata={'json': 'max_msgs_per_subject'} ) """MaxMsgsPerSubject is the maximum number of messages per subject that the stream will retain.""" @@ -184,7 +184,7 @@ class StreamConfig: ) """MaxMsgSize is the maximum size of any single message in the stream.""" - storage: StorageType = field(metadata={'json': 'storage'}) + storage: StorageType = field(default=StorageType.MEMORY, metadata={'json': 'storage'}) """Storage specifies the type of storage backend used for the stream (file or memory).""" replicas: int = field(default=1, metadata={'json': 'num_replicas'}) @@ -193,7 +193,7 @@ class StreamConfig: no_ack: Optional[bool] = field(default=None, metadata={'json': 'no_ack'}) """NoAck is a flag to disable acknowledging messages received by this stream. If set to true, publish methods from the JetStream client will not work as expected, since they rely on acknowledgements. Core NATS publish methods should be used instead. Note that this will make message delivery less reliable.""" - duplicates: Optional[timedelta] = field( + duplicates: Optional[int] = field( default=None, metadata={'json': 'duplicate_window'} ) """Duplicates is the window within which to track duplicate messages. If not set, server default is 2 minutes.""" @@ -284,7 +284,7 @@ class StreamSourceInfo: lag: int = field(metadata={'json': 'lag'}) """Lag informs how many messages behind the source/mirror operation is. This will only show correctly if there is active communication with stream/mirror.""" - active: timedelta = field(metadata={'json': 'active'}) + active: int = field(metadata={'json': 'active'}) """Active informs when last the mirror or sourced stream had activity. Value will be -1 when there has been no activity.""" filter_subject: Optional[str] = field( @@ -374,7 +374,7 @@ class PeerInfo: current: bool = field(metadata={'json': 'current'}) """Indicates if the peer is up to date and synchronized with the leader.""" - active: timedelta = field(metadata={'json': 'active'}) + active: int = field(metadata={'json': 'active'}) """The duration since this peer was last seen.""" offline: Optional[bool] = field(default=None, metadata={'json': 'offline'}) @@ -494,7 +494,7 @@ class StreamConsumerLimits: be overridden on a per consumer basis. """ - inactive_threshold: Optional[timedelta] = field( + inactive_threshold: Optional[int] = field( default=None, metadata={'json': 'inactive_threshold'} ) """A duration which instructs the server to clean up the consumer if it has been inactive for the specified duration.""" diff --git a/tests/test_jetstream_stream_manager.py b/tests/test_jetstream_stream_manager.py new file mode 100644 index 00000000..f586f2c7 --- /dev/null +++ b/tests/test_jetstream_stream_manager.py @@ -0,0 +1,145 @@ +import nats +import nats.jetstream + +from tests.utils import SingleServerTestCase, async_test + +class TestJetStreamStreamManager(SingleServerTestCase): + @async_test + async def test_create_stream_success(self): + nc = await nats.connect() + js = nc.jetstream.new(nc) + stream = await js.create_stream(StreamConfig("foo", ["FOO.123"])) + self.assertEqual(stream.name, "foo") + self.assertEqual(stream.subjects, ["FOO.123"]) + + @async_test + async def test_create_stream_with_metadata(self): + nc = await nats.connect() + js = nc.jetstream.new(nc) + metadata = {"foo": "bar", "name": "test"} + stream = await js.create_stream(StreamConfig("foo_meta", ["FOO.meta"], metadata)) + self.assertEqual(stream.name, "foo_meta") + self.assertEqual(stream.subjects, ["FOO.meta"]) + self.assertEqual(stream.metadata, metadata) + + @async_test + async def test_create_stream_invalid_name(self): + nc = await nats.connect() + js = nc.jetstream.new(nc) + with self.assertRaises(InvalidStreamNameError): + await js.create_stream(StreamConfig("foo.123", ["FOO.123"])) + + @async_test + async def test_create_stream_name_required(self): + nc = await nats.connect() + js = nc.jetstream.new(nc) + with self.assertRaises(StreamNameRequiredError): + await js.create_stream(StreamConfig("", ["FOO.123"])) + + @async_test + async def test_create_stream_name_already_in_use(self): + nc = await nats.connect() + js = nc.jetstream.new(nc) + await js.create_stream(StreamConfig("foo", ["FOO.123"])) + with self.assertRaises(StreamNameAlreadyInUseError): + await js.create_stream(StreamConfig("foo", ["BAR.123"])) + + @async_test + async def test_update_stream_success(self): + nc = await nats.connect() + js = nc.jetstream.new(nc) + await js.create_stream(StreamConfig("foo", ["FOO.123"])) + stream = await js.update_stream(StreamConfig("foo", ["BAR.123"])) + info = await stream.info() + self.assertEqual(info.config.subjects, ["BAR.123"]) + + @async_test + async def test_update_stream_add_metadata(self): + nc = await nats.connect() + js = nc.jetstream.new(nc) + await js.create_stream(StreamConfig("foo", ["FOO.123"])) + metadata = {"foo": "bar", "name": "test"} + stream = await js.update_stream(StreamConfig("foo", ["BAR.123"], metadata)) + info = await stream.info() + self.assertEqual(info.config.subjects, ["BAR.123"]) + self.assertEqual(info.config.metadata, metadata) + + @async_test + async def test_update_stream_invalid_name(self): + nc = await nats.connect() + js = nc.jetstream.new(nc) + with self.assertRaises(InvalidStreamNameError): + await js.update_stream(StreamConfig("foo.123", ["FOO.123"])) + + @async_test + async def test_update_stream_name_required(self): + nc = await nats.connect() + js = nc.jetstream.new(nc) + with self.assertRaises(StreamNameRequiredError): + await js.update_stream(StreamConfig("", ["FOO.123"])) + + @async_test + async def test_update_stream_not_found(self): + nc = await nats.connect() + js = nc.jetstream.new(nc) + with self.assertRaises(StreamNotFoundError): + await js.update_stream(StreamConfig("bar", ["FOO.123"])) + + @async_test + async def test_get_stream_success(self): + nc = await nats.connect() + js = nc.jetstream.new(nc) + await js.create_stream(StreamConfig("foo", ["FOO.123"])) + stream = await js.stream("foo") + self.assertEqual(stream.cached_info().config.name, "foo") + + @async_test + async def test_get_stream_invalid_name(self): + nc = await nats.connect() + js = nc.jetstream.new(nc) + with self.assertRaises(InvalidStreamNameError): + await js.stream("foo.123") + + @async_test + async def test_get_stream_name_required(self): + nc = await nats.connect() + js = nc.jetstream.new(nc) + with self.assertRaises(StreamNameRequiredError): + await js.stream("") + + @async_test + async def test_get_stream_not_found(self): + nc = await nats.connect() + js = nc.jetstream.new(nc) + with self.assertRaises(StreamNotFoundError): + await js.stream("bar") + + @async_test + async def test_delete_stream_success(self): + nc = await nats.connect() + js = nc.jetstream.new(nc) + await js.create_stream(StreamConfig("foo", ["FOO.123"])) + await js.delete_stream("foo") + with self.assertRaises(StreamNotFoundError): + await js.stream("foo") + + @async_test + async def test_delete_stream_invalid_name(self): + nc = await nats.connect() + js = nc.jetstream.new(nc) + with self.assertRaises(InvalidStreamNameError): + await js.delete_stream("foo.123") + + @async_test + async def test_delete_stream_name_required(self): + nc = await nats.connect() + js = nc.jetstream.new(nc) + with self.assertRaises(StreamNameRequiredError): + await js.delete_stream("") + + @async_test + async def test_delete_stream_not_found(self): + nc = await nats.connect() + js = nc.jetstream.new(nc) + with self.assertRaises(StreamNotFoundError): + await js.delete_stream("bar") From c6dfab6f4c2d6643852ca8008b9e653a6158ba65 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Tue, 30 Jul 2024 15:34:40 +0200 Subject: [PATCH 15/22] wip --- nats/jetstream/__init__.py | 29 +- nats/jetstream/api.py | 183 +--- nats/jetstream/consumer.py | 655 ++++++++---- nats/jetstream/context.py | 191 +++- nats/jetstream/errors.py | 98 -- nats/jetstream/message.py | 131 --- nats/jetstream/publish.py | 126 --- nats/jetstream/stream.py | 1350 +++++++++++------------- tests/test_jetstream.py | 805 ++++++++++++++ tests/test_jetstream_consumer.py | 340 +++++- tests/test_jetstream_errors.py | 4 - tests/test_jetstream_message.py | 0 tests/test_jetstream_publish.py | 4 - tests/test_jetstream_stream.py | 806 ++++++++++---- tests/test_jetstream_stream_manager.py | 145 --- tests/utils.py | 15 + 16 files changed, 3035 insertions(+), 1847 deletions(-) delete mode 100644 nats/jetstream/errors.py delete mode 100644 nats/jetstream/message.py delete mode 100644 nats/jetstream/publish.py create mode 100644 tests/test_jetstream.py delete mode 100644 tests/test_jetstream_errors.py delete mode 100644 tests/test_jetstream_message.py delete mode 100644 tests/test_jetstream_publish.py delete mode 100644 tests/test_jetstream_stream_manager.py diff --git a/nats/jetstream/__init__.py b/nats/jetstream/__init__.py index 064e1715..cbc5e3ce 100644 --- a/nats/jetstream/__init__.py +++ b/nats/jetstream/__init__.py @@ -1,28 +1,5 @@ -# Copyright 2016-2024 The NATS Authors -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# +from nats.jetstream.context import Context +from nats.aio.client import Client -from typing import Any - -import nats.aio.client - -from .api import Client -from .context import Context -from .stream import Stream, StreamConfig, StreamInfo, StreamState - - -async def new(client: nats.aio.client.Client) -> Context: +def new(client: Client) -> Context: return Context(client) - - -__all__ = ['new', 'Context', "Stream"] diff --git a/nats/jetstream/api.py b/nats/jetstream/api.py index ceff4603..35745a16 100644 --- a/nats/jetstream/api.py +++ b/nats/jetstream/api.py @@ -1,116 +1,44 @@ -# Copyright 2016-2024 The NATS Authors -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from __future__ import annotations +import nats.aio.client +import nats.aio.msg import json -from dataclasses import MISSING, dataclass, field, fields, asdict, is_dataclass -from typing import ( - Any, - Dict, - Optional, - Protocol, - Self, - Type, - TypeVar, - get_args, - get_origin, -) - -import nats - -from .message import Msg - -T = TypeVar("T", bound="Base") +from typing import Optional, Any, Dict DEFAULT_PREFIX = "$JS.API" -INBOX_PREFIX = b'_INBOX.' - -@dataclass -class Base: - """ - Provides methods for converting to and from json. - """ - def to_dict(self, include_null=False) -> dict: - """ - Converts self to a dictionary. - """ - def factory(fields): - return {field.metadata.get("json", value.name): value for field, value in fields if value is not MISSING} - - return asdict( - self, - dict_factory=factory, +# Error codes +JETSTREAM_NOT_ENABLED_FOR_ACCOUNT = 10039 +JETSTREAM_NOT_ENABLED = 10076 +STREAM_NOT_FOUND = 10059 +STREAM_NAME_IN_USE = 10058 +CONSUMER_CREATE = 10012 +CONSUMER_NOT_FOUND = 10014 +CONSUMER_NAME_EXISTS = 10013 +CONSUMER_ALREADY_EXISTS = 10105 +CONSUMER_EXISTS = 10148 +DUPLICATE_FILTER_SUBJECTS = 10136 +OVERLAPPING_FILTER_SUBJECTS = 10138 +CONSUMER_EMPTY_FILTER = 10139 +CONSUMER_DOES_NOT_EXIST = 10149 +MESSAGE_NOT_FOUND = 10037 +BAD_REQUEST = 10003 +STREAM_WRONG_LAST_SEQUENCE = 10071 + +# TODO: What should we call this error type? +class JetStreamError(Exception): + code:str + description: str + + def __init__(self, code: str, description: str) -> None: + self.code = code + self.description = description + + def __str__(self) -> str: + return ( + f"nats: {type(self).__name__}: code={self.code} " + f"description='{self.description}'" ) - def as_json(self, include_null=False) -> str: - """Converts this to json. - """ - return json.dumps(self.to_dict(include_null)) - - @classmethod - def from_dict(cls: Type[T], json: dict) -> T: - """Constructs `this` from given json. Assumes camel case convention is used and converts to camel case. - - Args: - json (dict): Json dictionary - - Raises: - ValueError: When `this` isn't a dataclass - - Returns: - T: New instance - """ - if not is_dataclass(cls): - raise ValueError(f"{cls.__name__} must be a dataclass") - - field_names = {field.metadata.get("json", field.name) for field in fields(cls)} - kwargs = { - camel_to_snake(key): value - for key, value in json.items() - if camel_to_snake(key) in field_names - } - return cls(**kwargs) - -@dataclass -class Request(Base): - pass - -@dataclass -class Paged(Base): - total: int = field(default=0, metadata={"json": "total"}) - offset: int = field(default=0, metadata={"json": "offset"}) - limit: int = field(default=0, metadata={"json": "limit"}) - - -@dataclass -class Error(Exception): - code: Optional[int] = field(default=None, metadata={"json": "code"}) - error_code: Optional[int] = field( - default=None, metadata={"json": "err_code"} - ) - description: Optional[str] = field( - default=None, metadata={"json": "description"} - ) - - -@dataclass -class Response(Base): - type: str - error: Optional[Error] = field(default=None) - class Client: """ Provides methods for sending requests and processing responses via JetStream. @@ -118,13 +46,13 @@ class Client: def __init__( self, - inner: Any, - timeout: float = 1.0, + inner: nats.aio.client.Client, + timeout: float = 2.0, prefix: str = DEFAULT_PREFIX ) -> None: self.inner = inner self.timeout = timeout - self.prefix = None + self.prefix = prefix async def request( self, @@ -132,11 +60,11 @@ async def request( payload: bytes, timeout: Optional[float] = None, headers: Optional[Dict[str, str]] = None - ) -> nats.Msg: + ) -> nats.aio.msg.Msg: if timeout is None: timeout = self.timeout - return self.inner.request(subject, payload, timeout=timeout) + return await self.inner.request(subject, payload, timeout=timeout) # TODO return `jetstream.Msg` async def request_msg( @@ -144,26 +72,25 @@ async def request_msg( subject: str, payload: bytes, timeout: Optional[float] = None, - ) -> Msg: - if timeout is None: - timeout = self.timeout - - return self.inner.request(subject, payload, timeout=timeout) + ) -> nats.aio.msg.Msg: + return await self.inner.request(subject, payload, timeout=timeout or self.timeout) async def request_json( - self, subject: str, data: Request, response_type: Type[T], + self, subject: str, data: Any, timeout: float | None, - return_exceptions: bool = False, - ) -> T: - if self.prefix is not None: - subject = f"{self.prefix}.{subject}" - - if timeout is None: - timeout = self.timeout - - request_payload = data.as_json() + ) -> Dict[str, Any]: + request_subject = f"{self.prefix}.{subject}" + request_data = json.dumps(data).encode("utf-8") response = await self.inner.request( - subject, request_payload, timeout=timeout + request_subject, request_data, timeout or self.timeout ) - return response_type.from_json(response.data) + response_data = json.loads(response.data.decode("utf-8")) + response_error = response_data.get("error") + if response_error: + raise JetStreamError( + code=response_error["err_code"], + description=response_error["description"], + ) + + return response_data diff --git a/nats/jetstream/consumer.py b/nats/jetstream/consumer.py index 9a1ec43c..9bc1bac0 100644 --- a/nats/jetstream/consumer.py +++ b/nats/jetstream/consumer.py @@ -1,24 +1,19 @@ -# Copyright 2016-2024 The NATS Authors -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - from __future__ import annotations -from dataclasses import dataclass, field -from datetime import datetime, timedelta +import random +import hashlib +import string + +from datetime import datetime from enum import Enum -from typing import Dict, List, Optional +from typing import AsyncIterable, Optional, Literal, List, Protocol, Dict, Any, AsyncIterator, AsyncIterable +from dataclasses import dataclass, field +from nats.jetstream.api import CONSUMER_NOT_FOUND, Client, JetStreamError + +CONSUMER_CREATE_ACTION = "create" +CONSUMER_UPDATE_ACTION = "update" +CONSUMER_CREATE_OR_UPDATE_ACTION = "" class DeliverPolicy(Enum): """ @@ -42,7 +37,6 @@ class DeliverPolicy(Enum): LAST_PER_SUBJECT = "last_per_subject" """DeliverLastPerSubjectPolicy will start the consumer with the last message for all subjects received.""" - class AckPolicy(Enum): """ AckPolicy determines how the consumer should acknowledge delivered messages. @@ -74,345 +68,558 @@ class SequenceInfo: """ SequenceInfo has both the consumer and the stream sequence and last activity. """ - consumer: int = field(metadata={'json': 'consumer_seq'}) + consumer: int """Consumer sequence number.""" - stream: int = field(metadata={'json': 'stream_seq'}) + stream: int """Stream sequence number.""" - last_active: Optional[datetime] = field( - default=None, metadata={'json': 'last_active'} - ) + last_active: Optional[datetime] = None """Last activity timestamp.""" + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> SequenceInfo: + return cls( + consumer=data['consumer_seq'], + stream=data['stream_seq'], + last_active=datetime.fromtimestamp(data['last_active']) if data.get('last_active') else None + ) + +@dataclass +class PeerInfo: + """ + PeerInfo shows information about the peers in the cluster that are + supporting the stream or consumer. + """ + + name: str + """The server name of the peer.""" + + current: bool + """Indicates if the peer is up to date and synchronized with the leader.""" + + active: int + """The duration since this peer was last seen.""" + + offline: Optional[bool] = None + """Indicates if the peer is considered offline by the group.""" + + lag: Optional[int] = None + """The number of uncommitted operations this peer is behind the leader.""" + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> PeerInfo: + return cls( + name=data['name'], + current=data['current'], + active=data['active'], + offline=data.get('offline', None), + lag=data.get('lag', None) + ) + +@dataclass +class ClusterInfo: + """ + ClusterInfo shows information about the underlying set of servers that + make up the stream or consumer. + """ + + name: Optional[str] = None + """Name is the name of the cluster.""" + + leader: Optional[str] = None + """Leader is the server name of the RAFT leader.""" + + replicas: List[PeerInfo] = field( + default_factory=list + ) + """Replicas is the list of members of the RAFT cluster.""" + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> ClusterInfo: + return cls( + name=data.get('name'), + leader=data.get('leader'), + replicas=[PeerInfo.from_dict(replica) for replica in data.get('replicas', [])] + ) @dataclass class ConsumerConfig: """ ConsumerConfig is the configuration of a JetStream consumer. """ - name: Optional[str] = field(default=None, metadata={'json': 'name'}) + name: Optional[str] = None """Optional name for the consumer.""" - durable: Optional[str] = field( - default=None, metadata={'json': 'durable_name'} - ) + durable: Optional[str] = None """Optional durable name for the consumer.""" - description: Optional[str] = field( - default=None, metadata={'json': 'description'} - ) + description: Optional[str] = None """Optional description of the consumer.""" - deliver_policy: DeliverPolicy = field( - default=DeliverPolicy.ALL, metadata={'json': 'deliver_policy'} - ) + deliver_policy: Optional[DeliverPolicy] = None """Defines from which point to start delivering messages from the stream. Defaults to DeliverAllPolicy.""" - opt_start_seq: Optional[int] = field( - default=None, metadata={'json': 'opt_start_seq'} - ) + opt_start_seq: Optional[int] = None """Optional sequence number from which to start message delivery.""" - opt_start_time: Optional[datetime] = field( - default=None, metadata={'json': 'opt_start_time'} - ) + opt_start_time: Optional[datetime] = None """Optional time from which to start message delivery.""" - ack_policy: AckPolicy = field( - default=AckPolicy.EXPLICIT, metadata={'json': 'ack_policy'} - ) + ack_policy: Optional[AckPolicy] = None """Defines the acknowledgement policy for the consumer. Defaults to AckExplicitPolicy.""" - ack_wait: Optional[timedelta] = field( - default=None, metadata={'json': 'ack_wait'} - ) + ack_wait: Optional[int] = None """How long the server will wait for an acknowledgement before resending a message.""" - max_deliver: Optional[int] = field( - default=None, metadata={'json': 'max_deliver'} - ) + max_deliver: Optional[int] = None """Maximum number of delivery attempts for a message.""" - backoff: Optional[List[timedelta]] = field( - default=None, metadata={'json': 'backoff'} - ) + backoff: Optional[List[int]] = None """Optional back-off intervals for retrying message delivery after a failed acknowledgement.""" - filter_subject: Optional[str] = field( - default=None, metadata={'json': 'filter_subject'} - ) + filter_subject: Optional[str] = None """Can be used to filter messages delivered from the stream.""" - replay_policy: ReplayPolicy = field( - default=ReplayPolicy.INSTANT, metadata={'json': 'replay_policy'} - ) + replay_policy: Optional[ReplayPolicy] = None """Defines the rate at which messages are sent to the consumer.""" - rate_limit: Optional[int] = field( - default=None, metadata={'json': 'rate_limit_bps'} - ) + rate_limit: Optional[int] = None """Optional maximum rate of message delivery in bits per second.""" - sample_frequency: Optional[str] = field( - default=None, metadata={'json': 'sample_freq'} - ) + sample_frequency: Optional[str] = None """Optional frequency for sampling how often acknowledgements are sampled for observability.""" - max_waiting: Optional[int] = field( - default=None, metadata={'json': 'max_waiting'} - ) + max_waiting: Optional[int] = None """Maximum number of pull requests waiting to be fulfilled.""" - max_ack_pending: Optional[int] = field( - default=None, metadata={'json': 'max_ack_pending'} - ) + max_ack_pending: Optional[int] = None """Maximum number of outstanding unacknowledged messages.""" - headers_only: Optional[bool] = field( - default=None, metadata={'json': 'headers_only'} - ) + headers_only: Optional[bool] = None """Indicates whether only headers of messages should be sent.""" - max_request_batch: Optional[int] = field( - default=None, metadata={'json': 'max_batch'} - ) + max_request_batch: Optional[int] = None """Optional maximum batch size a single pull request can make.""" - max_request_expires: Optional[timedelta] = field( - default=None, metadata={'json': 'max_expires'} - ) + max_request_expires: Optional[int] = None """Maximum duration a single pull request will wait for messages to be available to pull.""" - max_request_max_bytes: Optional[int] = field( - default=None, metadata={'json': 'max_bytes'} - ) + max_request_max_bytes: Optional[int] = None """Optional maximum total bytes that can be requested in a given batch.""" - inactive_threshold: Optional[timedelta] = field( - default=None, metadata={'json': 'inactive_threshold'} - ) + inactive_threshold: Optional[int] = None """Duration which instructs the server to clean up the consumer if it has been inactive.""" - replicas: Optional[int] = field(default=None, metadata={'json': 'num_replicas'}) + replicas: Optional[int] = None """Number of replicas for the consumer's state.""" - memory_storage: Optional[bool] = field( - default=None, metadata={'json': 'mem_storage'} - ) + memory_storage: Optional[bool] = None """Flag to force the consumer to use memory storage.""" - filter_subjects: Optional[List[str]] = field( - default=None, metadata={'json': 'filter_subjects'} - ) + filter_subjects: Optional[List[str]] = None """Allows filtering messages from a stream by subject.""" - metadata: Optional[Dict[str, str]] = field( - default=None, metadata={'json': 'metadata'} - ) + metadata: Optional[Dict[str, str]] = None """Set of application-defined key-value pairs for associating metadata on the consumer.""" + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> ConsumerConfig: + return cls( + name=data.get('name'), + durable=data.get('durable'), + description=data.get('description'), + deliver_policy=DeliverPolicy(data.get('deliver_policy')) if data.get('deliver_policy') else None, + opt_start_seq=data.get('opt_start_seq'), + opt_start_time=datetime.fromisoformat(data['opt_start_time']) if data.get('opt_start_time') else None, + ack_policy=AckPolicy(data.get('ack_policy')) if data.get('ack_policy') else None, + ack_wait=data.get('ack_wait'), + max_deliver=data.get('max_deliver'), + backoff=data.get('backoff'), + filter_subject=data.get('filter_subject'), + replay_policy=ReplayPolicy(data.get('replay_policy')) if data.get('replay_policy') else None, + rate_limit=data.get('rate_limit'), + sample_frequency=data.get('sample_frequency'), + max_waiting=data.get('max_waiting'), + max_ack_pending=data.get('max_ack_pending'), + headers_only=data.get('headers_only'), + max_request_batch=data.get('max_request_batch'), + max_request_expires=data.get('max_request_expires'), + max_request_max_bytes=data.get('max_request_max_bytes'), + inactive_threshold=data.get('inactive_threshold'), + replicas=data.get('replicas'), + memory_storage=data.get('memory_storage'), + filter_subjects=data.get('filter_subjects'), + metadata=data.get('metadata') + ) + + def to_dict(self) -> Dict[str, Any]: + return {key: value for key, value in { + 'name': self.name, + 'durable_name': self.durable, + 'description': self.description, + 'deliver_policy': self.deliver_policy, + 'opt_start_seq': self.opt_start_seq, + 'opt_start_time': self.opt_start_time, + 'ack_policy': self.ack_policy.value if self.ack_policy else None, + 'ack_wait': self.ack_wait, + 'max_deliver': self.max_deliver, + 'backoff': self.backoff, + 'filter_subject': self.filter_subject, + 'replay_policy': self.replay_policy, + 'rate_limit': self.rate_limit, + 'sample_frequency': self.sample_frequency, + 'max_waiting': self.max_waiting, + 'max_ack_pending': self.max_ack_pending, + 'headers_only': self.headers_only, + 'max_request_batch': self.max_request_batch, + 'max_request_expires': self.max_request_expires, + 'max_request_max_bytes': self.max_request_max_bytes, + 'inactive_threshold': self.inactive_threshold, + 'replicas': self.replicas, + 'memory_storage': self.memory_storage, + 'filter_subjects': self.filter_subjects, + 'metadata': self.metadata + }.items() if value is not None} + @dataclass class ConsumerInfo: """ ConsumerInfo is the detailed information about a JetStream consumer. """ - stream: str = field(metadata={'json': 'stream_name'}) - """Name of the stream that the consumer is bound to.""" - - name: str = field(metadata={'json': 'name'}) + name: str """Unique identifier for the consumer.""" - created: datetime = field(metadata={'json': 'created'}) + stream_name: str + """Name of the stream that the consumer is bound to.""" + + created: datetime """Timestamp when the consumer was created.""" - config: ConsumerConfig = field(metadata={'json': 'config'}) + config: ConsumerConfig """Configuration settings of the consumer.""" - delivered: SequenceInfo = field(metadata={'json': 'delivered'}) + delivered: SequenceInfo """Information about the most recently delivered message.""" - ack_floor: SequenceInfo = field(metadata={'json': 'ack_floor'}) + ack_floor: SequenceInfo """Indicates the message before the first unacknowledged message.""" - num_ack_pending: int = field(metadata={'json': 'num_ack_pending'}) + num_ack_pending: int """Number of messages that have been delivered but not yet acknowledged.""" - num_redelivered: int = field(metadata={'json': 'num_redelivered'}) + num_redelivered: int """Counts the number of messages that have been redelivered and not yet acknowledged.""" - num_waiting: int = field(metadata={'json': 'num_waiting'}) + num_waiting: int """Count of active pull requests.""" - num_pending: int = field(metadata={'json': 'num_pending'}) + num_pending: int """Number of messages that match the consumer's filter but have not been delivered yet.""" - timestamp: datetime = field(metadata={'json': 'ts'}) + timestamp: datetime """Timestamp when the info was gathered by the server.""" - push_bound: bool = field(default=False, metadata={'json': 'push_bound'}) + push_bound: bool """Indicates whether at least one subscription exists for the delivery subject of this consumer.""" - cluster: Optional[ClusterInfo] = field( - default=None, metadata={'json': 'cluster'} - ) + cluster: Optional[ClusterInfo] = None """Information about the cluster to which this consumer belongs.""" + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> ConsumerInfo: + return cls( + name=data['name'], + stream_name=data['stream_name'], + created=data['created'], + config=ConsumerConfig.from_dict(data['config']), + delivered=SequenceInfo.from_dict(data['delivered']), + ack_floor=SequenceInfo.from_dict(data['ack_floor']), + num_ack_pending=data['num_ack_pending'], + num_redelivered=data['num_redelivered'], + num_waiting=data['num_waiting'], + num_pending=data['num_pending'], + timestamp=datetime.fromisoformat(data['ts']), + push_bound=data.get('push_bound', False), + cluster=ClusterInfo.from_dict(data['cluster']) if 'cluster' in data else None + ) @dataclass class OrderedConsumerConfig: - """ - OrderedConsumerConfig is the configuration of an ordered JetStream consumer. - """ - filter_subjects: Optional[List[str]] = field( - default=None, metadata={'json': 'filter_subjects'} - ) - """Allows filtering messages from a stream by subject.""" + filter_subjects: List[str] = field(default_factory=list) + deliver_policy: Optional[DeliverPolicy] = None + opt_start_seq: Optional[int] = None + opt_start_time: Optional[datetime] = None + replay_policy: Optional[ReplayPolicy] = None + inactive_threshold: int = 5_000_000_000 # 5 seconds in nanoseconds + headers_only: bool = False + max_reset_attempts: Optional[int] = None + + def to_dict(self) -> Dict[str, Any]: + def convert(value): + if isinstance(value, Enum): + return value.name + elif isinstance(value, datetime): + return value.isoformat() + return value + + result = { + "filter_subjects": self.filter_subjects, + "deliver_policy": self.deliver_policy.value if self.deliver_policy else None, + "replay_policy": self.replay_policy.value if self.replay_policy else None, + "headers_only": self.headers_only, + "inactive_threshold": self.inactive_threshold, + } + + if self.opt_start_seq is not None: + result["opt_start_seq"] = self.opt_start_seq + if self.opt_start_time is not None: + result["opt_start_time"] = self.opt_start_time + if self.max_reset_attempts is not None: + result["max_reset_attempts"] = self.max_reset_attempts + + return result + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> OrderedConsumerConfig: + kwargs = data.copy() + return cls(**kwargs) + +class ConsumerNotFoundError(Exception): + pass - deliver_policy: DeliverPolicy = field(metadata={'json': 'deliver_policy'}) - """Defines from which point to start delivering messages from the stream.""" +class ConsumerNameRequiredError(ValueError): + pass - opt_start_seq: Optional[int] = field( - default=None, metadata={'json': 'opt_start_seq'} - ) - """Optional sequence number from which to start message delivery.""" +class InvalidConsumerNameError(ValueError): + pass - opt_start_time: Optional[datetime] = field( - default=None, metadata={'json': 'opt_start_time'} - ) - """Optional time from which to start message delivery.""" +class ConsumerExistsError(Exception): + pass - replay_policy: ReplayPolicy = field(metadata={'json': 'replay_policy'}) - """Defines the rate at which messages are sent to the consumer.""" +class ConsumerMultipleFilterSubjectsNotSupportedError(Exception): + pass - inactive_threshold: Optional[timedelta] = field( - default=None, metadata={'json': 'inactive_threshold'} - ) - """Duration which instructs the server to clean up the consumer if it has been inactive.""" +class Consumer(Protocol): + @property + def cached_info(self) -> ConsumerInfo: + ... - headers_only: Optional[bool] = field( - default=None, metadata={'json': 'headers_only'} - ) - """Indicates whether only headers of messages should be sent.""" +class MessageBatch: + pass - max_reset_attempts: Optional[int] = field( - default=None, metadata={'json': 'max_reset_attempts'} - ) - """Maximum number of attempts for the consumer to be recreated in a single recreation cycle.""" +class PullConsumer(Consumer): + def __init__(self, client: Client, stream: str, name: str, info: ConsumerInfo): + self._client = client + self._stream = stream + self._name = name + self._cached_info = info + @property + def cached_info(self) -> ConsumerInfo: + return self._cached_info -class Consumer: - async def info(self) -> ConsumerInfo: - """ - Returns detailed information about the consumer. - """ - raise NotImplementedError + def fetch_bytes(self, max_bytes: int) -> MessageBatch: + return MessageBatch() -class PullConsumer(Consumer): - raise NotImplementedError + def _fetch(self) -> MessageBatch: + return MessageBatch() +class ConsumerInfoLister(AsyncIterable): + def __init__(self, client: Client) -> None: + self._client = client -class OrderedConsumer(Consumer): - raise NotImplementedError + def __aiter__(self) -> AsyncIterator[ConsumerInfo]: + raise NotImplementedError +class ConsumerNameLister(AsyncIterable): + def __aiter__(self) -> AsyncIterator[str]: + raise NotImplementedError -class StreamConsumerManager: - """ - Provides methods for directly managing consumers. - """ +class StreamConsumerManager(Protocol): + async def create_consumer(self, stream: str, config: ConsumerConfig, timeout: Optional[float] = None) -> Consumer: + """ + Creates a consumer on a given stream with given config. If consumer already exists + and the provided configuration differs from its configuration, ErrConsumerExists is raised. + If the provided configuration is the same as the existing consumer, the existing consumer + is returned. Consumer interface is returned, allowing to operate on a consumer (e.g. fetch messages). + """ + ... - async def create_or_update_consumer( - self, - stream: str, - config: ConsumerConfig, - timeout: Optional[float] = None - ) -> Consumer: + async def update_consumer(self, stream: str, config: ConsumerConfig, timeout: Optional[float] = None) -> Consumer: """ - CreateOrUpdateConsumer creates a consumer on a given stream with - given config. If consumer already exists, it will be updated (if - possible). Consumer interface is returned, allowing to operate on a - consumer (e.g. fetch messages). + Updates an existing consumer. If consumer does not exist, ErrConsumerDoesNotExist is raised. + Consumer interface is returned, allowing to operate on a consumer (e.g. fetch messages). """ - raise NotImplementedError + ... - async def create_consumer( - self, - stream: str, - config: ConsumerConfig, - timeout: Optional[float] = None - ) -> Consumer: + async def create_or_update_consumer(self, stream: str, config: ConsumerConfig, timeout: Optional[float] = None) -> Consumer: """ - CreateConsumer creates a consumer on a given stream with given - config. If consumer already exists and the provided configuration - differs from its configuration, ErrConsumerExists is returned. If the - provided configuration is the same as the existing consumer, the - existing consumer is returned. Consumer interface is returned, - allowing to operate on a consumer (e.g. fetch messages). + Creates a consumer on a given stream with given config. If consumer already exists, + it will be updated (if possible). Consumer interface is returned, allowing to operate + on a consumer (e.g. fetch messages). """ - raise NotImplementedError + ... - async def update_consumer( - self, - stream: str, - config: ConsumerConfig, - timeout: Optional[float] = None - ) -> Consumer: + async def consumer(self, stream: str, consumer: str, timeout: Optional[float] = None) -> Consumer: """ - Updates an existing consumer. + Returns an interface to an existing consumer, allowing processing of messages. + If consumer does not exist, ErrConsumerNotFound is raised. + """ + ... - If consumer does not exist, an error is raised. + async def delete_consumer(self, stream: str, consumer: str, timeout: Optional[float] = None) -> None: """ - raise NotImplementedError + Removes a consumer with given name from a stream. + If consumer does not exist, `ConsumerNotFoundError` is raised. + """ + ... - async def ordered_consumer( - self, - stream: str, - config: OrderedConsumerConfig, - timeout: Optional[float] = None - ) -> Consumer: +class ConsumerManager(Protocol): + async def create_consumer(self, config: ConsumerConfig, timeout: Optional[float] = None) -> Consumer: + """ + Creates a consumer on a given stream with given config. If consumer already exists + and the provided configuration differs from its configuration, `ConsumerExists` is raised. + If the provided configuration is the same as the existing consumer, the existing consumer + is returned. """ - Returns returns an instance of an ordered consumer. + ... - Ordered consumers are managed by the library and provide a simple way to consume - messages from a stream. + async def update_consumer(self, config: ConsumerConfig, timeout: Optional[float] = None) -> Consumer: + """ + Updates an existing consumer. If consumer does not exist, `ConsumerNotFound` is raised. + Consumer interface is returned, allowing to operate on a consumer (e.g. fetch messages). + """ + ... - Ordered consumers are ephemeral in-memory pull consumers and are resilient to deletes and restarts. + async def create_or_update_consumer(self, config: ConsumerConfig, timeout: Optional[float] = None) -> Consumer: """ - raise NotImplementedError + Creates a consumer on a given stream with given config. If consumer already exists, + it will be updated (if possible). + """ + ... - async def consumer( - self, - stream: str, - consumer: str, - timeout: Optional[float] = None - ) -> Consumer: + async def consumer(self, consumer: str, timeout: Optional[float] = None) -> Consumer: """ - Returns an instance of an existing consumer, allowing processing of messages. + Returns an interface to an existing consumer, allowing processing of messages. - If consumer does not exist, an error is raised. + If the consumer does not exist, `ConsumerNotFoundError` is raised. """ - raise NotImplementedError + ... - async def delete_consumer( - self, - stream: str, - consumer: str, - timeout: Optional[float] = None - ) -> None: + async def delete_consumer(self, consumer: str, timeout: Optional[float] = None) -> None: """ Removes a consumer with given name from a stream. - If consumer does not exist, an error is raised. + + If the consumer does not exist, `ConsumerNotFoundError` is raised. """ - raise NotImplementedError + ... -@dataclass -class CreateConsumerRequest(Request): - stream: str = field(metadata={'json': 'stream_name'}) - config: ConsumerConfig = field(metadata={'json': 'config'}) - action: str = field(metadata={'json': 'action'}) + def list_consumers(self) -> ConsumerInfoLister: + """ + Returns ConsumerInfoLister enabling iterating over a channel of consumer infos. + """ + ... -class ConsumerInfoResponse(Response, ConsumerInfo): - pass + def consumer_names(self) -> ConsumerNameLister: + """ + Returns a ConsumerNameLister enabling iterating over a channel of consumer names. + """ + ... + + +def _generate_consumer_name() -> str: + name = ''.join(random.choices(string.ascii_letters + string.digits, k=16)) + sha = hashlib.sha256(name.encode()).digest() + return ''.join(string.ascii_lowercase[b % 26] for b in sha[:8]) + +async def _upsert_consumer(client: Client, stream: str, config: ConsumerConfig, action: str, timeout: Optional[float] = None) -> Consumer: + consumer_name = config.name + if not consumer_name: + if config.durable: + consumer_name = config.durable + else: + consumer_name = _generate_consumer_name() + + _validate_consumer_name(consumer_name) + + if config.filter_subject and not config.filter_subjects: + create_consumer_subject = f"CONSUMER.CREATE.{stream}.{consumer_name}.{config.filter_subject}" + else: + create_consumer_subject = f"CONSUMER.CREATE.{stream}.{consumer_name}" + + create_consumer_request = { + 'stream_name': stream, + 'config': config.to_dict(), + 'action': action + } + + create_consumer_response = await client.request_json(create_consumer_subject, create_consumer_request, timeout=timeout) + + info = ConsumerInfo.from_dict(create_consumer_response) + if config.filter_subjects and not info.config.filter_subjects: + raise ConsumerMultipleFilterSubjectsNotSupportedError() + + # TODO support more than just pull consumers + return PullConsumer( + client=client, + name=consumer_name, + stream=stream, + info=info, + ) + +async def _create_consumer(client: Client, stream: str, config: ConsumerConfig, timeout: Optional[float] = None) -> Consumer: + return await _upsert_consumer(client, stream=stream, config=config, action=CONSUMER_CREATE_ACTION, timeout=timeout) + +async def _update_consumer(client: Client, stream: str, config: ConsumerConfig, timeout: Optional[float] = None) -> Consumer: + return await _upsert_consumer(client, stream=stream, config=config, action=CONSUMER_UPDATE_ACTION, timeout=timeout) + +async def _create_or_update_consumer(client: Client, stream: str, config: ConsumerConfig, timeout: Optional[float] = None) -> Consumer: + return await _upsert_consumer(client, stream=stream, config=config, action=CONSUMER_CREATE_OR_UPDATE_ACTION, timeout=timeout) + + +async def _get_consumer(client: Client, stream: str, name: str, timeout: Optional[float] = None) -> 'Consumer': + _validate_consumer_name(name) + consumer_info_request = {} + consumer_info_subject = f"CONSUMER.INFO.{stream}.{name}" + + try: + consumer_info_response = await client.request_json(consumer_info_subject, consumer_info_request, timeout=timeout) + except JetStreamError as jetstream_error: + if jetstream_error.code == CONSUMER_NOT_FOUND: + raise ConsumerNotFoundError from jetstream_error + + raise jetstream_error + + info = ConsumerInfo.from_dict(consumer_info_response) + + return PullConsumer( + client=client, + stream=stream, + name=name, + info=info, + ) + +async def _delete_consumer(client: Client, stream: str, consumer: str, timeout: Optional[float] = None) -> None: + _validate_consumer_name(consumer) + + delete_consumer_request = {} + delete_consumer_subject = f"CONSUMER.DELETE.{stream}.{consumer}" + + try: + delete_response = await client.request_json(delete_consumer_subject, delete_consumer_request, timeout=timeout) + except JetStreamError as jetstream_error: + if jetstream_error.code == CONSUMER_NOT_FOUND: + raise ConsumerNotFoundError() + + raise jetstream_error + +def _validate_consumer_name(name: str) -> None: + if not name: + raise ConsumerNameRequiredError() + + if any(c in name for c in ">*. /\\"): + raise InvalidConsumerNameError() diff --git a/nats/jetstream/context.py b/nats/jetstream/context.py index 25ccbe96..4462b23e 100644 --- a/nats/jetstream/context.py +++ b/nats/jetstream/context.py @@ -14,34 +14,195 @@ from typing import Any, Type, TypeVar -from .api import Client -from .publish import Publisher -from .stream import StreamManager +from nats.aio.client import Client as NATS +from typing import Optional +from .api import * +from .stream import (Stream, StreamConfig, StreamInfo, StreamInfoLister, StreamManager, StreamNameAlreadyInUseError, StreamNameLister, StreamNotFoundError, StreamSourceMultipleFilterSubjectsNotSupported, StreamSourceNotSupportedError, StreamSubjectTransformNotSupportedError, _validate_stream_name) +from .consumer import * class Context( - Publisher, + # Publisher, StreamManager, # StreamConsumerManager, - # KeyValueManager, - # ObjectStoreManager ): """ Provides a context for interacting with JetStream. The capabilities of JetStream include: - Publishing messages to a stream using `Publisher`. - - Managing streams using `StreamManager`. - - Managing consumers using `StreamConsumerManager`. - - Managing key value stores using `KeyValueManager`. - - Managing object stores using `ObjectStoreManager`. + - Managing streams using the `StreamManager` protocol. + - Managing consumers using the `StreamConsumerManager` protocol. """ - def __init__(self, connection: Any, timeout: float = 2): - client = Client( - connection, + def __init__(self, nats: NATS, timeout: float = 2.0): + self._client = Client( + nats, timeout=timeout, ) - Publisher.__init__(self, client) - StreamManager.__init__(self, client) + async def create_stream( + self, config: StreamConfig, timeout: Optional[float] = None + ) -> Stream: + """ + Creates a new stream with given config. + """ + + stream_create_subject = f"STREAM.CREATE.{config.name}" + stream_create_request = config.to_dict() + try: + stream_create_response = await self._client.request_json( + stream_create_subject, + stream_create_request, + timeout=timeout + ) + except JetStreamError as jetstream_error: + if jetstream_error.code == STREAM_NAME_IN_USE: + raise StreamNameAlreadyInUseError() from jetstream_error + + raise jetstream_error + + info = StreamInfo.from_dict(stream_create_response) + + # Check if subject transforms are supported + if config.subject_transform and not info.config.subject_transform: + raise StreamSubjectTransformNotSupportedError() + + # Check if sources and subject transforms are supported + if config.sources: + if not info.config.sources: + raise StreamSourceNotSupportedError() + + for i in range(len(config.sources)): + source = config.sources[i] + response_source = config.sources[i] + + if source.subject_transforms and not response_source.subject_transforms: + raise StreamSourceMultipleFilterSubjectsNotSupported() + + return Stream( + client=self._client, + name=info.config.name, + info=info, + ) + + async def update_stream( + self, config: StreamConfig, timeout: Optional[float] = None + ) -> Stream: + """ + Updates an existing stream with the given config. + """ + + stream_create_subject = f"STREAM.UPDATE.{config.name}" + stream_create_request = config.to_dict() + try: + stream_create_response = await self._client.request_json( + stream_create_subject, + stream_create_request, + timeout=timeout + ) + except JetStreamError as jetstream_error: + if jetstream_error.code == STREAM_NAME_IN_USE: + raise StreamNameAlreadyInUseError() from jetstream_error + + if jetstream_error.code == STREAM_NOT_FOUND: + raise StreamNotFoundError() from jetstream_error + + raise jetstream_error + + info = StreamInfo.from_dict(stream_create_response) + + # Check if subject transforms are supported + if config.subject_transform and not info.config.subject_transform: + raise StreamSubjectTransformNotSupportedError() + + # Check if sources and subject transforms are supported + if config.sources: + if not info.config.sources: + raise StreamSourceNotSupportedError() + + for i in range(len(config.sources)): + source = config.sources[i] + response_source = config.sources[i] + + if source.subject_transforms and not response_source.subject_transforms: + raise StreamSourceMultipleFilterSubjectsNotSupported() + + return Stream( + client=self._client, + name=info.config.name, + info=info, + ) + + async def create_or_update_stream( + self, config: StreamConfig, timeout: Optional[float] = None + ) -> Stream: + """Creates a stream with given config or updates it if it already exists.""" + try: + return await self.update_stream(config, timeout=timeout) + except StreamNotFoundError: + return await self.create_stream(config, timeout=timeout) + + async def stream( + self, name: str, timeout: Optional[float] = None + ) -> Stream: + """Fetches `StreamInfo` and returns a `Stream` instance for a given stream name.""" + _validate_stream_name(name) + + stream_info_subject = f"STREAM.INFO.{name}" + stream_info_request = {} + try: + stream_info_response = await self._client.request_json( + stream_info_subject, + stream_info_request, + timeout=timeout + ) + except JetStreamError as jetstream_error: + if jetstream_error.code == STREAM_NOT_FOUND: + raise StreamNotFoundError() from jetstream_error + + raise jetstream_error + + info = StreamInfo.from_dict(stream_info_response) + + return Stream( + client=self._client, + name=info.config.name, + info=info, + ) + + async def stream_name_by_subject( + self, subject: str, timeout: Optional[float] = None + ) -> str: + """Returns a stream name listening on a given subject.""" + raise NotImplementedError + + async def delete_stream( + self, name: str, timeout: Optional[float] = None + ) -> None: + """Removes a stream with given name.""" + _validate_stream_name(name) + + stream_delete_subject = f"STREAM.DELETE.{name}" + stream_delete_request = {} + try: + stream_delete_response = await self._client.request_json( + stream_delete_subject, + stream_delete_request, + timeout=timeout + ) + except JetStreamError as response_error: + if response_error.code == STREAM_NOT_FOUND: + raise StreamNotFoundError() from response_error + + raise response_error + + def list_streams(self, + timeout: Optional[float] = None + ) -> StreamInfoLister: + raise NotImplementedError + + + def stream_names(self, + timeout: Optional[float] = None) -> StreamNameLister: + raise NotImplementedError diff --git a/nats/jetstream/errors.py b/nats/jetstream/errors.py deleted file mode 100644 index 17cd7dbc..00000000 --- a/nats/jetstream/errors.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright 2016-2024 The NATS Authors -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from enum import Enum -from typing import Optional - - -class ErrorCode(Enum): - JETSTREAM_NOT_ENABLED_FOR_ACCOUNT = 10039 - JETSTREAM_NOT_ENABLED = 10076 - STREAM_NOT_FOUND = 10059 - STREAM_NAME_IN_USE = 10058 - CONSUMER_CREATE = 10012 - CONSUMER_NOT_FOUND = 10014 - CONSUMER_NAME_EXISTS = 10013 - CONSUMER_ALREADY_EXISTS = 10105 - CONSUMER_EXISTS = 10148 - DUPLICATE_FILTER_SUBJECTS = 10136 - OVERLAPPING_FILTER_SUBJECTS = 10138 - CONSUMER_EMPTY_FILTER = 10139 - CONSUMER_DOES_NOT_EXIST = 10149 - MESSAGE_NOT_FOUND = 10037 - BAD_REQUEST = 10003 - STREAM_WRONG_LAST_SEQUENCE = 10071 - - -class Error(Exception): - - def __init__( - self, message=None, code=None, error_code=None, description=None - ): - self.message = message - self.code = code - self.error_code = error_code - self.description = description - - def __str__(self) -> str: - return ( - f"nats: {type(self).__name__}: code={self.code} err_code={self.error_code} " - f"description='{self.description}'" - ) - - -class StreamNameAlreadyInUseError(Error): - pass - - -class StreamNotFoundError(Error): - - def __init__(self): - super().__init__() - - -class StreamSubjectTransformNotSupportedError(Error): - - def __init__(self): - super().__init__() - - - -class StreamSourceNotSupportedError(Error): - - def __init__(self): - super().__init__() - - -class StreamSourceMultipleFilterSubjectsNotSupported(Error): - - def __init__(self): - super().__init__() - - -class MsgNotFoundError(Error): - - def __init__(self): - super().__init__() - -class NoStreamResponseError(Error): - - def __init__(self): - super().__init__() - - -class InvalidResponseError(Error): - - def __init__(self): - super().__init__() diff --git a/nats/jetstream/message.py b/nats/jetstream/message.py deleted file mode 100644 index 5470f026..00000000 --- a/nats/jetstream/message.py +++ /dev/null @@ -1,131 +0,0 @@ -# Copyright 2016-2024 The NATS Authors -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from dataclasses import dataclass, field -from enum import Enum - -import nats.aio.msg - - -class Header(str, Enum): - """ - Provides known headers that can be used to control message behavior. - """ - - MSG_ID = "Nats-Msg-Id" - """Used to specify a user-defined message ID. It can be used - e.g. for deduplication in conjunction with the Duplicates duration on - ConsumerConfig or to provide optimistic concurrency safety together with - ExpectedLastMsgID. - - This can be set when publishing messages using id option. - """ - - EXPECTED_STREAM = "Nats-Expected-Stream" - """Contains stream name and is used to assure that the - published message is received by the expected stream. The server will reject the - message if it is not the case. - - This can be set when publishing messages using expect_stream option. - """ - - EXPECTED_LAST_SEQUENCE = "Nats-Expected-Last-Sequence" - """Contains the expected last sequence number of the - stream and can be used to apply optimistic concurrency control at the stream - level. The server will reject the message if it is not the case. - - This can be set when publishing messages using expected_last_sequence - option. - """ - - EXPECTED_LAST_SUBJECT_SEQUENCE = "Nats-Expected-Last-Subject-Sequence" - """Contains the expected last sequence number on - the subject and can be used to apply optimistic concurrency control at - the subject level. The server will reject the message if it is not the case. - - This can be set when publishing messages using expected_last_subject_sequence option. - """ - - EXPECTED_LAST_MSG_ID = "Nats-Expected-Last-Msg-Id" - """Contains the expected last message ID on the - subject and can be used to apply optimistic concurrency control at - the stream level. The server will reject the message if it is not the case. - - This can be set when publishing messages using WithExpectLastMsgID - option. - """ - - ROLLUP = "Nats-Rollup" - """Used to apply a purge of all prior messages in the stream - ("all") or at the subject ("sub") before this message. - """ - - STREAM = "Nats-Stream" - """Contains the stream name the message was republished from or - the stream name the message was retrieved from using direct get. - """ - - SEQUENCE = "Nats-Sequence" - """ - Contains the original sequence number of the message. - """ - - TIMESTAMP = "Nats-Time-Stamp" - """ - Contains the original timestamp of the message. - """ - - SUBJECT = "Nats-Subject" - """ - Contains the original subject the message was published to. - """ - - LAST_SEQUENCE = "Nats-Last-Sequence" - """ - Contains the last sequence of the message having the - same subject, otherwise zero if this is the first message for the - subject. - """ - - -class Status(str, Enum): - SERVICE_UNAVAILABLE = "503" - NO_MESSAGES = "404" - REQUEST_TIMEOUT = "408" - CONFLICT = "409" - CONTROL_MESSAGE = "100" - - -@dataclass -class SequencePair: - """ - Provides a pair of the consumer and stream sequence numbers for a message. - """ - - consumer: int = field(metadata={"json": "consumer_seq"}) - """ - The consumer sequence number for message deliveries. - This is the total number of messages the consumer has seen (including redeliveries). - """ - - stream: int = field(metadata={"json": "stream_seq"}) - """ - The stream sequence number for a message. - """ - - -# FIXME -# For now, we will use the message class from the nats.aio.msg module. -# This needs to be fixed before releasing. -Msg = nats.aio.msg.Msg diff --git a/nats/jetstream/publish.py b/nats/jetstream/publish.py deleted file mode 100644 index 1583063e..00000000 --- a/nats/jetstream/publish.py +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright 2016-2024 The NATS Authors -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import json -from asyncio import Future, Semaphore -from dataclasses import dataclass, field -from typing import Dict, Optional, cast - -from nats.errors import * -from nats.jetstream.api import * -from nats.jetstream.errors import * -from nats.jetstream.message import * - -DEFAULT_RETRY_ATTEMPTS = 2 - - -@dataclass -class PubAck: - """ - PubAck is an ack received after successfully publishing a message. - """ - stream: str = field(metadata={"json": "stream"}) - """ - The stream name the message was published to. - """ - sequence: int = field(metadata={"json": "seq"}) - """ - The sequence number of the message. - """ - duplicate: bool = field(metadata={"json": "duplicate"}) - """ - Indicates whether the message was a duplicate. - """ - domain: Optional[str] = field(metadata={"json": "domain"}) - """ - The domain the message was published to. - """ - -class Publisher: - def __init__(self, client: Client, max_pending_async_futures: int = 1000): - self._client = client - self._pending_async_futures = {} - self._pending_async_publishes = Semaphore(max_pending_async_futures) - - async def publish( - self, - subject: str, - payload: bytes = b'', - timeout: Optional[float] = None, - headers: Optional[Dict] = None, - *, - id: Optional[str] = None, - expected_last_msg_id: Optional[str] = None, - expected_stream: Optional[str] = None, - expected_last_sequence: Optional[int] = None, - expected_last_subject_sequence: Optional[int] = None, - retry_attempts: int = 2, - retry_wait: float = 0.25, - ) -> PubAck: - """ - Performs a publish to a stream and waits for ack from server. - """ - - extra_headers = {} - if id is not None: - extra_headers[Header.MSG_ID] = id - - if expected_last_msg_id is not None: - extra_headers[Header.EXPECTED_LAST_MSG_ID - ] = str(expected_last_msg_id) - - if expected_stream is not None: - extra_headers[Header.EXPECTED_STREAM] = str(expected_stream) - - if expected_last_sequence is not None: - extra_headers[Header.EXPECTED_LAST_SEQUENCE - ] = str(expected_last_sequence) - - if expected_last_subject_sequence is not None: - extra_headers[Header.EXPECTED_LAST_SUBJECT_SEQUENCE - ] = str(expected_last_subject_sequence) - - if len(extra_headers) > 0: - if headers is not None: - extra_headers.update(headers) - - headers = extra_headers - - for attempt in range(0, retry_attempts): - try: - msg = await self._client.request( - subject, - payload, - timeout=timeout, - headers=headers, - ) - - pub_ack_response = PubAckResponse.from_json(msg.data) - if pub_ack_response.error is not None: - raise Error(*pub_ack_response.error) - - if pub_ack_response.stream is None: - raise InvalidResponseError( - "Stream was not provided with publish ack response" - ) - - return cast(PubAck, pub_ack_response) - except NoRespondersError: - if attempt < retry_attempts - 1: - await asyncio.sleep(retry_wait) - - raise NoStreamResponseError - -class PubAckResponse(Response, PubAck): - pass diff --git a/nats/jetstream/stream.py b/nats/jetstream/stream.py index 320834c2..699a1ef9 100644 --- a/nats/jetstream/stream.py +++ b/nats/jetstream/stream.py @@ -1,41 +1,30 @@ -# Copyright 2016-2024 The NATS Authors -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - from __future__ import annotations -import re -from dataclasses import dataclass, asdict, field from datetime import datetime from enum import Enum -from types import NotImplementedType -from typing import ( - Any, - AsyncIterable, - AsyncIterator, - Dict, - List, - Optional, - cast, -) - -import nats.aio.client +from dataclasses import dataclass, field +from typing import AsyncIterable, Dict, Any, Optional, List, AsyncIterator, Protocol from nats import jetstream -from nats.jetstream.api import Client, Paged, Request, Response -from nats.jetstream.errors import * -from nats.jetstream.message import Header, Msg, Status +from .api import ( + STREAM_NAME_IN_USE, + STREAM_NOT_FOUND, + Client, + JetStreamError, + JetStreamError, +) + +from .consumer import ( + ClusterInfo, + ConsumerConfig, + Consumer, + _create_consumer, + _delete_consumer, + _get_consumer, + _update_consumer, + _create_or_update_consumer, +) class RetentionPolicy(Enum): """ @@ -58,7 +47,10 @@ class DiscardPolicy(Enum): """ OLD = "old" - """DiscardOld will remove older messages to return to the limits. This is the default.""" + """DiscardOld will remove older messages to return to the limits. + + This is the default. + """ NEW = "new" """DiscardNew will fail to store new messages once the limits are reached.""" @@ -72,6 +64,8 @@ class StorageType(Enum): FILE = "file" """ Specifies on disk storage. + + This is the default. """ MEMORY = "memory" @@ -88,6 +82,8 @@ class StoreCompression(Enum): NONE = "none" """ Disables compression on the stream. + + This is the default. """ S2 = "s2" @@ -97,179 +93,24 @@ class StoreCompression(Enum): @dataclass -class StreamInfo: - """ - Provides configuration and current state for a stream. - """ - - config: StreamConfig = field(metadata={'json': 'config'}) - """Contains the configuration settings of the stream, set when creating or updating the stream.""" - - timestamp: datetime = field(metadata={'json': 'ts'}) - """Indicates when the info was gathered by the server.""" - - created: datetime = field(metadata={'json': 'created'}) - """The timestamp when the stream was created.""" - - state: StreamState = field(metadata={'json': 'state'}) - """Provides the state of the stream at the time of request, including metrics like the number of messages in the stream, total bytes, etc.""" - - cluster: Optional[ClusterInfo] = field( - default=None, metadata={'json': 'cluster'} - ) - """Contains information about the cluster to which this stream belongs (if applicable).""" - - mirror: Optional[StreamSourceInfo] = field( - default=None, metadata={'json': 'mirror'} - ) - """Contains information about another stream this one is mirroring. Mirroring is used to create replicas of another stream's data. This field is omitted if the stream is not mirroring another stream.""" - - sources: List[StreamSourceInfo] = field( - default_factory=list, metadata={'json': 'sources'} - ) - """A list of source streams from which this stream collects data.""" - - -@dataclass -class StreamConfig: +class SubjectTransformConfig: """ - StreamConfig is the configuration of a JetStream stream. + SubjectTransformConfig is for applying a subject transform (to matching + messages) before doing anything else when a new message is received. """ - name: str = field(metadata={'json': 'name'}) - """Name is the name of the stream. It is required and must be unique across the JetStream account. Names cannot contain whitespace, ., >, path separators (forward or backwards slash), and non-printable characters.""" - - description: Optional[str] = field( - default=None, metadata={'json': 'description'} - ) - """Description is an optional description of the stream.""" - - subjects: List[str] = field( - default_factory=list, metadata={'json': 'subjects'} - ) - """Subjects is a list of subjects that the stream is listening on. Wildcards are supported. Subjects cannot be set if the stream is created as a mirror.""" - - retention: RetentionPolicy = field( - default=RetentionPolicy.LIMITS, metadata={'json': 'retention'} - ) - """Retention defines the message retention policy for the stream. Defaults to LimitsPolicy.""" - - max_consumers: Optional[int] = field(default=None, metadata={'json': 'max_consumers'}) - """MaxConsumers specifies the maximum number of consumers allowed for the stream.""" - - max_msgs: Optional[int] = field(default=None, metadata={'json': 'max_msgs'}) - """MaxMsgs is the maximum number of messages the stream will store. After reaching the limit, stream adheres to the discard policy. If not set, server default is -1 (unlimited).""" - - max_bytes: Optional[int] = field(default=None, metadata={'json': 'max_bytes'}) - """MaxBytes is the maximum total size of messages the stream will store. After reaching the limit, stream adheres to the discard policy. If not set, server default is -1 (unlimited).""" - - discard: Optional[DiscardPolicy] = field(default=None, metadata={'json': 'discard'}) - """Discard defines the policy for handling messages when the stream reaches its limits in terms of number of messages or total bytes.""" - - discard_new_per_subject: Optional[bool] = field( - default=None, metadata={'json': 'discard_new_per_subject'} - ) - """DiscardNewPerSubject is a flag to enable discarding new messages per subject when limits are reached. Requires DiscardPolicy to be DiscardNew and the MaxMsgsPerSubject to be set.""" - - max_age: int = field(default=-1, metadata={'json': 'max_age'}) - """MaxAge is the maximum age of messages that the stream will retain.""" - - max_msgs_per_subject: int = field(default=-1, - metadata={'json': 'max_msgs_per_subject'} - ) - """MaxMsgsPerSubject is the maximum number of messages per subject that the stream will retain.""" - - max_msg_size: Optional[int] = field( - default=None, metadata={'json': 'max_msg_size'} - ) - """MaxMsgSize is the maximum size of any single message in the stream.""" - - storage: StorageType = field(default=StorageType.MEMORY, metadata={'json': 'storage'}) - """Storage specifies the type of storage backend used for the stream (file or memory).""" - - replicas: int = field(default=1, metadata={'json': 'num_replicas'}) - """Replicas is the number of stream replicas in clustered JetStream. Defaults to 1, maximum is 5.""" - - no_ack: Optional[bool] = field(default=None, metadata={'json': 'no_ack'}) - """NoAck is a flag to disable acknowledging messages received by this stream. If set to true, publish methods from the JetStream client will not work as expected, since they rely on acknowledgements. Core NATS publish methods should be used instead. Note that this will make message delivery less reliable.""" - - duplicates: Optional[int] = field( - default=None, metadata={'json': 'duplicate_window'} - ) - """Duplicates is the window within which to track duplicate messages. If not set, server default is 2 minutes.""" - - placement: Optional[Placement] = field( - default=None, metadata={'json': 'placement'} - ) - """Placement is used to declare where the stream should be placed via tags and/or an explicit cluster name.""" - - mirror: Optional[StreamSource] = field( - default=None, metadata={'json': 'mirror'} - ) - """Mirror defines the configuration for mirroring another stream.""" + source: str + """The subject pattern to match incoming messages against.""" - sources: List[StreamSource] = field( - default_factory=list, metadata={'json': 'sources'} - ) - """Sources is a list of other streams this stream sources messages from.""" + destination: str + """The subject pattern to remap the subject to.""" - sealed: Optional[bool] = field(default=None, metadata={'json': 'sealed'}) - """Sealed streams do not allow messages to be published or deleted via limits or API, sealed streams cannot be unsealed via configuration update. Can only be set on already created streams via the Update API.""" + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> SubjectTransformConfig: + return cls(source=data["src"], destination=data["dest"]) - deny_delete: Optional[bool] = field( - default=None, metadata={'json': 'deny_delete'} - ) - """DenyDelete restricts the ability to delete messages from a stream via the API. Defaults to false.""" - - deny_purge: Optional[bool] = field( - default=None, metadata={'json': 'deny_purge'} - ) - """DenyPurge restricts the ability to purge messages from a stream via the API. Defaults to false.""" - - allow_rollup: Optional[bool] = field( - default=None, metadata={'json': 'allow_rollup_hdrs'} - ) - """AllowRollup allows the use of the Nats-Rollup header to replace all contents of a stream, or subject in a stream, with a single new message.""" - - compression: StoreCompression = field( - default=StoreCompression.NONE, metadata={'json': 'compression'} - ) - """Compression specifies the message storage compression algorithm. Defaults to NoCompression.""" - - first_sequence: Optional[int] = field( - default=None, metadata={'json': 'first_seq'} - ) - """FirstSeq is the initial sequence number of the first message in the stream.""" - - subject_transform: Optional[SubjectTransformConfig] = field( - default=None, metadata={'json': 'subject_transform'} - ) - """SubjectTransform allows applying a transformation to matching messages' subjects.""" - - republish: Optional[Republish] = field( - default=None, metadata={'json': 'republish'} - ) - """RePublish allows immediate republishing of a message to the configured subject after it's stored.""" - - allow_direct: bool = field( - default=False, metadata={'json': 'allow_direct'} - ) - """AllowDirect enables direct access to individual messages using direct get API. Defaults to false.""" - - mirror_direct: bool = field( - default=False, metadata={'json': 'mirror_direct'} - ) - """MirrorDirect enables direct access to individual messages from the origin stream using direct get API. Defaults to false.""" - - consumer_limits: Optional[StreamConsumerLimits] = field( - default=None, metadata={'json': 'consumer_limits'} - ) - """ConsumerLimits defines limits of certain values that consumers can set, defaults for those who don't set these settings.""" - - metadata: Dict[str, str] = field( - default_factory=dict, metadata={'json': 'metadata'} - ) - """Metadata is a set of application-defined key-value pairs for associating metadata on the stream. This feature requires nats-server v2.10.0 or later.""" + def to_dict(self) -> Dict[str, str]: + return {"src": self.source, "dest": self.destination} @dataclass @@ -278,25 +119,34 @@ class StreamSourceInfo: StreamSourceInfo shows information about an upstream stream source/mirror. """ - name: str = field(metadata={'json': 'name'}) + name: str """Name is the name of the stream that is being replicated.""" - lag: int = field(metadata={'json': 'lag'}) + lag: Optional[int] = None """Lag informs how many messages behind the source/mirror operation is. This will only show correctly if there is active communication with stream/mirror.""" - active: int = field(metadata={'json': 'active'}) + active: Optional[int] = None """Active informs when last the mirror or sourced stream had activity. Value will be -1 when there has been no activity.""" - filter_subject: Optional[str] = field( - default=None, metadata={'json': 'filter_subject'} - ) + filter_subject: Optional[str] = None """FilterSubject is the subject filter defined for this source/mirror.""" - subject_transforms: List[SubjectTransformConfig] = field( - default_factory=list, metadata={'json': 'subject_transforms'} - ) + subject_transforms: List[SubjectTransformConfig] = field(default_factory=list) """SubjectTransforms is a list of subject transforms defined for this source/mirror.""" + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> StreamSourceInfo: + return cls( + name=data["name"], + lag=data.get("lag", None), + active=data.get("active", None), + filter_subject=data.get("filter_subject", None), + subject_transforms=[ + SubjectTransformConfig.from_dict(x) + for x in data.get("subject_transforms", []) + ], + ) + @dataclass class StreamState: @@ -304,98 +154,54 @@ class StreamState: StreamState is the state of a JetStream stream at the time of request. """ - msgs: int = field(metadata={'json': 'messages'}) + msgs: int """The number of messages stored in the stream.""" - bytes: int = field(metadata={'json': 'bytes'}) + bytes: int """The number of bytes stored in the stream.""" - first_sequence: int = field(metadata={'json': 'first_seq'}) + first_sequence: int """The the sequence number of the first message in the stream.""" - first_time: datetime = field(metadata={'json': 'first_ts'}) + first_time: datetime """The timestamp of the first message in the stream.""" - last_sequence: int = field(metadata={'json': 'last_seq'}) + last_sequence: int """The sequence number of the last message in the stream.""" - last_time: datetime = field(metadata={'json': 'last_ts'}) + last_time: datetime """The timestamp of the last message in the stream.""" - consumers: int = field(metadata={'json': 'consumer_count'}) + consumers: int """The number of consumers on the stream.""" - deleted: List[int] = field( - default_factory=list, metadata={'json': 'deleted'} - ) - """A list of sequence numbers that have been removed from the stream. This field will only be returned if the stream has been fetched with the DeletedDetails option.""" - - num_deleted: int = field(default=0, metadata={'json': 'num_deleted'}) + num_deleted: int """NumDeleted is the number of messages that have been removed from the stream. Only deleted messages causing a gap in stream sequence numbers are counted. Messages deleted at the beginning or end of the stream are not counted.""" - num_subjects: int = field(default=0, metadata={'json': 'num_subjects'}) + num_subjects: int """NumSubjects is the number of unique subjects the stream has received messages on.""" - subjects: Dict[str, int] = field( - default_factory=dict, metadata={'json': 'subjects'} - ) - """Subjects is a map of subjects the stream has received messages on with message count per subject. This field will only be returned if the stream has been fetched with the SubjectFilter option.""" - - -@dataclass -class ClusterInfo: - """ - ClusterInfo shows information about the underlying set of servers that - make up the stream or consumer. - """ - - name: Optional[str] = field(default=None, metadata={'json': 'name'}) - """Name is the name of the cluster.""" - - leader: Optional[str] = field(default=None, metadata={'json': 'leader'}) - """Leader is the server name of the RAFT leader.""" - - replicas: List[PeerInfo] = field( - default_factory=list, metadata={'json': 'replicas'} - ) - """Replicas is the list of members of the RAFT cluster.""" - - -@dataclass -class PeerInfo: - """ - PeerInfo shows information about the peers in the cluster that are - supporting the stream or consumer. - """ - - name: str = field(metadata={'json': 'name'}) - """The server name of the peer.""" - - current: bool = field(metadata={'json': 'current'}) - """Indicates if the peer is up to date and synchronized with the leader.""" - - active: int = field(metadata={'json': 'active'}) - """The duration since this peer was last seen.""" - - offline: Optional[bool] = field(default=None, metadata={'json': 'offline'}) - """Indicates if the peer is considered offline by the group.""" - - lag: Optional[int] = field(default=None, metadata={'json': 'lag'}) - """The number of uncommitted operations this peer is behind the leader.""" - - -@dataclass -class SubjectTransformConfig: - """ - SubjectTransformConfig is for applying a subject transform (to matching - messages) before doing anything else when a new message is received. - """ + deleted: Optional[List[int]] = None + """A list of sequence numbers that have been removed from the stream. This field will only be returned if the stream has been fetched with the DeletedDetails option.""" - source: str = field(metadata={'json': 'src'}) - """The subject pattern to match incoming messages against.""" + subjects: Optional[Dict[str, int]] = None + """Subjects is a map of subjects the stream has received messages on with message count per subject. This field will only be returned if the stream has been fetched with the SubjectFilter option.""" - destination: str = field(metadata={'json': 'dest'}) - """The subject pattern to remap the subject to.""" + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> StreamState: + return cls( + msgs=data["messages"], + bytes=data["bytes"], + first_sequence=data["first_seq"], + first_time=datetime.strptime(data["first_ts"], "%Y-%m-%dT%H:%M:%SZ"), + last_sequence=data["last_seq"], + last_time=datetime.strptime(data["last_ts"], "%Y-%m-%dT%H:%M:%SZ"), + consumers=data["consumer_count"], + num_deleted=data.get("num_deleted", 0), + num_subjects=data.get("num_subjects", 0), + deleted=data.get("deleted", None), + subjects=data.get("subjects", None), + ) @dataclass @@ -406,17 +212,34 @@ class Republish: pattern. """ - destination: str = field(metadata={'json': 'dest'}) + destination: str """The subject pattern to republish the subject to.""" - source: Optional[str] = field(default=None, metadata={'json': 'src'}) + source: Optional[str] = None """The subject pattern to match incoming messages against.""" - headers_only: Optional[bool] = field( - default=None, metadata={'json': 'headers_only'} - ) + headers_only: Optional[bool] = None """A flag to indicate that only the headers should be republished.""" + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> Republish: + return cls( + destination=data["dest"], + source=data.get("src"), + headers_only=data.get("headers_only"), + ) + + def to_dict(self) -> Dict[str, Any]: + return { + key: value + for key, value in { + "dest": self.destination, + "src": self.source, + "headers_only": self.headers_only, + }.items() + if value is not None + } + @dataclass class Placement: @@ -424,12 +247,52 @@ class Placement: Placement is used to guide placement of streams in clustered JetStream. """ - cluster: str = field(metadata={'json': 'cluster'}) + cluster: str """The name of the cluster to which the stream should be assigned.""" - tags: List[str] = field(default_factory=list, metadata={'json': 'tags'}) + tags: List[str] = field(default_factory=list) """Tags are used to match streams to servers in the cluster. A stream will be assigned to a server with a matching tag.""" + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> Placement: + return cls( + cluster=data["cluster"], + tags=data.get("tags", []), + ) + + def to_dict(self) -> Dict[str, Any]: + return { + "cluster": self.cluster, + "tags": self.tags, + } + + +@dataclass +class ExternalStream: + """ + ExternalStream allows you to qualify access to a stream source in another + account. + """ + + api_prefix: str + """The subject prefix that imports the other account/domain $JS.API.CONSUMER.> subjects.""" + + deliver_prefix: str + """The delivery subject to use for the push consumer.""" + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> ExternalStream: + return cls( + api_prefix=data["api"], + deliver_prefix=data["deliver"], + ) + + def to_dict(self) -> Dict[str, Any]: + return { + "api": self.api_prefix, + "deliver": self.deliver_prefix, + } + @dataclass class StreamSource: @@ -437,54 +300,67 @@ class StreamSource: StreamSource dictates how streams can source from other streams. """ - name: str = field(metadata={'json': 'name'}) + name: str """The name of the stream to source from.""" - opt_start_seq: Optional[int] = field( - default=None, metadata={'json': 'opt_start_seq'} - ) + opt_start_seq: Optional[int] = None """The sequence number to start sourcing from.""" - opt_start_time: Optional[datetime] = field( - default=None, metadata={'json': 'opt_start_time'} - ) + opt_start_time: Optional[datetime] = None """The timestamp of messages to start sourcing from.""" - filter_subject: Optional[str] = field( - default=None, metadata={'json': 'filter_subject'} - ) + filter_subject: Optional[str] = None """The subject filter used to only replicate messages with matching subjects.""" - subject_transforms: List[SubjectTransformConfig] = field( - default_factory=list, metadata={'json': 'subject_transforms'} - ) + subject_transforms: Optional[List[SubjectTransformConfig]] = None """ A list of subject transforms to apply to matching messages. Subject transforms on sources and mirrors are also used as subject filters with optional transformations. """ - external: Optional[ExternalStream] = field( - default=None, metadata={'json': 'external'} - ) + external: Optional[ExternalStream] = None """A configuration referencing a stream source in another account or JetStream domain.""" - domain: Optional[str] = field(default=None, metadata={'json': '-'}) + domain: Optional[str] = None """Used to configure a stream source in another JetStream domain. This setting will set the External field with the appropriate APIPrefix.""" + def __post__init__(self): + if self.external and self.domain: + raise ValueError("cannot set both external and domain") + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> StreamSource: + kwargs = data.copy() + + return cls( + name=data["name"], + opt_start_seq=data.get("opt_start_seq"), + opt_start_time=data.get("opt_start_time"), + filter_subject=data.get("filter_subject"), + subject_transforms=[ + SubjectTransformConfig.from_dict(subject_transform) + for subject_transform in data.get("subject_transforms", []) + ], + external=ExternalStream.from_dict(data["external"]) + if data.get("external") + else None, + ) -@dataclass -class ExternalStream: - """ - ExternalStream allows you to qualify access to a stream source in another - account. - """ - - api_prefix: str = field(metadata={'json': 'api'}) - """The subject prefix that imports the other account/domain $JS.API.CONSUMER.> subjects.""" - - deliver_prefix: str = field(metadata={'json': 'deliver'}) - """The delivery subject to use for the push consumer.""" + def to_dict(self) -> Dict[str, Any]: + return { + "name": self.name, + "opt_start_seq": self.opt_start_seq, + "opt_start_time": self.opt_start_time, + "filter_subject": self.filter_subject, + "subject_transforms": [ + subject_transform.to_dict() + for subject_transform in self.subject_transforms + ] + if self.subject_transforms + else None, + "external": self.external.to_dict() if self.external else None, + } @dataclass @@ -494,544 +370,532 @@ class StreamConsumerLimits: be overridden on a per consumer basis. """ - inactive_threshold: Optional[int] = field( - default=None, metadata={'json': 'inactive_threshold'} - ) + inactive_threshold: Optional[int] = None """A duration which instructs the server to clean up the consumer if it has been inactive for the specified duration.""" - max_ack_pending: Optional[int] = field( - default=None, metadata={'json': 'max_ack_pending'} - ) + max_ack_pending: Optional[int] = None """A maximum number of outstanding unacknowledged messages for a consumer.""" + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> StreamConsumerLimits: + return cls( + max_ack_pending=data.get("max_ack_pending"), + inactive_threshold=data.get("inactive_threshold"), + ) + + def to_dict(self) -> Dict[str, Any]: + return { + "max_ack_pending": self.max_ack_pending, + "inactive_threshold": self.inactive_threshold, + } + @dataclass -class RawStreamMsg: - subject: str = field(metadata={"json": "subject"}) - """ Subject of the message. """ +class StreamConfig: + """ + StreamConfig is the configuration of a JetStream stream. + """ - sequence: int = field(metadata={"json": "seq"}) - """ Sequence number of the message. """ + name: str + """Name is the name of the stream. It is required and must be unique across the JetStream account. Names cannot contain whitespace, ., >, path separators (forward or backwards slash), and non-printable characters.""" - time: datetime = field(metadata={"json": "time"}) - """ Time of the message. """ + description: Optional[str] = None + """Description is an optional description of the stream.""" - data: Optional[bytes] = field(default=None, metadata={"json": "data"}) - """ Data of the message.""" + subjects: List[str] = field(default_factory=list) + """Subjects is a list of subjects that the stream is listening on. Wildcards are supported. Subjects cannot be set if the stream is created as a mirror.""" - headers: Optional[Dict[str, Any]] = field( - default_factory=dict, metadata={"json": "hdrs"} - ) - """ Headers of the message. """ + retention: Optional[RetentionPolicy] = None + """Retention defines the message retention policy for the stream. Defaults to RetentionPolicy.LIMITS.""" + max_consumers: Optional[int] = None + """MaxConsumers specifies the maximum number of consumers allowed for the stream.""" -@dataclass -class StoredMsg: - subject: str = field(metadata={"json": "subject"}) - sequence: int = field(metadata={"json": "seq"}) - time: datetime = field(metadata={"json": "time"}) - headers: Optional[bytes] = field(default=None, metadata={"json": "hdrs"}) - data: Optional[bytes] = field(default=None, metadata={"json": "data"}) - - -def direct_msg_to_raw_stream_msg(msg: Msg) -> RawStreamMsg: - """ - Converts from a direct `Msg` to a `RawStreamMsg`. - """ - headers = msg.headers - if headers is None: - raise Error('response should have headers') - - data = msg.data - if len(data) == 0: - status = headers.get("Status") - if status == Status.NO_MESSAGES: - raise MsgNotFoundError() - else: - description = headers.get( - "Description", "unable to get message" - ) - raise Error(description=description) + max_msgs: Optional[int] = None + """MaxMsgs is the maximum number of messages the stream will store. After reaching the limit, stream adheres to the discard policy. If not set, server default is -1 (unlimited).""" + + max_bytes: Optional[int] = None + """MaxBytes is the maximum total size of messages the stream will store. After reaching the limit, stream adheres to the discard policy. If not set, server default is -1 (unlimited).""" + + discard: Optional[DiscardPolicy] = None + """Discard defines the policy for handling messages when the stream reaches its limits in terms of number of messages or total bytes. Defaults to DiscardPolicy.OLD if not set""" + + discard_new_per_subject: Optional[bool] = None + """DiscardNewPerSubject is a flag to enable discarding new messages per subject when limits are reached. Requires DiscardPolicy to be DiscardNew and the MaxMsgsPerSubject to be set.""" + + max_age: Optional[int] = None + """MaxAge is the maximum age of messages that the stream will retain.""" + + max_msgs_per_subject: Optional[int] = None + """MaxMsgsPerSubject is the maximum number of messages per subject that the stream will retain.""" + + max_msg_size: Optional[int] = None + """MaxMsgSize is the maximum size of any single message in the stream.""" + + storage: StorageType = StorageType.FILE + """Storage specifies the type of storage backend used for the stream (file or memory). Defaults to StorageType.FILE """ + + replicas: int = 1 + """Replicas is the number of stream replicas in clustered JetStream. Defaults to 1, maximum is 5.""" - subject = headers.get(Header.SUBJECT) - if subject is None: - raise Error('missing subject header') + no_ack: Optional[bool] = None + """NoAck is a flag to disable acknowledging messages received by this stream. If set to true, publish methods from the JetStream client will not work as expected, since they rely on acknowledgements. Core NATS publish methods should be used instead. Note that this will make message delivery less reliable.""" - sequence = headers.get(Header.SEQUENCE) - if sequence is None: - raise Error('missing sequence header') + duplicates: Optional[int] = None + """Duplicates is the window within which to track duplicate messages. If not set, server default is 2 minutes.""" - try: - sequence = int(sequence) - except ValueError as error: - raise Error(f'invalid sequence header: {error}') + placement: Optional[Placement] = None + """Placement is used to declare where the stream should be placed via tags and/or an explicit cluster name.""" - time = headers.get(Header.TIMESTAMP) - if time is None: - raise Error(f'missing timestamp header') + mirror: Optional[StreamSource] = None + """Mirror defines the configuration for mirroring another stream.""" - try: - time = datetime.strptime(time, "%Y-%m-%dT%H:%M:%S.%fZ") - except ValueError as error: - raise ValueError(f'invalid timestamp header: {error}') + sources: Optional[List[StreamSource]] = None + """Sources is a list of other streams this stream sources messages from.""" - return RawStreamMsg( - subject=subject, - sequence=sequence, - headers=headers, - data=data, - time=time, - ) + sealed: Optional[bool] = None + """Sealed streams do not allow messages to be published or deleted via limits or API, sealed streams cannot be unsealed via configuration update. Can only be set on already created streams via the Update API.""" -def stored_msg_to_raw_stream_msg(msg: StoredMsg) -> RawStreamMsg: + deny_delete: Optional[bool] = None """ - Converts from a `StoredMsg` to a `RawStreamMsg`. + Restricts the ability to delete messages from a stream via the API. + + Server defaults to false when not set. """ - headers = None - raw_headers = msg.headers - if raw_headers: - raise NotImplementedError('parsing headers is not implemented yet') + deny_purge: Optional[bool] = None + """Restricts the ability to purge messages from a stream via the API. - return RawStreamMsg( - subject=msg.subject, - sequence=msg.sequence, - time=msg.time, - headers=headers, - ) + Server defaults to false from server when not set. + """ + allow_rollup: Optional[bool] = None + """Allows the use of the `Nats-Rollup` header to replace all contents of a stream, or subject in a stream, with a single new message. + """ -class Stream: + compression: Optional[StoreCompression] = None """ - Stream contains operations on an existing stream. It allows fetching and removing - messages from a stream, as well as purging a stream. + Specifies the message storage compression algorithm. + + Server defaults to `StoreCompression.NONE` when not set. """ - def __init__(self, client: Client, name: str, info: StreamInfo): - self._client = client - self._name = name - self._info = info + first_sequence: Optional[int] = None + """The initial sequence number of the first message in the stream.""" - async def info( - self, - subject_filter: Optional[str] = None, - deleted_details: Optional[bool] = None, - timeout: Optional[float] = None - ) -> StreamInfo: - """Returns `StreamInfo` from the server.""" - # TODO(caspervonb): handle pagination - info_subject = f"STREAM.INFO.{self._name}" - info_request = StreamInfoRequest( - subject_filter=subject_filter, - deleted_details=deleted_details, - ) - info_response = await self._client.request_json( - info_subject, info_request, StreamInfoResponse, timeout=timeout - ) - if info_response.error is not None: - if info_response.error.error_code == ErrorCode.STREAM_NOT_FOUND: - raise StreamNotFoundError(*info_response.error) + subject_transform: Optional[SubjectTransformConfig] = None + """Allows applying a transformation to matching messages' subjects.""" - raise Error(*info_response.error) + republish: Optional[Republish] = None + """Allows immediate republishing of a message to the configured subject after it's stored.""" - return cast(StreamInfo, info_response) + allow_direct: bool = False + """ + Enables direct access to individual messages using direct get. - @property - def cached_info(self) -> StreamInfo: - """Returns the `StreamInfo` currently cached on this stream.""" - return self._info + Server defaults to false. + """ - # TODO(caspervonb): Go does not return anything for this operation, should we? - async def purge( - self, - sequence: Optional[int] = None, - keep: Optional[int] = None, - subject: Optional[str] = None, - timeout: Optional[float] = None - ) -> int: - """ - Removes messages from a stream. - This is a destructive operation. - """ + mirror_direct: bool = False + """ + Enables direct access to individual messages from the origin stream. - # TODO(caspervonb): enforce types with overloads - if keep is not None and sequence is not None: - raise ValueError( - "both 'keep' and 'sequence' cannot be provided in purge request" - ) + Defaults to false. + """ - purge_subject = f"STREAM.PURGE.{self._name}" - purge_request = StreamPurgeRequest( - sequence=sequence, - keep=keep, - subject=subject, - ) + consumer_limits: Optional[StreamConsumerLimits] = None + """Defines limits of certain values that consumers can set, defaults for those who don't set these settings.""" - purge_response = await self._client.request_json( - purge_subject, purge_request, StreamPurgeResponse, timeout=timeout - ) - if purge_response.error is not None: - raise Error(*purge_response.error) + metadata: Optional[Dict[str, str]] = None + """Provides a set of application-defined key-value pairs for associating metadata on the stream. - return purge_response.purged + Note: This feature requires nats-server v2.10.0 or later. + """ - async def _get_msg( - self, - sequence: Optional[int] = None, - next_by_subject: Optional[str] = None, - last_by_subject: Optional[str] = None, - timeout: Optional[float] = None, - ) -> RawStreamMsg: - msg_get_request = MsgGetRequest( - sequence=sequence, - last_by_subject=last_by_subject, - next_by_subject=next_by_subject, - ) + def __post_init__(self): + _validate_stream_name(self.name) - if self._info.config.allow_direct: - if last_by_subject is not None: - direct_get_subject = f"DIRECT.GET.{self._name}.{last_by_subject}" - direct_get_request = b"" - else: - direct_get_subject = f"DIRECT.GET.{sequence}" - direct_get_request = msg_get_request.as_json().encode() + if self.max_msgs_per_subject and not self.discard: + raise ValueError("max_msgs_per_subject requires discard policy to be set") - direct_get_response = await self._client.request_msg( - direct_get_subject, direct_get_request, timeout=timeout + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> StreamConfig: + return cls( + name=data["name"], + description=data.get("description"), + subjects=data.get("subjects", []), + retention=RetentionPolicy(data["retention"]) + if data.get("retention") + else None, + max_consumers=data.get("max_consumers"), + max_msgs=data.get("max_msgs"), + max_bytes=data.get("max_bytes"), + discard=DiscardPolicy(data["discard"]) if data.get("discard") else None, + discard_new_per_subject=data.get("discard_new_per_subject"), + max_age=data.get("max_age"), + max_msgs_per_subject=data.get("max_msgs_per_subject"), + max_msg_size=data.get("max_msg_size"), + storage=StorageType(data["storage"]), + replicas=data.get("num_replicas", 1), + no_ack=data.get("no_ack"), + duplicates=data.get("duplicates"), + placement=Placement.from_dict(data["placement"]) + if data.get("placement") + else None, + mirror=StreamSource.from_dict(data["mirror"]) + if data.get("mirror") + else None, + sources=[ + StreamSource.from_dict(source) for source in data.get("sources", []) + ], + sealed=data.get("sealed"), + deny_delete=data.get("deny_delete"), + deny_purge=data.get("deny_purge"), + allow_rollup=data.get("allow_rollup"), + compression=StoreCompression(data["compression"]) + if data.get("compression") + else None, + first_sequence=data.get("first_sequence"), + subject_transform=SubjectTransformConfig.from_dict( + data["subject_transform"] ) + if data.get("subject_transform") + else None, + republish=Republish.from_dict(data["republish"]) + if data.get("republish") + else None, + allow_direct=data.get("allow_direct", False), + mirror_direct=data.get("mirror_direct", False), + consumer_limits=StreamConsumerLimits.from_dict(data["consumer_limits"]) + if data.get("consumer_limits") + else None, + metadata=data.get("metadata"), + ) - return direct_msg_to_raw_stream_msg(direct_get_response) - + def to_dict(self) -> Dict[str, Any]: + return { + k: v + for k, v in { + "name": self.name, + "description": self.description, + "subjects": self.subjects, + "retention": self.retention.value if self.retention else None, + "max_consumers": self.max_consumers, + "max_msgs": self.max_msgs, + "max_bytes": self.max_bytes, + "discard": self.discard.value if self.discard else None, + "discard_new_per_subject": self.discard_new_per_subject, + "max_age": self.max_age, + "max_msgs_per_subject": self.max_msgs_per_subject, + "max_msg_size": self.max_msg_size, + "storage": self.storage.value, + "num_replicas": self.replicas, + "no_ack": self.no_ack, + "duplicate_window": self.duplicates, + "placement": self.placement.to_dict() if self.placement else None, + "mirror": self.mirror.to_dict() if self.mirror else None, + "sources": [source.to_dict() for source in self.sources] + if self.sources + else None, + "sealed": self.sealed, + "deny_delete": self.deny_delete, + "deny_purge": self.deny_purge, + "allow_rollup": self.allow_rollup, + "compression": self.compression.value if self.compression else None, + "first_seq": self.first_sequence, + "subject_transform": self.subject_transform.to_dict() + if self.subject_transform + else None, + "republish": self.republish.to_dict() if self.republish else None, + "allow_direct": self.allow_direct, + "mirror_direct": self.mirror_direct, + "consumer_limits": self.consumer_limits.to_dict() + if self.consumer_limits + else None, + "metadata": self.metadata, + }.items() + if v is not None + } - msg_get_subject = "MSG.GET.{self._name}" - msg_get_response = await self._client.request_json( - msg_get_subject, msg_get_request, MsgGetResponse, timeout=timeout - ) - if msg_get_response.error is not None: - if msg_get_response.error.error_code == ErrorCode.MESSAGE_NOT_FOUND: - raise MsgNotFoundError() +@dataclass +class StreamInfo: + """ + Provides configuration and current state for a stream. + """ - raise Error(*msg_get_response.error) + config: StreamConfig + """Contains the configuration settings of the stream, set when creating or updating the stream.""" - return stored_msg_to_raw_stream_msg(msg_get_response.msg) + timestamp: datetime + """Indicates when the info was gathered by the server.""" - async def get_msg( - self, - sequence: int, - timeout: Optional[float] = None, - ) -> RawStreamMsg: - """ - Retrieves a raw stream message stored in JetStream by sequence number. - """ - return await self._get_msg(sequence=sequence, timeout=timeout) + created: datetime + """The timestamp when the stream was created.""" - async def get_last_msg_for_subject( - self, subject: str, timeout: Optional[float] = None - ) -> RawStreamMsg: - """ - Retrieves the last raw stream message stored in JetStream on a given subject. - """ - return await self._get_msg(last_by_subject=subject, timeout=timeout) - - async def _delete_msg( - self, sequence: int, no_erase: bool, timeout: Optional[float] - ): - msg_delete_subject = f"STREAM.MSG.DELETE.{sequence}" - msg_delete_request = MsgDeleteRequest( - sequence=sequence, - no_erase=no_erase, - ) + state: StreamState + """Provides the state of the stream at the time of request, including metrics like the number of messages in the stream, total bytes, etc.""" - msg_delete_response = await self._client.request_json( - msg_delete_subject, - msg_delete_request, - MsgDeleteResponse, - timeout=timeout - ) + cluster: Optional[ClusterInfo] = None + """Contains information about the cluster to which this stream belongs (if applicable).""" - if msg_delete_response.error is not None: - raise Error(*msg_delete_response.error) + mirror: Optional[StreamSourceInfo] = None + """Contains information about another stream this one is mirroring. Mirroring is used to create replicas of another stream's data. This field is omitted if the stream is not mirroring another stream.""" - async def delete_msg( - self, sequence: int, timeout: Optional[float] = None - ) -> None: - """ - Deletes a message from a stream. - """ - await self._delete_msg( - sequence=sequence, no_erase=True, timeout=timeout - ) + sources: List[StreamSourceInfo] = field(default_factory=list) + """A list of source streams from which this stream collects data.""" - async def secure_delete_msg( - self, sequence: int, timeout: Optional[float] = None - ) -> None: - """ - Deletes a message from a stream. - """ - await self._delete_msg( - sequence=sequence, no_erase=False, timeout=timeout + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> StreamInfo: + return cls( + config=StreamConfig.from_dict(data["config"]), + timestamp=datetime.fromisoformat(data["ts"]), + created=datetime.fromisoformat(data["created"]), + state=StreamState.from_dict(data["state"]), + cluster=ClusterInfo.from_dict(data["cluster"]) + if "cluster" in data + else None, + mirror=StreamSourceInfo.from_dict(data["mirror"]) + if "mirror" in data + else None, + sources=[StreamSourceInfo.from_dict(source) for source in data["sources"]] + if "sources" in data + else [], ) -class StreamManager: +class StreamNameAlreadyInUse(Exception): """ - Provides methods for managing streams. + Raised when trying to create a stream with a name that is already in use. """ - def __init__(self, client: Client) -> None: - self._client = client - - async def create_stream( - self, config: StreamConfig, timeout: Optional[float] = None - ) -> Stream: - """ - Creates a new stream with given config. - """ + pass - stream_create_subject = f"STREAM.CREATE" - stream_create_request = StreamCreateRequest(**asdict(config)) - stream_create_response = await self._client.request_json( - stream_create_subject, - stream_create_request, - StreamCreateResponse, - timeout=timeout - ) - if stream_create_response.error: - if stream_create_response.error.error_code == ErrorCode.STREAM_NAME_IN_USE: - raise StreamNameAlreadyInUseError( - ) from stream_create_response.error +class StreamNotFoundError(Exception): + """ + Raised when trying to access a stream that does not exist. + """ - raise Error(*stream_create_response.error) + pass - # Check if subject transforms are supported - if config.subject_transform and not stream_create_response.config.subject_transform: - raise StreamSubjectTransformNotSupportedError() - # Check if sources and subject transforms are supported - if config.sources: - if not stream_create_response.config.sources: - raise StreamSourceNotSupportedError() +class StreamSourceNotSupportedError(Exception): + """ + Raised when a source stream is not supported by the server. + """ - for i in range(len(config.sources)): - source = config.sources[i] - response_source = stream_create_response.config.sources[i] + pass - if source.subject_transforms and not response_source.subject_transforms: - raise StreamSourceMultipleFilterSubjectsNotSupported() - return Stream( - client=self._client, - name=stream_create_response.config.name, - info=cast(StreamInfo, stream_create_response), - ) +class StreamSubjectTransformNotSupportedError(Exception): + """ + Raised when a subject transform is not supported by the server. + """ + pass - async def update_stream( - self, config: StreamConfig, timeout: Optional[float] = None - ) -> Stream: - """ - Updates an existing stream with the given config. - """ - raise NotImplementedError - async def create_or_update_stream( - self, config: StreamConfig, timeout: Optional[float] = None - ) -> Stream: - """CreateOrUpdateStream creates a stream with given config or updates it if it already exists.""" - try: - return await self.update_stream(config, timeout=timeout) - except StreamNotFoundError: - return await self.create_stream(config, timeout=timeout) +class StreamSourceMultipleFilterSubjectsNotSupported(Exception): + """ + Raised when multiple filter subjects are not supported by the server. + """ - async def stream( - self, name: str, timeout: Optional[float] = None - ) -> Stream: - """Stream fetches StreamInfo and returns a Stream interface for a given stream name.""" - validate_stream_name(name) - - stream_info_subject = f"STREAM.INFO.{name}" - stream_info_request = StreamInfoRequest() - stream_info_response = await self._client.request_json( - stream_info_subject, - stream_info_request, - StreamInfoResponse, - timeout=timeout - ) + pass - if stream_info_response.error: - if stream_info_response.error.error_code == ErrorCode.STREAM_NOT_FOUND: - raise StreamNotFoundError() - raise Error(*stream_info_response.error) +class InvalidStreamNameError(ValueError): + """ + Raised when an invalid stream name is provided. + """ - return Stream( - client=self._client, - name=name, - info=cast(StreamInfo, stream_info_response) - ) + pass - async def stream_name_by_subject( - self, subject: str, timeout: Optional[float] = None - ) -> str: - """StreamNameBySubject returns a stream name listening on a given subject.""" - raise NotImplementedError - async def delete_stream( - self, stream: str, timeout: Optional[float] = None - ) -> None: - """DeleteStream removes a stream with given name.""" - validate_stream_name(stream) - - stream_delete_subject = f"STREAM.DELETE.{stream}" - stream_delete_request = StreamDeleteRequest() - stream_delete_response = await self._client.request_json( - stream_delete_subject, - stream_delete_request, - StreamDeleteResponse, - timeout=timeout - ) +class StreamNameRequiredError(ValueError): + """ + Raised when a stream name is required but not provided (e.g empty). + """ - if stream_delete_response.error: - if stream_delete_response.error.error_code == ErrorCode.STREAM_NOT_FOUND: - raise StreamNotFoundError() from stream_delete_response.error + pass - raise Error(*stream_delete_response.error) - def list_streams(self, - timeout: Optional[float] = None - ) -> AsyncIterator[StreamInfo]: - """ListStreams returns a StreamInfoLister for iterating over stream infos.""" - raise NotImplementedError +class StreamNameAlreadyInUseError(Exception): + """ + Raised when a stream name is already in use. + """ - def stream_names(self, - timeout: Optional[float] = None) -> AsyncIterator[str]: - """StreamNames returns a StreamNameLister for iterating over stream names.""" - raise NotImplementedError + pass -class StreamInfoAsyncIterator: - pass +class Stream: + def __init__(self, client: Client, name: str, info: StreamInfo) -> None: + self._client = client + self._name = name + self._cached_info = info + @property + def cached_info(self) -> StreamInfo: + """ + Returns the cached `StreamInfo` for the stream. + """ + return self._cached_info -class StreamInfoLister(AsyncIterable): - "Provides asyncronous iteration over `StreamInfo`" - pass + async def info( + self, + subject_filter: Optional[str] = None, + deleted_details: Optional[bool] = None, + timeout: Optional[float] = None + ) -> StreamInfo: + """Returns `StreamInfo` from the server.""" + # TODO(caspervonb): handle pagination + stream_info_subject = f"STREAM.INFO.{self._name}" + stream_info_request = { + "subject_filter": subject_filter, + "deleted_details": deleted_details, + } + try: + info_response = await self._client.request_json( + stream_info_subject, stream_info_request, timeout=timeout + ) + except JetStreamError as jetstream_error: + if jetstream_error.code == STREAM_NOT_FOUND: + raise StreamNotFoundError() from jetstream_error + raise jetstream_error -class StreamNameLister: - pass + info = StreamInfo.from_dict(info_response) + self._cached_info = info + return info -@dataclass -class StreamCreateRequest(Request, StreamConfig): - pass + # TODO(caspervonb): Go does not return anything for this operation, should we? + async def purge( + self, + sequence: Optional[int] = None, + keep: Optional[int] = None, + subject: Optional[str] = None, + timeout: Optional[float] = None + ) -> None: + """ + Removes messages from a stream. + This is a destructive operation. + """ -@dataclass -class StreamCreateResponse(Response, StreamInfo): - pass + # TODO(caspervonb): enforce types with overloads + if keep is not None and sequence is not None: + raise ValueError( + "both 'keep' and 'sequence' cannot be provided in purge request" + ) + stream_purge_subject = f"STREAM.PURGE.{self._name}" + stream_purge_request = { + "sequence": sequence, + "keep": keep, + "subject": subject, + } -@dataclass -class StreamUpdateRequest(Request, StreamConfig): - pass + try: + stream_purge_response = await self._client.request_json( + stream_purge_subject, stream_purge_request, timeout=timeout + ) + except JetStreamError as jetstream_error: + raise jetstream_error -@dataclass -class StreamUpdateResponse(Response, StreamInfo): - pass + async def create_consumer( + self, config: ConsumerConfig, timeout: Optional[float] = None + ) -> Consumer: + return await _create_consumer( + self._client, stream=self._name, config=config, timeout=timeout + ) + async def update_consumer( + self, config: ConsumerConfig, timeout: Optional[float] = None + ) -> Consumer: + return await _update_consumer( + self._client, stream=self._name, config=config, timeout=timeout + ) -@dataclass -class StreamDeleteRequest(Request): - pass + async def create_or_update_consumer( + self, config: ConsumerConfig, timeout: Optional[float] = None + ) -> Consumer: + return await _create_or_update_consumer( + self._client, stream=self._name, config=config, timeout=timeout + ) + async def consumer(self, name: str, timeout: Optional[float] = None) -> Consumer: + return await _get_consumer( + self._client, stream=self._name, name=name, timeout=timeout + ) -@dataclass -class StreamDeleteResponse(Response): - pass + async def delete_consumer(self, name: str, timeout: Optional[float] = None) -> None: + return await _delete_consumer( + self._client, stream=self._name, consumer=name, timeout=timeout + ) -@dataclass -class StreamInfoRequest(Request, Paged): - deleted_details: Optional[bool] = field( - default=False, metadata={'json': 'deleted_details'} - ) - subject_filter: Optional[str] = field( - default=None, metadata={'json': 'subjects_filter'} - ) +class StreamNameLister(AsyncIterable): + pass -@dataclass -class StreamInfoResponse(Response, Paged, StreamInfo): +class StreamInfoLister(AsyncIterable): pass -@dataclass -class StreamPurgeRequest(Request): - subject: Optional[str] = field(default=None, metadata={'json': 'filter'}) - sequence: Optional[int] = field(default=None, metadata={'json': 'seq'}) - keep: Optional[int] = field(default=None, metadata={'json': 'keep'}) +class StreamManager(Protocol): + """ + Provides methods for managing streams. + """ + async def create_stream( + self, config: StreamConfig, timeout: Optional[float] = None + ) -> Stream: + """ + Creates a new stream with given config. + """ + ... -@dataclass -class StreamPurgeResponse(Response): - success: bool = field(default=False, metadata={'json': 'success'}) - purged: int = field(default=0, metadata={'json': 'purged'}) + async def update_stream( + self, config: StreamConfig, timeout: Optional[float] = None + ) -> Stream: + """ + Updates an existing stream with the given config. + """ + ... + + async def create_or_update_stream( + self, config: StreamConfig, timeout: Optional[float] = None + ) -> Stream: ... + async def stream(self, name: str, timeout: Optional[float] = None) -> Stream: + """Fetches `StreamInfo` and returns a `Stream` instance for a given stream name.""" + ... -@dataclass -class MsgGetRequest(Request): - sequence: Optional[int] = field(metadata={'json': 'seq'}) - last_by_subject: Optional[str] = field(metadata={'json': 'last_by_subj'}) - next_by_subject: Optional[str] = field(metadata={'json': 'next_by_subj'}) + async def stream_name_by_subject( + self, subject: str, timeout: Optional[float] = None + ) -> str: + """Returns a stream name listening on a given subject.""" + ... + async def delete_stream(self, name: str, timeout: Optional[float] = None) -> None: + """Removes a stream with given name.""" + ... -@dataclass -class MsgGetResponse(Response): - msg: Optional[StoredMsg] = field(default=None, metadata={'json': 'seq'}) + def list_streams(self, timeout: Optional[float] = None) -> StreamInfoLister: + """Returns a `StreamLister` for iterating over stream infos.""" + ... + def stream_names(self, timeout: Optional[float] = None) -> StreamNameLister: + """Returns a `StreamNameLister` for iterating over stream names.""" + ... -@dataclass -class MsgDeleteRequest(Request): - sequence: int = field(metadata={'json': 'seq'}) - no_erase: bool = field(metadata={'json': 'no_erase'}) +def _validate_stream_name(stream_name: str) -> None: + if not stream_name: + raise StreamNameRequiredError() -@dataclass -class MsgDeleteResponse(Response): - success: bool = field(default=False, metadata={'json': 'success'}) - - -def validate_stream_name(stream_name: Optional[str]): - if stream_name is None: - raise ValueError("Stream name is required.") - - if stream_name == "": - raise ValueError("Stream name cannot be empty.") - - if re.search(r'[>\*\./\\]', stream_name): - raise ValueError(f"Invalid stream name: '{stream_name}'") - - -__all__ = [ - 'RetentionPolicy', - 'DiscardPolicy', - 'StorageType', - 'StoreCompression', - 'StreamInfo', - 'StreamConfig', - 'StreamSourceInfo', - 'ClusterInfo', - 'PeerInfo', - 'SubjectTransformConfig', - 'Republish', - 'Placement', - 'StreamSource', - 'ExternalStream', - 'StreamConsumerLimits', - 'Stream', - 'StreamManager', -] + invalid_chars = ">*. /\\" + if any(char in stream_name for char in invalid_chars): + raise InvalidStreamNameError(stream_name) diff --git a/tests/test_jetstream.py b/tests/test_jetstream.py new file mode 100644 index 00000000..963f1b8f --- /dev/null +++ b/tests/test_jetstream.py @@ -0,0 +1,805 @@ +import unittest +import asyncio +import time +import nats +import nats.jetstream + +from nats.jetstream.stream import * +from nats.jetstream.consumer import * + +from .utils import IsolatedJetStreamServerTestCase + +class TestJetStream(IsolatedJetStreamServerTestCase): + # Stream Creation Tests + async def test_create_stream_ok(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="foo", subjects=["FOO.123"]) + created_stream = await jetstream_context.create_stream(stream_config) + self.assertIsNotNone(created_stream) + self.assertEqual(created_stream.cached_info.config.name, "foo") + + await nats_client.close() + + async def test_create_stream_with_metadata(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + metadata = {"foo": "bar", "name": "test"} + stream_config = StreamConfig(name="foo_meta", subjects=["FOO.meta"], metadata=metadata) + created_stream = await jetstream_context.create_stream(stream_config) + self.assertEqual(created_stream.cached_info.config.metadata, metadata) + + await nats_client.close() + + async def test_create_stream_with_metadata_reserved_prefix(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + metadata = {"foo": "bar", "_nats_version": "2.10.0"} + stream_config = StreamConfig(name="foo_meta1", subjects=["FOO.meta1"], metadata=metadata) + created_stream = await jetstream_context.create_stream(stream_config) + self.assertEqual(created_stream.cached_info.config.metadata, metadata) + + await nats_client.close() + + async def test_create_stream_with_empty_context(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="foo_empty_ctx", subjects=["FOO.ctx"]) + created_stream = await jetstream_context.create_stream(stream_config) + self.assertIsNotNone(created_stream) + + await nats_client.close() + + async def test_create_stream_invalid_name(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + with self.assertRaises(InvalidStreamNameError): + invalid_stream_config = StreamConfig(name="foo.123", subjects=["FOO.123"]) + await jetstream_context.create_stream(invalid_stream_config) + + await nats_client.close() + + async def test_create_stream_name_required(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + with self.assertRaises(StreamNameRequiredError): + invalid_stream_config = StreamConfig(name="", subjects=["FOO.123"]) + await jetstream_context.create_stream(invalid_stream_config) + + await nats_client.close() + + async def test_create_stream_name_already_in_use(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="foo", subjects=["FOO.123"]) + await jetstream_context.create_stream(stream_config) + with self.assertRaises(StreamNameAlreadyInUseError): + await jetstream_context.create_stream(stream_config) + + await nats_client.close() + + async def test_create_stream_timeout(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="foo", subjects=["BAR.123"]) + with self.assertRaises(asyncio.TimeoutError): + await jetstream_context.create_stream(stream_config, timeout=0.00001) + + await nats_client.close() + + # Create or Update Stream Tests + async def test_create_or_update_stream_create_ok(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="foo", subjects=["FOO.1"]) + created_stream = await jetstream_context.create_stream(stream_config) + self.assertIsNotNone(created_stream) + + await nats_client.close() + + + async def test_create_or_update_stream_invalid_name(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + with self.assertRaises(InvalidStreamNameError): + invalid_stream_config = StreamConfig(name="foo.123", subjects=["FOO-123"]) + await jetstream_context.create_stream(invalid_stream_config) + + await nats_client.close() + + async def test_create_or_update_stream_name_required(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + with self.assertRaises(StreamNameRequiredError): + invalid_stream_config = StreamConfig(name="", subjects=["FOO-1234"]) + await jetstream_context.create_stream(invalid_stream_config) + + await nats_client.close() + + async def test_create_or_update_stream_update_ok(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + original_config = StreamConfig(name="foo", subjects=["FOO.1"]) + await jetstream_context.create_stream(original_config) + updated_config = StreamConfig(name="foo", subjects=["BAR-123"]) + updated_stream = await jetstream_context.update_stream(updated_config) + self.assertEqual(updated_stream.cached_info.config.subjects, ["BAR-123"]) + + await nats_client.close() + + async def test_create_or_update_stream_timeout(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="foo", subjects=["BAR-1234"]) + with self.assertRaises(asyncio.TimeoutError): + await jetstream_context.create_stream(stream_config, timeout=0.000000001) + + await nats_client.close() + + # Update Stream Tests + async def test_update_stream_existing(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + original_config = StreamConfig(name="foo", subjects=["FOO.123"]) + await jetstream_context.create_stream(original_config) + updated_config = StreamConfig(name="foo", subjects=["BAR.123"]) + updated_stream = await jetstream_context.update_stream(updated_config) + self.assertEqual(updated_stream.cached_info.config.subjects, ["BAR.123"]) + + await nats_client.close() + + async def test_update_stream_add_metadata(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + original_config = StreamConfig(name="foo", subjects=["FOO.123"]) + await jetstream_context.create_stream(original_config) + metadata = {"foo": "bar", "name": "test"} + updated_config = StreamConfig(name="foo", subjects=["BAR.123"], metadata=metadata) + updated_stream = await jetstream_context.update_stream(updated_config) + self.assertEqual(updated_stream.cached_info.config.metadata, metadata) + + await nats_client.close() + + async def test_update_stream_invalid_name(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + with self.assertRaises(InvalidStreamNameError): + invalid_config = StreamConfig(name="foo.123", subjects=["FOO.123"]) + await jetstream_context.update_stream(invalid_config) + + await nats_client.close() + + async def test_update_stream_name_required(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + with self.assertRaises(StreamNameRequiredError): + invalid_config = StreamConfig(name="", subjects=["FOO.123"]) + await jetstream_context.update_stream(invalid_config) + + await nats_client.close() + + async def test_update_stream_not_found(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + nonexistent_config = StreamConfig(name="bar", subjects=["FOO.123"]) + with self.assertRaises(StreamNotFoundError): + await jetstream_context.update_stream(nonexistent_config) + + await nats_client.close() + + # Get Stream Tests + async def test_get_stream_existing(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="foo", subjects=["FOO.123"]) + await jetstream_context.create_stream(stream_config) + existing_stream = await jetstream_context.stream("foo") + self.assertIsNotNone(existing_stream) + self.assertEqual(existing_stream.cached_info.config.name, "foo") + + await nats_client.close() + + async def test_get_stream_invalid_name(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + with self.assertRaises(Exception): # Replace with specific exception + await jetstream_context.stream("foo.123") + + await nats_client.close() + + async def test_get_stream_name_required(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + with self.assertRaises(Exception): # Replace with specific exception + await jetstream_context.stream("") + + await nats_client.close() + + async def test_get_stream_not_found(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + with self.assertRaises(Exception): # Replace with specific exception + await jetstream_context.stream("bar") + + await nats_client.close() + + # Delete Stream Tests + async def test_delete_stream_existing(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="foo", subjects=["FOO.123"]) + await jetstream_context.create_stream(stream_config) + await jetstream_context.delete_stream("foo") + with self.assertRaises(StreamNotFoundError): + await jetstream_context.stream("foo") + + await nats_client.close() + + async def test_delete_stream_invalid_name(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + with self.assertRaises(InvalidStreamNameError): + await jetstream_context.delete_stream("foo.123") + + await nats_client.close() + + async def test_delete_stream_name_required(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + with self.assertRaises(Exception): # Replace with specific exception + await jetstream_context.delete_stream("") + + await nats_client.close() + + async def test_delete_stream_not_found(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + with self.assertRaises(Exception): # Replace with specific exception + await jetstream_context.delete_stream("foo") + + await nats_client.close() + + # # List Streams Tests + # async def test_list_streams(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # for i in range(500): + # await jetstream_context.create_stream(StreamConfig(name=f"foo{i}", subjects=[f"FOO.{i}"])) + # streams = [stream async for stream in jetstream_manager.streams()] + # self.assertEqual(len(streams), 500) + + # await nats_client.close() + + # async def test_list_streams_with_subject_filter(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # for i in range(260): + # await jetstream_context.create_stream(StreamConfig(name=f"foo{i}", subjects=[f"FOO.{i}"])) + # streams = [stream async for stream in jetstream_context.streams(subject="FOO.123")] + # self.assertEqual(len(streams), 1) + + # await nats_client.close() + + # async def test_list_streams_with_subject_filter_no_match(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # for i in range(100): + # await jetstream_context.create_stream(StreamConfig(name=f"foo{i}", subjects=[f"FOO.{i}"])) + # streams = [stream async for stream in jetstream_manager.streams(subject="FOO.500")] + # self.assertEqual(len(streams), 0) + + # await nats_client.close() + + # async def test_list_streams_timeout(self): + # nats_client = await nats.connect("nats://localhost:4222") + + # with self.assertRaises(asyncio.TimeoutError): + # async with asyncio.timeout(timeout=0.000001): + # streams = [stream async for stream in jetstream_manager.streams()] + + # await nats_client.close() + + # # Stream Names Tests + # async def test_stream_names(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # for i in range(500): + # await jetstream_context.create_stream(StreamConfig(name=f"foo{i}", subjects=[f"FOO.{i}"])) + # names = [name async for name in jetstream_manager.stream_names()] + # self.assertEqual(len(names), 500) + + # await nats_client.close() + + # async def test_stream_names_with_subject_filter(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # for i in range(260): + # await jetstream_context.create_stream(StreamConfig(name=f"foo{i}", subjects=[f"FOO.{i}"])) + # names = [name async for name in jetstream_manager.stream_names(subject="FOO.123")] + # self.assertEqual(len(names), 1) + + # await nats_client.close() + + # async def test_stream_names_with_subject_filter_no_match(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # for i in range(100): + # await jetstream_context.create_stream(StreamConfig(name=f"foo{i}", subjects=[f"FOO.{i}"])) + # names = [name async for name in jetstream_manager.stream_names(subject="FOO.500")] + # self.assertEqual(len(names), 0) + + # await nats_client.close() + + # async def test_stream_names_timeout(self): + # nats_client = await nats.connect("nats://localhost:4222") + + # with self.assertRaises(asyncio.TimeoutError): + # async with asyncio.timeout(timeout=0.000001): + # names = [name async for name in jetstream_manager.stream_names()] + + # await nats_client.close() + + # # Stream by Subject Tests + # async def test_stream_name_by_subject_explicit(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # stream_name = await jetstream_manager.stream_name_by_subject("FOO.123") + # self.assertEqual(stream_name, "foo") + + # await nats_client.close() + + # async def test_stream_name_by_subject_wildcard(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # await jetstream_context.create_stream(StreamConfig(name="bar", subjects=["BAR.*"])) + # stream_name = await jetstream_manager.stream_name_by_subject("BAR.*") + # self.assertEqual(stream_name, "bar") + + # await nats_client.close() + + # async def test_stream_name_by_subject_not_found(self): + # nats_client = await nats.connect("nats://localhost:4222") + + # with self.assertRaises(Exception): # Replace with specific exception + # await jetstream_manager.stream_name_by_subject("BAR.XYZ") + + # await nats_client.close() + + # async def test_stream_name_by_subject_invalid(self): + # nats_client = await nats.connect("nats://localhost:4222") + + # with self.assertRaises(Exception): # Replace with specific exception + # await jetstream_manager.stream_name_by_subject("FOO.>.123") + + # await nats_client.close() + + # async def test_stream_name_by_subject_timeout(self): + # nats_client = await nats.connect("nats://localhost:4222") + + # with self.assertRaises(asyncio.TimeoutError): + # async with asyncio.timeout(timeout=0.000001): + # await jetstream_manager.stream_name_by_subject("FOO.123") + + # await nats_client.close() + + # # Consumer Tests + # async def test_create_or_update_consumer_create_durable_pull(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # consumer_config = ConsumerConfig(durable_name="dur", ack_policy=ConsumerConfig.AckExplicit) + # consumer = await jetstream_context.create_consumer("foo", consumer_config) + # self.assertIsNotNone(consumer) + # self.assertEqual(consumer.name, "dur") + + # await nats_client.close() + + # async def test_create_or_update_consumer_create_ephemeral_pull(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # consumer_config = ConsumerConfig(ack_policy=ConsumerConfig.AckExplicit) + # consumer = await jetstream_context.create_consumer("foo", consumer_config) + # self.assertIsNotNone(consumer) + + # await nats_client.close() + + # async def test_create_or_update_consumer_update(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # consumer_config = ConsumerConfig(durable_name="dur", ack_policy=ConsumerConfig.AckExplicit) + # await jetstream_context.create_consumer("foo", consumer_config) + # updated_config = ConsumerConfig(durable_name="dur", ack_policy=ConsumerConfig.AckExplicit, description="test consumer") + # updated_consumer = await jetstream_context.update_consumer("foo", updated_config) + # self.assertEqual(updated_consumer.config.description, "test consumer") + + # await nats_client.close() + + # async def test_create_or_update_consumer_illegal_update(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # consumer_config = ConsumerConfig(durable_name="dur", ack_policy=ConsumerConfig.AckExplicit) + # await jetstream_context.create_consumer("foo", consumer_config) + # illegal_config = ConsumerConfig(durable_name="dur", ack_policy=ConsumerConfig.AckNone) + # with self.assertRaises(Exception): # Replace with specific exception + # await jetstream_context.update_consumer("foo", illegal_config) + + # await nats_client.close() + + # async def test_create_or_update_consumer_stream_not_found(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # consumer_config = ConsumerConfig(durable_name="dur", ack_policy=ConsumerConfig.AckExplicit) + # with self.assertRaises(Exception): # Replace with specific exception + # await jetstream_context.create_consumer("nonexistent", consumer_config) + + # await nats_client.close() + + # async def test_create_or_update_consumer_invalid_stream_name(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # consumer_config = ConsumerConfig(durable_name="dur", ack_policy=ConsumerConfig.AckExplicit) + # with self.assertRaises(Exception): # Replace with specific exception + # await jetstream_context.create_consumer("foo.1", consumer_config) + + # await nats_client.close() + + # async def test_create_or_update_consumer_invalid_durable_name(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # consumer_config = ConsumerConfig(durable_name="dur.123", ack_policy=ConsumerConfig.AckExplicit) + # with self.assertRaises(Exception): # Replace with specific exception + # await jetstream_context.create_consumer("foo", consumer_config) + + # await nats_client.close() + + # async def test_create_or_update_consumer_timeout(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # consumer_config = ConsumerConfig(durable_name="dur", ack_policy=ConsumerConfig.AckExplicit) + # with self.assertRaises(asyncio.TimeoutError): + # async with asyncio.timeout(timeout=0.000001): + # await jetstream_context.create_consumer("foo", consumer_config) + + # await nats_client.close() + + # # Get Consumer Tests + # async def test_get_consumer_existing(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # consumer_config = ConsumerConfig(durable_name="dur", ack_policy=ConsumerConfig.AckExplicit) + # await jetstream_context.create_consumer("foo", consumer_config) + # consumer = await jetstream_context.consumer_info("foo", "dur") + # self.assertIsNotNone(consumer) + # self.assertEqual(consumer.name, "dur") + + # await nats_client.close() + + # async def test_get_consumer_not_found(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # with self.assertRaises(Exception): # Replace with specific exception + # await jetstream_context.consumer_info("foo", "nonexistent") + + # await nats_client.close() + + # async def test_get_consumer_invalid_name(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # with self.assertRaises(Exception): # Replace with specific exception + # await jetstream_context.consumer_info("foo", "dur.123") + + # await nats_client.close() + + # async def test_get_consumer_stream_not_found(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # with self.assertRaises(Exception): # Replace with specific exception + # await jetstream_context.consumer_info("nonexistent", "dur") + + # await nats_client.close() + + # async def test_get_consumer_invalid_stream_name(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # with self.assertRaises(Exception): # Replace with specific exception + # await jetstream_context.consumer_info("foo.1", "dur") + + # await nats_client.close() + + # async def test_get_consumer_timeout(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # consumer_config = ConsumerConfig(durable_name="dur", ack_policy=ConsumerConfig.AckExplicit) + # await jetstream_context.create_consumer("foo", consumer_config) + # with self.assertRaises(asyncio.TimeoutError): + # async with asyncio.timeout(timeout=0.000001): + # await jetstream_context.consumer_info("foo", "dur") + + # await nats_client.close() + + # # Delete Consumer Tests + # async def test_delete_consumer_existing(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # consumer_config = ConsumerConfig(durable_name="dur", ack_policy=ConsumerConfig.ACK_EXPLICIT) + # await jetstream_context.create_consumer("foo", consumer_config) + # await jetstream_context.delete_consumer("foo", "dur") + # with self.assertRaises(Exception): # Replace with specific exception + # await jetstream_context.consumer_info("foo", "dur") + + # await nats_client.close() + + # async def test_delete_consumer_not_found(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats_client.jetstream() + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # with self.assertRaises(Exception): # Replace with specific exception + # await jetstream_context.delete_consumer("foo", "nonexistent") + + # await nats_client.close() + + # async def test_delete_consumer_invalid_name(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats_client.jetstream() + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # with self.assertRaises(Exception): # Replace with specific exception + # await jetstream_context.delete_consumer("foo", "dur.123") + + # await nats_client.close() + + # async def test_delete_consumer_stream_not_found(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats_client.jetstream() + + # with self.assertRaises(Exception): # Replace with specific exception + # await jetstream_context.delete_consumer("nonexistent", "dur") + + # await nats_client.close() + + # async def test_delete_consumer_invalid_stream_name(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats_client.jetstream() + + # with self.assertRaises(Exception): # Replace with specific exception + # await jetstream_context.delete_consumer("foo.1", "dur") + + # await nats_client.close() + + # async def test_delete_consumer_timeout(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats_client.jetstream() + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # consumer_config = ConsumerConfig(durable_name="dur", ack_policy=ConsumerConfig.AckExplicit) + # await jetstream_context.create_consumer("foo", consumer_config) + # with self.assertRaises(asyncio.TimeoutError): + # async with asyncio.timeout(timeout=0.000001): + # await jetstream_context.delete_consumer("foo", "dur") + + # await nats_client.close() + + # # JetStream Account Info Tests + # async def test_account_info(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats_client.jetstream() + # jetstream_manager = JetStreamManager(nats_client) + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # info = await jetstream_manager.account_info() + # self.assertIsNotNone(info) + # self.assertGreaterEqual(info.streams, 1) + + # await nats_client.close() + + # Stream Config Tests + async def test_stream_config_matches(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig( + name="stream", + subjects=["foo.*"], + retention=RetentionPolicy.LIMITS, + max_consumers=-1, + max_msgs=-1, + max_bytes=-1, + discard=DiscardPolicy.OLD, + max_age=0, + max_msgs_per_subject=-1, + max_msg_size=-1, + storage=StorageType.FILE, + replicas=1, + no_ack=False, + discard_new_per_subject=False, + duplicates=120 * 1000000000, # 120 seconds in nanoseconds + placement=None, + mirror=None, + sources=None, + sealed=False, + deny_delete=False, + deny_purge=False, + allow_rollup=False, + compression=StoreCompression.NONE, + first_sequence=0, + subject_transform=None, + republish=None, + allow_direct=False, + mirror_direct=False, + ) + + stream = await jetstream_context.create_stream(stream_config) + self.assertEqual(stream.cached_info.config, stream_config) + + await nats_client.close() + + # # Consumer Config Tests + # async def test_consumer_config_matches(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats_client.jetstream() + + # await jetstream_context.create_stream(StreamConfig(name="FOO", subjects=["foo.*"])) + # config = ConsumerConfig( + # durable_name="cons", + # description="test", + # deliver_policy=ConsumerConfig.DeliverAll, + # opt_start_seq=0, + # opt_start_time=None, + # ack_policy=ConsumerConfig.AckExplicit, + # ack_wait=30 * 1000000000, # 30 seconds in nanoseconds + # max_deliver=1, + # filter_subject="", + # replay_policy=ConsumerConfig.ReplayInstant, + # rate_limit_bps=0, + # sample_freq="", + # max_waiting=1, + # max_ack_pending=1000, + # headers_only=False, + # max_batch=0, + # max_expires=0, + # inactive_threshold=0, + # num_replicas=1, + # ) + # consumer = await jetstream_context.create_consumer("FOO", config) + # self.assertEqual(consumer.config, config) + + # await nats_client.close() + + # # JetStream Publish Tests + # async def test_publish(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats_client.jetstream() + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # ack = await jetstream_context.publish("FOO.bar", b"Hello World") + # self.assertIsNotNone(ack) + # self.assertGreater(ack.sequence, 0) + + # await nats_client.close() + + # # JetStream Subscribe Tests + # async def test_subscribe_push(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats_client.jetstream() + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # sub = await jetstream_context.subscribe("FOO.*") + # await jetstream_context.publish("FOO.bar", b"Hello World") + # msg = await sub.next_msg() + # self.assertEqual(msg.data, b"Hello World") + + # await nats_client.close() + + # async def test_subscribe_pull(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats_client.jetstream() + + # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + # sub = await jetstream_context.pull_subscribe("FOO.*", "consumer") + # await jetstream_context.publish("FOO.bar", b"Hello World") + # msgs = await sub.fetch(1) + # self.assertEqual(len(msgs), 1) + # self.assertEqual(msgs[0].data, b"Hello World") + + # await nats_client.close() + + # # JetStream Stream Transform Tests + # async def test_stream_transform(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats_client.jetstream() + + # origin_config = StreamConfig( + # name="ORIGIN", + # subjects=["test"], + # storage=StorageType.MEMORY, + # subject_transform=SubjectTransformConfig(source=">", destination="transformed.>") + # ) + # await jetstream_context.create_stream(origin_config) + + # await nats_client.publish("test", b"1") + + # sourcing_config = StreamConfig( + # name="SOURCING", + # storage=StreamConfig.MemoryStorage, + # sources=[ + # StreamSource( + # name="ORIGIN", + # subject_transforms=[ + # StreamConfig.SubjectTransform(src=">", dest="fromtest.>") + # ] + # ) + # ] + # ) + # sourcing_stream = await jetstream_context.create_stream(sourcing_config) + + # consumer_config = ConsumerConfig( + # filter_subject="fromtest.>", + # max_deliver=1, + # ) + # consumer = await sourcing_stream.create_consumer(consumer_config) + + # msg = await consumer.next_msg() + # self.assertEqual(msg.subject, "fromtest.transformed.test") + + # await nats_client.close() diff --git a/tests/test_jetstream_consumer.py b/tests/test_jetstream_consumer.py index 14dcf1de..7845e018 100644 --- a/tests/test_jetstream_consumer.py +++ b/tests/test_jetstream_consumer.py @@ -1,4 +1,338 @@ -from tests.utils import SingleJetStreamServerTestCase +import unittest +import asyncio +from nats.errors import TimeoutError +import jetstream +from tests.utils import IsolatedJetStreamServerTestCase -class JetStreamConsumerTest(SingleJetStreamServerTestCase): - pass +class TestPullConsumerFetch(IsolatedJetStreamServerTestCase): + async def setUp(self): + await super().setUp() + self.test_subject = "FOO.123" + self.test_msgs = ["m1", "m2", "m3", "m4", "m5"] + + async def publish_test_msgs(self, js): + for msg in self.test_msgs: + await js.publish(self.test_subject, msg.encode()) + + async def test_fetch_no_options(self): + js = await jetstream.JetStream.connect(self.nc) + stream = await js.add_stream(name="foo", subjects=["FOO.*"]) + consumer = await stream.create_consumer(jetstream.PullConsumer, ack_policy=jetstream.AckPolicy.EXPLICIT) + + await self.publish_test_msgs(js) + msgs = await consumer.fetch(5) + + received_msgs = [] + async for msg in msgs.messages(): + received_msgs.append(msg) + + self.assertEqual(len(self.test_msgs), len(received_msgs)) + for i, msg in enumerate(received_msgs): + self.assertEqual(self.test_msgs[i], msg.data.decode()) + + self.assertIsNone(msgs.error()) + + async def test_delete_consumer_during_fetch(self): + js = await jetstream.JetStream.connect(self.nc) + stream = await js.add_stream(name="foo", subjects=["FOO.*"]) + consumer = await stream.create_consumer(jetstream.PullConsumer, ack_policy=jetstream.AckPolicy.EXPLICIT) + + await self.publish_test_msgs(js) + msgs = await consumer.fetch(10) + await asyncio.sleep(0.1) + await stream.delete_consumer(consumer.name) + + received_msgs = [] + async for msg in msgs.messages(): + received_msgs.append(msg) + + self.assertEqual(len(self.test_msgs), len(received_msgs)) + for i, msg in enumerate(received_msgs): + self.assertEqual(self.test_msgs[i], msg.data.decode()) + + self.assertIsInstance(msgs.error(), ConsumerDeletedError) + + async def test_fetch_single_messages_one_by_one(self): + js = await jetstream.JetStream.connect(self.nc) + stream = await js.add_stream(name="foo", subjects=["FOO.*"]) + consumer = await stream.create_consumer(jetstream.PullConsumer, ack_policy=jetstream.AckPolicy.EXPLICIT) + + received_msgs = [] + + async def fetch_messages(): + while len(received_msgs) < len(self.test_msgs): + msgs = await consumer.fetch(1) + async for msg in msgs.messages(): + if msg: + received_msgs.append(msg) + if msgs.error(): + return + + task = asyncio.create_task(fetch_messages()) + await asyncio.sleep(0.01) + await self.publish_test_msgs(js) + await task + + self.assertEqual(len(self.test_msgs), len(received_msgs)) + for i, msg in enumerate(received_msgs): + self.assertEqual(self.test_msgs[i], msg.data.decode()) + + async def test_fetch_no_wait_no_messages(self): + js = await jetstream.JetStream.connect(self.nc) + stream = await js.add_stream(name="foo", subjects=["FOO.*"]) + consumer = await stream.create_consumer(jetstream.PullConsumer, ack_policy=jetstream.AckPolicy.EXPLICIT) + + msgs = await consumer.fetch_no_wait(5) + await asyncio.sleep(0.1) + await self.publish_test_msgs(js) + + received_msgs = [] + async for msg in msgs.messages(): + received_msgs.append(msg) + + self.assertEqual(0, len(received_msgs)) + + async def test_fetch_no_wait_some_messages_available(self): + js = await jetstream.JetStream.connect(self.nc) + stream = await js.add_stream(name="foo", subjects=["FOO.*"]) + consumer = await stream.create_consumer(jetstream.PullConsumer, ack_policy=jetstream.AckPolicy.EXPLICIT) + + await self.publish_test_msgs(js) + await asyncio.sleep(0.05) + msgs = await consumer.fetch_no_wait(10) + await asyncio.sleep(0.1) + await self.publish_test_msgs(js) + + received_msgs = [] + async for msg in msgs.messages(): + received_msgs.append(msg) + + self.assertEqual(len(self.test_msgs), len(received_msgs)) + + async def test_fetch_with_timeout(self): + js = await jetstream.JetStream.connect(self.nc) + stream = await js.add_stream(name="foo", subjects=["FOO.*"]) + consumer = await stream.create_consumer(jetstream.PullConsumer, ack_policy=jetstream.AckPolicy.EXPLICIT) + + msgs = await consumer.fetch(5, max_wait=0.05) + + received_msgs = [] + async for msg in msgs.messages(): + received_msgs.append(msg) + + self.assertEqual(0, len(received_msgs)) + + async def test_fetch_with_invalid_timeout(self): + js = await jetstream.JetStream.connect(self.nc) + stream = await js.add_stream(name="foo", subjects=["FOO.*"]) + consumer = await stream.create_consumer(jetstream.PullConsumer, ack_policy=jetstream.AckPolicy.EXPLICIT) + + with self.assertRaises(ValueError): + await consumer.fetch(5, max_wait=-0.05) + + async def test_fetch_with_missing_heartbeat(self): + js = await jetstream.JetStream.connect(self.nc) + stream = await js.add_stream(name="foo", subjects=["FOO.*"]) + consumer = await stream.create_consumer(jetstream.PullConsumer, ack_policy=jetstream.AckPolicy.EXPLICIT) + + await self.publish_test_msgs(js) + msgs = await consumer.fetch(5, heartbeat=0.05) + + received_msgs = [] + async for msg in msgs.messages(): + received_msgs.append(msg) + + self.assertEqual(len(self.test_msgs), len(received_msgs)) + self.assertIsNone(msgs.error()) + + msgs = await consumer.fetch(5, heartbeat=0.05, max_wait=0.2) + + received_msgs = [] + async for msg in msgs.messages(): + received_msgs.append(msg) + + self.assertEqual(0, len(received_msgs)) + self.assertIsNone(msgs.error()) + + await stream.delete_consumer(consumer.name) + msgs = await consumer.fetch(5, heartbeat=0.05) + + received_msgs = [] + async for msg in msgs.messages(): + received_msgs.append(msg) + + self.assertEqual(0, len(received_msgs)) + self.assertIsInstance(msgs.error(), TimeoutError) + + async def test_fetch_with_invalid_heartbeat(self): + js = await jetstream.JetStream.connect(self.nc) + stream = await js.add_stream(name="foo", subjects=["FOO.*"]) + consumer = await stream.create_consumer(jetstream.PullConsumer, ack_policy=jetstream.AckPolicy.EXPLICIT) + + with self.assertRaises(ValueError): + await consumer.fetch(5, heartbeat=20) + + with self.assertRaises(ValueError): + await consumer.fetch(5, heartbeat=2, max_wait=3) + + with self.assertRaises(ValueError): + await consumer.fetch(5, heartbeat=-2) + +class TestPullConsumerFetchBytes(IsolatedJetStreamTest): + async def setUp(self): + await super().setUp() + self.test_subject = "FOO.123" + self.msg = b"0123456789" + + async def publish_test_msgs(self, js, count): + for _ in range(count): + await js.publish(self.test_subject, self.msg) + + async def test_fetch_bytes_exact_count(self): + js = await jetstream.JetStream.connect(self.nc) + stream = await js.add_stream(name="foo", subjects=["FOO.*"]) + consumer = await stream.create_consumer(jetstream.PullConsumer, ack_policy=jetstream.AckPolicy.EXPLICIT, name="con") + + await self.publish_test_msgs(js, 5) + msgs = await consumer.fetch_bytes(300) + + received_msgs = [] + async for msg in msgs.messages(): + await msg.ack() + received_msgs.append(msg) + + self.assertEqual(5, len(received_msgs)) + self.assertIsNone(msgs.error()) + + async def test_fetch_bytes_last_msg_does_not_fit(self): + js = await jetstream.JetStream.connect(self.nc) + stream = await js.add_stream(name="foo", subjects=["FOO.*"]) + consumer = await stream.create_consumer(jetstream.PullConsumer, ack_policy=jetstream.AckPolicy.EXPLICIT, name="con") + + await self.publish_test_msgs(js, 5) + msgs = await consumer.fetch_bytes(250) + + received_msgs = [] + async for msg in msgs.messages(): + await msg.ack() + received_msgs.append(msg) + + self.assertEqual(4, len(received_msgs)) + self.assertIsNone(msgs.error()) + + async def test_fetch_bytes_single_msg_too_large(self): + js = await jetstream.JetStream.connect(self.nc) + stream = await js.add_stream(name="foo", subjects=["FOO.*"]) + consumer = await stream.create_consumer(jetstream.PullConsumer, ack_policy=jetstream.AckPolicy.EXPLICIT, name="con") + + await self.publish_test_msgs(js, 5) + msgs = await consumer.fetch_bytes(30) + + received_msgs = [] + async for msg in msgs.messages(): + await msg.ack() + received_msgs.append(msg) + + self.assertEqual(0, len(received_msgs)) + self.assertIsNone(msgs.error()) + + async def test_fetch_bytes_timeout(self): + js = await jetstream.JetStream.connect(self.nc) + stream = await js.add_stream(name="foo", subjects=["FOO.*"]) + consumer = await stream.create_consumer(jetstream.PullConsumer, ack_policy=jetstream.AckPolicy.EXPLICIT, name="con") + + await self.publish_test_msgs(js, 5) + msgs = await consumer.fetch_bytes(1000, max_wait=0.05) + + received_msgs = [] + async for msg in msgs.messages(): + await msg.ack() + received_msgs.append(msg) + + self.assertEqual(5, len(received_msgs)) + self.assertIsNone(msgs.error()) + + async def test_fetch_bytes_missing_heartbeat(self): + js = await jetstream.JetStream.connect(self.nc) + stream = await js.add_stream(name="foo", subjects=["FOO.*"]) + consumer = await stream.create_consumer(jetstream.PullConsumer, ack_policy=jetstream.AckPolicy.EXPLICIT) + + msgs = await consumer.fetch_bytes(5, heartbeat=0.05, max_wait=0.2) + + received_msgs = [] + async for msg in msgs.messages(): + received_msgs.append(msg) + + self.assertEqual(0, len(received_msgs)) + self.assertIsNone(msgs.error()) + + await stream.delete_consumer(consumer.name) + msgs = await consumer.fetch_bytes(5, heartbeat=0.05) + + received_msgs = [] + async for msg in msgs.messages(): + received_msgs.append(msg) + + self.assertEqual(0, len(received_msgs)) + self.assertIsInstance(msgs.error(), TimeoutError) + + async def test_fetch_bytes_invalid_heartbeat(self): + js = await jetstream.JetStream.connect(self.nc) + stream = await js.add_stream(name="foo", subjects=["FOO.*"]) + consumer = await stream.create_consumer(jetstream.PullConsumer, ack_policy=jetstream.AckPolicy.EXPLICIT) + + with self.assertRaises(ValueError): + await consumer.fetch_bytes(5, heartbeat=20) + + with self.assertRaises(ValueError): + await consumer.fetch_bytes(5, heartbeat=2, max_wait=3) + + with self.assertRaises(ValueError): + await consumer.fetch_bytes(5, heartbeat=-2) + +class TestPullConsumerMessages(IsolatedJetStreamTest): + async def setUp(self): + await super().setUp() + self.test_subject = "FOO.123" + self.test_msgs = ["m1", "m2", "m3", "m4", "m5"] + + async def publish_test_msgs(self, js): + for msg in self.test_msgs: + await js.publish(self.test_subject, msg.encode()) + + async def test_messages_no_options(self): + js = await jetstream.JetStream.connect(self.nc) + stream = await js.add_stream(name="foo", subjects=["FOO.*"]) + consumer = await stream.create_consumer(jetstream.PullConsumer, ack_policy=jetstream.AckPolicy.EXPLICIT) + + await self.publish_test_msgs(js) + msgs = [] + async with consumer.messages() as iterator: + async for msg in iterator: + msgs.append(msg) + if len(msgs) == len(self.test_msgs): + break + + self.assertEqual(len(self.test_msgs), len(msgs)) + for i, msg in enumerate(msgs): + self.assertEqual(self.test_msgs[i], msg.data.decode()) + + async def test_messages_delete_consumer_during_iteration(self): + js = await jetstream.JetStream.connect(self.nc) + stream = await js.add_stream(name="foo", subjects=["FOO.*"]) + consumer = await stream.create_consumer(jetstream.PullConsumer, ack_policy=jetstream.AckPolicy.EXPLICIT) + + await self.publish_test_msgs(js) + msgs = [] + async with consumer.messages() as iterator: + async for msg in iterator: + msgs.append(msg) + if len(msgs) == len(self.test_msgs): + break + + await stream.delete_consumer(consumer.name) + + with self.assertRaises(ConsumerDeletedError): + async with consumer.messages() as iterator: + async for _ in iterator: + pass diff --git a/tests/test_jetstream_errors.py b/tests/test_jetstream_errors.py deleted file mode 100644 index d3c40be2..00000000 --- a/tests/test_jetstream_errors.py +++ /dev/null @@ -1,4 +0,0 @@ -from tests.util import JetStreamServerTestCase - -class JetStreamErrorsTest(SingleJetStreamServerTestCase): - pass diff --git a/tests/test_jetstream_message.py b/tests/test_jetstream_message.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/test_jetstream_publish.py b/tests/test_jetstream_publish.py deleted file mode 100644 index e3706dd7..00000000 --- a/tests/test_jetstream_publish.py +++ /dev/null @@ -1,4 +0,0 @@ -from tests.utils import SingleJetStreamServerTestCase - -class JetStreamPublishTest(SingleJetStreamServerTestCase): - pass diff --git a/tests/test_jetstream_stream.py b/tests/test_jetstream_stream.py index ed232e9a..6921051b 100644 --- a/tests/test_jetstream_stream.py +++ b/tests/test_jetstream_stream.py @@ -1,204 +1,610 @@ -from tests.utils import SingleJetStreamServerTestCase - +import asyncio import nats import nats.jetstream from nats.jetstream.stream import StreamConfig -from nats.jetstream.consumer import ConsumerConfig, AckPolicy -from nats.jetstream.errors import * - -class JetStreamStreamTest(SingleJetStreamServerTestCase): - async def test_create_or_update_consumer(self): - tests = [ - { - "name": "create durable pull consumer", - "consumer_config": ConsumerConfig(durable="dur"), - "should_create": True, - "with_error": None - }, - { - "name": "create ephemeral pull consumer", - "consumer_config": ConsumerConfig(ack_policy=AckPolicy.NONE), - "should_create": True, - "with_error": None - }, - { - "name": "with filter subject", - "consumer_config": ConsumerConfig(filter_subject="FOO.A"), - "should_create": True, - "with_error": None - }, - { - "name": "with multiple filter subjects", - "consumer_config": ConsumerConfig(filter_subjects=["FOO.A", "FOO.B"]), - "should_create": True, - "with_error": None - }, - { - "name": "with multiple filter subjects, overlapping subjects", - "consumer_config": ConsumerConfig(filter_subjects=["FOO.*", "FOO.B"]), - "should_create": False, - "with_error": OverlappingFilterSubjectsError - }, - { - "name": "with multiple filter subjects and filter subject provided", - "consumer_config": ConsumerConfig(filter_subjects=["FOO.A", "FOO.B"], filter_subject="FOO.C"), - "should_create": False, - "with_error": DuplicateFilterSubjectsError - }, - { - "name": "with empty subject in filter subjects", - "consumer_config": ConsumerConfig(filter_subjects=["FOO.A", ""]), - "should_create": False, - "with_error": EmptyFilterError - }, - { - "name": "consumer already exists, update", - "consumer_config": ConsumerConfig(durable="dur", description="test consumer"), - "should_create": True, - "with_error": None - }, - { - "name": "consumer already exists, illegal update", - "consumer_config": ConsumerConfig(durable="dur", ack_policy=AckPolicy.NONE), - "should_create": False, - "with_error": ConsumerCreateError - }, - { - "name": "invalid durable name", - "consumer_config": ConsumerConfig(durable="dur.123"), - "should_create": False, - "with_error": InvalidConsumerNameError - }, - ] - - client = await nats.connect() - context = await nats.jetstream.new(client) - stream = await context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) - - for test in tests: - with self.subTest(test=test["name"]): - try: - if test["consumer_config"].filter_subject: - subscription = await client.subscribe(f"$JS.API.CONSUMER.CREATE.foo.*.{test['consumer_config'].filter_subject}") - else: - subscription = await client.subscribe("$JS.API.CONSUMER.CREATE.foo.*") - - consumer = await stream.create_or_update_consumer(test["consumer_config"]) - - if test["with_error"]: - self.fail(f"Expected error: {test['with_error']}; got: None") - if test["should_create"]: - self.assertIsNotNone(await subscription.next_msg()) - except Exception as e: - if not test["with_error"]: - self.fail(f"Unexpected error: {e}") - if not isinstance(e, test["with_error"]): - self.fail(f"Expected error: {test['with_error']}; got: {e}") - -async def test_create_consumer(self): - tests = [ - { - "name": "create durable pull consumer", - "consumer_config": ConsumerConfig(durable="dur"), - "should_create": True, - "with_error": None - }, - { - "name": "idempotent create, no error", - "consumer_config": ConsumerConfig(durable="dur"), - "should_create": True, - "with_error": None - }, - { - "name": "create ephemeral pull consumer", - "consumer_config": ConsumerConfig(ack_policy=AckPolicy.NONE), - "should_create": True, - "with_error": None - }, - { - "name": "with filter subject", - "consumer_config": ConsumerConfig(filter_subject="FOO.A"), - "should_create": True, - "with_error": None - }, - { - "name": "with metadata", - "consumer_config": ConsumerConfig(metadata={"foo": "bar", "baz": "quux"}), - "should_create": True, - "with_error": None - }, - { - "name": "with multiple filter subjects", - "consumer_config": ConsumerConfig(filter_subjects=["FOO.A", "FOO.B"]), - "should_create": True, - "with_error": None - }, - { - "name": "with multiple filter subjects, overlapping subjects", - "consumer_config": ConsumerConfig(filter_subjects=["FOO.*", "FOO.B"]), - "should_create": False, - "with_error": OverlappingFilterSubjectsError - }, - { - "name": "with multiple filter subjects and filter subject provided", - "consumer_config": ConsumerConfig(filter_subjects=["FOO.A", "FOO.B"], filter_subject="FOO.C"), - "should_create": False, - "with_error": DuplicateFilterSubjectsError - }, - { - "name": "with empty subject in filter subjects", - "consumer_config": ConsumerConfig(filter_subjects=["FOO.A", ""]), - "should_create": False, - "with_error": EmptyFilterError - }, - { - "name": "with invalid filter subject, leading dot", - "consumer_config": ConsumerConfig(filter_subject=".foo"), - "should_create": False, - "with_error": InvalidConsumerNameError - }, - { - "name": "with invalid filter subject, trailing dot", - "consumer_config": ConsumerConfig(filter_subject="foo."), - "should_create": False, - "with_error": InvalidConsumerNameError - }, - { - "name": "consumer already exists, error", - "consumer_config": ConsumerConfig(durable="dur", description="test consumer"), - "should_create": False, - "with_error": ConsumerExistsError - }, - { - "name": "invalid durable name", - "consumer_config": ConsumerConfig(durable="dur.123"), - "should_create": False, - "with_error": InvalidConsumerNameError - }, - ] - - nc = await nats.connect() - js = await nats.jetstream.new(nc) - s = await js.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) - - for test in tests: - with self.subTest(test=test["name"]): - try: - if test["consumer_config"].filter_subject: - sub = await self.nc.subscribe(f"$JS.API.CONSUMER.CREATE.foo.*.{test['consumer_config'].filter_subject}") - else: - sub = await self.nc.subscribe("$JS.API.CONSUMER.CREATE.foo.*") - - c = await s.create_consumer(test["consumer_config"]) - - if test["with_error"]: - self.fail(f"Expected error: {test['with_error']}; got: None") - if test["should_create"]: - msg = await sub.next_msg() - self.assertIsNotNone(msg) - except Exception as e: - if not test["with_error"]: - self.fail(f"Unexpected error: {e}") - if not isinstance(e, test["with_error"]): - self.fail(f"Expected error: {test['with_error']}; got: {e}") +from tests.utils import IsolatedJetStreamServerTestCase +from nats.jetstream.consumer import AckPolicy, ConsumerConfig, PullConsumer + +class TestJetStreamStream(IsolatedJetStreamServerTestCase): + # CreateConsumer tests + async def test_create_durable_pull_consumer(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(durable="durable_consumer") + created_consumer = await test_stream.create_consumer(consumer_config) + self.assertIsNotNone(created_consumer) + self.assertEqual(created_consumer.cached_info.name, "durable_consumer") + + await nats_client.close() + + async def test_create_consumer_idempotent(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(durable="durable_consumer") + first_consumer = await test_stream.create_consumer(consumer_config) + second_consumer = await test_stream.create_consumer(consumer_config) + self.assertEqual(first_consumer.cached_info.name, second_consumer.cached_info.name) + + await nats_client.close() + + async def test_create_ephemeral_pull_consumer(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(ack_policy=AckPolicy.NONE) + created_consumer = await test_stream.create_consumer(consumer_config) + self.assertIsNotNone(created_consumer) + + await nats_client.close() + + async def test_create_consumer_with_filter_subject(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(filter_subject="TEST.A") + created_consumer = await test_stream.create_consumer(consumer_config) + self.assertIsNotNone(created_consumer) + self.assertEqual(created_consumer.cached_info.config.filter_subject, "TEST.A") + + await nats_client.close() + + async def test_create_consumer_with_metadata(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(metadata={"foo": "bar", "baz": "quux"}) + created_consumer = await test_stream.create_consumer(consumer_config) + self.assertEqual(created_consumer.cached_info.config.metadata, {"foo": "bar", "baz": "quux"}) + + await nats_client.close() + + async def test_create_consumer_with_multiple_filter_subjects(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(filter_subjects=["TEST.A", "TEST.B"]) + created_consumer = await test_stream.create_consumer(consumer_config) + self.assertIsNotNone(created_consumer) + self.assertEqual(created_consumer.cached_info.config.filter_subjects, ["TEST.A", "TEST.B"]) + + await nats_client.close() + + async def test_create_consumer_with_overlapping_filter_subjects(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(filter_subjects=["TEST.*", "TEST.B"]) + with self.assertRaises(Exception): + await test_stream.create_consumer(consumer_config) + + await nats_client.close() + + async def test_create_consumer_with_filter_subjects_and_filter_subject(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(filter_subjects=["TEST.A", "TEST.B"], filter_subject="TEST.C") + with self.assertRaises(Exception): + await test_stream.create_consumer(consumer_config) + + await nats_client.close() + + async def test_create_consumer_with_empty_filter_subject(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(filter_subjects=["TEST.A", ""]) + with self.assertRaises(Exception): + await test_stream.create_consumer(consumer_config) + + await nats_client.close() + + async def test_create_consumer_with_invalid_filter_subject_leading_dot(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(filter_subject=".TEST") + with self.assertRaises(Exception): + await test_stream.create_consumer(consumer_config) + + await nats_client.close() + + async def test_create_consumer_with_invalid_filter_subject_trailing_dot(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(filter_subject="TEST.") + with self.assertRaises(Exception): + await test_stream.create_consumer(consumer_config) + + await nats_client.close() + + async def test_create_consumer_already_exists(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(durable="durable_consumer", description="test consumer") + await test_stream.create_consumer(consumer_config) + + with self.assertRaises(Exception): + await test_stream.create_consumer(consumer_config) + + await nats_client.close() + + # UpdateConsumer tests + async def test_update_consumer_with_existing_consumer(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + original_config = ConsumerConfig(name="test_consumer", description="original description") + await test_stream.create_consumer(original_config) + updated_config = ConsumerConfig(name="test_consumer", description="updated description") + updated_consumer = await test_stream.update_consumer(updated_config) + self.assertEqual(updated_consumer.cached_info.config.description, "updated description") + + await nats_client.close() + + async def test_update_consumer_with_metadata(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + original_config = ConsumerConfig(name="test_consumer") + await test_stream.create_consumer(original_config) + updated_config = ConsumerConfig(name="test_consumer", metadata={"foo": "bar", "baz": "quux"}) + updated_consumer = await test_stream.update_consumer(updated_config) + self.assertEqual(updated_consumer.config.metadata, {"foo": "bar", "baz": "quux"}) + + await nats_client.close() + + async def test_update_consumer_illegal_consumer_update(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + original_config = ConsumerConfig(name="test_consumer", ack_policy=AckPolicy.EXPLICIT) + await test_stream.create_consumer(original_config) + illegal_config = ConsumerConfig(name="test_consumer", ack_policy=AckPolicy.NONE) + with self.assertRaises(Exception): + await test_stream.update_consumer(illegal_config) + + await nats_client.close() + + async def test_update_non_existent_consumer(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + non_existent_config = ConsumerConfig(name="non_existent_consumer") + with self.assertRaises(Exception): + await test_stream.update_consumer(non_existent_config) + + await nats_client.close() + + # Consumer tests + async def test_get_existing_consumer(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(durable="durable_consumer") + await test_stream.create_consumer(consumer_config) + retrieved_consumer = await test_stream.consumer("durable_consumer") + self.assertIsNotNone(retrieved_consumer) + self.assertEqual(retrieved_consumer.cached_info.name, "durable_consumer") + + await nats_client.close() + + async def test_get_non_existent_consumer(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + with self.assertRaises(Exception): + await test_stream.consumer("non_existent_consumer") + + await nats_client.close() + + async def test_get_consumer_with_invalid_name(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + with self.assertRaises(Exception): + await test_stream.consumer("invalid.consumer.name") + + await nats_client.close() + + # CreateOrUpdateConsumer tests + async def test_create_or_update_durable_pull_consumer(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + + consumer_config = ConsumerConfig(durable="durable_consumer") + created_consumer = await test_stream.create_or_update_consumer(consumer_config) + self.assertIsInstance(created_consumer, PullConsumer) + self.assertEqual(created_consumer.cached_info.name, "durable_consumer") + + await nats_client.close() + + async def test_create_or_update_ephemeral_pull_consumer(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(ack_policy=AckPolicy.NONE) + created_consumer = await test_stream.create_or_update_consumer(consumer_config) + self.assertIsInstance(created_consumer, PullConsumer) + + await nats_client.close() + + async def test_create_or_update_consumer_with_filter_subject(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(filter_subject="TEST.A") + created_consumer = await test_stream.create_or_update_consumer(consumer_config) + self.assertIsInstance(created_consumer, PullConsumer) + self.assertEqual(created_consumer.cached_info.config.filter_subject, "TEST.A") + + await nats_client.close() + + async def test_create_or_update_consumer_with_multiple_filter_subjects(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(filter_subjects=["TEST.A", "TEST.B"]) + created_consumer = await test_stream.create_or_update_consumer(consumer_config) + self.assertIsInstance(created_consumer, PullConsumer) + self.assertEqual(created_consumer.cached_info.config.filter_subjects, ["TEST.A", "TEST.B"]) + + await nats_client.close() + + async def test_create_or_update_consumer_with_overlapping_filter_subjects(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config=StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(filter_subjects=["TEST.*", "TEST.B"]) + with self.assertRaises(Exception): + await test_stream.create_or_update_consumer(consumer_config) + + await nats_client.close() + + async def test_create_or_update_consumer_with_filter_subjects_and_filter_subject(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(filter_subjects=["TEST.A", "TEST.B"], filter_subject="TEST.C") + with self.assertRaises(Exception): + await test_stream.create_or_update_consumer(consumer_config) + + await nats_client.close() + + async def test_create_or_update_consumer_with_empty_filter_subject(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(filter_subjects=["TEST.A", ""]) + with self.assertRaises(Exception): + await test_stream.create_or_update_consumer(consumer_config) + + await nats_client.close() + + async def test_create_or_update_existing_consumer(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + + consumer_config = ConsumerConfig(durable="durable_consumer") + await test_stream.create_or_update_consumer(consumer_config) + + updated_config = ConsumerConfig(durable="durable_consumer", description="test consumer") + updated_consumer = await test_stream.create_or_update_consumer(updated_config) + self.assertEqual(updated_consumer.cached_info.config.description, "test consumer") + + await nats_client.close() + + async def test_create_or_with_update_illegal_update_of_existing_consumer(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + + original_config = ConsumerConfig(durable="durable_consumer_2", ack_policy=AckPolicy.EXPLICIT) + await test_stream.create_or_update_consumer(original_config) + + updated_config = ConsumerConfig(durable="durable_consumer_2", ack_policy=AckPolicy.NONE) + with self.assertRaises(Exception): + await test_stream.create_or_update_consumer(updated_config) + + await nats_client.close() + + async def test_create_or_update_consumer_with_invalid_durable(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + invalid_config = ConsumerConfig(durable="invalid.durable.name") + with self.assertRaises(Exception): + await test_stream.create_or_update_consumer(invalid_config) + + await nats_client.close() + + # DeleteConsumer tests + async def test_delete_existing_consumer(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + consumer_config = ConsumerConfig(durable="durable_consumer") + await test_stream.create_consumer(consumer_config) + await test_stream.delete_consumer("durable_consumer") + with self.assertRaises(Exception): + await test_stream.consumer("durable_consumer") + + await nats_client.close() + + async def test_delete_non_existent_consumer(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + with self.assertRaises(Exception): + await test_stream.delete_consumer("non_existent_consumer") + + await nats_client.close() + + async def test_delete_consumer_with_invalid_name(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + with self.assertRaises(Exception): + await test_stream.delete_consumer("invalid.consumer.name") + + await nats_client.close() + + # StreamInfo tests + async def test_stream_info_without_options(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + stream_info = await test_stream.info() + self.assertIsNotNone(stream_info) + self.assertEqual(stream_info.config.name, "test_stream") + + await nats_client.close() + + async def test_stream_info_with_deleted_details(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + for i in range(10): + await jetstream_context.publish("TEST.A", f"msg {i}".encode()) + await test_stream.delete_message(3) + await test_stream.delete_message(5) + stream_info = await test_stream.info(deleted_details=True) + self.assertEqual(stream_info.state.num_deleted, 2) + self.assertEqual(stream_info.state.deleted, [3, 5]) + + await nats_client.close() + + async def test_stream_info_with_subject_filter(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + for i in range(10): + await jetstream_context.publish("TEST.A", f"msg A {i}".encode()) + await jetstream_context.publish("TEST.B", f"msg B {i}".encode()) + stream_info = await test_stream.info(subject_filter="TEST.A") + self.assertEqual(stream_info.state.subjects.get("TEST.A"), 10) + self.assertNotIn("TEST.B", stream_info.state.subjects) + + await nats_client.close() + + async def test_stream_info_timeout(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + with self.assertRaises(asyncio.TimeoutError): + await test_stream.info(timeout=0.00001) + + await nats_client.close() + + # # SubjectsFilterPaging test + # async def test_subjects_filter_paging(self): + # nats_client = await nats.connect("nats://localhost:4222") + # jetstream_context = nats.jetstream.new(nats_client) + + # test_stream = await jetstream_context.create_stream(name="test_stream", subjects=["TEST.*"]) + # for i in range(110000): + # await jetstream_context.publish(f"TEST.{i}", b"data") + + # stream_info = await test_stream.info(subject_filter="TEST.*") + # self.assertEqual(len(stream_info.state.subjects), 110000) + + # cached_info = test_stream.cached_info() + # self.assertEqual(len(cached_info.state.subjects), 0) + + # await nats_client.close() + + # # StreamCachedInfo test + async def test_stream_cached_info(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"], description="original") + test_stream = await jetstream_context.create_stream(stream_config) + self.assertEqual(test_stream.cached_info.config.name, "test_stream") + self.assertEqual(test_stream.cached_info.config.description, "original") + + updated_stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"], description="updated") + await jetstream_context.update_stream(updated_stream_config) + self.assertEqual(test_stream.cached_info.config.description, "original") + + updated_info = await test_stream.info() + self.assertEqual(updated_info.config.description, "updated") + + await nats_client.close() + self.assertEqual(test_stream.cached_info.config.description, "original") + + updated_info = await test_stream.info() + self.assertEqual(updated_info.config.description, "updated") + + await nats_client.close() + + # # GetMsg tests + async def test_get_existing_message(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + await jetstream_context.publish("TEST.A", b"test message") + msg = await test_stream.get_msg(1) + self.assertEqual(msg.data, b"test message") + + await nats_client.close() + + async def test_get_non_existent_message(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + with self.assertRaises(nats.errors.Error): + await test_stream.get_msg(100) + + await nats_client.close() + + async def test_get_deleted_message(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + await jetstream_context.publish("TEST.A", b"test message") + await test_stream.delete_message(1) + with self.assertRaises(nats.errors.Error): + await test_stream.get_msg(1) + + await nats_client.close() + + async def test_get_message_with_headers(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + headers = {"X-Test": "test value"} + await jetstream_context.publish("TEST.A", b"test message", headers=headers) + msg = await test_stream.get_msg(1) + self.assertEqual(msg.data, b"test message") + self.assertEqual(msg.headers.get("X-Test"), "test value") + + await nats_client.close() + + async def test_get_message_context_timeout(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + await jetstream_context.publish("TEST.A", b"test message") + with self.assertRaises(asyncio.TimeoutError): + async with asyncio.timeout(0.001): + await test_stream.get_msg(1) + + await nats_client.close() + + # GetLastMsgForSubject tests + async def test_get_last_message_for_subject(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + for i in range(5): + await jetstream_context.publish("TEST.A", f"msg A {i}".encode()) + await jetstream_context.publish("TEST.B", f"msg B {i}".encode()) + msg = await test_stream.get_last_msg_for_subject("TEST.A") + self.assertEqual(msg.data, b"msg A 4") + + await nats_client.close() + + async def test_get_last_message_for_wildcard_subject(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) + + stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) + test_stream = await jetstream_context.create_stream(stream_config) + for i in range(5): + await jetstream_context.publish(f"TEST.{i}", b"data") + msg = await test_stream.get_last_msg_for_subject("TEST.*") + self.assertEqual(msg.data, b"data") + + await nats_client.close() diff --git a/tests/test_jetstream_stream_manager.py b/tests/test_jetstream_stream_manager.py deleted file mode 100644 index f586f2c7..00000000 --- a/tests/test_jetstream_stream_manager.py +++ /dev/null @@ -1,145 +0,0 @@ -import nats -import nats.jetstream - -from tests.utils import SingleServerTestCase, async_test - -class TestJetStreamStreamManager(SingleServerTestCase): - @async_test - async def test_create_stream_success(self): - nc = await nats.connect() - js = nc.jetstream.new(nc) - stream = await js.create_stream(StreamConfig("foo", ["FOO.123"])) - self.assertEqual(stream.name, "foo") - self.assertEqual(stream.subjects, ["FOO.123"]) - - @async_test - async def test_create_stream_with_metadata(self): - nc = await nats.connect() - js = nc.jetstream.new(nc) - metadata = {"foo": "bar", "name": "test"} - stream = await js.create_stream(StreamConfig("foo_meta", ["FOO.meta"], metadata)) - self.assertEqual(stream.name, "foo_meta") - self.assertEqual(stream.subjects, ["FOO.meta"]) - self.assertEqual(stream.metadata, metadata) - - @async_test - async def test_create_stream_invalid_name(self): - nc = await nats.connect() - js = nc.jetstream.new(nc) - with self.assertRaises(InvalidStreamNameError): - await js.create_stream(StreamConfig("foo.123", ["FOO.123"])) - - @async_test - async def test_create_stream_name_required(self): - nc = await nats.connect() - js = nc.jetstream.new(nc) - with self.assertRaises(StreamNameRequiredError): - await js.create_stream(StreamConfig("", ["FOO.123"])) - - @async_test - async def test_create_stream_name_already_in_use(self): - nc = await nats.connect() - js = nc.jetstream.new(nc) - await js.create_stream(StreamConfig("foo", ["FOO.123"])) - with self.assertRaises(StreamNameAlreadyInUseError): - await js.create_stream(StreamConfig("foo", ["BAR.123"])) - - @async_test - async def test_update_stream_success(self): - nc = await nats.connect() - js = nc.jetstream.new(nc) - await js.create_stream(StreamConfig("foo", ["FOO.123"])) - stream = await js.update_stream(StreamConfig("foo", ["BAR.123"])) - info = await stream.info() - self.assertEqual(info.config.subjects, ["BAR.123"]) - - @async_test - async def test_update_stream_add_metadata(self): - nc = await nats.connect() - js = nc.jetstream.new(nc) - await js.create_stream(StreamConfig("foo", ["FOO.123"])) - metadata = {"foo": "bar", "name": "test"} - stream = await js.update_stream(StreamConfig("foo", ["BAR.123"], metadata)) - info = await stream.info() - self.assertEqual(info.config.subjects, ["BAR.123"]) - self.assertEqual(info.config.metadata, metadata) - - @async_test - async def test_update_stream_invalid_name(self): - nc = await nats.connect() - js = nc.jetstream.new(nc) - with self.assertRaises(InvalidStreamNameError): - await js.update_stream(StreamConfig("foo.123", ["FOO.123"])) - - @async_test - async def test_update_stream_name_required(self): - nc = await nats.connect() - js = nc.jetstream.new(nc) - with self.assertRaises(StreamNameRequiredError): - await js.update_stream(StreamConfig("", ["FOO.123"])) - - @async_test - async def test_update_stream_not_found(self): - nc = await nats.connect() - js = nc.jetstream.new(nc) - with self.assertRaises(StreamNotFoundError): - await js.update_stream(StreamConfig("bar", ["FOO.123"])) - - @async_test - async def test_get_stream_success(self): - nc = await nats.connect() - js = nc.jetstream.new(nc) - await js.create_stream(StreamConfig("foo", ["FOO.123"])) - stream = await js.stream("foo") - self.assertEqual(stream.cached_info().config.name, "foo") - - @async_test - async def test_get_stream_invalid_name(self): - nc = await nats.connect() - js = nc.jetstream.new(nc) - with self.assertRaises(InvalidStreamNameError): - await js.stream("foo.123") - - @async_test - async def test_get_stream_name_required(self): - nc = await nats.connect() - js = nc.jetstream.new(nc) - with self.assertRaises(StreamNameRequiredError): - await js.stream("") - - @async_test - async def test_get_stream_not_found(self): - nc = await nats.connect() - js = nc.jetstream.new(nc) - with self.assertRaises(StreamNotFoundError): - await js.stream("bar") - - @async_test - async def test_delete_stream_success(self): - nc = await nats.connect() - js = nc.jetstream.new(nc) - await js.create_stream(StreamConfig("foo", ["FOO.123"])) - await js.delete_stream("foo") - with self.assertRaises(StreamNotFoundError): - await js.stream("foo") - - @async_test - async def test_delete_stream_invalid_name(self): - nc = await nats.connect() - js = nc.jetstream.new(nc) - with self.assertRaises(InvalidStreamNameError): - await js.delete_stream("foo.123") - - @async_test - async def test_delete_stream_name_required(self): - nc = await nats.connect() - js = nc.jetstream.new(nc) - with self.assertRaises(StreamNameRequiredError): - await js.delete_stream("") - - @async_test - async def test_delete_stream_not_found(self): - nc = await nats.connect() - js = nc.jetstream.new(nc) - with self.assertRaises(StreamNotFoundError): - await js.delete_stream("bar") diff --git a/tests/utils.py b/tests/utils.py index 43dd27fa..7af4d0ff 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -470,6 +470,21 @@ def tearDown(self): shutil.rmtree(natsd.store_dir) self.loop.close() +class IsolatedJetStreamServerTestCase(unittest.IsolatedAsyncioTestCase): + def setUp(self): + self.server_pool = [] + server = NATSD(port=4222, with_jetstream=True) + self.server_pool.append(server) + for natsd in self.server_pool: + start_natsd(natsd) + + def tearDown(self): + for natsd in self.server_pool: + natsd.stop() + shutil.rmtree(natsd.store_dir) + + self._server_pool = None + class SingleWebSocketServerTestCase(unittest.TestCase): From 06b658f9801f8bb86e633ff475dffc75ea9dd060 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Wed, 18 Sep 2024 11:12:15 +0200 Subject: [PATCH 16/22] Add publisher protocol --- nats/jetstream/publisher.py | 63 +++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 nats/jetstream/publisher.py diff --git a/nats/jetstream/publisher.py b/nats/jetstream/publisher.py new file mode 100644 index 00000000..25d3af5f --- /dev/null +++ b/nats/jetstream/publisher.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +from asyncio import Future +from typing import Dict, Any, Protocol, Optional +from dataclasses import dataclass + + +@dataclass +class PublishAck: + """ + Represents the response of publishing a message to JetStream. + """ + + stream: str + """ + The stream name the message was published to. + """ + + sequence: int + """ + The stream sequence number of the message. + """ + + domain: Optional[str] = None + """ + The domain the message was published to. + """ + + duplicate: Optional[bool] = None + """ + Indicates whether the message was a duplicate. + """ + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> PublishAck: + return cls( + stream=data["stream"], + sequence=data["seq"], + domain=data.get("domain"), + duplicate=data.get("duplicate"), + ) + +class Publisher(Protocol): + """ + A protocol for publishing messages to a stream. + """ + + async def publish(self, subject: str, payload: bytes) -> PublishAck: + """ + Publishes a message with the given payload on the given subject. + """ + ... + + async def publish_async( + self, + subject: str, + payload: bytes = b'', + wait_stall: Optional[float] = None, + ) -> Future[PublishAck]: + """ + Publishes a message with the given payload on the given subject without waiting for a server acknowledgement. + """ + ... From 71f8722a819f7d9a0eab44f72876ffbe358d4790 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Wed, 18 Sep 2024 11:44:51 +0200 Subject: [PATCH 17/22] Fix stream name already exists test --- tests/test_jetstream.py | 4 ++-- tests/test_jetstream_consumer.py | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/tests/test_jetstream.py b/tests/test_jetstream.py index 963f1b8f..4c138bec 100644 --- a/tests/test_jetstream.py +++ b/tests/test_jetstream.py @@ -79,9 +79,9 @@ async def test_create_stream_name_already_in_use(self): jetstream_context = nats.jetstream.new(nats_client) stream_config = StreamConfig(name="foo", subjects=["FOO.123"]) - await jetstream_context.create_stream(stream_config) + created_stream = await jetstream_context.create_stream(stream_config) with self.assertRaises(StreamNameAlreadyInUseError): - await jetstream_context.create_stream(stream_config) + await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["BAR.123"])) await nats_client.close() diff --git a/tests/test_jetstream_consumer.py b/tests/test_jetstream_consumer.py index 7845e018..cce8dec3 100644 --- a/tests/test_jetstream_consumer.py +++ b/tests/test_jetstream_consumer.py @@ -1,7 +1,9 @@ import unittest import asyncio +import nats.jetstream +import nats + from nats.errors import TimeoutError -import jetstream from tests.utils import IsolatedJetStreamServerTestCase class TestPullConsumerFetch(IsolatedJetStreamServerTestCase): From d54c0a6411373ee468a1a47dbb947701c824c482 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Wed, 18 Sep 2024 12:22:48 +0200 Subject: [PATCH 18/22] Implement publish --- nats/jetstream/context.py | 19 +++++++++++++++++++ nats/jetstream/publisher.py | 6 ++++++ tests/test_jetstream_consumer.py | 5 ----- 3 files changed, 25 insertions(+), 5 deletions(-) diff --git a/nats/jetstream/context.py b/nats/jetstream/context.py index 4462b23e..2d4bd493 100644 --- a/nats/jetstream/context.py +++ b/nats/jetstream/context.py @@ -17,8 +17,11 @@ from nats.aio.client import Client as NATS from typing import Optional +from nats.errors import NoRespondersError + from .api import * from .stream import (Stream, StreamConfig, StreamInfo, StreamInfoLister, StreamManager, StreamNameAlreadyInUseError, StreamNameLister, StreamNotFoundError, StreamSourceMultipleFilterSubjectsNotSupported, StreamSourceNotSupportedError, StreamSubjectTransformNotSupportedError, _validate_stream_name) +from .publisher import (NoStreamResponseError, Publisher, PublishAck) from .consumer import * class Context( @@ -41,6 +44,22 @@ def __init__(self, nats: NATS, timeout: float = 2.0): timeout=timeout, ) + async def publish(self, subject: str, payload: bytes, headers: Optional[Dict] = None, timeout: Optional[float] = None) -> PublishAck: + try: + response = await self._client.request(subject, payload, timeout) + except NoRespondersError as no_responders_error: + raise NoStreamResponseError from no_responders_error + + response_data = json.loads(response.data) + response_error = response_data.get("error") + if response_error: + raise JetStreamError( + code=response_error["err_code"], + description=response_error["description"], + ) + + return PublishAck.from_dict(response_data) + async def create_stream( self, config: StreamConfig, timeout: Optional[float] = None ) -> Stream: diff --git a/nats/jetstream/publisher.py b/nats/jetstream/publisher.py index 25d3af5f..34bb5024 100644 --- a/nats/jetstream/publisher.py +++ b/nats/jetstream/publisher.py @@ -40,6 +40,12 @@ def from_dict(cls, data: Dict[str, Any]) -> PublishAck: duplicate=data.get("duplicate"), ) +class NoStreamResponseError(Exception): + """ + Raised when no response is received from the JetStream server. + """ + pass + class Publisher(Protocol): """ A protocol for publishing messages to a stream. diff --git a/tests/test_jetstream_consumer.py b/tests/test_jetstream_consumer.py index cce8dec3..deedae39 100644 --- a/tests/test_jetstream_consumer.py +++ b/tests/test_jetstream_consumer.py @@ -7,11 +7,6 @@ from tests.utils import IsolatedJetStreamServerTestCase class TestPullConsumerFetch(IsolatedJetStreamServerTestCase): - async def setUp(self): - await super().setUp() - self.test_subject = "FOO.123" - self.test_msgs = ["m1", "m2", "m3", "m4", "m5"] - async def publish_test_msgs(self, js): for msg in self.test_msgs: await js.publish(self.test_subject, msg.encode()) From f6d6e8a11875c0e695f3d86fe7e06b3b13a4c640 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Wed, 18 Sep 2024 12:26:02 +0200 Subject: [PATCH 19/22] Enable publish test --- tests/test_jetstream.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/tests/test_jetstream.py b/tests/test_jetstream.py index 4c138bec..01a3457e 100644 --- a/tests/test_jetstream.py +++ b/tests/test_jetstream.py @@ -726,17 +726,17 @@ async def test_stream_config_matches(self): # await nats_client.close() - # # JetStream Publish Tests - # async def test_publish(self): - # nats_client = await nats.connect("nats://localhost:4222") - # jetstream_context = nats_client.jetstream() + # JetStream Publish Tests + async def test_publish(self): + nats_client = await nats.connect("nats://localhost:4222") + jetstream_context = nats.jetstream.new(nats_client) - # await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) - # ack = await jetstream_context.publish("FOO.bar", b"Hello World") - # self.assertIsNotNone(ack) - # self.assertGreater(ack.sequence, 0) + await jetstream_context.create_stream(StreamConfig(name="foo", subjects=["FOO.*"])) + ack = await jetstream_context.publish("FOO.bar", b"Hello World") + self.assertIsNotNone(ack) + self.assertGreater(ack.sequence, 0) - # await nats_client.close() + await nats_client.close() # # JetStream Subscribe Tests # async def test_subscribe_push(self): From cbfc5adfb5eb2af39b9ae119227eba3ebda5fbdb Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Wed, 18 Sep 2024 12:35:57 +0200 Subject: [PATCH 20/22] Fix expected test errors --- tests/test_jetstream.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/test_jetstream.py b/tests/test_jetstream.py index 01a3457e..19de95a0 100644 --- a/tests/test_jetstream.py +++ b/tests/test_jetstream.py @@ -222,7 +222,7 @@ async def test_get_stream_invalid_name(self): nats_client = await nats.connect("nats://localhost:4222") jetstream_context = nats.jetstream.new(nats_client) - with self.assertRaises(Exception): # Replace with specific exception + with self.assertRaises(InvalidStreamNameError): await jetstream_context.stream("foo.123") await nats_client.close() @@ -231,7 +231,7 @@ async def test_get_stream_name_required(self): nats_client = await nats.connect("nats://localhost:4222") jetstream_context = nats.jetstream.new(nats_client) - with self.assertRaises(Exception): # Replace with specific exception + with self.assertRaises(StreamNameRequiredError): await jetstream_context.stream("") await nats_client.close() @@ -240,7 +240,7 @@ async def test_get_stream_not_found(self): nats_client = await nats.connect("nats://localhost:4222") jetstream_context = nats.jetstream.new(nats_client) - with self.assertRaises(Exception): # Replace with specific exception + with self.assertRaises(StreamNotFoundError): await jetstream_context.stream("bar") await nats_client.close() @@ -271,7 +271,7 @@ async def test_delete_stream_name_required(self): nats_client = await nats.connect("nats://localhost:4222") jetstream_context = nats.jetstream.new(nats_client) - with self.assertRaises(Exception): # Replace with specific exception + with self.assertRaises(StreamNameRequiredError): await jetstream_context.delete_stream("") await nats_client.close() @@ -280,7 +280,7 @@ async def test_delete_stream_not_found(self): nats_client = await nats.connect("nats://localhost:4222") jetstream_context = nats.jetstream.new(nats_client) - with self.assertRaises(Exception): # Replace with specific exception + with self.assertRaises(StreamNotFoundError): await jetstream_context.delete_stream("foo") await nats_client.close() From 1a2dffd022e131e8ab83feeca175b3676065b22e Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Wed, 18 Sep 2024 12:42:07 +0200 Subject: [PATCH 21/22] Fix create consumer already exists test --- tests/test_jetstream_stream.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/test_jetstream_stream.py b/tests/test_jetstream_stream.py index 6921051b..7c33a321 100644 --- a/tests/test_jetstream_stream.py +++ b/tests/test_jetstream_stream.py @@ -150,11 +150,10 @@ async def test_create_consumer_already_exists(self): stream_config = StreamConfig(name="test_stream", subjects=["TEST.*"]) test_stream = await jetstream_context.create_stream(stream_config) - consumer_config = ConsumerConfig(durable="durable_consumer", description="test consumer") - await test_stream.create_consumer(consumer_config) + await test_stream.create_consumer(ConsumerConfig(durable="durable_consumer")) with self.assertRaises(Exception): - await test_stream.create_consumer(consumer_config) + await test_stream.create_consumer(ConsumerConfig(durable="durable_consumer", description="test consumer")) await nats_client.close() From b6a742ac2e2a584e61a500cab52c1a292c062956 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Wed, 18 Sep 2024 12:43:41 +0200 Subject: [PATCH 22/22] Fix test --- tests/test_jetstream_stream.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_jetstream_stream.py b/tests/test_jetstream_stream.py index 7c33a321..14e783ad 100644 --- a/tests/test_jetstream_stream.py +++ b/tests/test_jetstream_stream.py @@ -182,7 +182,7 @@ async def test_update_consumer_with_metadata(self): await test_stream.create_consumer(original_config) updated_config = ConsumerConfig(name="test_consumer", metadata={"foo": "bar", "baz": "quux"}) updated_consumer = await test_stream.update_consumer(updated_config) - self.assertEqual(updated_consumer.config.metadata, {"foo": "bar", "baz": "quux"}) + self.assertEqual(updated_consumer.cached_info.config.metadata, {"foo": "bar", "baz": "quux"}) await nats_client.close()