From c645a8bc9d1da54868eaeb41001edfbff1a0a1bf Mon Sep 17 00:00:00 2001 From: "fern-api[bot]" <115122769+fern-api[bot]@users.noreply.github.com> Date: Tue, 27 Jan 2026 21:51:07 +0000 Subject: [PATCH] SDK regeneration --- .fern/metadata.json | 6 +- poetry.lock | 6 +- pyproject.toml | 2 +- reference.md | 237 ++++++++- src/anduril/__init__.py | 54 +- src/anduril/core/client_wrapper.py | 8 +- src/anduril/core/pydantic_utilities.py | 201 ++++++- src/anduril/entities/raw_client.py | 10 +- .../types/stream_entities_response.py | 2 +- src/anduril/tasks/__init__.py | 39 +- src/anduril/tasks/client.py | 259 +++++++++ src/anduril/tasks/raw_client.py | 490 +++++++++++++++++- src/anduril/tasks/types/__init__.py | 33 +- .../tasks/types/stream_as_agent_response.py | 64 +++ .../tasks/types/stream_tasks_response.py | 55 ++ .../types/task_stream_request_task_type.py | 8 + ...ream_request_task_type_task_type_prefix.py | 26 + ...stream_request_task_type_task_type_urls.py | 26 + src/anduril/types/__init__.py | 21 + src/anduril/types/agent_stream_event.py | 20 + src/anduril/types/agent_task_request.py | 41 ++ src/anduril/types/heartbeat_object.py | 5 +- src/anduril/types/stream_heartbeat.py | 18 + src/anduril/types/task_event_data.py | 36 ++ .../types/task_event_data_task_event.py | 42 ++ .../task_event_data_task_event_event_type.py | 8 + src/anduril/types/task_stream_event.py | 20 + 27 files changed, 1696 insertions(+), 41 deletions(-) create mode 100644 src/anduril/tasks/types/stream_as_agent_response.py create mode 100644 src/anduril/tasks/types/stream_tasks_response.py create mode 100644 src/anduril/tasks/types/task_stream_request_task_type.py create mode 100644 src/anduril/tasks/types/task_stream_request_task_type_task_type_prefix.py create mode 100644 src/anduril/tasks/types/task_stream_request_task_type_task_type_urls.py create mode 100644 src/anduril/types/agent_stream_event.py create mode 100644 src/anduril/types/agent_task_request.py create mode 100644 src/anduril/types/stream_heartbeat.py create mode 100644 src/anduril/types/task_event_data.py create mode 100644 src/anduril/types/task_event_data_task_event.py create mode 100644 src/anduril/types/task_event_data_task_event_event_type.py create mode 100644 src/anduril/types/task_stream_event.py diff --git a/.fern/metadata.json b/.fern/metadata.json index f13c46b..84dffbc 100644 --- a/.fern/metadata.json +++ b/.fern/metadata.json @@ -1,10 +1,10 @@ { - "cliVersion": "3.40.0", + "cliVersion": "3.51.2", "generatorName": "fernapi/fern-python-sdk", - "generatorVersion": "4.46.14", + "generatorVersion": "4.51.1", "generatorConfig": { "client_class_name": "Lattice", "package_name": "anduril" }, - "sdkVersion": "4.1.0" + "sdkVersion": "4.2.0" } \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index ea0fda4..2f8666b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -236,13 +236,13 @@ files = [ [[package]] name = "packaging" -version = "25.0" +version = "26.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, - {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, + {file = "packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529"}, + {file = "packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4"}, ] [[package]] diff --git a/pyproject.toml b/pyproject.toml index fa6ea2f..ed52f5a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ dynamic = ["version"] [tool.poetry] name = "anduril-lattice-sdk" -version = "4.1.0" +version = "4.2.0" description = "HTTP clients for the Anduril Lattice SDK" readme = "README.md" authors = [ diff --git a/reference.md b/reference.md index f6472fc..2f31df7 100644 --- a/reference.md +++ b/reference.md @@ -1,6 +1,6 @@ # Reference ## Entities -
client.entities.publish_entity(...) -> AsyncHttpResponse[Entity] +
client.entities.publish_entity(...) -> AsyncHttpResponse[Entity]
@@ -405,7 +405,7 @@ Describes an entity's security classification levels at an overall classificatio
-
client.entities.get_entity(...) -> AsyncHttpResponse[Entity] +
client.entities.get_entity(...) -> AsyncHttpResponse[Entity]
@@ -462,7 +462,7 @@ client.entities.get_entity(
-
client.entities.override_entity(...) -> AsyncHttpResponse[Entity] +
client.entities.override_entity(...) -> AsyncHttpResponse[Entity]
@@ -567,7 +567,7 @@ the object and ignore all other fields.
-
client.entities.remove_entity_override(...) -> AsyncHttpResponse[Entity] +
client.entities.remove_entity_override(...) -> AsyncHttpResponse[Entity]
@@ -647,7 +647,7 @@ client.entities.remove_entity_override(
-
client.entities.long_poll_entity_events(...) -> AsyncHttpResponse[EntityEventResponse] +
client.entities.long_poll_entity_events(...) -> AsyncHttpResponse[EntityEventResponse]
@@ -734,7 +734,7 @@ client.entities.long_poll_entity_events(
-
client.entities.stream_entities(...) -> typing.AsyncIterator[ +
client.entities.stream_entities(...) -> typing.AsyncIterator[ AsyncHttpResponse[typing.AsyncIterator[StreamEntitiesResponse]] ]
@@ -842,7 +842,7 @@ for chunk in response.data:
## Tasks -
client.tasks.create_task(...) -> AsyncHttpResponse[Task] +
client.tasks.create_task(...) -> AsyncHttpResponse[Task]
@@ -987,7 +987,7 @@ task. For example, an entity Objective, an entity Keep In Zone, etc.
-
client.tasks.get_task(...) -> AsyncHttpResponse[Task] +
client.tasks.get_task(...) -> AsyncHttpResponse[Task]
@@ -1065,7 +1065,7 @@ client.tasks.get_task(
-
client.tasks.update_task_status(...) -> AsyncHttpResponse[Task] +
client.tasks.update_task_status(...) -> AsyncHttpResponse[Task]
@@ -1175,7 +1175,7 @@ is known are considered stale and ignored.
-
client.tasks.query_tasks(...) -> AsyncHttpResponse[TaskQueryResults] +
client.tasks.query_tasks(...) -> AsyncHttpResponse[TaskQueryResults]
@@ -1286,7 +1286,113 @@ any of the remaining parameters, but not both.
-
client.tasks.listen_as_agent(...) -> AsyncHttpResponse[AgentRequest] +
client.tasks.stream_tasks(...) -> typing.AsyncIterator[ + AsyncHttpResponse[typing.AsyncIterator[StreamTasksResponse]] +] +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Establishes a server streaming connection that delivers task updates in real-time using Server-Sent Events (SSE). + +The stream delivers all existing non-terminal tasks when first connected, followed by real-time +updates for task creation and status changes. Additionally, heartbeat messages are sent periodically to maintain the connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from anduril import Lattice + +client = Lattice( + client_id="YOUR_CLIENT_ID", + client_secret="YOUR_CLIENT_SECRET", +) +response = client.tasks.stream_tasks() +for chunk in response.data: + yield chunk + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**heartbeat_interval_ms:** `typing.Optional[int]` — The time interval, in milliseconds, that determines the frequency at which to send heartbeat events. Defaults to 30000 (30 seconds). + +
+
+ +
+
+ +**rate_limit:** `typing.Optional[int]` + +The time interval, in milliseconds, after an update for a given task before another one will be sent for the same task. +If set, value must be >= 250. + +
+
+ +
+
+ +**exclude_preexisting_tasks:** `typing.Optional[bool]` + +Optional flag to only include tasks created or updated after the stream is initiated, and not any previous preexisting tasks. +If unset or false, the stream will include any new tasks and task updates, as well as all preexisting tasks. + +
+
+ +
+
+ +**task_type:** `typing.Optional[TaskStreamRequestTaskType]` — Optional filter that only returns tasks with specific types. If not provided, all task types will be streamed. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.tasks.listen_as_agent(...) -> AsyncHttpResponse[AgentRequest]
@@ -1370,12 +1476,109 @@ client.tasks.listen_as_agent()
+ + +
+ +
client.tasks.stream_as_agent(...) -> typing.AsyncIterator[ + AsyncHttpResponse[typing.AsyncIterator[StreamAsAgentResponse]] +] +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Establishes a server streaming connection that delivers tasks to taskable agents for execution +using Server-Sent Events (SSE). + +This method creates a connection from the Tasks API to an agent that streams relevant tasks to the listener agent. The agent receives a stream of tasks that match the entities specified by the tasks' selector criteria. + +The stream delivers three types of requests: +- `ExecuteRequest`: Contains a new task for the agent to execute +- `CancelRequest`: Indicates a task should be canceled +- `CompleteRequest`: Indicates a task should be completed + +Additionally, heartbeat messages are sent periodically to maintain the connection. + +This is recommended method for taskable agents to receive and process tasks in real-time. +Agents should maintain connection to this stream and process incoming tasks according to their capabilities. + +When an agent receives a task, it should update the task status using the `UpdateStatus` endpoint +to provide progress information back to Tasks API. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from anduril import Lattice + +client = Lattice( + client_id="YOUR_CLIENT_ID", + client_secret="YOUR_CLIENT_SECRET", +) +response = client.tasks.stream_as_agent() +for chunk in response.data: + yield chunk + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**agent_selector:** `typing.Optional[EntityIdsSelector]` — The selector criteria to determine which tasks the agent receives. + +
+
+ +
+
+ +**heartbeat_interval_ms:** `typing.Optional[int]` — The time interval, defined in seconds, that determines the frequency at which to send heartbeat events. Defaults to 30s. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ +
## Objects -
client.objects.list_objects(...) -> AsyncPager[PathMetadata, ListResponse] +
client.objects.list_objects(...) -> AsyncPager[PathMetadata, ListResponse]
@@ -1473,7 +1676,7 @@ for page in response.iter_pages():
-
client.objects.get_object(...) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]] +
client.objects.get_object(...) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]
@@ -1560,7 +1763,7 @@ client.objects.get_object(
-
client.objects.upload_object(...) -> AsyncHttpResponse[PathMetadata] +
client.objects.upload_object(...) -> AsyncHttpResponse[PathMetadata]
@@ -1637,7 +1840,7 @@ client.objects.upload_object()
-
client.objects.delete_object(...) -> AsyncHttpResponse[None] +
client.objects.delete_object(...) -> AsyncHttpResponse[None]
@@ -1708,7 +1911,7 @@ client.objects.delete_object(
-
client.objects.get_object_metadata(...) -> AsyncHttpResponse[None] +
client.objects.get_object_metadata(...) -> AsyncHttpResponse[None]
@@ -1780,7 +1983,7 @@ client.objects.get_object_metadata(
## oauth -
client.oauth.get_token(...) -> AsyncHttpResponse[GetTokenResponse] +
client.oauth.get_token(...) -> AsyncHttpResponse[GetTokenResponse]
diff --git a/src/anduril/__init__.py b/src/anduril/__init__.py index be7280d..62f6976 100644 --- a/src/anduril/__init__.py +++ b/src/anduril/__init__.py @@ -12,6 +12,8 @@ ActiveTarget, Agent, AgentRequest, + AgentStreamEvent, + AgentTaskRequest, Alert, AlertCondition, AlertLevel, @@ -162,6 +164,7 @@ Sensors, Signal, Status, + StreamHeartbeat, Supplies, Symbology, System, @@ -173,9 +176,13 @@ TaskEntity, TaskError, TaskErrorCode, + TaskEventData, + TaskEventDataTaskEvent, + TaskEventDataTaskEventEventType, TaskQueryResults, TaskStatus, TaskStatusStatus, + TaskStreamEvent, TaskVersion, Team, Threat, @@ -206,7 +213,20 @@ from .environment import LatticeEnvironment from .oauth import GetTokenResponse from .objects import GetObjectRequestAcceptEncoding - from .tasks import TaskQueryStatusFilter, TaskQueryStatusFilterStatus, TaskQueryUpdateTimeRange + from .tasks import ( + StreamAsAgentResponse, + StreamAsAgentResponse_AgentRequest, + StreamAsAgentResponse_Heartbeat, + StreamTasksResponse, + StreamTasksResponse_Heartbeat, + StreamTasksResponse_TaskEvent, + TaskQueryStatusFilter, + TaskQueryStatusFilterStatus, + TaskQueryUpdateTimeRange, + TaskStreamRequestTaskType, + TaskStreamRequestTaskTypeTaskTypePrefix, + TaskStreamRequestTaskTypeTaskTypeUrls, + ) from .version import __version__ _dynamic_imports: typing.Dict[str, str] = { "AcmDetails": ".types", @@ -214,6 +234,8 @@ "ActiveTarget": ".types", "Agent": ".types", "AgentRequest": ".types", + "AgentStreamEvent": ".types", + "AgentTaskRequest": ".types", "Alert": ".types", "AlertCondition": ".types", "AlertLevel": ".types", @@ -375,9 +397,16 @@ "Sensors": ".types", "Signal": ".types", "Status": ".types", + "StreamAsAgentResponse": ".tasks", + "StreamAsAgentResponse_AgentRequest": ".tasks", + "StreamAsAgentResponse_Heartbeat": ".tasks", "StreamEntitiesResponse": ".entities", "StreamEntitiesResponse_Entity": ".entities", "StreamEntitiesResponse_Heartbeat": ".entities", + "StreamHeartbeat": ".types", + "StreamTasksResponse": ".tasks", + "StreamTasksResponse_Heartbeat": ".tasks", + "StreamTasksResponse_TaskEvent": ".tasks", "Supplies": ".types", "Symbology": ".types", "System": ".types", @@ -389,12 +418,19 @@ "TaskEntity": ".types", "TaskError": ".types", "TaskErrorCode": ".types", + "TaskEventData": ".types", + "TaskEventDataTaskEvent": ".types", + "TaskEventDataTaskEventEventType": ".types", "TaskQueryResults": ".types", "TaskQueryStatusFilter": ".tasks", "TaskQueryStatusFilterStatus": ".tasks", "TaskQueryUpdateTimeRange": ".tasks", "TaskStatus": ".types", "TaskStatusStatus": ".types", + "TaskStreamEvent": ".types", + "TaskStreamRequestTaskType": ".tasks", + "TaskStreamRequestTaskTypeTaskTypePrefix": ".tasks", + "TaskStreamRequestTaskTypeTaskTypeUrls": ".tasks", "TaskVersion": ".types", "Team": ".types", "Threat": ".types", @@ -448,6 +484,8 @@ def __dir__(): "ActiveTarget", "Agent", "AgentRequest", + "AgentStreamEvent", + "AgentTaskRequest", "Alert", "AlertCondition", "AlertLevel", @@ -609,9 +647,16 @@ def __dir__(): "Sensors", "Signal", "Status", + "StreamAsAgentResponse", + "StreamAsAgentResponse_AgentRequest", + "StreamAsAgentResponse_Heartbeat", "StreamEntitiesResponse", "StreamEntitiesResponse_Entity", "StreamEntitiesResponse_Heartbeat", + "StreamHeartbeat", + "StreamTasksResponse", + "StreamTasksResponse_Heartbeat", + "StreamTasksResponse_TaskEvent", "Supplies", "Symbology", "System", @@ -623,12 +668,19 @@ def __dir__(): "TaskEntity", "TaskError", "TaskErrorCode", + "TaskEventData", + "TaskEventDataTaskEvent", + "TaskEventDataTaskEventEventType", "TaskQueryResults", "TaskQueryStatusFilter", "TaskQueryStatusFilterStatus", "TaskQueryUpdateTimeRange", "TaskStatus", "TaskStatusStatus", + "TaskStreamEvent", + "TaskStreamRequestTaskType", + "TaskStreamRequestTaskTypeTaskTypePrefix", + "TaskStreamRequestTaskTypeTaskTypeUrls", "TaskVersion", "Team", "Threat", diff --git a/src/anduril/core/client_wrapper.py b/src/anduril/core/client_wrapper.py index d24a8a7..01a22f9 100644 --- a/src/anduril/core/client_wrapper.py +++ b/src/anduril/core/client_wrapper.py @@ -21,11 +21,15 @@ def __init__( self._timeout = timeout def get_headers(self) -> typing.Dict[str, str]: + import platform + headers: typing.Dict[str, str] = { - "User-Agent": "anduril-lattice-sdk/4.1.0", + "User-Agent": "anduril-lattice-sdk/4.2.0", "X-Fern-Language": "Python", + "X-Fern-Runtime": f"python/{platform.python_version()}", + "X-Fern-Platform": f"{platform.system().lower()}/{platform.release()}", "X-Fern-SDK-Name": "anduril-lattice-sdk", - "X-Fern-SDK-Version": "4.1.0", + "X-Fern-SDK-Version": "4.2.0", **(self.get_custom_headers() or {}), } token = self._get_token() diff --git a/src/anduril/core/pydantic_utilities.py b/src/anduril/core/pydantic_utilities.py index 12dc057..789081b 100644 --- a/src/anduril/core/pydantic_utilities.py +++ b/src/anduril/core/pydantic_utilities.py @@ -3,10 +3,34 @@ # nopycln: file import datetime as dt import inspect +import json +import logging from collections import defaultdict -from typing import Any, Callable, ClassVar, Dict, List, Mapping, Optional, Set, Tuple, Type, TypeVar, Union, cast +from dataclasses import asdict +from typing import ( + TYPE_CHECKING, + Any, + Callable, + ClassVar, + Dict, + List, + Mapping, + Optional, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) import pydantic +import typing_extensions + +_logger = logging.getLogger(__name__) + +if TYPE_CHECKING: + from .http_sse._models import ServerSentEvent IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.") @@ -37,6 +61,181 @@ Model = TypeVar("Model", bound=pydantic.BaseModel) +def _get_discriminator_and_variants(type_: Type[Any]) -> Tuple[Optional[str], Optional[List[Type[Any]]]]: + """ + Extract the discriminator field name and union variants from a discriminated union type. + Supports Annotated[Union[...], Field(discriminator=...)] patterns. + Returns (discriminator, variants) or (None, None) if not a discriminated union. + """ + origin = typing_extensions.get_origin(type_) + + if origin is typing_extensions.Annotated: + args = typing_extensions.get_args(type_) + if len(args) >= 2: + inner_type = args[0] + # Check annotations for discriminator + discriminator = None + for annotation in args[1:]: + if hasattr(annotation, "discriminator"): + discriminator = getattr(annotation, "discriminator", None) + break + + if discriminator: + inner_origin = typing_extensions.get_origin(inner_type) + if inner_origin is Union: + variants = list(typing_extensions.get_args(inner_type)) + return discriminator, variants + return None, None + + +def _get_field_annotation(model: Type[Any], field_name: str) -> Optional[Type[Any]]: + """Get the type annotation of a field from a Pydantic model.""" + if IS_PYDANTIC_V2: + fields = getattr(model, "model_fields", {}) + field_info = fields.get(field_name) + if field_info: + return cast(Optional[Type[Any]], field_info.annotation) + else: + fields = getattr(model, "__fields__", {}) + field_info = fields.get(field_name) + if field_info: + return cast(Optional[Type[Any]], field_info.outer_type_) + return None + + +def _find_variant_by_discriminator( + variants: List[Type[Any]], + discriminator: str, + discriminator_value: Any, +) -> Optional[Type[Any]]: + """Find the union variant that matches the discriminator value.""" + for variant in variants: + if not (inspect.isclass(variant) and issubclass(variant, pydantic.BaseModel)): + continue + + disc_annotation = _get_field_annotation(variant, discriminator) + if disc_annotation and is_literal_type(disc_annotation): + literal_args = get_args(disc_annotation) + if literal_args and literal_args[0] == discriminator_value: + return variant + return None + + +def _is_string_type(type_: Type[Any]) -> bool: + """Check if a type is str or Optional[str].""" + if type_ is str: + return True + + origin = typing_extensions.get_origin(type_) + if origin is Union: + args = typing_extensions.get_args(type_) + # Optional[str] = Union[str, None] + non_none_args = [a for a in args if a is not type(None)] + if len(non_none_args) == 1 and non_none_args[0] is str: + return True + + return False + + +def parse_sse_obj(sse: "ServerSentEvent", type_: Type[T]) -> T: + """ + Parse a ServerSentEvent into the appropriate type. + + Handles two scenarios based on where the discriminator field is located: + + 1. Data-level discrimination: The discriminator (e.g., 'type') is inside the 'data' payload. + The union describes the data content, not the SSE envelope. + -> Returns: json.loads(data) parsed into the type + + Example: ChatStreamResponse with discriminator='type' + Input: ServerSentEvent(event="message", data='{"type": "content-delta", ...}', id="") + Output: ContentDeltaEvent (parsed from data, SSE envelope stripped) + + 2. Event-level discrimination: The discriminator (e.g., 'event') is at the SSE event level. + The union describes the full SSE event structure. + -> Returns: SSE envelope with 'data' field JSON-parsed only if the variant expects non-string + + Example: JobStreamResponse with discriminator='event' + Input: ServerSentEvent(event="ERROR", data='{"code": "FAILED", ...}', id="123") + Output: JobStreamResponse_Error with data as ErrorData object + + But for variants where data is str (like STATUS_UPDATE): + Input: ServerSentEvent(event="STATUS_UPDATE", data='{"status": "processing"}', id="1") + Output: JobStreamResponse_StatusUpdate with data as string (not parsed) + + Args: + sse: The ServerSentEvent object to parse + type_: The target discriminated union type + + Returns: + The parsed object of type T + + Note: + This function is only available in SDK contexts where http_sse module exists. + """ + sse_event = asdict(sse) + discriminator, variants = _get_discriminator_and_variants(type_) + + if discriminator is None or variants is None: + # Not a discriminated union - parse the data field as JSON + data_value = sse_event.get("data") + if isinstance(data_value, str) and data_value: + try: + parsed_data = json.loads(data_value) + return parse_obj_as(type_, parsed_data) + except json.JSONDecodeError as e: + _logger.warning( + "Failed to parse SSE data field as JSON: %s, data: %s", + e, + data_value[:100] if len(data_value) > 100 else data_value, + ) + return parse_obj_as(type_, sse_event) + + data_value = sse_event.get("data") + + # Check if discriminator is at the top level (event-level discrimination) + if discriminator in sse_event: + # Case 2: Event-level discrimination + # Find the matching variant to check if 'data' field needs JSON parsing + disc_value = sse_event.get(discriminator) + matching_variant = _find_variant_by_discriminator(variants, discriminator, disc_value) + + if matching_variant is not None: + # Check what type the variant expects for 'data' + data_type = _get_field_annotation(matching_variant, "data") + if data_type is not None and not _is_string_type(data_type): + # Variant expects non-string data - parse JSON + if isinstance(data_value, str) and data_value: + try: + parsed_data = json.loads(data_value) + new_object = dict(sse_event) + new_object["data"] = parsed_data + return parse_obj_as(type_, new_object) + except json.JSONDecodeError as e: + _logger.warning( + "Failed to parse SSE data field as JSON for event-level discrimination: %s, data: %s", + e, + data_value[:100] if len(data_value) > 100 else data_value, + ) + # Either no matching variant, data is string type, or JSON parse failed + return parse_obj_as(type_, sse_event) + + else: + # Case 1: Data-level discrimination + # The discriminator is inside the data payload - extract and parse data only + if isinstance(data_value, str) and data_value: + try: + parsed_data = json.loads(data_value) + return parse_obj_as(type_, parsed_data) + except json.JSONDecodeError as e: + _logger.warning( + "Failed to parse SSE data field as JSON for data-level discrimination: %s, data: %s", + e, + data_value[:100] if len(data_value) > 100 else data_value, + ) + return parse_obj_as(type_, sse_event) + + def parse_obj_as(type_: Type[T], object_: Any) -> T: # convert_and_respect_annotation_metadata is required for TypedDict aliasing. # diff --git a/src/anduril/entities/raw_client.py b/src/anduril/entities/raw_client.py index 0c8f710..3e74acf 100644 --- a/src/anduril/entities/raw_client.py +++ b/src/anduril/entities/raw_client.py @@ -11,7 +11,7 @@ from ..core.http_response import AsyncHttpResponse, HttpResponse from ..core.http_sse._api import EventSource from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as +from ..core.pydantic_utilities import parse_obj_as, parse_sse_obj from ..core.request_options import RequestOptions from ..core.serialization import convert_and_respect_annotation_metadata from ..errors.bad_request_error import BadRequestError @@ -839,9 +839,9 @@ def _iter(): try: yield typing.cast( StreamEntitiesResponse, - parse_obj_as( + parse_sse_obj( + sse=_sse, type_=StreamEntitiesResponse, # type: ignore - object_=_sse.json(), ), ) except JSONDecodeError as e: @@ -1670,9 +1670,9 @@ async def _iter(): try: yield typing.cast( StreamEntitiesResponse, - parse_obj_as( + parse_sse_obj( + sse=_sse, type_=StreamEntitiesResponse, # type: ignore - object_=_sse.json(), ), ) except JSONDecodeError as e: diff --git a/src/anduril/entities/types/stream_entities_response.py b/src/anduril/entities/types/stream_entities_response.py index 253f92f..a03a2ac 100644 --- a/src/anduril/entities/types/stream_entities_response.py +++ b/src/anduril/entities/types/stream_entities_response.py @@ -18,7 +18,7 @@ class StreamEntitiesResponse_Heartbeat(UniversalBaseModel): """ event: typing.Literal["heartbeat"] = "heartbeat" - timestamp: typing.Optional[dt.datetime] = None + timestamp: typing.Optional[str] = None if IS_PYDANTIC_V2: model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 diff --git a/src/anduril/tasks/__init__.py b/src/anduril/tasks/__init__.py index c069799..a0c35f7 100644 --- a/src/anduril/tasks/__init__.py +++ b/src/anduril/tasks/__init__.py @@ -6,11 +6,33 @@ from importlib import import_module if typing.TYPE_CHECKING: - from .types import TaskQueryStatusFilter, TaskQueryStatusFilterStatus, TaskQueryUpdateTimeRange + from .types import ( + StreamAsAgentResponse, + StreamAsAgentResponse_AgentRequest, + StreamAsAgentResponse_Heartbeat, + StreamTasksResponse, + StreamTasksResponse_Heartbeat, + StreamTasksResponse_TaskEvent, + TaskQueryStatusFilter, + TaskQueryStatusFilterStatus, + TaskQueryUpdateTimeRange, + TaskStreamRequestTaskType, + TaskStreamRequestTaskTypeTaskTypePrefix, + TaskStreamRequestTaskTypeTaskTypeUrls, + ) _dynamic_imports: typing.Dict[str, str] = { + "StreamAsAgentResponse": ".types", + "StreamAsAgentResponse_AgentRequest": ".types", + "StreamAsAgentResponse_Heartbeat": ".types", + "StreamTasksResponse": ".types", + "StreamTasksResponse_Heartbeat": ".types", + "StreamTasksResponse_TaskEvent": ".types", "TaskQueryStatusFilter": ".types", "TaskQueryStatusFilterStatus": ".types", "TaskQueryUpdateTimeRange": ".types", + "TaskStreamRequestTaskType": ".types", + "TaskStreamRequestTaskTypeTaskTypePrefix": ".types", + "TaskStreamRequestTaskTypeTaskTypeUrls": ".types", } @@ -35,4 +57,17 @@ def __dir__(): return sorted(lazy_attrs) -__all__ = ["TaskQueryStatusFilter", "TaskQueryStatusFilterStatus", "TaskQueryUpdateTimeRange"] +__all__ = [ + "StreamAsAgentResponse", + "StreamAsAgentResponse_AgentRequest", + "StreamAsAgentResponse_Heartbeat", + "StreamTasksResponse", + "StreamTasksResponse_Heartbeat", + "StreamTasksResponse_TaskEvent", + "TaskQueryStatusFilter", + "TaskQueryStatusFilterStatus", + "TaskQueryUpdateTimeRange", + "TaskStreamRequestTaskType", + "TaskStreamRequestTaskTypeTaskTypePrefix", + "TaskStreamRequestTaskTypeTaskTypeUrls", +] diff --git a/src/anduril/tasks/client.py b/src/anduril/tasks/client.py index c9d23b7..eaeac22 100644 --- a/src/anduril/tasks/client.py +++ b/src/anduril/tasks/client.py @@ -14,8 +14,11 @@ from ..types.task_query_results import TaskQueryResults from ..types.task_status import TaskStatus from .raw_client import AsyncRawTasksClient, RawTasksClient +from .types.stream_as_agent_response import StreamAsAgentResponse +from .types.stream_tasks_response import StreamTasksResponse from .types.task_query_status_filter import TaskQueryStatusFilter from .types.task_query_update_time_range import TaskQueryUpdateTimeRange +from .types.task_stream_request_task_type import TaskStreamRequestTaskType # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -294,6 +297,66 @@ def query_tasks( ) return _response.data + def stream_tasks( + self, + *, + heartbeat_interval_ms: typing.Optional[int] = OMIT, + rate_limit: typing.Optional[int] = OMIT, + exclude_preexisting_tasks: typing.Optional[bool] = OMIT, + task_type: typing.Optional[TaskStreamRequestTaskType] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.Iterator[StreamTasksResponse]: + """ + Establishes a server streaming connection that delivers task updates in real-time using Server-Sent Events (SSE). + + The stream delivers all existing non-terminal tasks when first connected, followed by real-time + updates for task creation and status changes. Additionally, heartbeat messages are sent periodically to maintain the connection. + + Parameters + ---------- + heartbeat_interval_ms : typing.Optional[int] + The time interval, in milliseconds, that determines the frequency at which to send heartbeat events. Defaults to 30000 (30 seconds). + + rate_limit : typing.Optional[int] + The time interval, in milliseconds, after an update for a given task before another one will be sent for the same task. + If set, value must be >= 250. + + exclude_preexisting_tasks : typing.Optional[bool] + Optional flag to only include tasks created or updated after the stream is initiated, and not any previous preexisting tasks. + If unset or false, the stream will include any new tasks and task updates, as well as all preexisting tasks. + + task_type : typing.Optional[TaskStreamRequestTaskType] + Optional filter that only returns tasks with specific types. If not provided, all task types will be streamed. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Yields + ------ + typing.Iterator[StreamTasksResponse] + Returns a stream of task updates as they occur. + + Examples + -------- + from anduril import Lattice + + client = Lattice( + client_id="YOUR_CLIENT_ID", + client_secret="YOUR_CLIENT_SECRET", + ) + response = client.tasks.stream_tasks() + for chunk in response: + yield chunk + """ + with self._raw_client.stream_tasks( + heartbeat_interval_ms=heartbeat_interval_ms, + rate_limit=rate_limit, + exclude_preexisting_tasks=exclude_preexisting_tasks, + task_type=task_type, + request_options=request_options, + ) as r: + yield from r.data + def listen_as_agent( self, *, @@ -348,6 +411,65 @@ def listen_as_agent( _response = self._raw_client.listen_as_agent(agent_selector=agent_selector, request_options=request_options) return _response.data + def stream_as_agent( + self, + *, + agent_selector: typing.Optional[EntityIdsSelector] = OMIT, + heartbeat_interval_ms: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.Iterator[StreamAsAgentResponse]: + """ + Establishes a server streaming connection that delivers tasks to taskable agents for execution + using Server-Sent Events (SSE). + + This method creates a connection from the Tasks API to an agent that streams relevant tasks to the listener agent. The agent receives a stream of tasks that match the entities specified by the tasks' selector criteria. + + The stream delivers three types of requests: + - `ExecuteRequest`: Contains a new task for the agent to execute + - `CancelRequest`: Indicates a task should be canceled + - `CompleteRequest`: Indicates a task should be completed + + Additionally, heartbeat messages are sent periodically to maintain the connection. + + This is recommended method for taskable agents to receive and process tasks in real-time. + Agents should maintain connection to this stream and process incoming tasks according to their capabilities. + + When an agent receives a task, it should update the task status using the `UpdateStatus` endpoint + to provide progress information back to Tasks API. + + Parameters + ---------- + agent_selector : typing.Optional[EntityIdsSelector] + The selector criteria to determine which tasks the agent receives. + + heartbeat_interval_ms : typing.Optional[int] + The time interval, defined in seconds, that determines the frequency at which to send heartbeat events. Defaults to 30s. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Yields + ------ + typing.Iterator[StreamAsAgentResponse] + Returns a stream of tasks to the agent as they become available. + + Examples + -------- + from anduril import Lattice + + client = Lattice( + client_id="YOUR_CLIENT_ID", + client_secret="YOUR_CLIENT_SECRET", + ) + response = client.tasks.stream_as_agent() + for chunk in response: + yield chunk + """ + with self._raw_client.stream_as_agent( + agent_selector=agent_selector, heartbeat_interval_ms=heartbeat_interval_ms, request_options=request_options + ) as r: + yield from r.data + class AsyncTasksClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): @@ -654,6 +776,75 @@ async def main() -> None: ) return _response.data + async def stream_tasks( + self, + *, + heartbeat_interval_ms: typing.Optional[int] = OMIT, + rate_limit: typing.Optional[int] = OMIT, + exclude_preexisting_tasks: typing.Optional[bool] = OMIT, + task_type: typing.Optional[TaskStreamRequestTaskType] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.AsyncIterator[StreamTasksResponse]: + """ + Establishes a server streaming connection that delivers task updates in real-time using Server-Sent Events (SSE). + + The stream delivers all existing non-terminal tasks when first connected, followed by real-time + updates for task creation and status changes. Additionally, heartbeat messages are sent periodically to maintain the connection. + + Parameters + ---------- + heartbeat_interval_ms : typing.Optional[int] + The time interval, in milliseconds, that determines the frequency at which to send heartbeat events. Defaults to 30000 (30 seconds). + + rate_limit : typing.Optional[int] + The time interval, in milliseconds, after an update for a given task before another one will be sent for the same task. + If set, value must be >= 250. + + exclude_preexisting_tasks : typing.Optional[bool] + Optional flag to only include tasks created or updated after the stream is initiated, and not any previous preexisting tasks. + If unset or false, the stream will include any new tasks and task updates, as well as all preexisting tasks. + + task_type : typing.Optional[TaskStreamRequestTaskType] + Optional filter that only returns tasks with specific types. If not provided, all task types will be streamed. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Yields + ------ + typing.AsyncIterator[StreamTasksResponse] + Returns a stream of task updates as they occur. + + Examples + -------- + import asyncio + + from anduril import AsyncLattice + + client = AsyncLattice( + client_id="YOUR_CLIENT_ID", + client_secret="YOUR_CLIENT_SECRET", + ) + + + async def main() -> None: + response = await client.tasks.stream_tasks() + async for chunk in response: + yield chunk + + + asyncio.run(main()) + """ + async with self._raw_client.stream_tasks( + heartbeat_interval_ms=heartbeat_interval_ms, + rate_limit=rate_limit, + exclude_preexisting_tasks=exclude_preexisting_tasks, + task_type=task_type, + request_options=request_options, + ) as r: + async for _chunk in r.data: + yield _chunk + async def listen_as_agent( self, *, @@ -717,3 +908,71 @@ async def main() -> None: agent_selector=agent_selector, request_options=request_options ) return _response.data + + async def stream_as_agent( + self, + *, + agent_selector: typing.Optional[EntityIdsSelector] = OMIT, + heartbeat_interval_ms: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.AsyncIterator[StreamAsAgentResponse]: + """ + Establishes a server streaming connection that delivers tasks to taskable agents for execution + using Server-Sent Events (SSE). + + This method creates a connection from the Tasks API to an agent that streams relevant tasks to the listener agent. The agent receives a stream of tasks that match the entities specified by the tasks' selector criteria. + + The stream delivers three types of requests: + - `ExecuteRequest`: Contains a new task for the agent to execute + - `CancelRequest`: Indicates a task should be canceled + - `CompleteRequest`: Indicates a task should be completed + + Additionally, heartbeat messages are sent periodically to maintain the connection. + + This is recommended method for taskable agents to receive and process tasks in real-time. + Agents should maintain connection to this stream and process incoming tasks according to their capabilities. + + When an agent receives a task, it should update the task status using the `UpdateStatus` endpoint + to provide progress information back to Tasks API. + + Parameters + ---------- + agent_selector : typing.Optional[EntityIdsSelector] + The selector criteria to determine which tasks the agent receives. + + heartbeat_interval_ms : typing.Optional[int] + The time interval, defined in seconds, that determines the frequency at which to send heartbeat events. Defaults to 30s. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Yields + ------ + typing.AsyncIterator[StreamAsAgentResponse] + Returns a stream of tasks to the agent as they become available. + + Examples + -------- + import asyncio + + from anduril import AsyncLattice + + client = AsyncLattice( + client_id="YOUR_CLIENT_ID", + client_secret="YOUR_CLIENT_SECRET", + ) + + + async def main() -> None: + response = await client.tasks.stream_as_agent() + async for chunk in response: + yield chunk + + + asyncio.run(main()) + """ + async with self._raw_client.stream_as_agent( + agent_selector=agent_selector, heartbeat_interval_ms=heartbeat_interval_ms, request_options=request_options + ) as r: + async for _chunk in r.data: + yield _chunk diff --git a/src/anduril/tasks/raw_client.py b/src/anduril/tasks/raw_client.py index ac22cc1..8953d21 100644 --- a/src/anduril/tasks/raw_client.py +++ b/src/anduril/tasks/raw_client.py @@ -1,13 +1,16 @@ # This file was auto-generated by Fern from our API Definition. +import contextlib import typing from json.decoder import JSONDecodeError +from logging import error, warning from ..core.api_error import ApiError from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.http_sse._api import EventSource from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as +from ..core.pydantic_utilities import parse_obj_as, parse_sse_obj from ..core.request_options import RequestOptions from ..core.serialization import convert_and_respect_annotation_metadata from ..errors.bad_request_error import BadRequestError @@ -22,8 +25,11 @@ from ..types.task_entity import TaskEntity from ..types.task_query_results import TaskQueryResults from ..types.task_status import TaskStatus +from .types.stream_as_agent_response import StreamAsAgentResponse +from .types.stream_tasks_response import StreamTasksResponse from .types.task_query_status_filter import TaskQueryStatusFilter from .types.task_query_update_time_range import TaskQueryUpdateTimeRange +from .types.task_stream_request_task_type import TaskStreamRequestTaskType # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -463,6 +469,126 @@ def query_tasks( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + @contextlib.contextmanager + def stream_tasks( + self, + *, + heartbeat_interval_ms: typing.Optional[int] = OMIT, + rate_limit: typing.Optional[int] = OMIT, + exclude_preexisting_tasks: typing.Optional[bool] = OMIT, + task_type: typing.Optional[TaskStreamRequestTaskType] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.Iterator[HttpResponse[typing.Iterator[StreamTasksResponse]]]: + """ + Establishes a server streaming connection that delivers task updates in real-time using Server-Sent Events (SSE). + + The stream delivers all existing non-terminal tasks when first connected, followed by real-time + updates for task creation and status changes. Additionally, heartbeat messages are sent periodically to maintain the connection. + + Parameters + ---------- + heartbeat_interval_ms : typing.Optional[int] + The time interval, in milliseconds, that determines the frequency at which to send heartbeat events. Defaults to 30000 (30 seconds). + + rate_limit : typing.Optional[int] + The time interval, in milliseconds, after an update for a given task before another one will be sent for the same task. + If set, value must be >= 250. + + exclude_preexisting_tasks : typing.Optional[bool] + Optional flag to only include tasks created or updated after the stream is initiated, and not any previous preexisting tasks. + If unset or false, the stream will include any new tasks and task updates, as well as all preexisting tasks. + + task_type : typing.Optional[TaskStreamRequestTaskType] + Optional filter that only returns tasks with specific types. If not provided, all task types will be streamed. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Yields + ------ + typing.Iterator[HttpResponse[typing.Iterator[StreamTasksResponse]]] + Returns a stream of task updates as they occur. + """ + with self._client_wrapper.httpx_client.stream( + "api/v1/tasks/stream", + method="POST", + json={ + "heartbeatIntervalMs": heartbeat_interval_ms, + "rateLimit": rate_limit, + "excludePreexistingTasks": exclude_preexisting_tasks, + "taskType": convert_and_respect_annotation_metadata( + object_=task_type, annotation=TaskStreamRequestTaskType, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) as _response: + + def _stream() -> HttpResponse[typing.Iterator[StreamTasksResponse]]: + try: + if 200 <= _response.status_code < 300: + + def _iter(): + _event_source = EventSource(_response) + for _sse in _event_source.iter_sse(): + if _sse.data == None: + return + try: + yield typing.cast( + StreamTasksResponse, + parse_sse_obj( + sse=_sse, + type_=StreamTasksResponse, # type: ignore + ), + ) + except JSONDecodeError as e: + warning(f"Skipping SSE event with invalid JSON: {e}, sse: {_sse!r}") + except (TypeError, ValueError, KeyError, AttributeError) as e: + warning( + f"Skipping SSE event due to model construction error: {type(e).__name__}: {e}, sse: {_sse!r}" + ) + except Exception as e: + error( + f"Unexpected error processing SSE event: {type(e).__name__}: {e}, sse: {_sse!r}" + ) + return + + return HttpResponse(response=_response, data=_iter()) + _response.read() + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, headers=dict(_response.headers), body=_response.text + ) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + yield _stream() + def listen_as_agent( self, *, @@ -555,6 +681,127 @@ def listen_as_agent( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + @contextlib.contextmanager + def stream_as_agent( + self, + *, + agent_selector: typing.Optional[EntityIdsSelector] = OMIT, + heartbeat_interval_ms: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.Iterator[HttpResponse[typing.Iterator[StreamAsAgentResponse]]]: + """ + Establishes a server streaming connection that delivers tasks to taskable agents for execution + using Server-Sent Events (SSE). + + This method creates a connection from the Tasks API to an agent that streams relevant tasks to the listener agent. The agent receives a stream of tasks that match the entities specified by the tasks' selector criteria. + + The stream delivers three types of requests: + - `ExecuteRequest`: Contains a new task for the agent to execute + - `CancelRequest`: Indicates a task should be canceled + - `CompleteRequest`: Indicates a task should be completed + + Additionally, heartbeat messages are sent periodically to maintain the connection. + + This is recommended method for taskable agents to receive and process tasks in real-time. + Agents should maintain connection to this stream and process incoming tasks according to their capabilities. + + When an agent receives a task, it should update the task status using the `UpdateStatus` endpoint + to provide progress information back to Tasks API. + + Parameters + ---------- + agent_selector : typing.Optional[EntityIdsSelector] + The selector criteria to determine which tasks the agent receives. + + heartbeat_interval_ms : typing.Optional[int] + The time interval, defined in seconds, that determines the frequency at which to send heartbeat events. Defaults to 30s. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Yields + ------ + typing.Iterator[HttpResponse[typing.Iterator[StreamAsAgentResponse]]] + Returns a stream of tasks to the agent as they become available. + """ + with self._client_wrapper.httpx_client.stream( + "api/v1/agent/stream", + method="POST", + json={ + "agentSelector": convert_and_respect_annotation_metadata( + object_=agent_selector, annotation=EntityIdsSelector, direction="write" + ), + "heartbeatIntervalMs": heartbeat_interval_ms, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) as _response: + + def _stream() -> HttpResponse[typing.Iterator[StreamAsAgentResponse]]: + try: + if 200 <= _response.status_code < 300: + + def _iter(): + _event_source = EventSource(_response) + for _sse in _event_source.iter_sse(): + if _sse.data == None: + return + try: + yield typing.cast( + StreamAsAgentResponse, + parse_sse_obj( + sse=_sse, + type_=StreamAsAgentResponse, # type: ignore + ), + ) + except JSONDecodeError as e: + warning(f"Skipping SSE event with invalid JSON: {e}, sse: {_sse!r}") + except (TypeError, ValueError, KeyError, AttributeError) as e: + warning( + f"Skipping SSE event due to model construction error: {type(e).__name__}: {e}, sse: {_sse!r}" + ) + except Exception as e: + error( + f"Unexpected error processing SSE event: {type(e).__name__}: {e}, sse: {_sse!r}" + ) + return + + return HttpResponse(response=_response, data=_iter()) + _response.read() + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, headers=dict(_response.headers), body=_response.text + ) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + yield _stream() + class AsyncRawTasksClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): @@ -992,6 +1239,126 @@ async def query_tasks( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + @contextlib.asynccontextmanager + async def stream_tasks( + self, + *, + heartbeat_interval_ms: typing.Optional[int] = OMIT, + rate_limit: typing.Optional[int] = OMIT, + exclude_preexisting_tasks: typing.Optional[bool] = OMIT, + task_type: typing.Optional[TaskStreamRequestTaskType] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[StreamTasksResponse]]]: + """ + Establishes a server streaming connection that delivers task updates in real-time using Server-Sent Events (SSE). + + The stream delivers all existing non-terminal tasks when first connected, followed by real-time + updates for task creation and status changes. Additionally, heartbeat messages are sent periodically to maintain the connection. + + Parameters + ---------- + heartbeat_interval_ms : typing.Optional[int] + The time interval, in milliseconds, that determines the frequency at which to send heartbeat events. Defaults to 30000 (30 seconds). + + rate_limit : typing.Optional[int] + The time interval, in milliseconds, after an update for a given task before another one will be sent for the same task. + If set, value must be >= 250. + + exclude_preexisting_tasks : typing.Optional[bool] + Optional flag to only include tasks created or updated after the stream is initiated, and not any previous preexisting tasks. + If unset or false, the stream will include any new tasks and task updates, as well as all preexisting tasks. + + task_type : typing.Optional[TaskStreamRequestTaskType] + Optional filter that only returns tasks with specific types. If not provided, all task types will be streamed. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Yields + ------ + typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[StreamTasksResponse]]] + Returns a stream of task updates as they occur. + """ + async with self._client_wrapper.httpx_client.stream( + "api/v1/tasks/stream", + method="POST", + json={ + "heartbeatIntervalMs": heartbeat_interval_ms, + "rateLimit": rate_limit, + "excludePreexistingTasks": exclude_preexisting_tasks, + "taskType": convert_and_respect_annotation_metadata( + object_=task_type, annotation=TaskStreamRequestTaskType, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) as _response: + + async def _stream() -> AsyncHttpResponse[typing.AsyncIterator[StreamTasksResponse]]: + try: + if 200 <= _response.status_code < 300: + + async def _iter(): + _event_source = EventSource(_response) + async for _sse in _event_source.aiter_sse(): + if _sse.data == None: + return + try: + yield typing.cast( + StreamTasksResponse, + parse_sse_obj( + sse=_sse, + type_=StreamTasksResponse, # type: ignore + ), + ) + except JSONDecodeError as e: + warning(f"Skipping SSE event with invalid JSON: {e}, sse: {_sse!r}") + except (TypeError, ValueError, KeyError, AttributeError) as e: + warning( + f"Skipping SSE event due to model construction error: {type(e).__name__}: {e}, sse: {_sse!r}" + ) + except Exception as e: + error( + f"Unexpected error processing SSE event: {type(e).__name__}: {e}, sse: {_sse!r}" + ) + return + + return AsyncHttpResponse(response=_response, data=_iter()) + await _response.aread() + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, headers=dict(_response.headers), body=_response.text + ) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + yield await _stream() + async def listen_as_agent( self, *, @@ -1083,3 +1450,124 @@ async def listen_as_agent( except JSONDecodeError: raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + @contextlib.asynccontextmanager + async def stream_as_agent( + self, + *, + agent_selector: typing.Optional[EntityIdsSelector] = OMIT, + heartbeat_interval_ms: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[StreamAsAgentResponse]]]: + """ + Establishes a server streaming connection that delivers tasks to taskable agents for execution + using Server-Sent Events (SSE). + + This method creates a connection from the Tasks API to an agent that streams relevant tasks to the listener agent. The agent receives a stream of tasks that match the entities specified by the tasks' selector criteria. + + The stream delivers three types of requests: + - `ExecuteRequest`: Contains a new task for the agent to execute + - `CancelRequest`: Indicates a task should be canceled + - `CompleteRequest`: Indicates a task should be completed + + Additionally, heartbeat messages are sent periodically to maintain the connection. + + This is recommended method for taskable agents to receive and process tasks in real-time. + Agents should maintain connection to this stream and process incoming tasks according to their capabilities. + + When an agent receives a task, it should update the task status using the `UpdateStatus` endpoint + to provide progress information back to Tasks API. + + Parameters + ---------- + agent_selector : typing.Optional[EntityIdsSelector] + The selector criteria to determine which tasks the agent receives. + + heartbeat_interval_ms : typing.Optional[int] + The time interval, defined in seconds, that determines the frequency at which to send heartbeat events. Defaults to 30s. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Yields + ------ + typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[StreamAsAgentResponse]]] + Returns a stream of tasks to the agent as they become available. + """ + async with self._client_wrapper.httpx_client.stream( + "api/v1/agent/stream", + method="POST", + json={ + "agentSelector": convert_and_respect_annotation_metadata( + object_=agent_selector, annotation=EntityIdsSelector, direction="write" + ), + "heartbeatIntervalMs": heartbeat_interval_ms, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) as _response: + + async def _stream() -> AsyncHttpResponse[typing.AsyncIterator[StreamAsAgentResponse]]: + try: + if 200 <= _response.status_code < 300: + + async def _iter(): + _event_source = EventSource(_response) + async for _sse in _event_source.aiter_sse(): + if _sse.data == None: + return + try: + yield typing.cast( + StreamAsAgentResponse, + parse_sse_obj( + sse=_sse, + type_=StreamAsAgentResponse, # type: ignore + ), + ) + except JSONDecodeError as e: + warning(f"Skipping SSE event with invalid JSON: {e}, sse: {_sse!r}") + except (TypeError, ValueError, KeyError, AttributeError) as e: + warning( + f"Skipping SSE event due to model construction error: {type(e).__name__}: {e}, sse: {_sse!r}" + ) + except Exception as e: + error( + f"Unexpected error processing SSE event: {type(e).__name__}: {e}, sse: {_sse!r}" + ) + return + + return AsyncHttpResponse(response=_response, data=_iter()) + await _response.aread() + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Any, + parse_obj_as( + type_=typing.Any, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, headers=dict(_response.headers), body=_response.text + ) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + yield await _stream() diff --git a/src/anduril/tasks/types/__init__.py b/src/anduril/tasks/types/__init__.py index 7d5e5a3..c0815b2 100644 --- a/src/anduril/tasks/types/__init__.py +++ b/src/anduril/tasks/types/__init__.py @@ -6,13 +6,31 @@ from importlib import import_module if typing.TYPE_CHECKING: + from .stream_as_agent_response import ( + StreamAsAgentResponse, + StreamAsAgentResponse_AgentRequest, + StreamAsAgentResponse_Heartbeat, + ) + from .stream_tasks_response import StreamTasksResponse, StreamTasksResponse_Heartbeat, StreamTasksResponse_TaskEvent from .task_query_status_filter import TaskQueryStatusFilter from .task_query_status_filter_status import TaskQueryStatusFilterStatus from .task_query_update_time_range import TaskQueryUpdateTimeRange + from .task_stream_request_task_type import TaskStreamRequestTaskType + from .task_stream_request_task_type_task_type_prefix import TaskStreamRequestTaskTypeTaskTypePrefix + from .task_stream_request_task_type_task_type_urls import TaskStreamRequestTaskTypeTaskTypeUrls _dynamic_imports: typing.Dict[str, str] = { + "StreamAsAgentResponse": ".stream_as_agent_response", + "StreamAsAgentResponse_AgentRequest": ".stream_as_agent_response", + "StreamAsAgentResponse_Heartbeat": ".stream_as_agent_response", + "StreamTasksResponse": ".stream_tasks_response", + "StreamTasksResponse_Heartbeat": ".stream_tasks_response", + "StreamTasksResponse_TaskEvent": ".stream_tasks_response", "TaskQueryStatusFilter": ".task_query_status_filter", "TaskQueryStatusFilterStatus": ".task_query_status_filter_status", "TaskQueryUpdateTimeRange": ".task_query_update_time_range", + "TaskStreamRequestTaskType": ".task_stream_request_task_type", + "TaskStreamRequestTaskTypeTaskTypePrefix": ".task_stream_request_task_type_task_type_prefix", + "TaskStreamRequestTaskTypeTaskTypeUrls": ".task_stream_request_task_type_task_type_urls", } @@ -37,4 +55,17 @@ def __dir__(): return sorted(lazy_attrs) -__all__ = ["TaskQueryStatusFilter", "TaskQueryStatusFilterStatus", "TaskQueryUpdateTimeRange"] +__all__ = [ + "StreamAsAgentResponse", + "StreamAsAgentResponse_AgentRequest", + "StreamAsAgentResponse_Heartbeat", + "StreamTasksResponse", + "StreamTasksResponse_Heartbeat", + "StreamTasksResponse_TaskEvent", + "TaskQueryStatusFilter", + "TaskQueryStatusFilterStatus", + "TaskQueryUpdateTimeRange", + "TaskStreamRequestTaskType", + "TaskStreamRequestTaskTypeTaskTypePrefix", + "TaskStreamRequestTaskTypeTaskTypeUrls", +] diff --git a/src/anduril/tasks/types/stream_as_agent_response.py b/src/anduril/tasks/types/stream_as_agent_response.py new file mode 100644 index 0000000..a2f38c0 --- /dev/null +++ b/src/anduril/tasks/types/stream_as_agent_response.py @@ -0,0 +1,64 @@ +# This file was auto-generated by Fern from our API Definition. + +from __future__ import annotations + +import typing + +import pydantic +import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel, update_forward_refs +from ...core.serialization import FieldMetadata +from ...types.cancel_request import CancelRequest +from ...types.complete_request import CompleteRequest +from ...types.execute_request import ExecuteRequest + + +class StreamAsAgentResponse_Heartbeat(UniversalBaseModel): + """ + The stream event response. + """ + + event: typing.Literal["heartbeat"] = "heartbeat" + timestamp: typing.Optional[str] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +class StreamAsAgentResponse_AgentRequest(UniversalBaseModel): + """ + The stream event response. + """ + + event: typing.Literal["agent_request"] = "agent_request" + execute_request: typing_extensions.Annotated[ + typing.Optional[ExecuteRequest], FieldMetadata(alias="executeRequest") + ] = pydantic.Field(alias="executeRequest", default=None) + cancel_request: typing_extensions.Annotated[ + typing.Optional[CancelRequest], FieldMetadata(alias="cancelRequest") + ] = pydantic.Field(alias="cancelRequest", default=None) + complete_request: typing_extensions.Annotated[ + typing.Optional[CompleteRequest], FieldMetadata(alias="completeRequest") + ] = pydantic.Field(alias="completeRequest", default=None) + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +StreamAsAgentResponse = typing_extensions.Annotated[ + typing.Union[StreamAsAgentResponse_Heartbeat, StreamAsAgentResponse_AgentRequest], + pydantic.Field(discriminator="event"), +] +update_forward_refs(StreamAsAgentResponse_AgentRequest) diff --git a/src/anduril/tasks/types/stream_tasks_response.py b/src/anduril/tasks/types/stream_tasks_response.py new file mode 100644 index 0000000..dfe8966 --- /dev/null +++ b/src/anduril/tasks/types/stream_tasks_response.py @@ -0,0 +1,55 @@ +# This file was auto-generated by Fern from our API Definition. + +from __future__ import annotations + +import typing + +import pydantic +import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel, update_forward_refs +from ...core.serialization import FieldMetadata +from ...types.task_event_data_task_event import TaskEventDataTaskEvent + + +class StreamTasksResponse_Heartbeat(UniversalBaseModel): + """ + The stream event response. + """ + + event: typing.Literal["heartbeat"] = "heartbeat" + timestamp: typing.Optional[str] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +class StreamTasksResponse_TaskEvent(UniversalBaseModel): + """ + The stream event response. + """ + + event: typing.Literal["task_event"] = "task_event" + task_event: typing_extensions.Annotated[ + typing.Optional[TaskEventDataTaskEvent], FieldMetadata(alias="taskEvent") + ] = pydantic.Field(alias="taskEvent", default=None) + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +StreamTasksResponse = typing_extensions.Annotated[ + typing.Union[StreamTasksResponse_Heartbeat, StreamTasksResponse_TaskEvent], pydantic.Field(discriminator="event") +] +update_forward_refs(StreamTasksResponse_TaskEvent) diff --git a/src/anduril/tasks/types/task_stream_request_task_type.py b/src/anduril/tasks/types/task_stream_request_task_type.py new file mode 100644 index 0000000..813a9df --- /dev/null +++ b/src/anduril/tasks/types/task_stream_request_task_type.py @@ -0,0 +1,8 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from .task_stream_request_task_type_task_type_prefix import TaskStreamRequestTaskTypeTaskTypePrefix +from .task_stream_request_task_type_task_type_urls import TaskStreamRequestTaskTypeTaskTypeUrls + +TaskStreamRequestTaskType = typing.Union[TaskStreamRequestTaskTypeTaskTypeUrls, TaskStreamRequestTaskTypeTaskTypePrefix] diff --git a/src/anduril/tasks/types/task_stream_request_task_type_task_type_prefix.py b/src/anduril/tasks/types/task_stream_request_task_type_task_type_prefix.py new file mode 100644 index 0000000..a512408 --- /dev/null +++ b/src/anduril/tasks/types/task_stream_request_task_type_task_type_prefix.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.serialization import FieldMetadata + + +class TaskStreamRequestTaskTypeTaskTypePrefix(UniversalBaseModel): + task_type_prefix: typing_extensions.Annotated[str, FieldMetadata(alias="taskTypePrefix")] = pydantic.Field( + alias="taskTypePrefix" + ) + """ + Prefix string to match task types. Any task with a type that starts with this prefix will be included. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/anduril/tasks/types/task_stream_request_task_type_task_type_urls.py b/src/anduril/tasks/types/task_stream_request_task_type_task_type_urls.py new file mode 100644 index 0000000..327469a --- /dev/null +++ b/src/anduril/tasks/types/task_stream_request_task_type_task_type_urls.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.serialization import FieldMetadata + + +class TaskStreamRequestTaskTypeTaskTypeUrls(UniversalBaseModel): + task_type_urls: typing_extensions.Annotated[typing.List[str], FieldMetadata(alias="taskTypeUrls")] = pydantic.Field( + alias="taskTypeUrls" + ) + """ + List of exact task type URLs to match. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/anduril/types/__init__.py b/src/anduril/types/__init__.py index 8125489..1fcae4e 100644 --- a/src/anduril/types/__init__.py +++ b/src/anduril/types/__init__.py @@ -11,6 +11,8 @@ from .active_target import ActiveTarget from .agent import Agent from .agent_request import AgentRequest + from .agent_stream_event import AgentStreamEvent + from .agent_task_request import AgentTaskRequest from .alert import Alert from .alert_condition import AlertCondition from .alert_level import AlertLevel @@ -161,6 +163,7 @@ from .sensors import Sensors from .signal import Signal from .status import Status + from .stream_heartbeat import StreamHeartbeat from .supplies import Supplies from .symbology import Symbology from .system import System @@ -172,9 +175,13 @@ from .task_entity import TaskEntity from .task_error import TaskError from .task_error_code import TaskErrorCode + from .task_event_data import TaskEventData + from .task_event_data_task_event import TaskEventDataTaskEvent + from .task_event_data_task_event_event_type import TaskEventDataTaskEventEventType from .task_query_results import TaskQueryResults from .task_status import TaskStatus from .task_status_status import TaskStatusStatus + from .task_stream_event import TaskStreamEvent from .task_version import TaskVersion from .team import Team from .threat import Threat @@ -194,6 +201,8 @@ "ActiveTarget": ".active_target", "Agent": ".agent", "AgentRequest": ".agent_request", + "AgentStreamEvent": ".agent_stream_event", + "AgentTaskRequest": ".agent_task_request", "Alert": ".alert", "AlertCondition": ".alert_condition", "AlertLevel": ".alert_level", @@ -344,6 +353,7 @@ "Sensors": ".sensors", "Signal": ".signal", "Status": ".status", + "StreamHeartbeat": ".stream_heartbeat", "Supplies": ".supplies", "Symbology": ".symbology", "System": ".system", @@ -355,9 +365,13 @@ "TaskEntity": ".task_entity", "TaskError": ".task_error", "TaskErrorCode": ".task_error_code", + "TaskEventData": ".task_event_data", + "TaskEventDataTaskEvent": ".task_event_data_task_event", + "TaskEventDataTaskEventEventType": ".task_event_data_task_event_event_type", "TaskQueryResults": ".task_query_results", "TaskStatus": ".task_status", "TaskStatusStatus": ".task_status_status", + "TaskStreamEvent": ".task_stream_event", "TaskVersion": ".task_version", "Team": ".team", "Threat": ".threat", @@ -401,6 +415,8 @@ def __dir__(): "ActiveTarget", "Agent", "AgentRequest", + "AgentStreamEvent", + "AgentTaskRequest", "Alert", "AlertCondition", "AlertLevel", @@ -551,6 +567,7 @@ def __dir__(): "Sensors", "Signal", "Status", + "StreamHeartbeat", "Supplies", "Symbology", "System", @@ -562,9 +579,13 @@ def __dir__(): "TaskEntity", "TaskError", "TaskErrorCode", + "TaskEventData", + "TaskEventDataTaskEvent", + "TaskEventDataTaskEventEventType", "TaskQueryResults", "TaskStatus", "TaskStatusStatus", + "TaskStreamEvent", "TaskVersion", "Team", "Threat", diff --git a/src/anduril/types/agent_stream_event.py b/src/anduril/types/agent_stream_event.py new file mode 100644 index 0000000..93cd619 --- /dev/null +++ b/src/anduril/types/agent_stream_event.py @@ -0,0 +1,20 @@ +# This file was auto-generated by Fern from our API Definition. + +from __future__ import annotations + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .agent_task_request import AgentTaskRequest + + +class AgentStreamEvent(AgentTaskRequest): + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/anduril/types/agent_task_request.py b/src/anduril/types/agent_task_request.py new file mode 100644 index 0000000..9398920 --- /dev/null +++ b/src/anduril/types/agent_task_request.py @@ -0,0 +1,41 @@ +# This file was auto-generated by Fern from our API Definition. + +from __future__ import annotations + +import typing + +import pydantic +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel, update_forward_refs +from ..core.serialization import FieldMetadata +from .cancel_request import CancelRequest +from .complete_request import CompleteRequest +from .execute_request import ExecuteRequest + + +class AgentTaskRequest(UniversalBaseModel): + """ + The wrapper for a task's action requests: execute, cancel, or complete. + """ + + execute_request: typing_extensions.Annotated[ + typing.Optional[ExecuteRequest], FieldMetadata(alias="executeRequest") + ] = pydantic.Field(alias="executeRequest", default=None) + cancel_request: typing_extensions.Annotated[ + typing.Optional[CancelRequest], FieldMetadata(alias="cancelRequest") + ] = pydantic.Field(alias="cancelRequest", default=None) + complete_request: typing_extensions.Annotated[ + typing.Optional[CompleteRequest], FieldMetadata(alias="completeRequest") + ] = pydantic.Field(alias="completeRequest", default=None) + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +update_forward_refs(AgentTaskRequest) diff --git a/src/anduril/types/heartbeat_object.py b/src/anduril/types/heartbeat_object.py index dd731c7..8643bef 100644 --- a/src/anduril/types/heartbeat_object.py +++ b/src/anduril/types/heartbeat_object.py @@ -1,6 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing import pydantic @@ -8,9 +7,9 @@ class HeartbeatObject(UniversalBaseModel): - timestamp: typing.Optional[dt.datetime] = pydantic.Field(default=None) + timestamp: typing.Optional[str] = pydantic.Field(default=None) """ - timestamp of the heartbeat + The timestamp at which the heartbeat message was sent. """ if IS_PYDANTIC_V2: diff --git a/src/anduril/types/stream_heartbeat.py b/src/anduril/types/stream_heartbeat.py new file mode 100644 index 0000000..c90dd5a --- /dev/null +++ b/src/anduril/types/stream_heartbeat.py @@ -0,0 +1,18 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .heartbeat_object import HeartbeatObject + + +class StreamHeartbeat(HeartbeatObject): + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/anduril/types/task_event_data.py b/src/anduril/types/task_event_data.py new file mode 100644 index 0000000..59ad1f2 --- /dev/null +++ b/src/anduril/types/task_event_data.py @@ -0,0 +1,36 @@ +# This file was auto-generated by Fern from our API Definition. + +from __future__ import annotations + +import typing + +import pydantic +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel, update_forward_refs +from ..core.serialization import FieldMetadata +from .task_event_data_task_event import TaskEventDataTaskEvent + + +class TaskEventData(UniversalBaseModel): + """ + Contains information about a task event. + """ + + task_event: typing_extensions.Annotated[ + typing.Optional[TaskEventDataTaskEvent], FieldMetadata(alias="taskEvent") + ] = pydantic.Field(alias="taskEvent", default=None) + """ + The task event that occurred. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +update_forward_refs(TaskEventData) diff --git a/src/anduril/types/task_event_data_task_event.py b/src/anduril/types/task_event_data_task_event.py new file mode 100644 index 0000000..56ca83e --- /dev/null +++ b/src/anduril/types/task_event_data_task_event.py @@ -0,0 +1,42 @@ +# This file was auto-generated by Fern from our API Definition. + +from __future__ import annotations + +import typing + +import pydantic +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel, update_forward_refs +from ..core.serialization import FieldMetadata +from .task import Task +from .task_event_data_task_event_event_type import TaskEventDataTaskEventEventType + + +class TaskEventDataTaskEvent(UniversalBaseModel): + """ + The task event that occurred. + """ + + event_type: typing_extensions.Annotated[ + typing.Optional[TaskEventDataTaskEventEventType], FieldMetadata(alias="eventType") + ] = pydantic.Field(alias="eventType", default=None) + """ + The type of event that occurred for this task. + """ + + task: typing.Optional[Task] = pydantic.Field(default=None) + """ + The task associated with this event. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow + + +update_forward_refs(TaskEventDataTaskEvent) diff --git a/src/anduril/types/task_event_data_task_event_event_type.py b/src/anduril/types/task_event_data_task_event_event_type.py new file mode 100644 index 0000000..cbf31d5 --- /dev/null +++ b/src/anduril/types/task_event_data_task_event_event_type.py @@ -0,0 +1,8 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +TaskEventDataTaskEventEventType = typing.Union[ + typing.Literal["EVENT_TYPE_INVALID", "EVENT_TYPE_CREATED", "EVENT_TYPE_UPDATE", "EVENT_TYPE_PREEXISTING"], + typing.Any, +] diff --git a/src/anduril/types/task_stream_event.py b/src/anduril/types/task_stream_event.py new file mode 100644 index 0000000..946964a --- /dev/null +++ b/src/anduril/types/task_stream_event.py @@ -0,0 +1,20 @@ +# This file was auto-generated by Fern from our API Definition. + +from __future__ import annotations + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .task_event_data import TaskEventData + + +class TaskStreamEvent(TaskEventData): + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow