diff --git a/README.md b/README.md
index 59d7355..51bb919 100644
--- a/README.md
+++ b/README.md
@@ -15,7 +15,6 @@ The REST API documentation can be found on [docs.llama-stack.todo](https://docs.
## Installation
```sh
-# install from this staging repo
pip install llama-stack-client
```
@@ -31,7 +30,7 @@ client = LlamaStack(
environment="sandbox",
)
-session = client.agentic_system.sessions.create(
+session = client.agents.sessions.create(
agent_id="agent_id",
session_name="session_name",
)
@@ -53,7 +52,7 @@ client = AsyncLlamaStack(
async def main() -> None:
- session = await client.agentic_system.sessions.create(
+ session = await client.agents.sessions.create(
agent_id="agent_id",
session_name="session_name",
)
@@ -90,7 +89,7 @@ from llama_stack import LlamaStack
client = LlamaStack()
try:
- client.agentic_system.sessions.create(
+ client.agents.sessions.create(
agent_id="agent_id",
session_name="session_name",
)
@@ -136,7 +135,7 @@ client = LlamaStack(
)
# Or, configure per-request:
-client.with_options(max_retries=5).agentic_system.sessions.create(
+client.with_options(max_retries=5).agents.sessions.create(
agent_id="agent_id",
session_name="session_name",
)
@@ -162,7 +161,7 @@ client = LlamaStack(
)
# Override per-request:
-client.with_options(timeout=5.0).agentic_system.sessions.create(
+client.with_options(timeout=5.0).agents.sessions.create(
agent_id="agent_id",
session_name="session_name",
)
@@ -204,13 +203,13 @@ The "raw" Response object can be accessed by prefixing `.with_raw_response.` to
from llama_stack import LlamaStack
client = LlamaStack()
-response = client.agentic_system.sessions.with_raw_response.create(
+response = client.agents.sessions.with_raw_response.create(
agent_id="agent_id",
session_name="session_name",
)
print(response.headers.get('X-My-Header'))
-session = response.parse() # get the object that `agentic_system.sessions.create()` would have returned
+session = response.parse() # get the object that `agents.sessions.create()` would have returned
print(session.session_id)
```
@@ -225,7 +224,7 @@ The above interface eagerly reads the full response body when you make the reque
To stream the response body, use `.with_streaming_response` instead, which requires a context manager and only reads the response body once you call `.read()`, `.text()`, `.json()`, `.iter_bytes()`, `.iter_text()`, `.iter_lines()` or `.parse()`. In the async client, these are async methods.
```python
-with client.agentic_system.sessions.with_streaming_response.create(
+with client.agents.sessions.with_streaming_response.create(
agent_id="agent_id",
session_name="session_name",
) as response:
diff --git a/api.md b/api.md
index 6ca4424..5561fa2 100644
--- a/api.md
+++ b/api.md
@@ -26,7 +26,7 @@ Methods:
- client.telemetry.get_trace(\*\*params) -> TelemetryGetTraceResponse
- client.telemetry.log(\*\*params) -> None
-# AgenticSystem
+# Agents
Types:
@@ -42,53 +42,53 @@ from llama_stack.types import (
ShieldDefinition,
ToolExecutionStep,
ToolParamDefinition,
- AgenticSystemCreateResponse,
+ AgentCreateResponse,
)
```
Methods:
-- client.agentic_system.create(\*\*params) -> AgenticSystemCreateResponse
-- client.agentic_system.delete(\*\*params) -> None
+- client.agents.create(\*\*params) -> AgentCreateResponse
+- client.agents.delete(\*\*params) -> None
## Sessions
Types:
```python
-from llama_stack.types.agentic_system import Session, SessionCreateResponse
+from llama_stack.types.agents import Session, SessionCreateResponse
```
Methods:
-- client.agentic_system.sessions.create(\*\*params) -> SessionCreateResponse
-- client.agentic_system.sessions.retrieve(\*\*params) -> Session
-- client.agentic_system.sessions.delete(\*\*params) -> None
+- client.agents.sessions.create(\*\*params) -> SessionCreateResponse
+- client.agents.sessions.retrieve(\*\*params) -> Session
+- client.agents.sessions.delete(\*\*params) -> None
## Steps
Types:
```python
-from llama_stack.types.agentic_system import AgenticSystemStep
+from llama_stack.types.agents import AgentsStep
```
Methods:
-- client.agentic_system.steps.retrieve(\*\*params) -> AgenticSystemStep
+- client.agents.steps.retrieve(\*\*params) -> AgentsStep
## Turns
Types:
```python
-from llama_stack.types.agentic_system import AgenticSystemTurnStreamChunk, Turn, TurnStreamEvent
+from llama_stack.types.agents import AgentsTurnStreamChunk, Turn, TurnStreamEvent
```
Methods:
-- client.agentic_system.turns.create(\*\*params) -> AgenticSystemTurnStreamChunk
-- client.agentic_system.turns.retrieve(\*\*params) -> Turn
+- client.agents.turns.create(\*\*params) -> AgentsTurnStreamChunk
+- client.agents.turns.retrieve(\*\*params) -> Turn
# Datasets
diff --git a/pyproject.toml b/pyproject.toml
index 9a15fbd..f230641 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "llama_stack_client"
-version = "0.0.1-alpha.0"
+version = "0.0.2-alpha.0"
description = "The official Python library for the llama-stack API"
dynamic = ["readme"]
license = "Apache-2.0"
diff --git a/src/llama_stack/_client.py b/src/llama_stack/_client.py
index 72fd4a9..80d45bd 100644
--- a/src/llama_stack/_client.py
+++ b/src/llama_stack/_client.py
@@ -53,7 +53,7 @@
class LlamaStack(SyncAPIClient):
telemetry: resources.TelemetryResource
- agentic_system: resources.AgenticSystemResource
+ agents: resources.AgentsResource
datasets: resources.DatasetsResource
evaluate: resources.EvaluateResource
evaluations: resources.EvaluationsResource
@@ -133,7 +133,7 @@ def __init__(
)
self.telemetry = resources.TelemetryResource(self)
- self.agentic_system = resources.AgenticSystemResource(self)
+ self.agents = resources.AgentsResource(self)
self.datasets = resources.DatasetsResource(self)
self.evaluate = resources.EvaluateResource(self)
self.evaluations = resources.EvaluationsResource(self)
@@ -248,7 +248,7 @@ def _make_status_error(
class AsyncLlamaStack(AsyncAPIClient):
telemetry: resources.AsyncTelemetryResource
- agentic_system: resources.AsyncAgenticSystemResource
+ agents: resources.AsyncAgentsResource
datasets: resources.AsyncDatasetsResource
evaluate: resources.AsyncEvaluateResource
evaluations: resources.AsyncEvaluationsResource
@@ -328,7 +328,7 @@ def __init__(
)
self.telemetry = resources.AsyncTelemetryResource(self)
- self.agentic_system = resources.AsyncAgenticSystemResource(self)
+ self.agents = resources.AsyncAgentsResource(self)
self.datasets = resources.AsyncDatasetsResource(self)
self.evaluate = resources.AsyncEvaluateResource(self)
self.evaluations = resources.AsyncEvaluationsResource(self)
@@ -444,7 +444,7 @@ def _make_status_error(
class LlamaStackWithRawResponse:
def __init__(self, client: LlamaStack) -> None:
self.telemetry = resources.TelemetryResourceWithRawResponse(client.telemetry)
- self.agentic_system = resources.AgenticSystemResourceWithRawResponse(client.agentic_system)
+ self.agents = resources.AgentsResourceWithRawResponse(client.agents)
self.datasets = resources.DatasetsResourceWithRawResponse(client.datasets)
self.evaluate = resources.EvaluateResourceWithRawResponse(client.evaluate)
self.evaluations = resources.EvaluationsResourceWithRawResponse(client.evaluations)
@@ -462,7 +462,7 @@ def __init__(self, client: LlamaStack) -> None:
class AsyncLlamaStackWithRawResponse:
def __init__(self, client: AsyncLlamaStack) -> None:
self.telemetry = resources.AsyncTelemetryResourceWithRawResponse(client.telemetry)
- self.agentic_system = resources.AsyncAgenticSystemResourceWithRawResponse(client.agentic_system)
+ self.agents = resources.AsyncAgentsResourceWithRawResponse(client.agents)
self.datasets = resources.AsyncDatasetsResourceWithRawResponse(client.datasets)
self.evaluate = resources.AsyncEvaluateResourceWithRawResponse(client.evaluate)
self.evaluations = resources.AsyncEvaluationsResourceWithRawResponse(client.evaluations)
@@ -480,7 +480,7 @@ def __init__(self, client: AsyncLlamaStack) -> None:
class LlamaStackWithStreamedResponse:
def __init__(self, client: LlamaStack) -> None:
self.telemetry = resources.TelemetryResourceWithStreamingResponse(client.telemetry)
- self.agentic_system = resources.AgenticSystemResourceWithStreamingResponse(client.agentic_system)
+ self.agents = resources.AgentsResourceWithStreamingResponse(client.agents)
self.datasets = resources.DatasetsResourceWithStreamingResponse(client.datasets)
self.evaluate = resources.EvaluateResourceWithStreamingResponse(client.evaluate)
self.evaluations = resources.EvaluationsResourceWithStreamingResponse(client.evaluations)
@@ -498,7 +498,7 @@ def __init__(self, client: LlamaStack) -> None:
class AsyncLlamaStackWithStreamedResponse:
def __init__(self, client: AsyncLlamaStack) -> None:
self.telemetry = resources.AsyncTelemetryResourceWithStreamingResponse(client.telemetry)
- self.agentic_system = resources.AsyncAgenticSystemResourceWithStreamingResponse(client.agentic_system)
+ self.agents = resources.AsyncAgentsResourceWithStreamingResponse(client.agents)
self.datasets = resources.AsyncDatasetsResourceWithStreamingResponse(client.datasets)
self.evaluate = resources.AsyncEvaluateResourceWithStreamingResponse(client.evaluate)
self.evaluations = resources.AsyncEvaluationsResourceWithStreamingResponse(client.evaluations)
diff --git a/src/llama_stack/resources/__init__.py b/src/llama_stack/resources/__init__.py
index a9a971f..484c881 100644
--- a/src/llama_stack/resources/__init__.py
+++ b/src/llama_stack/resources/__init__.py
@@ -1,5 +1,13 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+from .agents import (
+ AgentsResource,
+ AsyncAgentsResource,
+ AgentsResourceWithRawResponse,
+ AsyncAgentsResourceWithRawResponse,
+ AgentsResourceWithStreamingResponse,
+ AsyncAgentsResourceWithStreamingResponse,
+)
from .safety import (
SafetyResource,
AsyncSafetyResource,
@@ -64,14 +72,6 @@
PostTrainingResourceWithStreamingResponse,
AsyncPostTrainingResourceWithStreamingResponse,
)
-from .agentic_system import (
- AgenticSystemResource,
- AsyncAgenticSystemResource,
- AgenticSystemResourceWithRawResponse,
- AsyncAgenticSystemResourceWithRawResponse,
- AgenticSystemResourceWithStreamingResponse,
- AsyncAgenticSystemResourceWithStreamingResponse,
-)
from .reward_scoring import (
RewardScoringResource,
AsyncRewardScoringResource,
@@ -104,12 +104,12 @@
"AsyncTelemetryResourceWithRawResponse",
"TelemetryResourceWithStreamingResponse",
"AsyncTelemetryResourceWithStreamingResponse",
- "AgenticSystemResource",
- "AsyncAgenticSystemResource",
- "AgenticSystemResourceWithRawResponse",
- "AsyncAgenticSystemResourceWithRawResponse",
- "AgenticSystemResourceWithStreamingResponse",
- "AsyncAgenticSystemResourceWithStreamingResponse",
+ "AgentsResource",
+ "AsyncAgentsResource",
+ "AgentsResourceWithRawResponse",
+ "AsyncAgentsResourceWithRawResponse",
+ "AgentsResourceWithStreamingResponse",
+ "AsyncAgentsResourceWithStreamingResponse",
"DatasetsResource",
"AsyncDatasetsResource",
"DatasetsResourceWithRawResponse",
diff --git a/src/llama_stack/resources/agentic_system/__init__.py b/src/llama_stack/resources/agents/__init__.py
similarity index 72%
rename from src/llama_stack/resources/agentic_system/__init__.py
rename to src/llama_stack/resources/agents/__init__.py
index a0dd4df..0a644db 100644
--- a/src/llama_stack/resources/agentic_system/__init__.py
+++ b/src/llama_stack/resources/agents/__init__.py
@@ -16,6 +16,14 @@
TurnsResourceWithStreamingResponse,
AsyncTurnsResourceWithStreamingResponse,
)
+from .agents import (
+ AgentsResource,
+ AsyncAgentsResource,
+ AgentsResourceWithRawResponse,
+ AsyncAgentsResourceWithRawResponse,
+ AgentsResourceWithStreamingResponse,
+ AsyncAgentsResourceWithStreamingResponse,
+)
from .sessions import (
SessionsResource,
AsyncSessionsResource,
@@ -24,14 +32,6 @@
SessionsResourceWithStreamingResponse,
AsyncSessionsResourceWithStreamingResponse,
)
-from .agentic_system import (
- AgenticSystemResource,
- AsyncAgenticSystemResource,
- AgenticSystemResourceWithRawResponse,
- AsyncAgenticSystemResourceWithRawResponse,
- AgenticSystemResourceWithStreamingResponse,
- AsyncAgenticSystemResourceWithStreamingResponse,
-)
__all__ = [
"SessionsResource",
@@ -52,10 +52,10 @@
"AsyncTurnsResourceWithRawResponse",
"TurnsResourceWithStreamingResponse",
"AsyncTurnsResourceWithStreamingResponse",
- "AgenticSystemResource",
- "AsyncAgenticSystemResource",
- "AgenticSystemResourceWithRawResponse",
- "AsyncAgenticSystemResourceWithRawResponse",
- "AgenticSystemResourceWithStreamingResponse",
- "AsyncAgenticSystemResourceWithStreamingResponse",
+ "AgentsResource",
+ "AsyncAgentsResource",
+ "AgentsResourceWithRawResponse",
+ "AsyncAgentsResourceWithRawResponse",
+ "AgentsResourceWithStreamingResponse",
+ "AsyncAgentsResourceWithStreamingResponse",
]
diff --git a/src/llama_stack/resources/agentic_system/agentic_system.py b/src/llama_stack/resources/agents/agents.py
similarity index 71%
rename from src/llama_stack/resources/agentic_system/agentic_system.py
rename to src/llama_stack/resources/agents/agents.py
index dde7feb..48871a2 100644
--- a/src/llama_stack/resources/agentic_system/agentic_system.py
+++ b/src/llama_stack/resources/agents/agents.py
@@ -20,7 +20,7 @@
TurnsResourceWithStreamingResponse,
AsyncTurnsResourceWithStreamingResponse,
)
-from ...types import agentic_system_create_params, agentic_system_delete_params
+from ...types import agent_create_params, agent_delete_params
from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
from ..._utils import (
maybe_transform,
@@ -43,12 +43,12 @@
async_to_streamed_response_wrapper,
)
from ..._base_client import make_request_options
-from ...types.agentic_system_create_response import AgenticSystemCreateResponse
+from ...types.agent_create_response import AgentCreateResponse
-__all__ = ["AgenticSystemResource", "AsyncAgenticSystemResource"]
+__all__ = ["AgentsResource", "AsyncAgentsResource"]
-class AgenticSystemResource(SyncAPIResource):
+class AgentsResource(SyncAPIResource):
@cached_property
def sessions(self) -> SessionsResource:
return SessionsResource(self._client)
@@ -62,35 +62,35 @@ def turns(self) -> TurnsResource:
return TurnsResource(self._client)
@cached_property
- def with_raw_response(self) -> AgenticSystemResourceWithRawResponse:
+ def with_raw_response(self) -> AgentsResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return the
the raw response object instead of the parsed content.
For more information, see https://www.github.com/stainless-sdks/llama-stack-python#accessing-raw-response-data-eg-headers
"""
- return AgenticSystemResourceWithRawResponse(self)
+ return AgentsResourceWithRawResponse(self)
@cached_property
- def with_streaming_response(self) -> AgenticSystemResourceWithStreamingResponse:
+ def with_streaming_response(self) -> AgentsResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/stainless-sdks/llama-stack-python#with_streaming_response
"""
- return AgenticSystemResourceWithStreamingResponse(self)
+ return AgentsResourceWithStreamingResponse(self)
def create(
self,
*,
- agent_config: agentic_system_create_params.AgentConfig,
+ agent_config: agent_create_params.AgentConfig,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> AgenticSystemCreateResponse:
+ ) -> AgentCreateResponse:
"""
Args:
extra_headers: Send extra headers
@@ -102,14 +102,12 @@ def create(
timeout: Override the client-level default timeout for this request, in seconds
"""
return self._post(
- "/agentic_system/create",
- body=maybe_transform(
- {"agent_config": agent_config}, agentic_system_create_params.AgenticSystemCreateParams
- ),
+ "/agents/create",
+ body=maybe_transform({"agent_config": agent_config}, agent_create_params.AgentCreateParams),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=AgenticSystemCreateResponse,
+ cast_to=AgentCreateResponse,
)
def delete(
@@ -135,8 +133,8 @@ def delete(
"""
extra_headers = {"Accept": "*/*", **(extra_headers or {})}
return self._post(
- "/agentic_system/delete",
- body=maybe_transform({"agent_id": agent_id}, agentic_system_delete_params.AgenticSystemDeleteParams),
+ "/agents/delete",
+ body=maybe_transform({"agent_id": agent_id}, agent_delete_params.AgentDeleteParams),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
@@ -144,7 +142,7 @@ def delete(
)
-class AsyncAgenticSystemResource(AsyncAPIResource):
+class AsyncAgentsResource(AsyncAPIResource):
@cached_property
def sessions(self) -> AsyncSessionsResource:
return AsyncSessionsResource(self._client)
@@ -158,35 +156,35 @@ def turns(self) -> AsyncTurnsResource:
return AsyncTurnsResource(self._client)
@cached_property
- def with_raw_response(self) -> AsyncAgenticSystemResourceWithRawResponse:
+ def with_raw_response(self) -> AsyncAgentsResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return the
the raw response object instead of the parsed content.
For more information, see https://www.github.com/stainless-sdks/llama-stack-python#accessing-raw-response-data-eg-headers
"""
- return AsyncAgenticSystemResourceWithRawResponse(self)
+ return AsyncAgentsResourceWithRawResponse(self)
@cached_property
- def with_streaming_response(self) -> AsyncAgenticSystemResourceWithStreamingResponse:
+ def with_streaming_response(self) -> AsyncAgentsResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/stainless-sdks/llama-stack-python#with_streaming_response
"""
- return AsyncAgenticSystemResourceWithStreamingResponse(self)
+ return AsyncAgentsResourceWithStreamingResponse(self)
async def create(
self,
*,
- agent_config: agentic_system_create_params.AgentConfig,
+ agent_config: agent_create_params.AgentConfig,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> AgenticSystemCreateResponse:
+ ) -> AgentCreateResponse:
"""
Args:
extra_headers: Send extra headers
@@ -198,14 +196,12 @@ async def create(
timeout: Override the client-level default timeout for this request, in seconds
"""
return await self._post(
- "/agentic_system/create",
- body=await async_maybe_transform(
- {"agent_config": agent_config}, agentic_system_create_params.AgenticSystemCreateParams
- ),
+ "/agents/create",
+ body=await async_maybe_transform({"agent_config": agent_config}, agent_create_params.AgentCreateParams),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=AgenticSystemCreateResponse,
+ cast_to=AgentCreateResponse,
)
async def delete(
@@ -231,10 +227,8 @@ async def delete(
"""
extra_headers = {"Accept": "*/*", **(extra_headers or {})}
return await self._post(
- "/agentic_system/delete",
- body=await async_maybe_transform(
- {"agent_id": agent_id}, agentic_system_delete_params.AgenticSystemDeleteParams
- ),
+ "/agents/delete",
+ body=await async_maybe_transform({"agent_id": agent_id}, agent_delete_params.AgentDeleteParams),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
@@ -242,97 +236,97 @@ async def delete(
)
-class AgenticSystemResourceWithRawResponse:
- def __init__(self, agentic_system: AgenticSystemResource) -> None:
- self._agentic_system = agentic_system
+class AgentsResourceWithRawResponse:
+ def __init__(self, agents: AgentsResource) -> None:
+ self._agents = agents
self.create = to_raw_response_wrapper(
- agentic_system.create,
+ agents.create,
)
self.delete = to_raw_response_wrapper(
- agentic_system.delete,
+ agents.delete,
)
@cached_property
def sessions(self) -> SessionsResourceWithRawResponse:
- return SessionsResourceWithRawResponse(self._agentic_system.sessions)
+ return SessionsResourceWithRawResponse(self._agents.sessions)
@cached_property
def steps(self) -> StepsResourceWithRawResponse:
- return StepsResourceWithRawResponse(self._agentic_system.steps)
+ return StepsResourceWithRawResponse(self._agents.steps)
@cached_property
def turns(self) -> TurnsResourceWithRawResponse:
- return TurnsResourceWithRawResponse(self._agentic_system.turns)
+ return TurnsResourceWithRawResponse(self._agents.turns)
-class AsyncAgenticSystemResourceWithRawResponse:
- def __init__(self, agentic_system: AsyncAgenticSystemResource) -> None:
- self._agentic_system = agentic_system
+class AsyncAgentsResourceWithRawResponse:
+ def __init__(self, agents: AsyncAgentsResource) -> None:
+ self._agents = agents
self.create = async_to_raw_response_wrapper(
- agentic_system.create,
+ agents.create,
)
self.delete = async_to_raw_response_wrapper(
- agentic_system.delete,
+ agents.delete,
)
@cached_property
def sessions(self) -> AsyncSessionsResourceWithRawResponse:
- return AsyncSessionsResourceWithRawResponse(self._agentic_system.sessions)
+ return AsyncSessionsResourceWithRawResponse(self._agents.sessions)
@cached_property
def steps(self) -> AsyncStepsResourceWithRawResponse:
- return AsyncStepsResourceWithRawResponse(self._agentic_system.steps)
+ return AsyncStepsResourceWithRawResponse(self._agents.steps)
@cached_property
def turns(self) -> AsyncTurnsResourceWithRawResponse:
- return AsyncTurnsResourceWithRawResponse(self._agentic_system.turns)
+ return AsyncTurnsResourceWithRawResponse(self._agents.turns)
-class AgenticSystemResourceWithStreamingResponse:
- def __init__(self, agentic_system: AgenticSystemResource) -> None:
- self._agentic_system = agentic_system
+class AgentsResourceWithStreamingResponse:
+ def __init__(self, agents: AgentsResource) -> None:
+ self._agents = agents
self.create = to_streamed_response_wrapper(
- agentic_system.create,
+ agents.create,
)
self.delete = to_streamed_response_wrapper(
- agentic_system.delete,
+ agents.delete,
)
@cached_property
def sessions(self) -> SessionsResourceWithStreamingResponse:
- return SessionsResourceWithStreamingResponse(self._agentic_system.sessions)
+ return SessionsResourceWithStreamingResponse(self._agents.sessions)
@cached_property
def steps(self) -> StepsResourceWithStreamingResponse:
- return StepsResourceWithStreamingResponse(self._agentic_system.steps)
+ return StepsResourceWithStreamingResponse(self._agents.steps)
@cached_property
def turns(self) -> TurnsResourceWithStreamingResponse:
- return TurnsResourceWithStreamingResponse(self._agentic_system.turns)
+ return TurnsResourceWithStreamingResponse(self._agents.turns)
-class AsyncAgenticSystemResourceWithStreamingResponse:
- def __init__(self, agentic_system: AsyncAgenticSystemResource) -> None:
- self._agentic_system = agentic_system
+class AsyncAgentsResourceWithStreamingResponse:
+ def __init__(self, agents: AsyncAgentsResource) -> None:
+ self._agents = agents
self.create = async_to_streamed_response_wrapper(
- agentic_system.create,
+ agents.create,
)
self.delete = async_to_streamed_response_wrapper(
- agentic_system.delete,
+ agents.delete,
)
@cached_property
def sessions(self) -> AsyncSessionsResourceWithStreamingResponse:
- return AsyncSessionsResourceWithStreamingResponse(self._agentic_system.sessions)
+ return AsyncSessionsResourceWithStreamingResponse(self._agents.sessions)
@cached_property
def steps(self) -> AsyncStepsResourceWithStreamingResponse:
- return AsyncStepsResourceWithStreamingResponse(self._agentic_system.steps)
+ return AsyncStepsResourceWithStreamingResponse(self._agents.steps)
@cached_property
def turns(self) -> AsyncTurnsResourceWithStreamingResponse:
- return AsyncTurnsResourceWithStreamingResponse(self._agentic_system.turns)
+ return AsyncTurnsResourceWithStreamingResponse(self._agents.turns)
diff --git a/src/llama_stack/resources/agentic_system/sessions.py b/src/llama_stack/resources/agents/sessions.py
similarity index 96%
rename from src/llama_stack/resources/agentic_system/sessions.py
rename to src/llama_stack/resources/agents/sessions.py
index 534f603..43eb41b 100644
--- a/src/llama_stack/resources/agentic_system/sessions.py
+++ b/src/llama_stack/resources/agents/sessions.py
@@ -20,9 +20,9 @@
async_to_streamed_response_wrapper,
)
from ..._base_client import make_request_options
-from ...types.agentic_system import session_create_params, session_delete_params, session_retrieve_params
-from ...types.agentic_system.session import Session
-from ...types.agentic_system.session_create_response import SessionCreateResponse
+from ...types.agents import session_create_params, session_delete_params, session_retrieve_params
+from ...types.agents.session import Session
+from ...types.agents.session_create_response import SessionCreateResponse
__all__ = ["SessionsResource", "AsyncSessionsResource"]
@@ -70,7 +70,7 @@ def create(
timeout: Override the client-level default timeout for this request, in seconds
"""
return self._post(
- "/agentic_system/session/create",
+ "/agents/session/create",
body=maybe_transform(
{
"agent_id": agent_id,
@@ -108,7 +108,7 @@ def retrieve(
timeout: Override the client-level default timeout for this request, in seconds
"""
return self._post(
- "/agentic_system/session/get",
+ "/agents/session/get",
body=maybe_transform({"turn_ids": turn_ids}, session_retrieve_params.SessionRetrieveParams),
options=make_request_options(
extra_headers=extra_headers,
@@ -150,7 +150,7 @@ def delete(
"""
extra_headers = {"Accept": "*/*", **(extra_headers or {})}
return self._post(
- "/agentic_system/session/delete",
+ "/agents/session/delete",
body=maybe_transform(
{
"agent_id": agent_id,
@@ -208,7 +208,7 @@ async def create(
timeout: Override the client-level default timeout for this request, in seconds
"""
return await self._post(
- "/agentic_system/session/create",
+ "/agents/session/create",
body=await async_maybe_transform(
{
"agent_id": agent_id,
@@ -246,7 +246,7 @@ async def retrieve(
timeout: Override the client-level default timeout for this request, in seconds
"""
return await self._post(
- "/agentic_system/session/get",
+ "/agents/session/get",
body=await async_maybe_transform({"turn_ids": turn_ids}, session_retrieve_params.SessionRetrieveParams),
options=make_request_options(
extra_headers=extra_headers,
@@ -288,7 +288,7 @@ async def delete(
"""
extra_headers = {"Accept": "*/*", **(extra_headers or {})}
return await self._post(
- "/agentic_system/session/delete",
+ "/agents/session/delete",
body=await async_maybe_transform(
{
"agent_id": agent_id,
diff --git a/src/llama_stack/resources/agentic_system/steps.py b/src/llama_stack/resources/agents/steps.py
similarity index 94%
rename from src/llama_stack/resources/agentic_system/steps.py
rename to src/llama_stack/resources/agents/steps.py
index 74962ec..df6649f 100644
--- a/src/llama_stack/resources/agentic_system/steps.py
+++ b/src/llama_stack/resources/agents/steps.py
@@ -18,8 +18,8 @@
async_to_streamed_response_wrapper,
)
from ..._base_client import make_request_options
-from ...types.agentic_system import step_retrieve_params
-from ...types.agentic_system.agentic_system_step import AgenticSystemStep
+from ...types.agents import step_retrieve_params
+from ...types.agents.agents_step import AgentsStep
__all__ = ["StepsResource", "AsyncStepsResource"]
@@ -56,7 +56,7 @@ def retrieve(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> AgenticSystemStep:
+ ) -> AgentsStep:
"""
Args:
extra_headers: Send extra headers
@@ -68,7 +68,7 @@ def retrieve(
timeout: Override the client-level default timeout for this request, in seconds
"""
return self._get(
- "/agentic_system/step/get",
+ "/agents/step/get",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
@@ -83,7 +83,7 @@ def retrieve(
step_retrieve_params.StepRetrieveParams,
),
),
- cast_to=AgenticSystemStep,
+ cast_to=AgentsStep,
)
@@ -119,7 +119,7 @@ async def retrieve(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> AgenticSystemStep:
+ ) -> AgentsStep:
"""
Args:
extra_headers: Send extra headers
@@ -131,7 +131,7 @@ async def retrieve(
timeout: Override the client-level default timeout for this request, in seconds
"""
return await self._get(
- "/agentic_system/step/get",
+ "/agents/step/get",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
@@ -146,7 +146,7 @@ async def retrieve(
step_retrieve_params.StepRetrieveParams,
),
),
- cast_to=AgenticSystemStep,
+ cast_to=AgentsStep,
)
diff --git a/src/llama_stack/resources/agentic_system/turns.py b/src/llama_stack/resources/agents/turns.py
similarity index 92%
rename from src/llama_stack/resources/agentic_system/turns.py
rename to src/llama_stack/resources/agents/turns.py
index 9df70f7..217fff1 100644
--- a/src/llama_stack/resources/agentic_system/turns.py
+++ b/src/llama_stack/resources/agents/turns.py
@@ -23,10 +23,10 @@
)
from ..._streaming import Stream, AsyncStream
from ..._base_client import make_request_options
-from ...types.agentic_system import turn_create_params, turn_retrieve_params
-from ...types.agentic_system.turn import Turn
+from ...types.agents import turn_create_params, turn_retrieve_params
+from ...types.agents.turn import Turn
from ...types.shared_params.attachment import Attachment
-from ...types.agentic_system.agentic_system_turn_stream_chunk import AgenticSystemTurnStreamChunk
+from ...types.agents.agents_turn_stream_chunk import AgentsTurnStreamChunk
__all__ = ["TurnsResource", "AsyncTurnsResource"]
@@ -66,7 +66,7 @@ def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> AgenticSystemTurnStreamChunk:
+ ) -> AgentsTurnStreamChunk:
"""
Args:
extra_headers: Send extra headers
@@ -94,7 +94,7 @@ def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Stream[AgenticSystemTurnStreamChunk]:
+ ) -> Stream[AgentsTurnStreamChunk]:
"""
Args:
extra_headers: Send extra headers
@@ -122,7 +122,7 @@ def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> AgenticSystemTurnStreamChunk | Stream[AgenticSystemTurnStreamChunk]:
+ ) -> AgentsTurnStreamChunk | Stream[AgentsTurnStreamChunk]:
"""
Args:
extra_headers: Send extra headers
@@ -150,10 +150,9 @@ def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> AgenticSystemTurnStreamChunk | Stream[AgenticSystemTurnStreamChunk]:
- extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})}
+ ) -> AgentsTurnStreamChunk | Stream[AgentsTurnStreamChunk]:
return self._post(
- "/agentic_system/turn/create",
+ "/agents/turn/create",
body=maybe_transform(
{
"agent_id": agent_id,
@@ -167,9 +166,9 @@ def create(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=AgenticSystemTurnStreamChunk,
+ cast_to=AgentsTurnStreamChunk,
stream=stream or False,
- stream_cls=Stream[AgenticSystemTurnStreamChunk],
+ stream_cls=Stream[AgentsTurnStreamChunk],
)
def retrieve(
@@ -195,7 +194,7 @@ def retrieve(
timeout: Override the client-level default timeout for this request, in seconds
"""
return self._get(
- "/agentic_system/turn/get",
+ "/agents/turn/get",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
@@ -248,7 +247,7 @@ async def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> AgenticSystemTurnStreamChunk:
+ ) -> AgentsTurnStreamChunk:
"""
Args:
extra_headers: Send extra headers
@@ -276,7 +275,7 @@ async def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> AsyncStream[AgenticSystemTurnStreamChunk]:
+ ) -> AsyncStream[AgentsTurnStreamChunk]:
"""
Args:
extra_headers: Send extra headers
@@ -304,7 +303,7 @@ async def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> AgenticSystemTurnStreamChunk | AsyncStream[AgenticSystemTurnStreamChunk]:
+ ) -> AgentsTurnStreamChunk | AsyncStream[AgentsTurnStreamChunk]:
"""
Args:
extra_headers: Send extra headers
@@ -332,10 +331,9 @@ async def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> AgenticSystemTurnStreamChunk | AsyncStream[AgenticSystemTurnStreamChunk]:
- extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})}
+ ) -> AgentsTurnStreamChunk | AsyncStream[AgentsTurnStreamChunk]:
return await self._post(
- "/agentic_system/turn/create",
+ "/agents/turn/create",
body=await async_maybe_transform(
{
"agent_id": agent_id,
@@ -349,9 +347,9 @@ async def create(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=AgenticSystemTurnStreamChunk,
+ cast_to=AgentsTurnStreamChunk,
stream=stream or False,
- stream_cls=AsyncStream[AgenticSystemTurnStreamChunk],
+ stream_cls=AsyncStream[AgentsTurnStreamChunk],
)
async def retrieve(
@@ -377,7 +375,7 @@ async def retrieve(
timeout: Override the client-level default timeout for this request, in seconds
"""
return await self._get(
- "/agentic_system/turn/get",
+ "/agents/turn/get",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
diff --git a/src/llama_stack/types/__init__.py b/src/llama_stack/types/__init__.py
index cc354da..8f43ce3 100644
--- a/src/llama_stack/types/__init__.py
+++ b/src/llama_stack/types/__init__.py
@@ -22,8 +22,11 @@
from .post_training_job import PostTrainingJob as PostTrainingJob
from .dataset_get_params import DatasetGetParams as DatasetGetParams
from .train_eval_dataset import TrainEvalDataset as TrainEvalDataset
+from .agent_create_params import AgentCreateParams as AgentCreateParams
+from .agent_delete_params import AgentDeleteParams as AgentDeleteParams
from .tool_execution_step import ToolExecutionStep as ToolExecutionStep
from .telemetry_log_params import TelemetryLogParams as TelemetryLogParams
+from .agent_create_response import AgentCreateResponse as AgentCreateResponse
from .batch_chat_completion import BatchChatCompletion as BatchChatCompletion
from .dataset_create_params import DatasetCreateParams as DatasetCreateParams
from .dataset_delete_params import DatasetDeleteParams as DatasetDeleteParams
@@ -46,12 +49,9 @@
from .reward_scoring_score_params import RewardScoringScoreParams as RewardScoringScoreParams
from .safety_run_shields_response import SafetyRunShieldsResponse as SafetyRunShieldsResponse
from .tool_param_definition_param import ToolParamDefinitionParam as ToolParamDefinitionParam
-from .agentic_system_create_params import AgenticSystemCreateParams as AgenticSystemCreateParams
-from .agentic_system_delete_params import AgenticSystemDeleteParams as AgenticSystemDeleteParams
from .chat_completion_stream_chunk import ChatCompletionStreamChunk as ChatCompletionStreamChunk
from .telemetry_get_trace_response import TelemetryGetTraceResponse as TelemetryGetTraceResponse
from .inference_completion_response import InferenceCompletionResponse as InferenceCompletionResponse
-from .agentic_system_create_response import AgenticSystemCreateResponse as AgenticSystemCreateResponse
from .evaluation_summarization_params import EvaluationSummarizationParams as EvaluationSummarizationParams
from .rest_api_execution_config_param import RestAPIExecutionConfigParam as RestAPIExecutionConfigParam
from .inference_chat_completion_params import InferenceChatCompletionParams as InferenceChatCompletionParams
diff --git a/src/llama_stack/types/agentic_system_create_params.py b/src/llama_stack/types/agent_create_params.py
similarity index 97%
rename from src/llama_stack/types/agentic_system_create_params.py
rename to src/llama_stack/types/agent_create_params.py
index 69290da..39738a3 100644
--- a/src/llama_stack/types/agentic_system_create_params.py
+++ b/src/llama_stack/types/agent_create_params.py
@@ -14,7 +14,7 @@
from .default_query_generator_config_param import DefaultQueryGeneratorConfigParam
__all__ = [
- "AgenticSystemCreateParams",
+ "AgentCreateParams",
"AgentConfig",
"AgentConfigTool",
"AgentConfigToolSearchToolDefinition",
@@ -32,11 +32,13 @@
]
-class AgenticSystemCreateParams(TypedDict, total=False):
+class AgentCreateParams(TypedDict, total=False):
agent_config: Required[AgentConfig]
class AgentConfigToolSearchToolDefinition(TypedDict, total=False):
+ api_key: Required[str]
+
engine: Required[Literal["bing", "brave"]]
type: Required[Literal["brave_search"]]
@@ -49,6 +51,8 @@ class AgentConfigToolSearchToolDefinition(TypedDict, total=False):
class AgentConfigToolWolframAlphaToolDefinition(TypedDict, total=False):
+ api_key: Required[str]
+
type: Required[Literal["wolfram_alpha"]]
input_shields: Iterable[ShieldDefinitionParam]
diff --git a/src/llama_stack/types/agentic_system_create_response.py b/src/llama_stack/types/agent_create_response.py
similarity index 61%
rename from src/llama_stack/types/agentic_system_create_response.py
rename to src/llama_stack/types/agent_create_response.py
index a9a8593..be25364 100644
--- a/src/llama_stack/types/agentic_system_create_response.py
+++ b/src/llama_stack/types/agent_create_response.py
@@ -4,8 +4,8 @@
from .._models import BaseModel
-__all__ = ["AgenticSystemCreateResponse"]
+__all__ = ["AgentCreateResponse"]
-class AgenticSystemCreateResponse(BaseModel):
+class AgentCreateResponse(BaseModel):
agent_id: str
diff --git a/src/llama_stack/types/agentic_system_delete_params.py b/src/llama_stack/types/agent_delete_params.py
similarity index 67%
rename from src/llama_stack/types/agentic_system_delete_params.py
rename to src/llama_stack/types/agent_delete_params.py
index 7aa881c..cfc4cd0 100644
--- a/src/llama_stack/types/agentic_system_delete_params.py
+++ b/src/llama_stack/types/agent_delete_params.py
@@ -4,8 +4,8 @@
from typing_extensions import Required, TypedDict
-__all__ = ["AgenticSystemDeleteParams"]
+__all__ = ["AgentDeleteParams"]
-class AgenticSystemDeleteParams(TypedDict, total=False):
+class AgentDeleteParams(TypedDict, total=False):
agent_id: Required[str]
diff --git a/src/llama_stack/types/agentic_system/turn_stream_event.py b/src/llama_stack/types/agentic_system/turn_stream_event.py
deleted file mode 100644
index 7e627ec..0000000
--- a/src/llama_stack/types/agentic_system/turn_stream_event.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import Dict, List, Union, Optional
-from typing_extensions import Literal, TypeAlias
-
-from pydantic import Field as FieldInfo
-
-from .turn import Turn
-from ..._models import BaseModel
-from ..inference_step import InferenceStep
-from ..shared.tool_call import ToolCall
-from ..shield_call_step import ShieldCallStep
-from ..tool_execution_step import ToolExecutionStep
-from ..memory_retrieval_step import MemoryRetrievalStep
-
-__all__ = [
- "TurnStreamEvent",
- "Payload",
- "PayloadAgenticSystemTurnResponseStepStartPayload",
- "PayloadAgenticSystemTurnResponseStepProgressPayload",
- "PayloadAgenticSystemTurnResponseStepProgressPayloadToolCallDelta",
- "PayloadAgenticSystemTurnResponseStepProgressPayloadToolCallDeltaContent",
- "PayloadAgenticSystemTurnResponseStepCompletePayload",
- "PayloadAgenticSystemTurnResponseStepCompletePayloadStepDetails",
- "PayloadAgenticSystemTurnResponseTurnStartPayload",
- "PayloadAgenticSystemTurnResponseTurnCompletePayload",
-]
-
-
-class PayloadAgenticSystemTurnResponseStepStartPayload(BaseModel):
- event_type: Literal["step_start"]
-
- step_id: str
-
- step_type: Literal["inference", "tool_execution", "shield_call", "memory_retrieval"]
-
- metadata: Optional[Dict[str, Union[bool, float, str, List[object], object, None]]] = None
-
-
-PayloadAgenticSystemTurnResponseStepProgressPayloadToolCallDeltaContent: TypeAlias = Union[str, ToolCall]
-
-
-class PayloadAgenticSystemTurnResponseStepProgressPayloadToolCallDelta(BaseModel):
- content: PayloadAgenticSystemTurnResponseStepProgressPayloadToolCallDeltaContent
-
- parse_status: Literal["started", "in_progress", "failure", "success"]
-
-
-class PayloadAgenticSystemTurnResponseStepProgressPayload(BaseModel):
- event_type: Literal["step_progress"]
-
- step_id: str
-
- step_type: Literal["inference", "tool_execution", "shield_call", "memory_retrieval"]
-
- text_delta_model_response: Optional[str] = FieldInfo(alias="model_response_text_delta", default=None)
-
- tool_call_delta: Optional[PayloadAgenticSystemTurnResponseStepProgressPayloadToolCallDelta] = None
-
- tool_response_text_delta: Optional[str] = None
-
-
-PayloadAgenticSystemTurnResponseStepCompletePayloadStepDetails: TypeAlias = Union[
- InferenceStep, ToolExecutionStep, ShieldCallStep, MemoryRetrievalStep
-]
-
-
-class PayloadAgenticSystemTurnResponseStepCompletePayload(BaseModel):
- event_type: Literal["step_complete"]
-
- step_details: PayloadAgenticSystemTurnResponseStepCompletePayloadStepDetails
-
- step_type: Literal["inference", "tool_execution", "shield_call", "memory_retrieval"]
-
-
-class PayloadAgenticSystemTurnResponseTurnStartPayload(BaseModel):
- event_type: Literal["turn_start"]
-
- turn_id: str
-
-
-class PayloadAgenticSystemTurnResponseTurnCompletePayload(BaseModel):
- event_type: Literal["turn_complete"]
-
- turn: Turn
-
-
-Payload: TypeAlias = Union[
- PayloadAgenticSystemTurnResponseStepStartPayload,
- PayloadAgenticSystemTurnResponseStepProgressPayload,
- PayloadAgenticSystemTurnResponseStepCompletePayload,
- PayloadAgenticSystemTurnResponseTurnStartPayload,
- PayloadAgenticSystemTurnResponseTurnCompletePayload,
-]
-
-
-class TurnStreamEvent(BaseModel):
- payload: Payload
diff --git a/src/llama_stack/types/agentic_system/__init__.py b/src/llama_stack/types/agents/__init__.py
similarity index 81%
rename from src/llama_stack/types/agentic_system/__init__.py
rename to src/llama_stack/types/agents/__init__.py
index 5db1683..42a7d1b 100644
--- a/src/llama_stack/types/agentic_system/__init__.py
+++ b/src/llama_stack/types/agents/__init__.py
@@ -4,13 +4,13 @@
from .turn import Turn as Turn
from .session import Session as Session
+from .agents_step import AgentsStep as AgentsStep
from .turn_stream_event import TurnStreamEvent as TurnStreamEvent
from .turn_create_params import TurnCreateParams as TurnCreateParams
-from .agentic_system_step import AgenticSystemStep as AgenticSystemStep
from .step_retrieve_params import StepRetrieveParams as StepRetrieveParams
from .turn_retrieve_params import TurnRetrieveParams as TurnRetrieveParams
from .session_create_params import SessionCreateParams as SessionCreateParams
from .session_delete_params import SessionDeleteParams as SessionDeleteParams
from .session_create_response import SessionCreateResponse as SessionCreateResponse
from .session_retrieve_params import SessionRetrieveParams as SessionRetrieveParams
-from .agentic_system_turn_stream_chunk import AgenticSystemTurnStreamChunk as AgenticSystemTurnStreamChunk
+from .agents_turn_stream_chunk import AgentsTurnStreamChunk as AgentsTurnStreamChunk
diff --git a/src/llama_stack/types/agentic_system/agentic_system_step.py b/src/llama_stack/types/agents/agents_step.py
similarity index 86%
rename from src/llama_stack/types/agentic_system/agentic_system_step.py
rename to src/llama_stack/types/agents/agents_step.py
index a26fe8f..743890d 100644
--- a/src/llama_stack/types/agentic_system/agentic_system_step.py
+++ b/src/llama_stack/types/agents/agents_step.py
@@ -9,10 +9,10 @@
from ..tool_execution_step import ToolExecutionStep
from ..memory_retrieval_step import MemoryRetrievalStep
-__all__ = ["AgenticSystemStep", "Step"]
+__all__ = ["AgentsStep", "Step"]
Step: TypeAlias = Union[InferenceStep, ToolExecutionStep, ShieldCallStep, MemoryRetrievalStep]
-class AgenticSystemStep(BaseModel):
+class AgentsStep(BaseModel):
step: Step
diff --git a/src/llama_stack/types/agentic_system/agentic_system_turn_stream_chunk.py b/src/llama_stack/types/agents/agents_turn_stream_chunk.py
similarity index 68%
rename from src/llama_stack/types/agentic_system/agentic_system_turn_stream_chunk.py
rename to src/llama_stack/types/agents/agents_turn_stream_chunk.py
index 0b6a2cd..79fd2d3 100644
--- a/src/llama_stack/types/agentic_system/agentic_system_turn_stream_chunk.py
+++ b/src/llama_stack/types/agents/agents_turn_stream_chunk.py
@@ -5,8 +5,8 @@
from ..._models import BaseModel
from .turn_stream_event import TurnStreamEvent
-__all__ = ["AgenticSystemTurnStreamChunk"]
+__all__ = ["AgentsTurnStreamChunk"]
-class AgenticSystemTurnStreamChunk(BaseModel):
+class AgentsTurnStreamChunk(BaseModel):
event: TurnStreamEvent
diff --git a/src/llama_stack/types/agentic_system/session.py b/src/llama_stack/types/agents/session.py
similarity index 100%
rename from src/llama_stack/types/agentic_system/session.py
rename to src/llama_stack/types/agents/session.py
diff --git a/src/llama_stack/types/agentic_system/session_create_params.py b/src/llama_stack/types/agents/session_create_params.py
similarity index 100%
rename from src/llama_stack/types/agentic_system/session_create_params.py
rename to src/llama_stack/types/agents/session_create_params.py
diff --git a/src/llama_stack/types/agentic_system/session_create_response.py b/src/llama_stack/types/agents/session_create_response.py
similarity index 100%
rename from src/llama_stack/types/agentic_system/session_create_response.py
rename to src/llama_stack/types/agents/session_create_response.py
diff --git a/src/llama_stack/types/agentic_system/session_delete_params.py b/src/llama_stack/types/agents/session_delete_params.py
similarity index 100%
rename from src/llama_stack/types/agentic_system/session_delete_params.py
rename to src/llama_stack/types/agents/session_delete_params.py
diff --git a/src/llama_stack/types/agentic_system/session_retrieve_params.py b/src/llama_stack/types/agents/session_retrieve_params.py
similarity index 100%
rename from src/llama_stack/types/agentic_system/session_retrieve_params.py
rename to src/llama_stack/types/agents/session_retrieve_params.py
diff --git a/src/llama_stack/types/agentic_system/step_retrieve_params.py b/src/llama_stack/types/agents/step_retrieve_params.py
similarity index 100%
rename from src/llama_stack/types/agentic_system/step_retrieve_params.py
rename to src/llama_stack/types/agents/step_retrieve_params.py
diff --git a/src/llama_stack/types/agentic_system/turn.py b/src/llama_stack/types/agents/turn.py
similarity index 100%
rename from src/llama_stack/types/agentic_system/turn.py
rename to src/llama_stack/types/agents/turn.py
diff --git a/src/llama_stack/types/agentic_system/turn_create_params.py b/src/llama_stack/types/agents/turn_create_params.py
similarity index 100%
rename from src/llama_stack/types/agentic_system/turn_create_params.py
rename to src/llama_stack/types/agents/turn_create_params.py
diff --git a/src/llama_stack/types/agentic_system/turn_retrieve_params.py b/src/llama_stack/types/agents/turn_retrieve_params.py
similarity index 100%
rename from src/llama_stack/types/agentic_system/turn_retrieve_params.py
rename to src/llama_stack/types/agents/turn_retrieve_params.py
diff --git a/src/llama_stack/types/agents/turn_stream_event.py b/src/llama_stack/types/agents/turn_stream_event.py
new file mode 100644
index 0000000..2d810d2
--- /dev/null
+++ b/src/llama_stack/types/agents/turn_stream_event.py
@@ -0,0 +1,98 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Dict, List, Union, Optional
+from typing_extensions import Literal, TypeAlias
+
+from pydantic import Field as FieldInfo
+
+from .turn import Turn
+from ..._models import BaseModel
+from ..inference_step import InferenceStep
+from ..shared.tool_call import ToolCall
+from ..shield_call_step import ShieldCallStep
+from ..tool_execution_step import ToolExecutionStep
+from ..memory_retrieval_step import MemoryRetrievalStep
+
+__all__ = [
+ "TurnStreamEvent",
+ "Payload",
+ "PayloadAgentTurnResponseStepStartPayload",
+ "PayloadAgentTurnResponseStepProgressPayload",
+ "PayloadAgentTurnResponseStepProgressPayloadToolCallDelta",
+ "PayloadAgentTurnResponseStepProgressPayloadToolCallDeltaContent",
+ "PayloadAgentTurnResponseStepCompletePayload",
+ "PayloadAgentTurnResponseStepCompletePayloadStepDetails",
+ "PayloadAgentTurnResponseTurnStartPayload",
+ "PayloadAgentTurnResponseTurnCompletePayload",
+]
+
+
+class PayloadAgentTurnResponseStepStartPayload(BaseModel):
+ event_type: Literal["step_start"]
+
+ step_id: str
+
+ step_type: Literal["inference", "tool_execution", "shield_call", "memory_retrieval"]
+
+ metadata: Optional[Dict[str, Union[bool, float, str, List[object], object, None]]] = None
+
+
+PayloadAgentTurnResponseStepProgressPayloadToolCallDeltaContent: TypeAlias = Union[str, ToolCall]
+
+
+class PayloadAgentTurnResponseStepProgressPayloadToolCallDelta(BaseModel):
+ content: PayloadAgentTurnResponseStepProgressPayloadToolCallDeltaContent
+
+ parse_status: Literal["started", "in_progress", "failure", "success"]
+
+
+class PayloadAgentTurnResponseStepProgressPayload(BaseModel):
+ event_type: Literal["step_progress"]
+
+ step_id: str
+
+ step_type: Literal["inference", "tool_execution", "shield_call", "memory_retrieval"]
+
+ text_delta_model_response: Optional[str] = FieldInfo(alias="model_response_text_delta", default=None)
+
+ tool_call_delta: Optional[PayloadAgentTurnResponseStepProgressPayloadToolCallDelta] = None
+
+ tool_response_text_delta: Optional[str] = None
+
+
+PayloadAgentTurnResponseStepCompletePayloadStepDetails: TypeAlias = Union[
+ InferenceStep, ToolExecutionStep, ShieldCallStep, MemoryRetrievalStep
+]
+
+
+class PayloadAgentTurnResponseStepCompletePayload(BaseModel):
+ event_type: Literal["step_complete"]
+
+ step_details: PayloadAgentTurnResponseStepCompletePayloadStepDetails
+
+ step_type: Literal["inference", "tool_execution", "shield_call", "memory_retrieval"]
+
+
+class PayloadAgentTurnResponseTurnStartPayload(BaseModel):
+ event_type: Literal["turn_start"]
+
+ turn_id: str
+
+
+class PayloadAgentTurnResponseTurnCompletePayload(BaseModel):
+ event_type: Literal["turn_complete"]
+
+ turn: Turn
+
+
+Payload: TypeAlias = Union[
+ PayloadAgentTurnResponseStepStartPayload,
+ PayloadAgentTurnResponseStepProgressPayload,
+ PayloadAgentTurnResponseStepCompletePayload,
+ PayloadAgentTurnResponseTurnStartPayload,
+ PayloadAgentTurnResponseTurnCompletePayload,
+]
+
+
+class TurnStreamEvent(BaseModel):
+ payload: Payload
diff --git a/src/llama_stack/types/memory_bank_insert_params.py b/src/llama_stack/types/memory_bank_insert_params.py
index 9c7d6a0..c460fc2 100644
--- a/src/llama_stack/types/memory_bank_insert_params.py
+++ b/src/llama_stack/types/memory_bank_insert_params.py
@@ -23,4 +23,4 @@ class Document(TypedDict, total=False):
metadata: Required[Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
- mime_type: Required[str]
+ mime_type: str
diff --git a/src/llama_stack/types/memory_bank_update_params.py b/src/llama_stack/types/memory_bank_update_params.py
index 4acc881..2ab747b 100644
--- a/src/llama_stack/types/memory_bank_update_params.py
+++ b/src/llama_stack/types/memory_bank_update_params.py
@@ -21,4 +21,4 @@ class Document(TypedDict, total=False):
metadata: Required[Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
- mime_type: Required[str]
+ mime_type: str
diff --git a/src/llama_stack/types/memory_banks/document_retrieve_response.py b/src/llama_stack/types/memory_banks/document_retrieve_response.py
index dd2b4c8..0cfe8c6 100644
--- a/src/llama_stack/types/memory_banks/document_retrieve_response.py
+++ b/src/llama_stack/types/memory_banks/document_retrieve_response.py
@@ -1,6 +1,6 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import Dict, List, Union
+from typing import Dict, List, Union, Optional
from ..._models import BaseModel
@@ -14,4 +14,4 @@ class DocumentRetrieveResponse(BaseModel):
metadata: Dict[str, Union[bool, float, str, List[object], object, None]]
- mime_type: str
+ mime_type: Optional[str] = None
diff --git a/src/llama_stack_client/lib/.keep b/src/llama_stack_client/lib/.keep
new file mode 100644
index 0000000..5e2c99f
--- /dev/null
+++ b/src/llama_stack_client/lib/.keep
@@ -0,0 +1,4 @@
+File generated from our OpenAPI spec by Stainless.
+
+This directory can be used to store custom files to expand the SDK.
+It is ignored by Stainless code generation and its content (other than this keep file) won't be touched.
\ No newline at end of file
diff --git a/tests/api_resources/agentic_system/__init__.py b/tests/api_resources/agents/__init__.py
similarity index 100%
rename from tests/api_resources/agentic_system/__init__.py
rename to tests/api_resources/agents/__init__.py
diff --git a/tests/api_resources/agentic_system/test_sessions.py b/tests/api_resources/agents/test_sessions.py
similarity index 83%
rename from tests/api_resources/agentic_system/test_sessions.py
rename to tests/api_resources/agents/test_sessions.py
index a6267c8..4040ef0 100644
--- a/tests/api_resources/agentic_system/test_sessions.py
+++ b/tests/api_resources/agents/test_sessions.py
@@ -9,7 +9,7 @@
from llama_stack import LlamaStack, AsyncLlamaStack
from tests.utils import assert_matches_type
-from llama_stack.types.agentic_system import (
+from llama_stack.types.agents import (
Session,
SessionCreateResponse,
)
@@ -22,7 +22,7 @@ class TestSessions:
@parametrize
def test_method_create(self, client: LlamaStack) -> None:
- session = client.agentic_system.sessions.create(
+ session = client.agents.sessions.create(
agent_id="agent_id",
session_name="session_name",
)
@@ -30,7 +30,7 @@ def test_method_create(self, client: LlamaStack) -> None:
@parametrize
def test_raw_response_create(self, client: LlamaStack) -> None:
- response = client.agentic_system.sessions.with_raw_response.create(
+ response = client.agents.sessions.with_raw_response.create(
agent_id="agent_id",
session_name="session_name",
)
@@ -42,7 +42,7 @@ def test_raw_response_create(self, client: LlamaStack) -> None:
@parametrize
def test_streaming_response_create(self, client: LlamaStack) -> None:
- with client.agentic_system.sessions.with_streaming_response.create(
+ with client.agents.sessions.with_streaming_response.create(
agent_id="agent_id",
session_name="session_name",
) as response:
@@ -56,7 +56,7 @@ def test_streaming_response_create(self, client: LlamaStack) -> None:
@parametrize
def test_method_retrieve(self, client: LlamaStack) -> None:
- session = client.agentic_system.sessions.retrieve(
+ session = client.agents.sessions.retrieve(
agent_id="agent_id",
session_id="session_id",
)
@@ -64,7 +64,7 @@ def test_method_retrieve(self, client: LlamaStack) -> None:
@parametrize
def test_method_retrieve_with_all_params(self, client: LlamaStack) -> None:
- session = client.agentic_system.sessions.retrieve(
+ session = client.agents.sessions.retrieve(
agent_id="agent_id",
session_id="session_id",
turn_ids=["string", "string", "string"],
@@ -73,7 +73,7 @@ def test_method_retrieve_with_all_params(self, client: LlamaStack) -> None:
@parametrize
def test_raw_response_retrieve(self, client: LlamaStack) -> None:
- response = client.agentic_system.sessions.with_raw_response.retrieve(
+ response = client.agents.sessions.with_raw_response.retrieve(
agent_id="agent_id",
session_id="session_id",
)
@@ -85,7 +85,7 @@ def test_raw_response_retrieve(self, client: LlamaStack) -> None:
@parametrize
def test_streaming_response_retrieve(self, client: LlamaStack) -> None:
- with client.agentic_system.sessions.with_streaming_response.retrieve(
+ with client.agents.sessions.with_streaming_response.retrieve(
agent_id="agent_id",
session_id="session_id",
) as response:
@@ -99,7 +99,7 @@ def test_streaming_response_retrieve(self, client: LlamaStack) -> None:
@parametrize
def test_method_delete(self, client: LlamaStack) -> None:
- session = client.agentic_system.sessions.delete(
+ session = client.agents.sessions.delete(
agent_id="agent_id",
session_id="session_id",
)
@@ -107,7 +107,7 @@ def test_method_delete(self, client: LlamaStack) -> None:
@parametrize
def test_raw_response_delete(self, client: LlamaStack) -> None:
- response = client.agentic_system.sessions.with_raw_response.delete(
+ response = client.agents.sessions.with_raw_response.delete(
agent_id="agent_id",
session_id="session_id",
)
@@ -119,7 +119,7 @@ def test_raw_response_delete(self, client: LlamaStack) -> None:
@parametrize
def test_streaming_response_delete(self, client: LlamaStack) -> None:
- with client.agentic_system.sessions.with_streaming_response.delete(
+ with client.agents.sessions.with_streaming_response.delete(
agent_id="agent_id",
session_id="session_id",
) as response:
@@ -137,7 +137,7 @@ class TestAsyncSessions:
@parametrize
async def test_method_create(self, async_client: AsyncLlamaStack) -> None:
- session = await async_client.agentic_system.sessions.create(
+ session = await async_client.agents.sessions.create(
agent_id="agent_id",
session_name="session_name",
)
@@ -145,7 +145,7 @@ async def test_method_create(self, async_client: AsyncLlamaStack) -> None:
@parametrize
async def test_raw_response_create(self, async_client: AsyncLlamaStack) -> None:
- response = await async_client.agentic_system.sessions.with_raw_response.create(
+ response = await async_client.agents.sessions.with_raw_response.create(
agent_id="agent_id",
session_name="session_name",
)
@@ -157,7 +157,7 @@ async def test_raw_response_create(self, async_client: AsyncLlamaStack) -> None:
@parametrize
async def test_streaming_response_create(self, async_client: AsyncLlamaStack) -> None:
- async with async_client.agentic_system.sessions.with_streaming_response.create(
+ async with async_client.agents.sessions.with_streaming_response.create(
agent_id="agent_id",
session_name="session_name",
) as response:
@@ -171,7 +171,7 @@ async def test_streaming_response_create(self, async_client: AsyncLlamaStack) ->
@parametrize
async def test_method_retrieve(self, async_client: AsyncLlamaStack) -> None:
- session = await async_client.agentic_system.sessions.retrieve(
+ session = await async_client.agents.sessions.retrieve(
agent_id="agent_id",
session_id="session_id",
)
@@ -179,7 +179,7 @@ async def test_method_retrieve(self, async_client: AsyncLlamaStack) -> None:
@parametrize
async def test_method_retrieve_with_all_params(self, async_client: AsyncLlamaStack) -> None:
- session = await async_client.agentic_system.sessions.retrieve(
+ session = await async_client.agents.sessions.retrieve(
agent_id="agent_id",
session_id="session_id",
turn_ids=["string", "string", "string"],
@@ -188,7 +188,7 @@ async def test_method_retrieve_with_all_params(self, async_client: AsyncLlamaSta
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncLlamaStack) -> None:
- response = await async_client.agentic_system.sessions.with_raw_response.retrieve(
+ response = await async_client.agents.sessions.with_raw_response.retrieve(
agent_id="agent_id",
session_id="session_id",
)
@@ -200,7 +200,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncLlamaStack) -> Non
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncLlamaStack) -> None:
- async with async_client.agentic_system.sessions.with_streaming_response.retrieve(
+ async with async_client.agents.sessions.with_streaming_response.retrieve(
agent_id="agent_id",
session_id="session_id",
) as response:
@@ -214,7 +214,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncLlamaStack)
@parametrize
async def test_method_delete(self, async_client: AsyncLlamaStack) -> None:
- session = await async_client.agentic_system.sessions.delete(
+ session = await async_client.agents.sessions.delete(
agent_id="agent_id",
session_id="session_id",
)
@@ -222,7 +222,7 @@ async def test_method_delete(self, async_client: AsyncLlamaStack) -> None:
@parametrize
async def test_raw_response_delete(self, async_client: AsyncLlamaStack) -> None:
- response = await async_client.agentic_system.sessions.with_raw_response.delete(
+ response = await async_client.agents.sessions.with_raw_response.delete(
agent_id="agent_id",
session_id="session_id",
)
@@ -234,7 +234,7 @@ async def test_raw_response_delete(self, async_client: AsyncLlamaStack) -> None:
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncLlamaStack) -> None:
- async with async_client.agentic_system.sessions.with_streaming_response.delete(
+ async with async_client.agents.sessions.with_streaming_response.delete(
agent_id="agent_id",
session_id="session_id",
) as response:
diff --git a/tests/api_resources/agentic_system/test_steps.py b/tests/api_resources/agents/test_steps.py
similarity index 73%
rename from tests/api_resources/agentic_system/test_steps.py
rename to tests/api_resources/agents/test_steps.py
index 48a6775..3dda39e 100644
--- a/tests/api_resources/agentic_system/test_steps.py
+++ b/tests/api_resources/agents/test_steps.py
@@ -9,7 +9,7 @@
from llama_stack import LlamaStack, AsyncLlamaStack
from tests.utils import assert_matches_type
-from llama_stack.types.agentic_system import AgenticSystemStep
+from llama_stack.types.agents import AgentsStep
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -19,16 +19,16 @@ class TestSteps:
@parametrize
def test_method_retrieve(self, client: LlamaStack) -> None:
- step = client.agentic_system.steps.retrieve(
+ step = client.agents.steps.retrieve(
agent_id="agent_id",
step_id="step_id",
turn_id="turn_id",
)
- assert_matches_type(AgenticSystemStep, step, path=["response"])
+ assert_matches_type(AgentsStep, step, path=["response"])
@parametrize
def test_raw_response_retrieve(self, client: LlamaStack) -> None:
- response = client.agentic_system.steps.with_raw_response.retrieve(
+ response = client.agents.steps.with_raw_response.retrieve(
agent_id="agent_id",
step_id="step_id",
turn_id="turn_id",
@@ -37,11 +37,11 @@ def test_raw_response_retrieve(self, client: LlamaStack) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
step = response.parse()
- assert_matches_type(AgenticSystemStep, step, path=["response"])
+ assert_matches_type(AgentsStep, step, path=["response"])
@parametrize
def test_streaming_response_retrieve(self, client: LlamaStack) -> None:
- with client.agentic_system.steps.with_streaming_response.retrieve(
+ with client.agents.steps.with_streaming_response.retrieve(
agent_id="agent_id",
step_id="step_id",
turn_id="turn_id",
@@ -50,7 +50,7 @@ def test_streaming_response_retrieve(self, client: LlamaStack) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
step = response.parse()
- assert_matches_type(AgenticSystemStep, step, path=["response"])
+ assert_matches_type(AgentsStep, step, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -60,16 +60,16 @@ class TestAsyncSteps:
@parametrize
async def test_method_retrieve(self, async_client: AsyncLlamaStack) -> None:
- step = await async_client.agentic_system.steps.retrieve(
+ step = await async_client.agents.steps.retrieve(
agent_id="agent_id",
step_id="step_id",
turn_id="turn_id",
)
- assert_matches_type(AgenticSystemStep, step, path=["response"])
+ assert_matches_type(AgentsStep, step, path=["response"])
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncLlamaStack) -> None:
- response = await async_client.agentic_system.steps.with_raw_response.retrieve(
+ response = await async_client.agents.steps.with_raw_response.retrieve(
agent_id="agent_id",
step_id="step_id",
turn_id="turn_id",
@@ -78,11 +78,11 @@ async def test_raw_response_retrieve(self, async_client: AsyncLlamaStack) -> Non
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
step = await response.parse()
- assert_matches_type(AgenticSystemStep, step, path=["response"])
+ assert_matches_type(AgentsStep, step, path=["response"])
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncLlamaStack) -> None:
- async with async_client.agentic_system.steps.with_streaming_response.retrieve(
+ async with async_client.agents.steps.with_streaming_response.retrieve(
agent_id="agent_id",
step_id="step_id",
turn_id="turn_id",
@@ -91,6 +91,6 @@ async def test_streaming_response_retrieve(self, async_client: AsyncLlamaStack)
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
step = await response.parse()
- assert_matches_type(AgenticSystemStep, step, path=["response"])
+ assert_matches_type(AgentsStep, step, path=["response"])
assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/agentic_system/test_turns.py b/tests/api_resources/agents/test_turns.py
similarity index 87%
rename from tests/api_resources/agentic_system/test_turns.py
rename to tests/api_resources/agents/test_turns.py
index 8d2e3ca..47614e2 100644
--- a/tests/api_resources/agentic_system/test_turns.py
+++ b/tests/api_resources/agents/test_turns.py
@@ -9,10 +9,7 @@
from llama_stack import LlamaStack, AsyncLlamaStack
from tests.utils import assert_matches_type
-from llama_stack.types.agentic_system import (
- Turn,
- AgenticSystemTurnStreamChunk,
-)
+from llama_stack.types.agents import Turn, AgentsTurnStreamChunk
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -22,7 +19,7 @@ class TestTurns:
@parametrize
def test_method_create_overload_1(self, client: LlamaStack) -> None:
- turn = client.agentic_system.turns.create(
+ turn = client.agents.turns.create(
agent_id="agent_id",
messages=[
{
@@ -40,11 +37,11 @@ def test_method_create_overload_1(self, client: LlamaStack) -> None:
],
session_id="session_id",
)
- assert_matches_type(AgenticSystemTurnStreamChunk, turn, path=["response"])
+ assert_matches_type(AgentsTurnStreamChunk, turn, path=["response"])
@parametrize
def test_method_create_with_all_params_overload_1(self, client: LlamaStack) -> None:
- turn = client.agentic_system.turns.create(
+ turn = client.agents.turns.create(
agent_id="agent_id",
messages=[
{
@@ -80,11 +77,11 @@ def test_method_create_with_all_params_overload_1(self, client: LlamaStack) -> N
],
stream=False,
)
- assert_matches_type(AgenticSystemTurnStreamChunk, turn, path=["response"])
+ assert_matches_type(AgentsTurnStreamChunk, turn, path=["response"])
@parametrize
def test_raw_response_create_overload_1(self, client: LlamaStack) -> None:
- response = client.agentic_system.turns.with_raw_response.create(
+ response = client.agents.turns.with_raw_response.create(
agent_id="agent_id",
messages=[
{
@@ -106,11 +103,11 @@ def test_raw_response_create_overload_1(self, client: LlamaStack) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
turn = response.parse()
- assert_matches_type(AgenticSystemTurnStreamChunk, turn, path=["response"])
+ assert_matches_type(AgentsTurnStreamChunk, turn, path=["response"])
@parametrize
def test_streaming_response_create_overload_1(self, client: LlamaStack) -> None:
- with client.agentic_system.turns.with_streaming_response.create(
+ with client.agents.turns.with_streaming_response.create(
agent_id="agent_id",
messages=[
{
@@ -132,13 +129,13 @@ def test_streaming_response_create_overload_1(self, client: LlamaStack) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
turn = response.parse()
- assert_matches_type(AgenticSystemTurnStreamChunk, turn, path=["response"])
+ assert_matches_type(AgentsTurnStreamChunk, turn, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_method_create_overload_2(self, client: LlamaStack) -> None:
- turn_stream = client.agentic_system.turns.create(
+ turn_stream = client.agents.turns.create(
agent_id="agent_id",
messages=[
{
@@ -161,7 +158,7 @@ def test_method_create_overload_2(self, client: LlamaStack) -> None:
@parametrize
def test_method_create_with_all_params_overload_2(self, client: LlamaStack) -> None:
- turn_stream = client.agentic_system.turns.create(
+ turn_stream = client.agents.turns.create(
agent_id="agent_id",
messages=[
{
@@ -201,7 +198,7 @@ def test_method_create_with_all_params_overload_2(self, client: LlamaStack) -> N
@parametrize
def test_raw_response_create_overload_2(self, client: LlamaStack) -> None:
- response = client.agentic_system.turns.with_raw_response.create(
+ response = client.agents.turns.with_raw_response.create(
agent_id="agent_id",
messages=[
{
@@ -227,7 +224,7 @@ def test_raw_response_create_overload_2(self, client: LlamaStack) -> None:
@parametrize
def test_streaming_response_create_overload_2(self, client: LlamaStack) -> None:
- with client.agentic_system.turns.with_streaming_response.create(
+ with client.agents.turns.with_streaming_response.create(
agent_id="agent_id",
messages=[
{
@@ -256,7 +253,7 @@ def test_streaming_response_create_overload_2(self, client: LlamaStack) -> None:
@parametrize
def test_method_retrieve(self, client: LlamaStack) -> None:
- turn = client.agentic_system.turns.retrieve(
+ turn = client.agents.turns.retrieve(
agent_id="agent_id",
turn_id="turn_id",
)
@@ -264,7 +261,7 @@ def test_method_retrieve(self, client: LlamaStack) -> None:
@parametrize
def test_raw_response_retrieve(self, client: LlamaStack) -> None:
- response = client.agentic_system.turns.with_raw_response.retrieve(
+ response = client.agents.turns.with_raw_response.retrieve(
agent_id="agent_id",
turn_id="turn_id",
)
@@ -276,7 +273,7 @@ def test_raw_response_retrieve(self, client: LlamaStack) -> None:
@parametrize
def test_streaming_response_retrieve(self, client: LlamaStack) -> None:
- with client.agentic_system.turns.with_streaming_response.retrieve(
+ with client.agents.turns.with_streaming_response.retrieve(
agent_id="agent_id",
turn_id="turn_id",
) as response:
@@ -294,7 +291,7 @@ class TestAsyncTurns:
@parametrize
async def test_method_create_overload_1(self, async_client: AsyncLlamaStack) -> None:
- turn = await async_client.agentic_system.turns.create(
+ turn = await async_client.agents.turns.create(
agent_id="agent_id",
messages=[
{
@@ -312,11 +309,11 @@ async def test_method_create_overload_1(self, async_client: AsyncLlamaStack) ->
],
session_id="session_id",
)
- assert_matches_type(AgenticSystemTurnStreamChunk, turn, path=["response"])
+ assert_matches_type(AgentsTurnStreamChunk, turn, path=["response"])
@parametrize
async def test_method_create_with_all_params_overload_1(self, async_client: AsyncLlamaStack) -> None:
- turn = await async_client.agentic_system.turns.create(
+ turn = await async_client.agents.turns.create(
agent_id="agent_id",
messages=[
{
@@ -352,11 +349,11 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
],
stream=False,
)
- assert_matches_type(AgenticSystemTurnStreamChunk, turn, path=["response"])
+ assert_matches_type(AgentsTurnStreamChunk, turn, path=["response"])
@parametrize
async def test_raw_response_create_overload_1(self, async_client: AsyncLlamaStack) -> None:
- response = await async_client.agentic_system.turns.with_raw_response.create(
+ response = await async_client.agents.turns.with_raw_response.create(
agent_id="agent_id",
messages=[
{
@@ -378,11 +375,11 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncLlamaStac
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
turn = await response.parse()
- assert_matches_type(AgenticSystemTurnStreamChunk, turn, path=["response"])
+ assert_matches_type(AgentsTurnStreamChunk, turn, path=["response"])
@parametrize
async def test_streaming_response_create_overload_1(self, async_client: AsyncLlamaStack) -> None:
- async with async_client.agentic_system.turns.with_streaming_response.create(
+ async with async_client.agents.turns.with_streaming_response.create(
agent_id="agent_id",
messages=[
{
@@ -404,13 +401,13 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncLla
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
turn = await response.parse()
- assert_matches_type(AgenticSystemTurnStreamChunk, turn, path=["response"])
+ assert_matches_type(AgentsTurnStreamChunk, turn, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_method_create_overload_2(self, async_client: AsyncLlamaStack) -> None:
- turn_stream = await async_client.agentic_system.turns.create(
+ turn_stream = await async_client.agents.turns.create(
agent_id="agent_id",
messages=[
{
@@ -433,7 +430,7 @@ async def test_method_create_overload_2(self, async_client: AsyncLlamaStack) ->
@parametrize
async def test_method_create_with_all_params_overload_2(self, async_client: AsyncLlamaStack) -> None:
- turn_stream = await async_client.agentic_system.turns.create(
+ turn_stream = await async_client.agents.turns.create(
agent_id="agent_id",
messages=[
{
@@ -473,7 +470,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
@parametrize
async def test_raw_response_create_overload_2(self, async_client: AsyncLlamaStack) -> None:
- response = await async_client.agentic_system.turns.with_raw_response.create(
+ response = await async_client.agents.turns.with_raw_response.create(
agent_id="agent_id",
messages=[
{
@@ -499,7 +496,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncLlamaStac
@parametrize
async def test_streaming_response_create_overload_2(self, async_client: AsyncLlamaStack) -> None:
- async with async_client.agentic_system.turns.with_streaming_response.create(
+ async with async_client.agents.turns.with_streaming_response.create(
agent_id="agent_id",
messages=[
{
@@ -528,7 +525,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncLla
@parametrize
async def test_method_retrieve(self, async_client: AsyncLlamaStack) -> None:
- turn = await async_client.agentic_system.turns.retrieve(
+ turn = await async_client.agents.turns.retrieve(
agent_id="agent_id",
turn_id="turn_id",
)
@@ -536,7 +533,7 @@ async def test_method_retrieve(self, async_client: AsyncLlamaStack) -> None:
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncLlamaStack) -> None:
- response = await async_client.agentic_system.turns.with_raw_response.retrieve(
+ response = await async_client.agents.turns.with_raw_response.retrieve(
agent_id="agent_id",
turn_id="turn_id",
)
@@ -548,7 +545,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncLlamaStack) -> Non
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncLlamaStack) -> None:
- async with async_client.agentic_system.turns.with_streaming_response.retrieve(
+ async with async_client.agents.turns.with_streaming_response.retrieve(
agent_id="agent_id",
turn_id="turn_id",
) as response:
diff --git a/tests/api_resources/test_agentic_system.py b/tests/api_resources/test_agents.py
similarity index 95%
rename from tests/api_resources/test_agentic_system.py
rename to tests/api_resources/test_agents.py
index 33155aa..a9a46c9 100644
--- a/tests/api_resources/test_agentic_system.py
+++ b/tests/api_resources/test_agents.py
@@ -9,27 +9,27 @@
from llama_stack import LlamaStack, AsyncLlamaStack
from tests.utils import assert_matches_type
-from llama_stack.types import AgenticSystemCreateResponse
+from llama_stack.types import AgentCreateResponse
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-class TestAgenticSystem:
+class TestAgents:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
@parametrize
def test_method_create(self, client: LlamaStack) -> None:
- agentic_system = client.agentic_system.create(
+ agent = client.agents.create(
agent_config={
"instructions": "instructions",
"model": "model",
},
)
- assert_matches_type(AgenticSystemCreateResponse, agentic_system, path=["response"])
+ assert_matches_type(AgentCreateResponse, agent, path=["response"])
@parametrize
def test_method_create_with_all_params(self, client: LlamaStack) -> None:
- agentic_system = client.agentic_system.create(
+ agent = client.agents.create(
agent_config={
"instructions": "instructions",
"model": "model",
@@ -163,6 +163,7 @@ def test_method_create_with_all_params(self, client: LlamaStack) -> None:
"tool_prompt_format": "json",
"tools": [
{
+ "api_key": "api_key",
"engine": "bing",
"type": "brave_search",
"input_shields": [
@@ -292,6 +293,7 @@ def test_method_create_with_all_params(self, client: LlamaStack) -> None:
},
},
{
+ "api_key": "api_key",
"engine": "bing",
"type": "brave_search",
"input_shields": [
@@ -421,6 +423,7 @@ def test_method_create_with_all_params(self, client: LlamaStack) -> None:
},
},
{
+ "api_key": "api_key",
"engine": "bing",
"type": "brave_search",
"input_shields": [
@@ -552,11 +555,11 @@ def test_method_create_with_all_params(self, client: LlamaStack) -> None:
],
},
)
- assert_matches_type(AgenticSystemCreateResponse, agentic_system, path=["response"])
+ assert_matches_type(AgentCreateResponse, agent, path=["response"])
@parametrize
def test_raw_response_create(self, client: LlamaStack) -> None:
- response = client.agentic_system.with_raw_response.create(
+ response = client.agents.with_raw_response.create(
agent_config={
"instructions": "instructions",
"model": "model",
@@ -565,12 +568,12 @@ def test_raw_response_create(self, client: LlamaStack) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- agentic_system = response.parse()
- assert_matches_type(AgenticSystemCreateResponse, agentic_system, path=["response"])
+ agent = response.parse()
+ assert_matches_type(AgentCreateResponse, agent, path=["response"])
@parametrize
def test_streaming_response_create(self, client: LlamaStack) -> None:
- with client.agentic_system.with_streaming_response.create(
+ with client.agents.with_streaming_response.create(
agent_config={
"instructions": "instructions",
"model": "model",
@@ -579,59 +582,59 @@ def test_streaming_response_create(self, client: LlamaStack) -> None:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- agentic_system = response.parse()
- assert_matches_type(AgenticSystemCreateResponse, agentic_system, path=["response"])
+ agent = response.parse()
+ assert_matches_type(AgentCreateResponse, agent, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_method_delete(self, client: LlamaStack) -> None:
- agentic_system = client.agentic_system.delete(
+ agent = client.agents.delete(
agent_id="agent_id",
)
- assert agentic_system is None
+ assert agent is None
@parametrize
def test_raw_response_delete(self, client: LlamaStack) -> None:
- response = client.agentic_system.with_raw_response.delete(
+ response = client.agents.with_raw_response.delete(
agent_id="agent_id",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- agentic_system = response.parse()
- assert agentic_system is None
+ agent = response.parse()
+ assert agent is None
@parametrize
def test_streaming_response_delete(self, client: LlamaStack) -> None:
- with client.agentic_system.with_streaming_response.delete(
+ with client.agents.with_streaming_response.delete(
agent_id="agent_id",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- agentic_system = response.parse()
- assert agentic_system is None
+ agent = response.parse()
+ assert agent is None
assert cast(Any, response.is_closed) is True
-class TestAsyncAgenticSystem:
+class TestAsyncAgents:
parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
@parametrize
async def test_method_create(self, async_client: AsyncLlamaStack) -> None:
- agentic_system = await async_client.agentic_system.create(
+ agent = await async_client.agents.create(
agent_config={
"instructions": "instructions",
"model": "model",
},
)
- assert_matches_type(AgenticSystemCreateResponse, agentic_system, path=["response"])
+ assert_matches_type(AgentCreateResponse, agent, path=["response"])
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncLlamaStack) -> None:
- agentic_system = await async_client.agentic_system.create(
+ agent = await async_client.agents.create(
agent_config={
"instructions": "instructions",
"model": "model",
@@ -765,6 +768,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncLlamaStack
"tool_prompt_format": "json",
"tools": [
{
+ "api_key": "api_key",
"engine": "bing",
"type": "brave_search",
"input_shields": [
@@ -894,6 +898,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncLlamaStack
},
},
{
+ "api_key": "api_key",
"engine": "bing",
"type": "brave_search",
"input_shields": [
@@ -1023,6 +1028,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncLlamaStack
},
},
{
+ "api_key": "api_key",
"engine": "bing",
"type": "brave_search",
"input_shields": [
@@ -1154,11 +1160,11 @@ async def test_method_create_with_all_params(self, async_client: AsyncLlamaStack
],
},
)
- assert_matches_type(AgenticSystemCreateResponse, agentic_system, path=["response"])
+ assert_matches_type(AgentCreateResponse, agent, path=["response"])
@parametrize
async def test_raw_response_create(self, async_client: AsyncLlamaStack) -> None:
- response = await async_client.agentic_system.with_raw_response.create(
+ response = await async_client.agents.with_raw_response.create(
agent_config={
"instructions": "instructions",
"model": "model",
@@ -1167,12 +1173,12 @@ async def test_raw_response_create(self, async_client: AsyncLlamaStack) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- agentic_system = await response.parse()
- assert_matches_type(AgenticSystemCreateResponse, agentic_system, path=["response"])
+ agent = await response.parse()
+ assert_matches_type(AgentCreateResponse, agent, path=["response"])
@parametrize
async def test_streaming_response_create(self, async_client: AsyncLlamaStack) -> None:
- async with async_client.agentic_system.with_streaming_response.create(
+ async with async_client.agents.with_streaming_response.create(
agent_config={
"instructions": "instructions",
"model": "model",
@@ -1181,38 +1187,38 @@ async def test_streaming_response_create(self, async_client: AsyncLlamaStack) ->
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- agentic_system = await response.parse()
- assert_matches_type(AgenticSystemCreateResponse, agentic_system, path=["response"])
+ agent = await response.parse()
+ assert_matches_type(AgentCreateResponse, agent, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_method_delete(self, async_client: AsyncLlamaStack) -> None:
- agentic_system = await async_client.agentic_system.delete(
+ agent = await async_client.agents.delete(
agent_id="agent_id",
)
- assert agentic_system is None
+ assert agent is None
@parametrize
async def test_raw_response_delete(self, async_client: AsyncLlamaStack) -> None:
- response = await async_client.agentic_system.with_raw_response.delete(
+ response = await async_client.agents.with_raw_response.delete(
agent_id="agent_id",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- agentic_system = await response.parse()
- assert agentic_system is None
+ agent = await response.parse()
+ assert agent is None
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncLlamaStack) -> None:
- async with async_client.agentic_system.with_streaming_response.delete(
+ async with async_client.agents.with_streaming_response.delete(
agent_id="agent_id",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- agentic_system = await response.parse()
- assert agentic_system is None
+ agent = await response.parse()
+ assert agent is None
assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/test_memory_banks.py b/tests/api_resources/test_memory_banks.py
index 0f314dc..9b3f433 100644
--- a/tests/api_resources/test_memory_banks.py
+++ b/tests/api_resources/test_memory_banks.py
@@ -90,19 +90,16 @@ def test_method_update(self, client: LlamaStack) -> None:
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
],
)
@@ -117,19 +114,16 @@ def test_raw_response_update(self, client: LlamaStack) -> None:
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
],
)
@@ -148,19 +142,16 @@ def test_streaming_response_update(self, client: LlamaStack) -> None:
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
],
) as response:
@@ -237,19 +228,16 @@ def test_method_insert(self, client: LlamaStack) -> None:
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
],
)
@@ -292,19 +280,16 @@ def test_raw_response_insert(self, client: LlamaStack) -> None:
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
],
)
@@ -323,19 +308,16 @@ def test_streaming_response_insert(self, client: LlamaStack) -> None:
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
],
) as response:
@@ -465,19 +447,16 @@ async def test_method_update(self, async_client: AsyncLlamaStack) -> None:
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
],
)
@@ -492,19 +471,16 @@ async def test_raw_response_update(self, async_client: AsyncLlamaStack) -> None:
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
],
)
@@ -523,19 +499,16 @@ async def test_streaming_response_update(self, async_client: AsyncLlamaStack) ->
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
],
) as response:
@@ -612,19 +585,16 @@ async def test_method_insert(self, async_client: AsyncLlamaStack) -> None:
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
],
)
@@ -667,19 +637,16 @@ async def test_raw_response_insert(self, async_client: AsyncLlamaStack) -> None:
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
],
)
@@ -698,19 +665,16 @@ async def test_streaming_response_insert(self, async_client: AsyncLlamaStack) ->
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
{
"content": "string",
"document_id": "document_id",
"metadata": {"foo": True},
- "mime_type": "mime_type",
},
],
) as response:
diff --git a/tests/test_client.py b/tests/test_client.py
index c531ba5..ea89ceb 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -673,11 +673,11 @@ def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str
@mock.patch("llama_stack._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) -> None:
- respx_mock.post("/agentic_system/session/create").mock(side_effect=httpx.TimeoutException("Test timeout error"))
+ respx_mock.post("/agents/session/create").mock(side_effect=httpx.TimeoutException("Test timeout error"))
with pytest.raises(APITimeoutError):
self.client.post(
- "/agentic_system/session/create",
+ "/agents/session/create",
body=cast(object, dict(agent_id="agent_id", session_name="session_name")),
cast_to=httpx.Response,
options={"headers": {RAW_RESPONSE_HEADER: "stream"}},
@@ -688,11 +688,11 @@ def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) -> No
@mock.patch("llama_stack._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter) -> None:
- respx_mock.post("/agentic_system/session/create").mock(return_value=httpx.Response(500))
+ respx_mock.post("/agents/session/create").mock(return_value=httpx.Response(500))
with pytest.raises(APIStatusError):
self.client.post(
- "/agentic_system/session/create",
+ "/agents/session/create",
body=cast(object, dict(agent_id="agent_id", session_name="session_name")),
cast_to=httpx.Response,
options={"headers": {RAW_RESPONSE_HEADER: "stream"}},
@@ -715,11 +715,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
return httpx.Response(500)
return httpx.Response(200)
- respx_mock.post("/agentic_system/session/create").mock(side_effect=retry_handler)
+ respx_mock.post("/agents/session/create").mock(side_effect=retry_handler)
- response = client.agentic_system.sessions.with_raw_response.create(
- agent_id="agent_id", session_name="session_name"
- )
+ response = client.agents.sessions.with_raw_response.create(agent_id="agent_id", session_name="session_name")
assert response.retries_taken == failures_before_success
@@ -1355,11 +1353,11 @@ async def test_parse_retry_after_header(self, remaining_retries: int, retry_afte
@mock.patch("llama_stack._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
async def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) -> None:
- respx_mock.post("/agentic_system/session/create").mock(side_effect=httpx.TimeoutException("Test timeout error"))
+ respx_mock.post("/agents/session/create").mock(side_effect=httpx.TimeoutException("Test timeout error"))
with pytest.raises(APITimeoutError):
await self.client.post(
- "/agentic_system/session/create",
+ "/agents/session/create",
body=cast(object, dict(agent_id="agent_id", session_name="session_name")),
cast_to=httpx.Response,
options={"headers": {RAW_RESPONSE_HEADER: "stream"}},
@@ -1370,11 +1368,11 @@ async def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter)
@mock.patch("llama_stack._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
async def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter) -> None:
- respx_mock.post("/agentic_system/session/create").mock(return_value=httpx.Response(500))
+ respx_mock.post("/agents/session/create").mock(return_value=httpx.Response(500))
with pytest.raises(APIStatusError):
await self.client.post(
- "/agentic_system/session/create",
+ "/agents/session/create",
body=cast(object, dict(agent_id="agent_id", session_name="session_name")),
cast_to=httpx.Response,
options={"headers": {RAW_RESPONSE_HEADER: "stream"}},
@@ -1400,9 +1398,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
return httpx.Response(500)
return httpx.Response(200)
- respx_mock.post("/agentic_system/session/create").mock(side_effect=retry_handler)
+ respx_mock.post("/agents/session/create").mock(side_effect=retry_handler)
- response = await client.agentic_system.sessions.with_raw_response.create(
+ response = await client.agents.sessions.with_raw_response.create(
agent_id="agent_id", session_name="session_name"
)