Skip to content

Commit

Permalink
sync repo w/ MM
Browse files Browse the repository at this point in the history
  • Loading branch information
yanxi0830 committed Sep 25, 2024
1 parent da88593 commit 39e0b2b
Show file tree
Hide file tree
Showing 30 changed files with 1,070 additions and 90 deletions.
10 changes: 5 additions & 5 deletions src/llama_stack_client/resources/batch_inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from __future__ import annotations

from typing import List, Union, Iterable
from typing import List, Iterable
from typing_extensions import Literal

import httpx
Expand Down Expand Up @@ -58,7 +58,7 @@ def chat_completion(
logprobs: batch_inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN,
sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
tool_choice: Literal["auto", "required"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag", "python_list"] | NotGiven = NOT_GIVEN,
tools: Iterable[batch_inference_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
x_llama_stack_provider_data: str | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
Expand Down Expand Up @@ -115,7 +115,7 @@ def chat_completion(
def completion(
self,
*,
content_batch: List[Union[str, List[str]]],
content_batch: List[batch_inference_completion_params.ContentBatch],
model: str,
logprobs: batch_inference_completion_params.Logprobs | NotGiven = NOT_GIVEN,
sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -187,7 +187,7 @@ async def chat_completion(
logprobs: batch_inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN,
sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
tool_choice: Literal["auto", "required"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag", "python_list"] | NotGiven = NOT_GIVEN,
tools: Iterable[batch_inference_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
x_llama_stack_provider_data: str | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
Expand Down Expand Up @@ -244,7 +244,7 @@ async def chat_completion(
async def completion(
self,
*,
content_batch: List[Union[str, List[str]]],
content_batch: List[batch_inference_completion_params.ContentBatch],
model: str,
logprobs: batch_inference_completion_params.Logprobs | NotGiven = NOT_GIVEN,
sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
Expand Down
6 changes: 3 additions & 3 deletions src/llama_stack_client/resources/inference/embeddings.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from __future__ import annotations

from typing import List, Union
from typing import List

import httpx

Expand Down Expand Up @@ -50,7 +50,7 @@ def with_streaming_response(self) -> EmbeddingsResourceWithStreamingResponse:
def create(
self,
*,
contents: List[Union[str, List[str]]],
contents: List[embedding_create_params.Content],
model: str,
x_llama_stack_provider_data: str | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
Expand Down Expand Up @@ -113,7 +113,7 @@ def with_streaming_response(self) -> AsyncEmbeddingsResourceWithStreamingRespons
async def create(
self,
*,
contents: List[Union[str, List[str]]],
contents: List[embedding_create_params.Content],
model: str,
x_llama_stack_provider_data: str | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
Expand Down
22 changes: 11 additions & 11 deletions src/llama_stack_client/resources/inference/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from __future__ import annotations

from typing import Any, List, Union, Iterable, cast
from typing import Any, Iterable, cast
from typing_extensions import Literal, overload

import httpx
Expand Down Expand Up @@ -74,7 +74,7 @@ def chat_completion(
sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
stream: Literal[False] | NotGiven = NOT_GIVEN,
tool_choice: Literal["auto", "required"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag", "python_list"] | NotGiven = NOT_GIVEN,
tools: Iterable[inference_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
x_llama_stack_provider_data: str | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
Expand Down Expand Up @@ -116,7 +116,7 @@ def chat_completion(
logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN,
sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
tool_choice: Literal["auto", "required"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag", "python_list"] | NotGiven = NOT_GIVEN,
tools: Iterable[inference_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
x_llama_stack_provider_data: str | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
Expand Down Expand Up @@ -158,7 +158,7 @@ def chat_completion(
logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN,
sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
tool_choice: Literal["auto", "required"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag", "python_list"] | NotGiven = NOT_GIVEN,
tools: Iterable[inference_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
x_llama_stack_provider_data: str | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
Expand Down Expand Up @@ -200,7 +200,7 @@ def chat_completion(
sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
stream: Literal[False] | Literal[True] | NotGiven = NOT_GIVEN,
tool_choice: Literal["auto", "required"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag", "python_list"] | NotGiven = NOT_GIVEN,
tools: Iterable[inference_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
x_llama_stack_provider_data: str | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
Expand Down Expand Up @@ -246,7 +246,7 @@ def chat_completion(
def completion(
self,
*,
content: Union[str, List[str]],
content: inference_completion_params.Content,
model: str,
logprobs: inference_completion_params.Logprobs | NotGiven = NOT_GIVEN,
sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -331,7 +331,7 @@ async def chat_completion(
sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
stream: Literal[False] | NotGiven = NOT_GIVEN,
tool_choice: Literal["auto", "required"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag", "python_list"] | NotGiven = NOT_GIVEN,
tools: Iterable[inference_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
x_llama_stack_provider_data: str | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
Expand Down Expand Up @@ -373,7 +373,7 @@ async def chat_completion(
logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN,
sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
tool_choice: Literal["auto", "required"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag", "python_list"] | NotGiven = NOT_GIVEN,
tools: Iterable[inference_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
x_llama_stack_provider_data: str | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
Expand Down Expand Up @@ -415,7 +415,7 @@ async def chat_completion(
logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN,
sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
tool_choice: Literal["auto", "required"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag", "python_list"] | NotGiven = NOT_GIVEN,
tools: Iterable[inference_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
x_llama_stack_provider_data: str | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
Expand Down Expand Up @@ -457,7 +457,7 @@ async def chat_completion(
sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
stream: Literal[False] | Literal[True] | NotGiven = NOT_GIVEN,
tool_choice: Literal["auto", "required"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag"] | NotGiven = NOT_GIVEN,
tool_prompt_format: Literal["json", "function_tag", "python_list"] | NotGiven = NOT_GIVEN,
tools: Iterable[inference_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
x_llama_stack_provider_data: str | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
Expand Down Expand Up @@ -503,7 +503,7 @@ async def chat_completion(
async def completion(
self,
*,
content: Union[str, List[str]],
content: inference_completion_params.Content,
model: str,
logprobs: inference_completion_params.Logprobs | NotGiven = NOT_GIVEN,
sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
Expand Down
6 changes: 3 additions & 3 deletions src/llama_stack_client/resources/memory/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from __future__ import annotations

from typing import Dict, List, Union, Iterable
from typing import Dict, Union, Iterable

import httpx

Expand Down Expand Up @@ -300,7 +300,7 @@ def query(
self,
*,
bank_id: str,
query: Union[str, List[str]],
query: memory_query_params.Query,
params: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
x_llama_stack_provider_data: str | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
Expand Down Expand Up @@ -599,7 +599,7 @@ async def query(
self,
*,
bank_id: str,
query: Union[str, List[str]],
query: memory_query_params.Query,
params: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
x_llama_stack_provider_data: str | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
Expand Down
2 changes: 1 addition & 1 deletion src/llama_stack_client/types/agent_create_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ class AgentConfig(TypedDict, total=False):

tool_choice: Literal["auto", "required"]

tool_prompt_format: Literal["json", "function_tag"]
tool_prompt_format: Literal["json", "function_tag", "python_list"]
"""
`json` -- Refers to the json format for calling tools. The json format takes the
form like { "type": "function", "function" : { "name": "function_name",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class BatchInferenceChatCompletionParams(TypedDict, total=False):

tool_choice: Literal["auto", "required"]

tool_prompt_format: Literal["json", "function_tag"]
tool_prompt_format: Literal["json", "function_tag", "python_list"]
"""
`json` -- Refers to the json format for calling tools. The json format takes the
form like { "type": "function", "function" : { "name": "function_name",
Expand Down
50 changes: 47 additions & 3 deletions src/llama_stack_client/types/batch_inference_completion_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,27 @@
from __future__ import annotations

from typing import List, Union
from typing_extensions import Required, Annotated, TypedDict
from typing_extensions import Required, Annotated, TypeAlias, TypedDict

from .._utils import PropertyInfo
from .shared_params.sampling_params import SamplingParams

__all__ = ["BatchInferenceCompletionParams", "Logprobs"]
__all__ = [
"BatchInferenceCompletionParams",
"ContentBatch",
"ContentBatchImageMedia",
"ContentBatchImageMediaImage",
"ContentBatchImageMediaImageThisClassRepresentsAnImageObjectToCreate",
"ContentBatchUnionMember2",
"ContentBatchUnionMember2ImageMedia",
"ContentBatchUnionMember2ImageMediaImage",
"ContentBatchUnionMember2ImageMediaImageThisClassRepresentsAnImageObjectToCreate",
"Logprobs",
]


class BatchInferenceCompletionParams(TypedDict, total=False):
content_batch: Required[List[Union[str, List[str]]]]
content_batch: Required[List[ContentBatch]]

model: Required[str]

Expand All @@ -23,5 +34,38 @@ class BatchInferenceCompletionParams(TypedDict, total=False):
x_llama_stack_provider_data: Annotated[str, PropertyInfo(alias="X-LlamaStack-ProviderData")]


class ContentBatchImageMediaImageThisClassRepresentsAnImageObjectToCreate(TypedDict, total=False):
format: str

format_description: str


ContentBatchImageMediaImage: TypeAlias = Union[ContentBatchImageMediaImageThisClassRepresentsAnImageObjectToCreate, str]


class ContentBatchImageMedia(TypedDict, total=False):
image: Required[ContentBatchImageMediaImage]


class ContentBatchUnionMember2ImageMediaImageThisClassRepresentsAnImageObjectToCreate(TypedDict, total=False):
format: str

format_description: str


ContentBatchUnionMember2ImageMediaImage: TypeAlias = Union[
ContentBatchUnionMember2ImageMediaImageThisClassRepresentsAnImageObjectToCreate, str
]


class ContentBatchUnionMember2ImageMedia(TypedDict, total=False):
image: Required[ContentBatchUnionMember2ImageMediaImage]


ContentBatchUnionMember2: TypeAlias = Union[str, ContentBatchUnionMember2ImageMedia]

ContentBatch: TypeAlias = Union[str, ContentBatchImageMedia, List[ContentBatchUnionMember2]]


class Logprobs(TypedDict, total=False):
top_k: int
49 changes: 46 additions & 3 deletions src/llama_stack_client/types/inference/embedding_create_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,59 @@
from __future__ import annotations

from typing import List, Union
from typing_extensions import Required, Annotated, TypedDict
from typing_extensions import Required, Annotated, TypeAlias, TypedDict

from ..._utils import PropertyInfo

__all__ = ["EmbeddingCreateParams"]
__all__ = [
"EmbeddingCreateParams",
"Content",
"ContentImageMedia",
"ContentImageMediaImage",
"ContentImageMediaImageThisClassRepresentsAnImageObjectToCreate",
"ContentUnionMember2",
"ContentUnionMember2ImageMedia",
"ContentUnionMember2ImageMediaImage",
"ContentUnionMember2ImageMediaImageThisClassRepresentsAnImageObjectToCreate",
]


class EmbeddingCreateParams(TypedDict, total=False):
contents: Required[List[Union[str, List[str]]]]
contents: Required[List[Content]]

model: Required[str]

x_llama_stack_provider_data: Annotated[str, PropertyInfo(alias="X-LlamaStack-ProviderData")]


class ContentImageMediaImageThisClassRepresentsAnImageObjectToCreate(TypedDict, total=False):
format: str

format_description: str


ContentImageMediaImage: TypeAlias = Union[ContentImageMediaImageThisClassRepresentsAnImageObjectToCreate, str]


class ContentImageMedia(TypedDict, total=False):
image: Required[ContentImageMediaImage]


class ContentUnionMember2ImageMediaImageThisClassRepresentsAnImageObjectToCreate(TypedDict, total=False):
format: str

format_description: str


ContentUnionMember2ImageMediaImage: TypeAlias = Union[
ContentUnionMember2ImageMediaImageThisClassRepresentsAnImageObjectToCreate, str
]


class ContentUnionMember2ImageMedia(TypedDict, total=False):
image: Required[ContentUnionMember2ImageMediaImage]


ContentUnionMember2: TypeAlias = Union[str, ContentUnionMember2ImageMedia]

Content: TypeAlias = Union[str, ContentImageMedia, List[ContentUnionMember2]]
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class InferenceChatCompletionParamsBase(TypedDict, total=False):

tool_choice: Literal["auto", "required"]

tool_prompt_format: Literal["json", "function_tag"]
tool_prompt_format: Literal["json", "function_tag", "python_list"]
"""
`json` -- Refers to the json format for calling tools. The json format takes the
form like { "type": "function", "function" : { "name": "function_name",
Expand Down
Loading

0 comments on commit 39e0b2b

Please sign in to comment.