Skip to content

Commit

Permalink
Set azure_openai model provider value
Browse files Browse the repository at this point in the history
  • Loading branch information
Yun-Kim committed Sep 18, 2024
1 parent 7a2e802 commit 2e30eb8
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 7 deletions.
3 changes: 2 additions & 1 deletion ddtrace/llmobs/_integrations/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,8 @@ def llmobs_set_tags(
span.set_tag_str(SPAN_KIND, span_kind)
model_name = span.get_tag("openai.response.model") or span.get_tag("openai.request.model")
span.set_tag_str(MODEL_NAME, model_name or "")
span.set_tag_str(MODEL_PROVIDER, "openai")
model_provider = "azure_openai" if self._is_azure_openai(span) else "openai"
span.set_tag_str(MODEL_PROVIDER, model_provider)
if operation == "completion":
self._llmobs_set_meta_tags_from_completion(resp, err, kwargs, streamed_completions, span)
elif operation == "chat":
Expand Down
5 changes: 3 additions & 2 deletions releasenotes/notes/feat-openai-azure-23cf45cfa854a5e5.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ features:
openai: The OpenAI integration now includes a new ``openai.request.client`` tag with the possible values ``OpenAI/AzureOpenAI``
to help differentiate whether the request was made to Azure OpenAI or OpenAI.
- |
LLM Observability: LLM Observability spans generated by the OpenAI integration now have updated span names.
LLM Observability: LLM Observability spans generated by the OpenAI integration now have updated span name and ``model_provider`` values.
Span names are now prefixed with the OpenAI client name (possible values: ``OpenAI/AzureOpenAI``)
instead of the default ``openai`` prefix to better differentiate whether the request was made to Azure OpenAI or OpenAI.
instead of the default ``openai`` prefix to better differentiate whether the request was made to Azure OpenAI or OpenAI.
The ``model_provider`` field also now corresponds to ``openai`` or ``azure_openai`` based on the OpenAI client.
8 changes: 4 additions & 4 deletions tests/contrib/openai/test_openai_llmobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -360,7 +360,7 @@ def test_completion_azure(
_expected_llmobs_llm_span_event(
span,
model_name=resp.model,
model_provider="openai",
model_provider="azure_openai",
input_messages=[{"content": prompt}],
output_messages=[{"content": expected_output}],
metadata={"temperature": 0, "max_tokens": 20, "n": 1, "user": "ddtrace-test"},
Expand Down Expand Up @@ -390,7 +390,7 @@ async def test_completion_azure_async(
_expected_llmobs_llm_span_event(
span,
model_name=resp.model,
model_provider="openai",
model_provider="azure_openai",
input_messages=[{"content": prompt}],
output_messages=[{"content": expected_output}],
metadata={"temperature": 0, "max_tokens": 20, "n": 1, "user": "ddtrace-test"},
Expand Down Expand Up @@ -479,7 +479,7 @@ def test_chat_completion_azure(
_expected_llmobs_llm_span_event(
span,
model_name=resp.model,
model_provider="openai",
model_provider="azure_openai",
input_messages=input_messages,
output_messages=[{"role": "assistant", "content": expected_output}],
metadata={"temperature": 0, "max_tokens": 20, "n": 1, "user": "ddtrace-test"},
Expand Down Expand Up @@ -509,7 +509,7 @@ async def test_chat_completion_azure_async(
_expected_llmobs_llm_span_event(
span,
model_name=resp.model,
model_provider="openai",
model_provider="azure_openai",
input_messages=input_messages,
output_messages=[{"role": "assistant", "content": expected_output}],
metadata={"temperature": 0, "max_tokens": 20, "n": 1, "user": "ddtrace-test"},
Expand Down

0 comments on commit 2e30eb8

Please sign in to comment.