Skip to content

Commit

Permalink
Fixed OpenAI tests (#3738)
Browse files Browse the repository at this point in the history
  • Loading branch information
antonpirker authored Dec 20, 2024
1 parent 64d1930 commit 9e64b1d
Showing 1 changed file with 9 additions and 6 deletions.
15 changes: 9 additions & 6 deletions tests/integrations/openai/test_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,8 @@ def test_nonstreaming_chat_completion(
assert span["op"] == "ai.chat_completions.create.openai"

if send_default_pii and include_prompts:
assert "hello" in span["data"]["ai.input_messages"]["content"]
assert "the model response" in span["data"]["ai.responses"]["content"]
assert '"content": "hello"' in span["data"]["ai.input_messages"]
assert '"content": "the model response"' in span["data"]["ai.responses"]
else:
assert "ai.input_messages" not in span["data"]
assert "ai.responses" not in span["data"]
Expand Down Expand Up @@ -125,8 +125,8 @@ async def test_nonstreaming_chat_completion_async(
assert span["op"] == "ai.chat_completions.create.openai"

if send_default_pii and include_prompts:
assert "hello" in span["data"]["ai.input_messages"]["content"]
assert "the model response" in span["data"]["ai.responses"]["content"]
assert '"content": "hello"' in span["data"]["ai.input_messages"]
assert '"content": "the model response"' in span["data"]["ai.responses"]
else:
assert "ai.input_messages" not in span["data"]
assert "ai.responses" not in span["data"]
Expand Down Expand Up @@ -218,7 +218,7 @@ def test_streaming_chat_completion(
assert span["op"] == "ai.chat_completions.create.openai"

if send_default_pii and include_prompts:
assert "hello" in span["data"]["ai.input_messages"]["content"]
assert '"content": "hello"' in span["data"]["ai.input_messages"]
assert "hello world" in span["data"]["ai.responses"]
else:
assert "ai.input_messages" not in span["data"]
Expand Down Expand Up @@ -314,7 +314,7 @@ async def test_streaming_chat_completion_async(
assert span["op"] == "ai.chat_completions.create.openai"

if send_default_pii and include_prompts:
assert "hello" in span["data"]["ai.input_messages"]["content"]
assert '"content": "hello"' in span["data"]["ai.input_messages"]
assert "hello world" in span["data"]["ai.responses"]
else:
assert "ai.input_messages" not in span["data"]
Expand All @@ -330,6 +330,7 @@ async def test_streaming_chat_completion_async(
pass # if tiktoken is not installed, we can't guarantee token usage will be calculated properly


@pytest.mark.forked
def test_bad_chat_completion(sentry_init, capture_events):
sentry_init(integrations=[OpenAIIntegration()], traces_sample_rate=1.0)
events = capture_events()
Expand Down Expand Up @@ -460,6 +461,7 @@ async def test_embeddings_create_async(
assert span["measurements"]["ai_total_tokens_used"]["value"] == 30


@pytest.mark.forked
@pytest.mark.parametrize(
"send_default_pii, include_prompts",
[(True, True), (True, False), (False, True), (False, False)],
Expand Down Expand Up @@ -487,6 +489,7 @@ def test_embeddings_create_raises_error(
assert event["level"] == "error"


@pytest.mark.forked
@pytest.mark.asyncio
@pytest.mark.parametrize(
"send_default_pii, include_prompts",
Expand Down

0 comments on commit 9e64b1d

Please sign in to comment.