Skip to content

Commit

Permalink
Prompt Driver Debug Logs (#1250)
Browse files Browse the repository at this point in the history
  • Loading branch information
collindutter authored Oct 10, 2024
1 parent 766d365 commit 3fc39a4
Show file tree
Hide file tree
Showing 17 changed files with 264 additions and 34 deletions.
11 changes: 10 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,16 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## Unreleased

## [0.33.0] - 2024-10-09
### Added

- `griptape.configs.logging.JsonFormatter` for formatting logs as JSON.
- Request/response debug logging to all Prompt Drivers.

### Changed

- `_DefaultsConfig.logging_config` and `Defaults.drivers_config` are now lazily instantiated.

## \[0.33.0\] - 2024-10-09

## Added

Expand Down
7 changes: 5 additions & 2 deletions MIGRATION.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ DataframeLoader().load(df)
```

#### After

```python
# Convert the dataframe to csv bytes and parse it
CsvLoader().parse(bytes(df.to_csv(line_terminator='\r\n', index=False), encoding='utf-8'))
Expand All @@ -25,12 +26,14 @@ CsvLoader().parse(bytes(df.to_csv(line_terminator='\r\n', index=False), encoding
### `TextLoader`, `PdfLoader`, `ImageLoader`, and `AudioLoader` now take a `str | PathLike` instead of `bytes`.

#### Before

```python
PdfLoader().load(Path("attention.pdf").read_bytes())
PdfLoader().load_collection([Path("attention.pdf").read_bytes(), Path("CoT.pdf").read_bytes()])
```

#### After

```python
PdfLoader().load("attention.pdf")
PdfLoader().load_collection([Path("attention.pdf"), "CoT.pdf"])
Expand All @@ -47,7 +50,7 @@ You can now pass the file path directly to the Loader.
PdfLoader().load(load_file("attention.pdf").read_bytes())
PdfLoader().load_collection(list(load_files(["attention.pdf", "CoT.pdf"]).values()))
```

```python
PdfLoader().load("attention.pdf")
PdfLoader().load_collection(["attention.pdf", "CoT.pdf"])
Expand All @@ -69,6 +72,7 @@ vector_store.upsert_text_artifacts(
```

#### After

```python
artifact = PdfLoader().load("attention.pdf")
chunks = Chunker().chunk(artifact)
Expand All @@ -79,7 +83,6 @@ vector_store.upsert_text_artifacts(
)
```


### Removed `torch` extra from `transformers` dependency

The `torch` extra has been removed from the `transformers` dependency. If you require `torch`, install it separately.
Expand Down
76 changes: 76 additions & 0 deletions docs/griptape-framework/structures/configs.md
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,82 @@ Griptape provides a predefined [LoggingConfig](../../reference/griptape/configs/
--8<-- "docs/griptape-framework/structures/src/logging_config.py"
```

#### Debug Logs

You can enable debug logs to view more granular information such as request/response payloads.

```python
import logging

from griptape.configs import Defaults
from griptape.configs.defaults_config import LoggingConfig
from griptape.configs.logging import JsonFormatter
from griptape.drivers import OpenAiChatPromptDriver
from griptape.structures import Agent
from griptape.tools import CalculatorTool

logger = logging.getLogger(Defaults.logging_config.logger_name)
logger.setLevel(logging.DEBUG)
logger.handlers[0].setFormatter(JsonFormatter())

agent = Agent()

agent.run("Hello world!")
```

```
[10/09/24 15:30:04] INFO PromptTask 75ef1747a5824bc8ac838f3081aeb57d
Input: Hello world!
DEBUG {
"model": "gpt-4o",
"temperature": 0.1,
"user": "",
"seed": null,
"messages": [
{
"role": "user",
"content": "Hello world!"
}
]
}
[10/09/24 15:30:05] DEBUG {
"id": "chatcmpl-AGZTwg4T4YikR2KjF3AMIRxlIfcKa",
"choices": [
{
"finish_reason": "stop",
"index": 0,
"logprobs": null,
"message": {
"content": "Hello! How can I assist you today?",
"refusal": null,
"role": "assistant",
"function_call": null,
"tool_calls": null
}
}
],
"created": 1728513004,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion",
"service_tier": null,
"system_fingerprint": "fp_2f406b9113",
"usage": {
"completion_tokens": 9,
"prompt_tokens": 10,
"total_tokens": 19,
"prompt_tokens_details": {
"cached_tokens": 0
},
"completion_tokens_details": {
"reasoning_tokens": 0
}
}
}
INFO PromptTask 75ef1747a5824bc8ac838f3081aeb57d
Output: Hello! How can I assist you today?
```

### Loading/Saving Configs

```python
Expand Down
18 changes: 14 additions & 4 deletions griptape/configs/defaults_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@

from typing import TYPE_CHECKING

from attrs import Factory, define, field
from attrs import define, field

from griptape.mixins.singleton_mixin import SingletonMixin
from griptape.utils.decorators import lazy_property

from .base_config import BaseConfig
from .drivers.openai_drivers_config import OpenAiDriversConfig
from .logging.logging_config import LoggingConfig

if TYPE_CHECKING:
Expand All @@ -16,8 +16,18 @@

@define(kw_only=True)
class _DefaultsConfig(BaseConfig, SingletonMixin):
logging_config: LoggingConfig = field(default=Factory(lambda: LoggingConfig()))
drivers_config: BaseDriversConfig = field(default=Factory(lambda: OpenAiDriversConfig()))
_logging_config: LoggingConfig = field(default=None)
_drivers_config: BaseDriversConfig = field(default=None)

@lazy_property()
def logging_config(self) -> LoggingConfig:
return LoggingConfig()

@lazy_property()
def drivers_config(self) -> BaseDriversConfig:
from griptape.configs.drivers.openai_drivers_config import OpenAiDriversConfig

return OpenAiDriversConfig()


Defaults = _DefaultsConfig()
3 changes: 2 additions & 1 deletion griptape/configs/logging/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from .logging_config import LoggingConfig
from .truncate_logging_filter import TruncateLoggingFilter
from .newline_logging_filter import NewlineLoggingFilter
from .json_formatter import JsonFormatter

__all__ = ["LoggingConfig", "TruncateLoggingFilter", "NewlineLoggingFilter"]
__all__ = ["LoggingConfig", "TruncateLoggingFilter", "NewlineLoggingFilter", "JsonFormatter"]
19 changes: 19 additions & 0 deletions griptape/configs/logging/json_formatter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import json
import logging
from typing import Any

from attrs import define, field


@define
class JsonFormatter(logging.Formatter):
indent: int = field(default=2, kw_only=True)

def __attrs_pre_init__(self) -> None:
super().__init__()

def format(self, record: Any) -> str:
if isinstance(record.msg, dict):
record.msg = json.dumps(record.msg, indent=self.indent)

return super().format(record)
14 changes: 12 additions & 2 deletions griptape/drivers/prompt/amazon_bedrock_prompt_driver.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

import logging
from typing import TYPE_CHECKING, Any

from attrs import Factory, define, field
Expand Down Expand Up @@ -28,6 +29,7 @@
ToolAction,
observable,
)
from griptape.configs import Defaults
from griptape.drivers import BasePromptDriver
from griptape.tokenizers import AmazonBedrockTokenizer, BaseTokenizer
from griptape.utils import import_optional_dependency
Expand All @@ -41,6 +43,8 @@
from griptape.common import PromptStack
from griptape.tools import BaseTool

logger = logging.getLogger(Defaults.logging_config.logger_name)


@define
class AmazonBedrockPromptDriver(BasePromptDriver):
Expand All @@ -60,7 +64,10 @@ def client(self) -> Any:

@observable
def try_run(self, prompt_stack: PromptStack) -> Message:
response = self.client.converse(**self._base_params(prompt_stack))
params = self._base_params(prompt_stack)
logger.debug(params)
response = self.client.converse(**params)
logger.debug(response)

usage = response["usage"]
output_message = response["output"]["message"]
Expand All @@ -73,11 +80,14 @@ def try_run(self, prompt_stack: PromptStack) -> Message:

@observable
def try_stream(self, prompt_stack: PromptStack) -> Iterator[DeltaMessage]:
response = self.client.converse_stream(**self._base_params(prompt_stack))
params = self._base_params(prompt_stack)
logger.debug(params)
response = self.client.converse_stream(**params)

stream = response.get("stream")
if stream is not None:
for event in stream:
logger.debug(event)
if "contentBlockDelta" in event or "contentBlockStart" in event:
yield DeltaMessage(content=self.__to_prompt_stack_delta_message_content(event))
elif "metadata" in event:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
from __future__ import annotations

import json
import logging
from typing import TYPE_CHECKING, Any, Optional

from attrs import Attribute, Factory, define, field

from griptape.artifacts import TextArtifact
from griptape.common import DeltaMessage, Message, PromptStack, TextMessageContent, observable
from griptape.configs import Defaults
from griptape.drivers import BasePromptDriver
from griptape.tokenizers import HuggingFaceTokenizer
from griptape.utils import import_optional_dependency
Expand All @@ -19,6 +21,8 @@

from griptape.common import PromptStack

logger = logging.getLogger(Defaults.logging_config.logger_name)


@define
class AmazonSageMakerJumpstartPromptDriver(BasePromptDriver):
Expand Down Expand Up @@ -52,6 +56,7 @@ def try_run(self, prompt_stack: PromptStack) -> Message:
"inputs": self.prompt_stack_to_string(prompt_stack),
"parameters": {**self._base_params(prompt_stack)},
}
logger.debug(payload)

response = self.client.invoke_endpoint(
EndpointName=self.endpoint,
Expand All @@ -66,6 +71,7 @@ def try_run(self, prompt_stack: PromptStack) -> Message:
)

decoded_body = json.loads(response["Body"].read().decode("utf8"))
logger.debug(decoded_body)

if isinstance(decoded_body, list):
if decoded_body:
Expand Down
16 changes: 14 additions & 2 deletions griptape/drivers/prompt/anthropic_prompt_driver.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

import logging
from typing import TYPE_CHECKING, Optional

from attrs import Factory, define, field
Expand Down Expand Up @@ -29,6 +30,7 @@
ToolAction,
observable,
)
from griptape.configs import Defaults
from griptape.drivers import BasePromptDriver
from griptape.tokenizers import AnthropicTokenizer, BaseTokenizer
from griptape.utils import import_optional_dependency
Expand All @@ -43,6 +45,9 @@
from griptape.tools.base_tool import BaseTool


logger = logging.getLogger(Defaults.logging_config.logger_name)


@define
class AnthropicPromptDriver(BasePromptDriver):
"""Anthropic Prompt Driver.
Expand Down Expand Up @@ -72,7 +77,11 @@ def client(self) -> Client:

@observable
def try_run(self, prompt_stack: PromptStack) -> Message:
response = self.client.messages.create(**self._base_params(prompt_stack))
params = self._base_params(prompt_stack)
logger.debug(params)
response = self.client.messages.create(**params)

logger.debug(response.model_dump())

return Message(
content=[self.__to_prompt_stack_message_content(content) for content in response.content],
Expand All @@ -82,9 +91,12 @@ def try_run(self, prompt_stack: PromptStack) -> Message:

@observable
def try_stream(self, prompt_stack: PromptStack) -> Iterator[DeltaMessage]:
events = self.client.messages.create(**self._base_params(prompt_stack), stream=True)
params = {**self._base_params(prompt_stack), "stream": True}
logger.debug(params)
events = self.client.messages.create(**params)

for event in events:
logger.debug(event)
if event.type == "content_block_delta" or event.type == "content_block_start":
yield DeltaMessage(content=self.__to_prompt_stack_delta_message_content(event))
elif event.type == "message_start":
Expand Down
Loading

0 comments on commit 3fc39a4

Please sign in to comment.