diff --git a/CHANGELOG.md b/CHANGELOG.md index 2effc9b00..9eceb30e1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,16 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## Unreleased -## [0.33.0] - 2024-10-09 +### Added + +- `griptape.configs.logging.JsonFormatter` for formatting logs as JSON. +- Request/response debug logging to all Prompt Drivers. + +### Changed + +- `_DefaultsConfig.logging_config` and `Defaults.drivers_config` are now lazily instantiated. + +## \[0.33.0\] - 2024-10-09 ## Added diff --git a/MIGRATION.md b/MIGRATION.md index 22f352387..2877de049 100644 --- a/MIGRATION.md +++ b/MIGRATION.md @@ -15,6 +15,7 @@ DataframeLoader().load(df) ``` #### After + ```python # Convert the dataframe to csv bytes and parse it CsvLoader().parse(bytes(df.to_csv(line_terminator='\r\n', index=False), encoding='utf-8')) @@ -25,12 +26,14 @@ CsvLoader().parse(bytes(df.to_csv(line_terminator='\r\n', index=False), encoding ### `TextLoader`, `PdfLoader`, `ImageLoader`, and `AudioLoader` now take a `str | PathLike` instead of `bytes`. #### Before + ```python PdfLoader().load(Path("attention.pdf").read_bytes()) PdfLoader().load_collection([Path("attention.pdf").read_bytes(), Path("CoT.pdf").read_bytes()]) ``` #### After + ```python PdfLoader().load("attention.pdf") PdfLoader().load_collection([Path("attention.pdf"), "CoT.pdf"]) @@ -47,7 +50,7 @@ You can now pass the file path directly to the Loader. PdfLoader().load(load_file("attention.pdf").read_bytes()) PdfLoader().load_collection(list(load_files(["attention.pdf", "CoT.pdf"]).values())) ``` - + ```python PdfLoader().load("attention.pdf") PdfLoader().load_collection(["attention.pdf", "CoT.pdf"]) @@ -69,6 +72,7 @@ vector_store.upsert_text_artifacts( ``` #### After + ```python artifact = PdfLoader().load("attention.pdf") chunks = Chunker().chunk(artifact) @@ -79,7 +83,6 @@ vector_store.upsert_text_artifacts( ) ``` - ### Removed `torch` extra from `transformers` dependency The `torch` extra has been removed from the `transformers` dependency. If you require `torch`, install it separately. diff --git a/docs/griptape-framework/structures/configs.md b/docs/griptape-framework/structures/configs.md index ed7327f86..98c58985d 100644 --- a/docs/griptape-framework/structures/configs.md +++ b/docs/griptape-framework/structures/configs.md @@ -97,6 +97,82 @@ Griptape provides a predefined [LoggingConfig](../../reference/griptape/configs/ --8<-- "docs/griptape-framework/structures/src/logging_config.py" ``` +#### Debug Logs + +You can enable debug logs to view more granular information such as request/response payloads. + +```python +import logging + +from griptape.configs import Defaults +from griptape.configs.defaults_config import LoggingConfig +from griptape.configs.logging import JsonFormatter +from griptape.drivers import OpenAiChatPromptDriver +from griptape.structures import Agent +from griptape.tools import CalculatorTool + +logger = logging.getLogger(Defaults.logging_config.logger_name) +logger.setLevel(logging.DEBUG) +logger.handlers[0].setFormatter(JsonFormatter()) + +agent = Agent() + +agent.run("Hello world!") +``` + +``` +[10/09/24 15:30:04] INFO PromptTask 75ef1747a5824bc8ac838f3081aeb57d + Input: Hello world! + DEBUG { + "model": "gpt-4o", + "temperature": 0.1, + "user": "", + "seed": null, + "messages": [ + { + "role": "user", + "content": "Hello world!" + } + ] + } +[10/09/24 15:30:05] DEBUG { + "id": "chatcmpl-AGZTwg4T4YikR2KjF3AMIRxlIfcKa", + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "Hello! How can I assist you today?", + "refusal": null, + "role": "assistant", + "function_call": null, + "tool_calls": null + } + } + ], + "created": 1728513004, + "model": "gpt-4o-2024-08-06", + "object": "chat.completion", + "service_tier": null, + "system_fingerprint": "fp_2f406b9113", + "usage": { + "completion_tokens": 9, + "prompt_tokens": 10, + "total_tokens": 19, + "prompt_tokens_details": { + "cached_tokens": 0 + }, + "completion_tokens_details": { + "reasoning_tokens": 0 + } + } + } + INFO PromptTask 75ef1747a5824bc8ac838f3081aeb57d + Output: Hello! How can I assist you today? + +``` + ### Loading/Saving Configs ```python diff --git a/griptape/configs/defaults_config.py b/griptape/configs/defaults_config.py index b81f50cdc..71c0bceae 100644 --- a/griptape/configs/defaults_config.py +++ b/griptape/configs/defaults_config.py @@ -2,12 +2,12 @@ from typing import TYPE_CHECKING -from attrs import Factory, define, field +from attrs import define, field from griptape.mixins.singleton_mixin import SingletonMixin +from griptape.utils.decorators import lazy_property from .base_config import BaseConfig -from .drivers.openai_drivers_config import OpenAiDriversConfig from .logging.logging_config import LoggingConfig if TYPE_CHECKING: @@ -16,8 +16,18 @@ @define(kw_only=True) class _DefaultsConfig(BaseConfig, SingletonMixin): - logging_config: LoggingConfig = field(default=Factory(lambda: LoggingConfig())) - drivers_config: BaseDriversConfig = field(default=Factory(lambda: OpenAiDriversConfig())) + _logging_config: LoggingConfig = field(default=None) + _drivers_config: BaseDriversConfig = field(default=None) + + @lazy_property() + def logging_config(self) -> LoggingConfig: + return LoggingConfig() + + @lazy_property() + def drivers_config(self) -> BaseDriversConfig: + from griptape.configs.drivers.openai_drivers_config import OpenAiDriversConfig + + return OpenAiDriversConfig() Defaults = _DefaultsConfig() diff --git a/griptape/configs/logging/__init__.py b/griptape/configs/logging/__init__.py index de7726060..418708d75 100644 --- a/griptape/configs/logging/__init__.py +++ b/griptape/configs/logging/__init__.py @@ -1,5 +1,6 @@ from .logging_config import LoggingConfig from .truncate_logging_filter import TruncateLoggingFilter from .newline_logging_filter import NewlineLoggingFilter +from .json_formatter import JsonFormatter -__all__ = ["LoggingConfig", "TruncateLoggingFilter", "NewlineLoggingFilter"] +__all__ = ["LoggingConfig", "TruncateLoggingFilter", "NewlineLoggingFilter", "JsonFormatter"] diff --git a/griptape/configs/logging/json_formatter.py b/griptape/configs/logging/json_formatter.py new file mode 100644 index 000000000..3477112a5 --- /dev/null +++ b/griptape/configs/logging/json_formatter.py @@ -0,0 +1,19 @@ +import json +import logging +from typing import Any + +from attrs import define, field + + +@define +class JsonFormatter(logging.Formatter): + indent: int = field(default=2, kw_only=True) + + def __attrs_pre_init__(self) -> None: + super().__init__() + + def format(self, record: Any) -> str: + if isinstance(record.msg, dict): + record.msg = json.dumps(record.msg, indent=self.indent) + + return super().format(record) diff --git a/griptape/drivers/prompt/amazon_bedrock_prompt_driver.py b/griptape/drivers/prompt/amazon_bedrock_prompt_driver.py index be34d2a8c..1499b84c5 100644 --- a/griptape/drivers/prompt/amazon_bedrock_prompt_driver.py +++ b/griptape/drivers/prompt/amazon_bedrock_prompt_driver.py @@ -1,5 +1,6 @@ from __future__ import annotations +import logging from typing import TYPE_CHECKING, Any from attrs import Factory, define, field @@ -28,6 +29,7 @@ ToolAction, observable, ) +from griptape.configs import Defaults from griptape.drivers import BasePromptDriver from griptape.tokenizers import AmazonBedrockTokenizer, BaseTokenizer from griptape.utils import import_optional_dependency @@ -41,6 +43,8 @@ from griptape.common import PromptStack from griptape.tools import BaseTool +logger = logging.getLogger(Defaults.logging_config.logger_name) + @define class AmazonBedrockPromptDriver(BasePromptDriver): @@ -60,7 +64,10 @@ def client(self) -> Any: @observable def try_run(self, prompt_stack: PromptStack) -> Message: - response = self.client.converse(**self._base_params(prompt_stack)) + params = self._base_params(prompt_stack) + logger.debug(params) + response = self.client.converse(**params) + logger.debug(response) usage = response["usage"] output_message = response["output"]["message"] @@ -73,11 +80,14 @@ def try_run(self, prompt_stack: PromptStack) -> Message: @observable def try_stream(self, prompt_stack: PromptStack) -> Iterator[DeltaMessage]: - response = self.client.converse_stream(**self._base_params(prompt_stack)) + params = self._base_params(prompt_stack) + logger.debug(params) + response = self.client.converse_stream(**params) stream = response.get("stream") if stream is not None: for event in stream: + logger.debug(event) if "contentBlockDelta" in event or "contentBlockStart" in event: yield DeltaMessage(content=self.__to_prompt_stack_delta_message_content(event)) elif "metadata" in event: diff --git a/griptape/drivers/prompt/amazon_sagemaker_jumpstart_prompt_driver.py b/griptape/drivers/prompt/amazon_sagemaker_jumpstart_prompt_driver.py index 2dcf55307..8347f1f17 100644 --- a/griptape/drivers/prompt/amazon_sagemaker_jumpstart_prompt_driver.py +++ b/griptape/drivers/prompt/amazon_sagemaker_jumpstart_prompt_driver.py @@ -1,12 +1,14 @@ from __future__ import annotations import json +import logging from typing import TYPE_CHECKING, Any, Optional from attrs import Attribute, Factory, define, field from griptape.artifacts import TextArtifact from griptape.common import DeltaMessage, Message, PromptStack, TextMessageContent, observable +from griptape.configs import Defaults from griptape.drivers import BasePromptDriver from griptape.tokenizers import HuggingFaceTokenizer from griptape.utils import import_optional_dependency @@ -19,6 +21,8 @@ from griptape.common import PromptStack +logger = logging.getLogger(Defaults.logging_config.logger_name) + @define class AmazonSageMakerJumpstartPromptDriver(BasePromptDriver): @@ -52,6 +56,7 @@ def try_run(self, prompt_stack: PromptStack) -> Message: "inputs": self.prompt_stack_to_string(prompt_stack), "parameters": {**self._base_params(prompt_stack)}, } + logger.debug(payload) response = self.client.invoke_endpoint( EndpointName=self.endpoint, @@ -66,6 +71,7 @@ def try_run(self, prompt_stack: PromptStack) -> Message: ) decoded_body = json.loads(response["Body"].read().decode("utf8")) + logger.debug(decoded_body) if isinstance(decoded_body, list): if decoded_body: diff --git a/griptape/drivers/prompt/anthropic_prompt_driver.py b/griptape/drivers/prompt/anthropic_prompt_driver.py index 1c7b376f8..054049fe8 100644 --- a/griptape/drivers/prompt/anthropic_prompt_driver.py +++ b/griptape/drivers/prompt/anthropic_prompt_driver.py @@ -1,5 +1,6 @@ from __future__ import annotations +import logging from typing import TYPE_CHECKING, Optional from attrs import Factory, define, field @@ -29,6 +30,7 @@ ToolAction, observable, ) +from griptape.configs import Defaults from griptape.drivers import BasePromptDriver from griptape.tokenizers import AnthropicTokenizer, BaseTokenizer from griptape.utils import import_optional_dependency @@ -43,6 +45,9 @@ from griptape.tools.base_tool import BaseTool +logger = logging.getLogger(Defaults.logging_config.logger_name) + + @define class AnthropicPromptDriver(BasePromptDriver): """Anthropic Prompt Driver. @@ -72,7 +77,11 @@ def client(self) -> Client: @observable def try_run(self, prompt_stack: PromptStack) -> Message: - response = self.client.messages.create(**self._base_params(prompt_stack)) + params = self._base_params(prompt_stack) + logger.debug(params) + response = self.client.messages.create(**params) + + logger.debug(response.model_dump()) return Message( content=[self.__to_prompt_stack_message_content(content) for content in response.content], @@ -82,9 +91,12 @@ def try_run(self, prompt_stack: PromptStack) -> Message: @observable def try_stream(self, prompt_stack: PromptStack) -> Iterator[DeltaMessage]: - events = self.client.messages.create(**self._base_params(prompt_stack), stream=True) + params = {**self._base_params(prompt_stack), "stream": True} + logger.debug(params) + events = self.client.messages.create(**params) for event in events: + logger.debug(event) if event.type == "content_block_delta" or event.type == "content_block_start": yield DeltaMessage(content=self.__to_prompt_stack_delta_message_content(event)) elif event.type == "message_start": diff --git a/griptape/drivers/prompt/cohere_prompt_driver.py b/griptape/drivers/prompt/cohere_prompt_driver.py index b31c78ea3..6bd8fb010 100644 --- a/griptape/drivers/prompt/cohere_prompt_driver.py +++ b/griptape/drivers/prompt/cohere_prompt_driver.py @@ -1,5 +1,6 @@ from __future__ import annotations +import logging from typing import TYPE_CHECKING, Any from attrs import Factory, define, field @@ -20,6 +21,7 @@ observable, ) from griptape.common.prompt_stack.contents.action_call_delta_message_content import ActionCallDeltaMessageContent +from griptape.configs import Defaults from griptape.drivers import BasePromptDriver from griptape.tokenizers import BaseTokenizer, CohereTokenizer from griptape.utils import import_optional_dependency @@ -33,6 +35,8 @@ from griptape.tools import BaseTool +logger = logging.getLogger(Defaults.logging_config.logger_name) + @define(kw_only=True) class CoherePromptDriver(BasePromptDriver): @@ -59,7 +63,11 @@ def client(self) -> Client: @observable def try_run(self, prompt_stack: PromptStack) -> Message: - result = self.client.chat(**self._base_params(prompt_stack)) + params = self._base_params(prompt_stack) + logger.debug(params) + + result = self.client.chat(**params) + logger.debug(result.model_dump()) usage = result.meta.tokens return Message( @@ -70,9 +78,12 @@ def try_run(self, prompt_stack: PromptStack) -> Message: @observable def try_stream(self, prompt_stack: PromptStack) -> Iterator[DeltaMessage]: - result = self.client.chat_stream(**self._base_params(prompt_stack)) + params = self._base_params(prompt_stack) + logger.debug(params) + result = self.client.chat_stream(**params) for event in result: + logger.debug(event.model_dump()) if event.event_type == "stream-end": usage = event.response.meta.tokens diff --git a/griptape/drivers/prompt/google_prompt_driver.py b/griptape/drivers/prompt/google_prompt_driver.py index 4afdad5c6..1634bc613 100644 --- a/griptape/drivers/prompt/google_prompt_driver.py +++ b/griptape/drivers/prompt/google_prompt_driver.py @@ -1,6 +1,7 @@ from __future__ import annotations import json +import logging from typing import TYPE_CHECKING, Optional from attrs import Factory, define, field @@ -23,6 +24,7 @@ ToolAction, observable, ) +from griptape.configs import Defaults from griptape.drivers import BasePromptDriver from griptape.tokenizers import BaseTokenizer, GoogleTokenizer from griptape.utils import import_optional_dependency, remove_key_in_dict_recursively @@ -37,6 +39,8 @@ from griptape.tools import BaseTool +logger = logging.getLogger(Defaults.logging_config.logger_name) + @define class GooglePromptDriver(BasePromptDriver): @@ -72,10 +76,10 @@ def client(self) -> GenerativeModel: @observable def try_run(self, prompt_stack: PromptStack) -> Message: messages = self.__to_google_messages(prompt_stack) - response: GenerateContentResponse = self.client.generate_content( - messages, - **self._base_params(prompt_stack), - ) + params = self._base_params(prompt_stack) + logging.debug((messages, params)) + response: GenerateContentResponse = self.client.generate_content(messages, **params) + logging.debug(response.to_dict()) usage_metadata = response.usage_metadata @@ -91,14 +95,16 @@ def try_run(self, prompt_stack: PromptStack) -> Message: @observable def try_stream(self, prompt_stack: PromptStack) -> Iterator[DeltaMessage]: messages = self.__to_google_messages(prompt_stack) + params = {**self._base_params(prompt_stack), "stream": True} + logging.debug((messages, params)) response: GenerateContentResponse = self.client.generate_content( messages, - **self._base_params(prompt_stack), - stream=True, + **params, ) prompt_token_count = None for chunk in response: + logger.debug(chunk.to_dict()) usage_metadata = chunk.usage_metadata content = self.__to_prompt_stack_delta_message_content(chunk.parts[0]) if chunk.parts else None diff --git a/griptape/drivers/prompt/huggingface_hub_prompt_driver.py b/griptape/drivers/prompt/huggingface_hub_prompt_driver.py index 68267f755..11ae1c145 100644 --- a/griptape/drivers/prompt/huggingface_hub_prompt_driver.py +++ b/griptape/drivers/prompt/huggingface_hub_prompt_driver.py @@ -1,10 +1,12 @@ from __future__ import annotations +import logging from typing import TYPE_CHECKING from attrs import Factory, define, field from griptape.common import DeltaMessage, Message, PromptStack, TextDeltaMessageContent, observable +from griptape.configs import Defaults from griptape.drivers import BasePromptDriver from griptape.tokenizers import HuggingFaceTokenizer from griptape.utils import import_optional_dependency @@ -15,6 +17,8 @@ from huggingface_hub import InferenceClient +logger = logging.getLogger(Defaults.logging_config.logger_name) + @define class HuggingFaceHubPromptDriver(BasePromptDriver): @@ -52,13 +56,14 @@ def client(self) -> InferenceClient: @observable def try_run(self, prompt_stack: PromptStack) -> Message: prompt = self.prompt_stack_to_string(prompt_stack) + full_params = {"return_full_text": False, "max_new_tokens": self.max_tokens, **self.params} + logger.debug((prompt, full_params)) response = self.client.text_generation( prompt, - return_full_text=False, - max_new_tokens=self.max_tokens, - **self.params, + **full_params, ) + logger.debug(response) input_tokens = len(self.__prompt_stack_to_tokens(prompt_stack)) output_tokens = len(self.tokenizer.tokenizer.encode(response)) @@ -71,19 +76,16 @@ def try_run(self, prompt_stack: PromptStack) -> Message: @observable def try_stream(self, prompt_stack: PromptStack) -> Iterator[DeltaMessage]: prompt = self.prompt_stack_to_string(prompt_stack) + full_params = {"return_full_text": False, "max_new_tokens": self.max_tokens, "stream": True, **self.params} + logger.debug((prompt, full_params)) - response = self.client.text_generation( - prompt, - return_full_text=False, - max_new_tokens=self.max_tokens, - stream=True, - **self.params, - ) + response = self.client.text_generation(prompt, **full_params) input_tokens = len(self.__prompt_stack_to_tokens(prompt_stack)) full_text = "" for token in response: + logger.debug(token) full_text += token yield DeltaMessage(content=TextDeltaMessageContent(token, index=0)) diff --git a/griptape/drivers/prompt/huggingface_pipeline_prompt_driver.py b/griptape/drivers/prompt/huggingface_pipeline_prompt_driver.py index 1978b339a..87e20b8ec 100644 --- a/griptape/drivers/prompt/huggingface_pipeline_prompt_driver.py +++ b/griptape/drivers/prompt/huggingface_pipeline_prompt_driver.py @@ -1,11 +1,13 @@ from __future__ import annotations +import logging from typing import TYPE_CHECKING from attrs import Factory, define, field from griptape.artifacts import TextArtifact from griptape.common import DeltaMessage, Message, PromptStack, TextMessageContent, observable +from griptape.configs import Defaults from griptape.drivers import BasePromptDriver from griptape.tokenizers import HuggingFaceTokenizer from griptape.utils import import_optional_dependency @@ -16,6 +18,8 @@ from transformers import TextGenerationPipeline +logger = logging.getLogger(Defaults.logging_config.logger_name) + @define class HuggingFacePipelinePromptDriver(BasePromptDriver): @@ -52,6 +56,12 @@ def pipeline(self) -> TextGenerationPipeline: @observable def try_run(self, prompt_stack: PromptStack) -> Message: messages = self._prompt_stack_to_messages(prompt_stack) + logger.debug( + ( + messages, + {"max_new_tokens": self.max_tokens, "temperature": self.temperature, "do_sample": True, **self.params}, + ) + ) result = self.pipeline( messages, @@ -60,6 +70,7 @@ def try_run(self, prompt_stack: PromptStack) -> Message: do_sample=True, **self.params, ) + logger.debug(result) if isinstance(result, list): if len(result) == 1: diff --git a/griptape/drivers/prompt/ollama_prompt_driver.py b/griptape/drivers/prompt/ollama_prompt_driver.py index 5f9e32e2f..01db026ff 100644 --- a/griptape/drivers/prompt/ollama_prompt_driver.py +++ b/griptape/drivers/prompt/ollama_prompt_driver.py @@ -1,5 +1,6 @@ from __future__ import annotations +import logging from collections.abc import Iterator from typing import TYPE_CHECKING, Any, Optional @@ -19,11 +20,14 @@ ToolAction, observable, ) +from griptape.configs import Defaults from griptape.drivers import BasePromptDriver from griptape.tokenizers import SimpleTokenizer from griptape.utils import import_optional_dependency from griptape.utils.decorators import lazy_property +logger = logging.getLogger(Defaults.logging_config.logger_name) + if TYPE_CHECKING: from ollama import Client @@ -72,7 +76,10 @@ def client(self) -> Client: @observable def try_run(self, prompt_stack: PromptStack) -> Message: - response = self.client.chat(**self._base_params(prompt_stack)) + params = self._base_params(prompt_stack) + logger.debug(params) + response = self.client.chat(**params) + logger.debug(response) if isinstance(response, dict): return Message( @@ -84,10 +91,13 @@ def try_run(self, prompt_stack: PromptStack) -> Message: @observable def try_stream(self, prompt_stack: PromptStack) -> Iterator[DeltaMessage]: - stream = self.client.chat(**self._base_params(prompt_stack), stream=True) + params = {**self._base_params(prompt_stack), "stream": True} + logger.debug(params) + stream = self.client.chat(**params) if isinstance(stream, Iterator): for chunk in stream: + logger.debug(chunk) yield DeltaMessage(content=TextDeltaMessageContent(chunk["message"]["content"])) else: raise Exception("invalid model response") diff --git a/griptape/drivers/prompt/openai_chat_prompt_driver.py b/griptape/drivers/prompt/openai_chat_prompt_driver.py index ec10ab72e..75a604805 100644 --- a/griptape/drivers/prompt/openai_chat_prompt_driver.py +++ b/griptape/drivers/prompt/openai_chat_prompt_driver.py @@ -1,6 +1,7 @@ from __future__ import annotations import json +import logging from typing import TYPE_CHECKING, Optional import openai @@ -23,6 +24,7 @@ ToolAction, observable, ) +from griptape.configs.defaults_config import Defaults from griptape.drivers import BasePromptDriver from griptape.tokenizers import BaseTokenizer, OpenAiTokenizer from griptape.utils.decorators import lazy_property @@ -36,6 +38,9 @@ from griptape.tools import BaseTool +logger = logging.getLogger(Defaults.logging_config.logger_name) + + @define class OpenAiChatPromptDriver(BasePromptDriver): """OpenAI Chat Prompt Driver. @@ -95,8 +100,11 @@ def client(self) -> openai.OpenAI: @observable def try_run(self, prompt_stack: PromptStack) -> Message: - result = self.client.chat.completions.create(**self._base_params(prompt_stack)) + params = self._base_params(prompt_stack) + logger.debug(params) + result = self.client.chat.completions.create(**params) + logger.debug(result.model_dump()) if len(result.choices) == 1: message = result.choices[0].message @@ -113,9 +121,12 @@ def try_run(self, prompt_stack: PromptStack) -> Message: @observable def try_stream(self, prompt_stack: PromptStack) -> Iterator[DeltaMessage]: - result = self.client.chat.completions.create(**self._base_params(prompt_stack), stream=True) + params = self._base_params(prompt_stack) + logger.debug({"stream": True, **params}) + result = self.client.chat.completions.create(**params, stream=True) for chunk in result: + logger.debug(chunk.model_dump()) if chunk.usage is not None: yield DeltaMessage( usage=DeltaMessage.Usage( diff --git a/tests/unit/configs/logging/test_json_formatter.py b/tests/unit/configs/logging/test_json_formatter.py new file mode 100644 index 000000000..184e0ded5 --- /dev/null +++ b/tests/unit/configs/logging/test_json_formatter.py @@ -0,0 +1,22 @@ +import logging + +from griptape.configs.logging import JsonFormatter + + +class TestJsonFormatter: + def test_init(self): + formatter = JsonFormatter() + assert formatter + + def test_format(self): + formatter = JsonFormatter() + record = logging.LogRecord( + name="name", + level=logging.INFO, + pathname="pathname", + lineno=1, + msg={"key": "value"}, + args=None, + exc_info=None, + ) + assert formatter.format(record) == '{\n "key": "value"\n}' diff --git a/tests/unit/configs/logging/test_logging_config.py b/tests/unit/configs/logging/test_logging_config.py new file mode 100644 index 000000000..174d461e6 --- /dev/null +++ b/tests/unit/configs/logging/test_logging_config.py @@ -0,0 +1,11 @@ +import logging + +from griptape.configs import Defaults + + +class TestLoggingConfig: + def test_init(self): + logger = logging.getLogger(Defaults.logging_config.logger_name) + assert logger.level == logging.INFO + assert logger.propagate is False + assert len(logger.handlers) == 1