Skip to content

Commit

Permalink
Litellm dev 12 24 2024 p2 (BerriAI#7400)
Browse files Browse the repository at this point in the history
* fix(utils.py): default custom_llm_provider=None for 'supports_response_schema'

Closes BerriAI#7397

* refactor(langfuse/): call langfuse logger inside customlogger compatible langfuse class, refactor langfuse logger to use verbose_logger.debug instead of print_verbose

* refactor(litellm_pre_call_utils.py): move config based team callbacks inside dynamic team callback logic

enables simpler unit testing for config-based team callbacks

* fix(proxy/_types.py): handle teamcallbackmetadata - none values

drop none values if present. if all none, use default dict to avoid downstream errors

* test(test_proxy_utils.py): add unit test preventing future issues - asserts team_id in config state not popped off across calls

Fixes BerriAI#6787

* fix(langfuse_prompt_management.py): add success + failure logging event support

* fix: fix linting error

* test: fix test

* test: fix test

* test: override o1 prompt caching - openai currently not working

* test: fix test
  • Loading branch information
krrishdholakia authored Dec 25, 2024
1 parent fb9ab10 commit 2e86a48
Show file tree
Hide file tree
Showing 12 changed files with 227 additions and 62 deletions.
19 changes: 8 additions & 11 deletions litellm/integrations/langfuse/langfuse.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,12 +148,7 @@ def add_metadata_from_header(litellm_params: dict, metadata: dict) -> dict:

return metadata

# def log_error(kwargs, response_obj, start_time, end_time):
# generation = trace.generation(
# level ="ERROR" # can be any of DEBUG, DEFAULT, WARNING or ERROR
# status_message='error' # can be any string (e.g. stringified stack trace or error body)
# )
def log_event( # noqa: PLR0915
def _old_log_event( # noqa: PLR0915
self,
kwargs,
response_obj,
Expand All @@ -167,7 +162,7 @@ def log_event( # noqa: PLR0915
# Method definition

try:
print_verbose(
verbose_logger.debug(
f"Langfuse Logging - Enters logging function for model {kwargs}"
)

Expand Down Expand Up @@ -260,7 +255,9 @@ def log_event( # noqa: PLR0915
):
input = prompt
output = response_obj.get("response", "")
print_verbose(f"OUTPUT IN LANGFUSE: {output}; original: {response_obj}")
verbose_logger.debug(
f"OUTPUT IN LANGFUSE: {output}; original: {response_obj}"
)
trace_id = None
generation_id = None
if self._is_langfuse_v2():
Expand Down Expand Up @@ -291,7 +288,7 @@ def log_event( # noqa: PLR0915
input,
response_obj,
)
print_verbose(
verbose_logger.debug(
f"Langfuse Layer Logging - final response object: {response_obj}"
)
verbose_logger.info("Langfuse Layer Logging - logging success")
Expand Down Expand Up @@ -444,7 +441,7 @@ def _log_langfuse_v2( # noqa: PLR0915
) -> tuple:
import langfuse

print_verbose("Langfuse Layer Logging - logging to langfuse v2")
verbose_logger.debug("Langfuse Layer Logging - logging to langfuse v2")

try:
metadata = self._prepare_metadata(metadata)
Expand Down Expand Up @@ -577,7 +574,7 @@ def _log_langfuse_v2( # noqa: PLR0915
trace_params["metadata"] = {"metadata_passed_to_litellm": metadata}

cost = kwargs.get("response_cost", None)
print_verbose(f"trace: {cost}")
verbose_logger.debug(f"trace: {cost}")

clean_metadata["litellm_response_cost"] = cost
if standard_logging_object is not None:
Expand Down
34 changes: 32 additions & 2 deletions litellm/integrations/langfuse/langfuse_prompt_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@
from litellm.integrations.custom_logger import CustomLogger
from litellm.proxy._types import UserAPIKeyAuth
from litellm.types.llms.openai import AllMessageValues
from litellm.types.utils import StandardCallbackDynamicParams
from litellm.types.utils import StandardCallbackDynamicParams, StandardLoggingPayload

from .langfuse import LangFuseLogger

if TYPE_CHECKING:
from langfuse import Langfuse
Expand Down Expand Up @@ -92,7 +94,7 @@ def langfuse_client_init(
return client


class LangfusePromptManagement(CustomLogger):
class LangfusePromptManagement(LangFuseLogger, CustomLogger):
def __init__(
self,
langfuse_public_key=None,
Expand Down Expand Up @@ -248,3 +250,31 @@ def get_chat_completion_prompt(
model = self._get_model_from_prompt(langfuse_prompt_client, model)

return model, messages, non_default_params

async def async_log_success_event(self, kwargs, response_obj, start_time, end_time):
self._old_log_event(
kwargs=kwargs,
response_obj=response_obj,
start_time=start_time,
end_time=end_time,
user_id=kwargs.get("user", None),
print_verbose=None,
)

async def async_log_failure_event(self, kwargs, response_obj, start_time, end_time):
standard_logging_object = cast(
Optional[StandardLoggingPayload],
kwargs.get("standard_logging_object", None),
)
if standard_logging_object is None:
return
self._old_log_event(
start_time=start_time,
end_time=end_time,
response_obj=None,
user_id=kwargs.get("user", None),
print_verbose=None,
status_message=standard_logging_object["error_str"],
level="ERROR",
kwargs=kwargs,
)
4 changes: 2 additions & 2 deletions litellm/litellm_core_utils/litellm_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -1202,7 +1202,7 @@ def success_handler( # noqa: PLR0915
in_memory_dynamic_logger_cache=in_memory_dynamic_logger_cache,
)
if langfuse_logger_to_use is not None:
_response = langfuse_logger_to_use.log_event(
_response = langfuse_logger_to_use._old_log_event(
kwargs=kwargs,
response_obj=result,
start_time=start_time,
Expand Down Expand Up @@ -1925,7 +1925,7 @@ def failure_handler( # noqa: PLR0915
standard_callback_dynamic_params=self.standard_callback_dynamic_params,
in_memory_dynamic_logger_cache=in_memory_dynamic_logger_cache,
)
_response = langfuse_logger_to_use.log_event(
_response = langfuse_logger_to_use._old_log_event(
start_time=start_time,
end_time=end_time,
response_obj=None,
Expand Down
1 change: 0 additions & 1 deletion litellm/proxy/_experimental/out/404.html

This file was deleted.

1 change: 0 additions & 1 deletion litellm/proxy/_experimental/out/model_hub.html

This file was deleted.

Loading

0 comments on commit 2e86a48

Please sign in to comment.