Skip to content

Commit 89b1b3e

Browse files
authored
Merge branch 'main' into nimar/lfe-6153-generalized-graphs-python
2 parents ef49cde + 2bfdff2 commit 89b1b3e

4 files changed

Lines changed: 50 additions & 12 deletions

File tree

langfuse/langchain/CallbackHandler.py

Lines changed: 27 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -240,7 +240,7 @@ def _register_langfuse_prompt(
240240
If parent_run_id is None, we are at the root of a trace and should not attempt to register the prompt, as there will be no LLM invocation following it.
241241
Otherwise it would have been traced in with a parent run consisting of the prompt template formatting and the LLM invocation.
242242
"""
243-
if not parent_run_id:
243+
if not parent_run_id or not run_id:
244244
return
245245

246246
langfuse_prompt = metadata and metadata.get("langfuse_prompt", None)
@@ -255,7 +255,7 @@ def _register_langfuse_prompt(
255255
self.prompt_to_parent_run_map[run_id] = registered_prompt
256256

257257
def _deregister_langfuse_prompt(self, run_id: Optional[UUID]) -> None:
258-
if run_id in self.prompt_to_parent_run_map:
258+
if run_id is not None and run_id in self.prompt_to_parent_run_map:
259259
del self.prompt_to_parent_run_map[run_id]
260260

261261
def on_agent_action(
@@ -610,7 +610,14 @@ def __on_llm_action(
610610
content = {
611611
"name": self.get_langchain_run_name(serialized, **kwargs),
612612
"input": prompts,
613-
"metadata": self.__join_tags_and_metadata(tags, metadata),
613+
"metadata": self.__join_tags_and_metadata(
614+
tags,
615+
metadata,
616+
# If llm is run isolated and outside chain, keep trace attributes
617+
keep_langfuse_trace_attributes=True
618+
if parent_run_id is None
619+
else False,
620+
),
614621
"model": model_name,
615622
"model_parameters": self._parse_model_parameters(kwargs),
616623
"prompt": registered_prompt,
@@ -763,16 +770,19 @@ def __join_tags_and_metadata(
763770
self,
764771
tags: Optional[List[str]] = None,
765772
metadata: Optional[Dict[str, Any]] = None,
766-
trace_metadata: Optional[Dict[str, Any]] = None,
773+
keep_langfuse_trace_attributes: bool = False,
767774
) -> Optional[Dict[str, Any]]:
768775
final_dict = {}
769776
if tags is not None and len(tags) > 0:
770777
final_dict["tags"] = tags
771778
if metadata is not None:
772779
final_dict.update(metadata)
773-
if trace_metadata is not None:
774-
final_dict.update(trace_metadata)
775-
return _strip_langfuse_keys_from_dict(final_dict) if final_dict != {} else None
780+
781+
return (
782+
_strip_langfuse_keys_from_dict(final_dict, keep_langfuse_trace_attributes)
783+
if final_dict != {}
784+
else None
785+
)
776786

777787
def _convert_message_to_dict(self, message: BaseMessage) -> Dict[str, Any]:
778788
# assistant message
@@ -1027,12 +1037,17 @@ def _parse_model_name_from_metadata(metadata: Optional[Dict[str, Any]]) -> Any:
10271037
return metadata.get("ls_model_name", None)
10281038

10291039

1030-
def _strip_langfuse_keys_from_dict(metadata: Optional[Dict[str, Any]]) -> Any:
1040+
def _strip_langfuse_keys_from_dict(
1041+
metadata: Optional[Dict[str, Any]], keep_langfuse_trace_attributes: bool
1042+
) -> Any:
10311043
if metadata is None or not isinstance(metadata, dict):
10321044
return metadata
10331045

10341046
langfuse_metadata_keys = [
10351047
"langfuse_prompt",
1048+
]
1049+
1050+
langfuse_trace_attribute_keys = [
10361051
"langfuse_session_id",
10371052
"langfuse_user_id",
10381053
"langfuse_tags",
@@ -1043,4 +1058,8 @@ def _strip_langfuse_keys_from_dict(metadata: Optional[Dict[str, Any]]) -> Any:
10431058
for key in langfuse_metadata_keys:
10441059
metadata_copy.pop(key, None)
10451060

1061+
if not keep_langfuse_trace_attributes:
1062+
for key in langfuse_trace_attribute_keys:
1063+
metadata_copy.pop(key, None)
1064+
10461065
return metadata_copy

langfuse/openai.py

Lines changed: 21 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
from dataclasses import dataclass
2424
from datetime import datetime
2525
from inspect import isclass
26-
from typing import Optional, cast, Any
26+
from typing import Any, Optional, cast
2727

2828
from openai._types import NotGiven
2929
from packaging.version import Version
@@ -161,6 +161,22 @@ class OpenAiDefinition:
161161
sync=False,
162162
min_version="1.66.0",
163163
),
164+
OpenAiDefinition(
165+
module="openai.resources.responses",
166+
object="Responses",
167+
method="parse",
168+
type="chat",
169+
sync=True,
170+
min_version="1.66.0",
171+
),
172+
OpenAiDefinition(
173+
module="openai.resources.responses",
174+
object="AsyncResponses",
175+
method="parse",
176+
type="chat",
177+
sync=False,
178+
min_version="1.66.0",
179+
),
164180
]
165181

166182

@@ -570,7 +586,10 @@ def _extract_streamed_openai_response(resource: Any, chunks: Any) -> Any:
570586
)
571587
curr["arguments"] += getattr(tool_call_chunk, "arguments", "")
572588

573-
elif delta.get("tool_calls", None) is not None and len(delta.get("tool_calls")) > 0:
589+
elif (
590+
delta.get("tool_calls", None) is not None
591+
and len(delta.get("tool_calls")) > 0
592+
):
574593
curr = completion["tool_calls"]
575594
tool_call_chunk = getattr(
576595
delta.get("tool_calls", None)[0], "function", None

langfuse/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
"""@private"""
22

3-
__version__ = "3.2.3"
3+
__version__ = "3.2.4"

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[tool.poetry]
22
name = "langfuse"
33

4-
version = "3.2.3"
4+
version = "3.2.4"
55
description = "A client library for accessing langfuse"
66
authors = ["langfuse <developers@langfuse.com>"]
77
license = "MIT"

0 commit comments

Comments
 (0)