-
Notifications
You must be signed in to change notification settings - Fork 252
feat: add langfuse integration #83
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||
|---|---|---|---|---|---|---|
|
|
@@ -25,23 +25,27 @@ | |||||
| _weave_enabled: bool = False | ||||||
| _agentops_enabled: bool = False | ||||||
| _mlflow_enabled: bool = False | ||||||
| _langfuse_enabled: bool = False | ||||||
| _trace_log: List[Dict[str, Any]] = [] | ||||||
| _trace_dir: Optional[Path] = None | ||||||
|
|
||||||
| Backend = Literal["local", "weave", "agentops", "mlflow"] | ||||||
| Backend = Literal["local", "weave", "agentops", "mlflow", "langfuse"] | ||||||
| OutputFormat = Literal["raw", "dict", "openai_chat", "langchain", "pydantic"] | ||||||
|
|
||||||
|
|
||||||
| def set_trace( | ||||||
| enabled: bool | List[Backend] | Backend = True, /, *, trace_dir: Optional[str | Path] = None | ||||||
| enabled: bool | List[Backend] | Backend = True, | ||||||
| /, | ||||||
| *, | ||||||
| trace_dir: Optional[str | Path] = None, | ||||||
| ) -> Optional[Path]: | ||||||
| """Enable or disable tracing of ``poml`` calls with optional backend integrations. | ||||||
|
|
||||||
| Args: | ||||||
| enabled: Controls which tracing backends to enable. Can be: | ||||||
| - True: Enable local tracing only (equivalent to ["local"]) | ||||||
| - False: Disable all tracing (equivalent to []) | ||||||
| - str: Enable a single backend ("local", "weave", "agentops", "mlflow") | ||||||
| - str: Enable a single backend ("local", "weave", "agentops", "mlflow", "langfuse") | ||||||
| - List[str]: Enable multiple backends. "local" is auto-enabled if any backends are specified. | ||||||
| trace_dir: Optional directory for local trace files. If provided when local | ||||||
| tracing is enabled, a subdirectory named by the current timestamp | ||||||
|
|
@@ -57,6 +61,7 @@ def set_trace( | |||||
| - "weave": Log to Weights & Biases Weave (requires local tracing) | ||||||
| - "agentops": Log to AgentOps (requires local tracing) | ||||||
| - "mlflow": Log to MLflow (requires local tracing) | ||||||
| - "langfuse": Log to Langfuse (requires local tracing) | ||||||
| """ | ||||||
|
|
||||||
| if enabled is True: | ||||||
|
|
@@ -67,7 +72,7 @@ def set_trace( | |||||
| if isinstance(enabled, str): | ||||||
| enabled = [enabled] | ||||||
|
|
||||||
| global _trace_enabled, _trace_dir, _weave_enabled, _agentops_enabled, _mlflow_enabled | ||||||
| global _trace_enabled, _trace_dir, _weave_enabled, _agentops_enabled, _mlflow_enabled, _langsmith_enabled, _langfuse_enabled | ||||||
| if enabled or "local" in enabled: | ||||||
| # When enabled is non-empty, we always enable local tracing. | ||||||
| _trace_enabled = True | ||||||
|
|
@@ -104,6 +109,11 @@ def set_trace( | |||||
| else: | ||||||
| _mlflow_enabled = False | ||||||
|
|
||||||
| if "langfuse" in enabled: | ||||||
| _langfuse_enabled = True | ||||||
| else: | ||||||
| _langfuse_enabled = False | ||||||
|
|
||||||
| return _trace_dir | ||||||
|
|
||||||
|
|
||||||
|
|
@@ -225,7 +235,9 @@ def _poml_response_to_openai_chat(messages: List[PomlMessage]) -> List[Dict[str, | |||||
| contents.append( | ||||||
| { | ||||||
| "type": "image_url", | ||||||
| "image_url": {"url": f"data:{content_part.type};base64,{content_part.base64}"}, | ||||||
| "image_url": { | ||||||
| "url": f"data:{content_part.type};base64,{content_part.base64}" | ||||||
| }, | ||||||
| } | ||||||
| ) | ||||||
| else: | ||||||
|
|
@@ -242,7 +254,9 @@ def _poml_response_to_langchain(messages: List[PomlMessage]) -> List[Dict[str, A | |||||
| langchain_messages = [] | ||||||
| for msg in messages: | ||||||
| if isinstance(msg.content, str): | ||||||
| langchain_messages.append({"type": msg.speaker, "data": {"content": msg.content}}) | ||||||
| langchain_messages.append( | ||||||
| {"type": msg.speaker, "data": {"content": msg.content}} | ||||||
| ) | ||||||
| elif isinstance(msg.content, list): | ||||||
| content_parts = [] | ||||||
| for content_part in msg.content: | ||||||
|
|
@@ -259,7 +273,9 @@ def _poml_response_to_langchain(messages: List[PomlMessage]) -> List[Dict[str, A | |||||
| ) | ||||||
| else: | ||||||
| raise ValueError(f"Unexpected content part: {content_part}") | ||||||
| langchain_messages.append({"type": msg.speaker, "data": {"content": content_parts}}) | ||||||
| langchain_messages.append( | ||||||
| {"type": msg.speaker, "data": {"content": content_parts}} | ||||||
| ) | ||||||
| else: | ||||||
| raise ValueError(f"Unexpected content type: {type(msg.content)}") | ||||||
| return langchain_messages | ||||||
|
|
@@ -434,24 +450,24 @@ def poml( | |||||
| # Do nothing | ||||||
| pass | ||||||
| else: | ||||||
| result = json.loads(result) | ||||||
| if isinstance(result, dict) and "messages" in result: | ||||||
| result_dict = json.loads(result) | ||||||
| if isinstance(result_dict, dict) and "messages" in result_dict: | ||||||
| # The new versions will always return a dict with "messages" key. | ||||||
| result = result["messages"] | ||||||
| result_dict = result_dict["messages"] | ||||||
| if format != "dict": | ||||||
| # Continue to validate the format. | ||||||
| if chat: | ||||||
| pydantic_result = [PomlMessage(**item) for item in result] | ||||||
| pydantic_result = [PomlMessage(**item) for item in result_dict] | ||||||
| else: | ||||||
| # TODO: Make it a RichContent object | ||||||
| pydantic_result = [PomlMessage(speaker="human", content=result)] | ||||||
| pydantic_result = [PomlMessage(speaker="human", content=result_dict)] # type: ignore | ||||||
|
|
||||||
| if format == "pydantic": | ||||||
| return pydantic_result | ||||||
| result = pydantic_result | ||||||
| elif format == "openai_chat": | ||||||
| return _poml_response_to_openai_chat(pydantic_result) | ||||||
| result = _poml_response_to_openai_chat(pydantic_result) | ||||||
| elif format == "langchain": | ||||||
| return _poml_response_to_langchain(pydantic_result) | ||||||
| result = _poml_response_to_langchain(pydantic_result) | ||||||
| else: | ||||||
| raise ValueError(f"Unknown output format: {format}") | ||||||
|
|
||||||
|
|
@@ -461,7 +477,9 @@ def poml( | |||||
| trace_prefix = _latest_trace_prefix() | ||||||
| current_version = _current_trace_version() | ||||||
| if trace_prefix is None or current_version is None: | ||||||
| raise RuntimeError("Weave tracing requires local tracing to be enabled.") | ||||||
| raise RuntimeError( | ||||||
| "Weave tracing requires local tracing to be enabled." | ||||||
| ) | ||||||
| poml_content = _read_latest_traced_file(".poml") | ||||||
| context_content = _read_latest_traced_file(".context.json") | ||||||
| stylesheet_content = _read_latest_traced_file(".stylesheet.json") | ||||||
|
|
@@ -480,7 +498,9 @@ def poml( | |||||
| trace_prefix = _latest_trace_prefix() | ||||||
| current_version = _current_trace_version() | ||||||
| if trace_prefix is None or current_version is None: | ||||||
| raise RuntimeError("AgentOps tracing requires local tracing to be enabled.") | ||||||
| raise RuntimeError( | ||||||
| "AgentOps tracing requires local tracing to be enabled." | ||||||
| ) | ||||||
| poml_content = _read_latest_traced_file(".poml") | ||||||
| context_content = _read_latest_traced_file(".context.json") | ||||||
| stylesheet_content = _read_latest_traced_file(".stylesheet.json") | ||||||
|
|
@@ -498,7 +518,9 @@ def poml( | |||||
| trace_prefix = _latest_trace_prefix() | ||||||
| current_version = _current_trace_version() | ||||||
| if trace_prefix is None or current_version is None: | ||||||
| raise RuntimeError("MLflow tracing requires local tracing to be enabled.") | ||||||
| raise RuntimeError( | ||||||
| "MLflow tracing requires local tracing to be enabled." | ||||||
| ) | ||||||
| poml_content = _read_latest_traced_file(".poml") | ||||||
| context_content = _read_latest_traced_file(".context.json") | ||||||
| stylesheet_content = _read_latest_traced_file(".stylesheet.json") | ||||||
|
|
@@ -510,8 +532,28 @@ def poml( | |||||
| result, | ||||||
| ) | ||||||
|
|
||||||
| if _langfuse_enabled: | ||||||
| from .integration import langfuse | ||||||
|
|
||||||
| trace_prefix = _latest_trace_prefix() | ||||||
| current_version = _current_trace_version() | ||||||
| if trace_prefix is None or current_version is None: | ||||||
| raise RuntimeError( | ||||||
| "Langfuse tracing requires local tracing to be enabled." | ||||||
| ) | ||||||
| poml_content = _read_latest_traced_file(".poml") | ||||||
| context_content = _read_latest_traced_file(".context.json") | ||||||
| stylesheet_content = _read_latest_traced_file(".stylesheet.json") | ||||||
| langfuse.log_poml_call( | ||||||
| trace_prefix.name, | ||||||
| poml_content or str(markup), | ||||||
| json.loads(context_content) if context_content else None, | ||||||
| json.loads(stylesheet_content) if stylesheet_content else None, | ||||||
| result, | ||||||
| ) | ||||||
|
|
||||||
| if trace_record is not None: | ||||||
| trace_record["result"] = result | ||||||
| trace_record["result"] = result_dict | ||||||
|
||||||
| trace_record["result"] = result_dict | |
| trace_record["result"] = result if format == "raw" else result_dict |
| Original file line number | Diff line number | Diff line change | ||||||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| @@ -0,0 +1,24 @@ | ||||||||||||||||||||||||
| from __future__ import annotations | ||||||||||||||||||||||||
|
|
||||||||||||||||||||||||
| from typing import Any | ||||||||||||||||||||||||
| from langfuse import get_client, observe | ||||||||||||||||||||||||
|
|
||||||||||||||||||||||||
|
|
||||||||||||||||||||||||
| def log_poml_call( | ||||||||||||||||||||||||
| name: str, prompt: str, context: dict | None, stylesheet: dict | None, result: Any | ||||||||||||||||||||||||
| ) -> Any: | ||||||||||||||||||||||||
| """Log the entire poml call to Langfuse.""" | ||||||||||||||||||||||||
| client = get_client() | ||||||||||||||||||||||||
|
|
||||||||||||||||||||||||
| @observe(name=name) | ||||||||||||||||||||||||
| def poml(prompt, context, stylesheet): | ||||||||||||||||||||||||
| client.update_current_generation(prompt=prompt_client) | ||||||||||||||||||||||||
| return result | ||||||||||||||||||||||||
|
|
||||||||||||||||||||||||
| prompt_client = client.create_prompt( | ||||||||||||||||||||||||
| name=name, | ||||||||||||||||||||||||
| type="text", | ||||||||||||||||||||||||
| prompt=prompt | ||||||||||||||||||||||||
| ) | ||||||||||||||||||||||||
|
|
||||||||||||||||||||||||
|
||||||||||||||||||||||||
| prompt_client = client.create_prompt( | |
| name=name, | |
| type="text", | |
| prompt=prompt | |
| ) | |
| @observe(name=name) | |
| def poml(prompt, context, stylesheet): | |
| client.update_current_generation(prompt=prompt_client) | |
| return result |
Copilot
AI
Aug 14, 2025
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The function does not return the result of the poml call, but the function signature suggests it should return Any. Add return before the function call.
| poml(prompt=prompt, context=context, stylesheet=stylesheet) | |
| return poml(prompt=prompt, context=context, stylesheet=stylesheet) |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,22 @@ | ||
| import os | ||
| from langfuse.openai import openai | ||
|
|
||
| client = openai.OpenAI( | ||
| base_url=os.environ["OPENAI_API_BASE"], | ||
| api_key=os.environ["OPENAI_API_KEY"], | ||
| ) | ||
|
|
||
| completion = client.chat.completions.create( | ||
| name="test-chat", | ||
| model="gpt-4.1-mini", | ||
| messages=[ | ||
| { | ||
| "role": "system", | ||
| "content": "You are a very accurate calculator. You output only the result of the calculation.", | ||
| }, | ||
| {"role": "user", "content": "1 + 1 = "}, | ||
| ], | ||
| metadata={"someMetadataKey": "someValue"}, | ||
| ) | ||
|
|
||
| print(completion) |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,19 @@ | ||
| import os | ||
| import poml | ||
| from langfuse.openai import openai | ||
|
|
||
| client = openai.OpenAI( | ||
| base_url=os.environ["OPENAI_API_BASE"], | ||
| api_key=os.environ["OPENAI_API_KEY"], | ||
| ) | ||
|
|
||
| poml.set_trace("langfuse", trace_dir="logs") | ||
|
|
||
| messages = poml.poml("example_poml.poml", context={"code_path": "example_agentops_original.py"}, format="openai_chat") | ||
|
|
||
| response = client.chat.completions.create( | ||
| model="gpt-4o-mini", | ||
| messages=messages, | ||
| ) | ||
|
|
||
| print(response) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The variable
_langsmith_enabledis referenced in the global declaration but is not defined anywhere in the visible code. This will cause a NameError when the function is called.