Skip to content

Commit

Permalink
feat: adds decorator implementation (#425)
Browse files Browse the repository at this point in the history
* feat: adds decorator implementation

* fix tests

* adds async and generation support

* adds trycatch around wrapper operations

* adds langfuse singleton

* adds get_current_observation_id"

* removes unused imports

* refactor: exception handling

* docs: make pep compliant

* chore: v2.20.0a0

* fix: missing output if result is None

* chore: renames methods to private

* rename update methods

* renames to observe and langfuse_context

* adds scoring to decorators

* adds public option to trace params

* chore: v2.20.0a1

* chore: v2.20.0a2

* chore: v2.20.0a3

* add LangfuseDecorator to init to add it to docs

* fix: make llama-index optional in decorators implementation

* chore: v2.20.0a4

* fix: handle function args that are circular refs

* feat: adds capture_io param

* fix: remove wrong comments

* chore: v2.20.0a5

* fix: handle tuples in serialization

* chore: add pytest vscode support

* remove self argument of instance methods from logging

* split capture_io

* adds support for returned generators

* chore: update precommit hook to use ruff lint

* adds public option to llamaindex trace params

* refactor: async gen handling

* adds decorator support for nested openai integration

* add reset method to langfuse singleton

* remove print statements

* remove log_warning argument

* chore: v2.20.2a3

* fixes resetting of observation params on multiple update calls

* fix: allow observation scoring on trace level

* fixes score test

* feat: create wrapper trace if top level is generation

---------

Co-authored-by: Max Deichmann <[email protected]>
  • Loading branch information
hassiebp and maxdeichmann authored Mar 20, 2024
1 parent 3880103 commit e92f305
Show file tree
Hide file tree
Showing 17 changed files with 2,040 additions and 105 deletions.
14 changes: 11 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
repos:
- repo: https://github.com/psf/black
rev: 23.3.0
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.2
hooks:
- id: black
# Run the linter and fix
- id: ruff
types_or: [ python, pyi, jupyter ]
args: [ --fix, --config=ci.ruff.toml ]

# Run the formatter.
- id: ruff-format
types_or: [ python, pyi, jupyter ]

9 changes: 6 additions & 3 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"[python]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "charliermarsh.ruff",
"editor.defaultFormatter": "charliermarsh.ruff"
// "editor.codeActionsOnSave": {
// "source.fixAll": "explicit",
// "source.organizeImports": "explicit"
Expand All @@ -13,5 +13,8 @@
},
"prettier.documentSelectors": [
"**/*.{cjs,mjs,ts,tsx,astro,md,mdx,json,yaml,yml}"
]
}
],
"python.testing.pytestArgs": ["tests"],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true
}
30 changes: 13 additions & 17 deletions langfuse/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import uuid
import httpx
from enum import Enum
from typing import Any, Literal, Optional
from typing import Any, Optional

from langfuse.api.resources.ingestion.types.create_event_body import CreateEventBody
from langfuse.api.resources.ingestion.types.create_generation_body import (
Expand Down Expand Up @@ -47,6 +47,7 @@
from langfuse.model import Dataset, MapValue, Observation, TraceWithFullDetails
from langfuse.request import LangfuseClient
from langfuse.task_manager import TaskManager
from langfuse.types import SpanLevel
from langfuse.utils import _convert_usage_input, _create_prompt_context, _get_timestamp

from .version import __version__ as version
Expand Down Expand Up @@ -97,7 +98,7 @@ def __init__(
flush_interval: float = 0.5,
max_retries: int = 3,
timeout: int = 10, # seconds
sdk_integration: str = "default",
sdk_integration: Optional[str] = "default",
httpx_client: Optional[httpx.Client] = None,
):
"""Initialize the Langfuse client.
Expand Down Expand Up @@ -783,7 +784,7 @@ def span(
start_time: typing.Optional[dt.datetime] = None,
end_time: typing.Optional[dt.datetime] = None,
metadata: typing.Optional[typing.Any] = None,
level: typing.Optional[Literal["DEBUG", "DEFAULT", "WARNING", "ERROR"]] = None,
level: typing.Optional[SpanLevel] = None,
status_message: typing.Optional[str] = None,
input: typing.Optional[typing.Any] = None,
output: typing.Optional[typing.Any] = None,
Expand Down Expand Up @@ -889,7 +890,7 @@ def event(
metadata: typing.Optional[typing.Any] = None,
input: typing.Optional[typing.Any] = None,
output: typing.Optional[typing.Any] = None,
level: typing.Optional[Literal["DEBUG", "DEFAULT", "WARNING", "ERROR"]] = None,
level: typing.Optional[SpanLevel] = None,
status_message: typing.Optional[str] = None,
version: typing.Optional[str] = None,
**kwargs,
Expand Down Expand Up @@ -986,7 +987,7 @@ def generation(
end_time: typing.Optional[dt.datetime] = None,
completion_start_time: typing.Optional[dt.datetime] = None,
metadata: typing.Optional[typing.Any] = None,
level: typing.Optional[Literal["DEBUG", "DEFAULT", "WARNING", "ERROR"]] = None,
level: typing.Optional[SpanLevel] = None,
status_message: typing.Optional[str] = None,
version: typing.Optional[str] = None,
model: typing.Optional[str] = None,
Expand Down Expand Up @@ -1246,7 +1247,7 @@ def generation(
start_time: typing.Optional[dt.datetime] = None,
end_time: typing.Optional[dt.datetime] = None,
metadata: typing.Optional[typing.Any] = None,
level: typing.Optional[Literal["DEBUG", "DEFAULT", "WARNING", "ERROR"]] = None,
level: typing.Optional[SpanLevel] = None,
status_message: typing.Optional[str] = None,
version: typing.Optional[str] = None,
completion_start_time: typing.Optional[dt.datetime] = None,
Expand Down Expand Up @@ -1359,7 +1360,7 @@ def span(
metadata: typing.Optional[typing.Any] = None,
input: typing.Optional[typing.Any] = None,
output: typing.Optional[typing.Any] = None,
level: typing.Optional[Literal["DEBUG", "DEFAULT", "WARNING", "ERROR"]] = None,
level: typing.Optional[SpanLevel] = None,
status_message: typing.Optional[str] = None,
version: typing.Optional[str] = None,
**kwargs,
Expand Down Expand Up @@ -1524,7 +1525,7 @@ def event(
metadata: typing.Optional[typing.Any] = None,
input: typing.Optional[typing.Any] = None,
output: typing.Optional[typing.Any] = None,
level: typing.Optional[Literal["DEBUG", "DEFAULT", "WARNING", "ERROR"]] = None,
level: typing.Optional[SpanLevel] = None,
status_message: typing.Optional[str] = None,
version: typing.Optional[str] = None,
**kwargs,
Expand Down Expand Up @@ -1638,7 +1639,7 @@ def update(
end_time: typing.Optional[dt.datetime] = None,
completion_start_time: typing.Optional[dt.datetime] = None,
metadata: typing.Optional[typing.Any] = None,
level: typing.Optional[Literal["DEBUG", "DEFAULT", "WARNING", "ERROR"]] = None,
level: typing.Optional[SpanLevel] = None,
status_message: typing.Optional[str] = None,
version: typing.Optional[str] = None,
model: typing.Optional[str] = None,
Expand Down Expand Up @@ -1739,7 +1740,7 @@ def end(
end_time: typing.Optional[dt.datetime] = None,
completion_start_time: typing.Optional[dt.datetime] = None,
metadata: typing.Optional[typing.Any] = None,
level: typing.Optional[Literal["DEBUG", "DEFAULT", "WARNING", "ERROR"]] = None,
level: typing.Optional[SpanLevel] = None,
status_message: typing.Optional[str] = None,
version: typing.Optional[str] = None,
model: typing.Optional[str] = None,
Expand Down Expand Up @@ -1841,7 +1842,7 @@ def update(
metadata: typing.Optional[typing.Any] = None,
input: typing.Optional[typing.Any] = None,
output: typing.Optional[typing.Any] = None,
level: typing.Optional[Literal["DEBUG", "DEFAULT", "WARNING", "ERROR"]] = None,
level: typing.Optional[SpanLevel] = None,
status_message: typing.Optional[str] = None,
version: typing.Optional[str] = None,
**kwargs,
Expand Down Expand Up @@ -1924,7 +1925,7 @@ def end(
metadata: typing.Optional[typing.Any] = None,
input: typing.Optional[typing.Any] = None,
output: typing.Optional[typing.Any] = None,
level: typing.Optional[Literal["DEBUG", "DEFAULT", "WARNING", "ERROR"]] = None,
level: typing.Optional[SpanLevel] = None,
status_message: typing.Optional[str] = None,
version: typing.Optional[str] = None,
**kwargs,
Expand Down Expand Up @@ -1998,11 +1999,6 @@ def get_langchain_handler(self):

return CallbackHandler(stateful_client=self)

def get_llama_index_handler(self):
from langfuse.llama_index import LlamaIndexCallbackHandler

return LlamaIndexCallbackHandler(stateful_client=self)


class StatefulTraceClient(StatefulClient):
"""Class for handling stateful operations of traces in the Langfuse system. Inherits from StatefulClient.
Expand Down
3 changes: 3 additions & 0 deletions langfuse/decorators/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from .langfuse_decorator import langfuse_context, observe, LangfuseDecorator

__all__ = ["langfuse_context", "observe", "LangfuseDecorator"]
Loading

0 comments on commit e92f305

Please sign in to comment.