Skip to content

Commit 463145c

Browse files
committed
linting
1 parent 754cdab commit 463145c

File tree

4 files changed

+20
-22
lines changed

4 files changed

+20
-22
lines changed

packages/core/ldai/client.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ async def create_judge(
160160
try:
161161
# Overwrite reserved variables to ensure they remain as placeholders for judge evaluation
162162
extended_variables = dict(variables) if variables else {}
163-
163+
164164
# Warn if reserved variables are provided
165165
if variables:
166166
if 'message_history' in variables:
@@ -171,7 +171,7 @@ async def create_judge(
171171
self._logger.warning(
172172
'Variable "response_to_evaluate" is reserved for judge evaluation and will be overwritten'
173173
)
174-
174+
175175
extended_variables['message_history'] = '{{message_history}}'
176176
extended_variables['response_to_evaluate'] = '{{response_to_evaluate}}'
177177

packages/core/ldai/providers/ai_provider.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ class AIProvider(ABC):
2222
def __init__(self):
2323
"""
2424
Initialize the AI provider.
25-
25+
2626
Creates a logger for this provider instance.
2727
"""
2828
self._logger = logging.getLogger(f'{__name__}.{self.__class__.__name__}')

packages/langchain/ldai/providers/langchain/__init__.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,4 +3,3 @@
33
from ldai.providers.langchain.langchain_provider import LangChainProvider
44

55
__all__ = ['LangChainProvider']
6-

packages/langchain/ldai/providers/langchain/langchain_provider.py

Lines changed: 17 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -16,14 +16,14 @@
1616
class LangChainProvider(AIProvider):
1717
"""
1818
LangChain implementation of AIProvider.
19-
19+
2020
This provider integrates LangChain models with LaunchDarkly's tracking capabilities.
2121
"""
2222

2323
def __init__(self, llm: BaseChatModel):
2424
"""
2525
Initialize the LangChain provider.
26-
26+
2727
:param llm: LangChain BaseChatModel instance
2828
"""
2929
super().__init__()
@@ -33,7 +33,7 @@ def __init__(self, llm: BaseChatModel):
3333
async def create(ai_config: AIConfigKind) -> 'LangChainProvider':
3434
"""
3535
Static factory method to create a LangChain AIProvider from an AI configuration.
36-
36+
3737
:param ai_config: The LaunchDarkly AI configuration
3838
:return: Configured LangChainProvider instance
3939
"""
@@ -43,7 +43,7 @@ async def create(ai_config: AIConfigKind) -> 'LangChainProvider':
4343
async def invoke_model(self, messages: List[LDMessage]) -> ChatResponse:
4444
"""
4545
Invoke the LangChain model with an array of messages.
46-
46+
4747
:param messages: Array of LDMessage objects representing the conversation
4848
:return: ChatResponse containing the model's response
4949
"""
@@ -92,7 +92,7 @@ async def invoke_structured_model(
9292
) -> StructuredResponse:
9393
"""
9494
Invoke the LangChain model with structured output support.
95-
95+
9696
:param messages: Array of LDMessage objects representing the conversation
9797
:param response_structure: Dictionary of output configurations keyed by output name
9898
:return: StructuredResponse containing the structured data
@@ -143,7 +143,7 @@ async def invoke_structured_model(
143143
def get_chat_model(self) -> BaseChatModel:
144144
"""
145145
Get the underlying LangChain model instance.
146-
146+
147147
:return: The LangChain BaseChatModel instance
148148
"""
149149
return self._llm
@@ -152,10 +152,10 @@ def get_chat_model(self) -> BaseChatModel:
152152
def map_provider(ld_provider_name: str) -> str:
153153
"""
154154
Map LaunchDarkly provider names to LangChain provider names.
155-
155+
156156
This method enables seamless integration between LaunchDarkly's standardized
157157
provider naming and LangChain's naming conventions.
158-
158+
159159
:param ld_provider_name: LaunchDarkly provider name
160160
:return: LangChain provider name
161161
"""
@@ -171,10 +171,10 @@ def map_provider(ld_provider_name: str) -> str:
171171
def get_ai_metrics_from_response(response: AIMessage) -> LDAIMetrics:
172172
"""
173173
Get AI metrics from a LangChain provider response.
174-
174+
175175
This method extracts token usage information and success status from LangChain responses
176176
and returns a LaunchDarkly LDAIMetrics object.
177-
177+
178178
:param response: The response from the LangChain model
179179
:return: LDAIMetrics with success status and token usage
180180
"""
@@ -196,10 +196,10 @@ def get_ai_metrics_from_response(response: AIMessage) -> LDAIMetrics:
196196
def convert_messages_to_langchain(messages: List[LDMessage]) -> List[BaseMessage]:
197197
"""
198198
Convert LaunchDarkly messages to LangChain messages.
199-
199+
200200
This helper method enables developers to work directly with LangChain message types
201201
while maintaining compatibility with LaunchDarkly's standardized message format.
202-
202+
203203
:param messages: List of LDMessage objects
204204
:return: List of LangChain message objects
205205
"""
@@ -219,10 +219,10 @@ def convert_messages_to_langchain(messages: List[LDMessage]) -> List[BaseMessage
219219
async def create_langchain_model(ai_config: AIConfigKind) -> BaseChatModel:
220220
"""
221221
Create a LangChain model from an AI configuration.
222-
222+
223223
This public helper method enables developers to initialize their own LangChain models
224224
using LaunchDarkly AI configurations.
225-
225+
226226
:param ai_config: The LaunchDarkly AI configuration
227227
:return: A configured LangChain BaseChatModel
228228
"""
@@ -242,15 +242,15 @@ async def create_langchain_model(ai_config: AIConfigKind) -> BaseChatModel:
242242
except ImportError:
243243
# Fallback for older versions or different import path
244244
from langchain.chat_models.universal import init_chat_model
245-
245+
246246
# Map provider name
247247
langchain_provider = LangChainProvider.map_provider(provider)
248-
248+
249249
# Create model configuration
250250
model_kwargs = {**parameters}
251251
if langchain_provider:
252252
model_kwargs['model_provider'] = langchain_provider
253-
253+
254254
# Initialize the chat model (init_chat_model may be async or sync)
255255
result = init_chat_model(model_name, **model_kwargs)
256256
# Handle both sync and async initialization
@@ -262,4 +262,3 @@ async def create_langchain_model(ai_config: AIConfigKind) -> BaseChatModel:
262262
'langchain package is required for LangChainProvider. '
263263
'Install it with: pip install langchain langchain-core'
264264
) from e
265-

0 commit comments

Comments
 (0)