forked from sinaptik-ai/pandas-ai
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtest_langchain_llm.py
101 lines (76 loc) · 3.21 KB
/
test_langchain_llm.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
"""Unit tests for the base LLM class"""
import pytest
from langchain_community.chat_models import ChatOpenAI
from langchain_community.llms import OpenAI
from langchain_core.messages import AIMessage
from langchain_core.outputs import (
ChatGeneration,
GenerationChunk,
LLMResult,
)
from pandasai_langchain.langchain import LangchainLLM
from pandasai.core.prompts.base import BasePrompt
from pandasai.llm.base import LLM
class TestLangchainLLM:
"""Unit tests for the LangChain wrapper LLM class"""
@pytest.fixture
def langchain_llm(self):
class FakeOpenAI(OpenAI):
openai_api_key = "fake_key"
def generate(self, prompts, stop=None, run_manager=None, **kwargs):
generation = GenerationChunk(text="Custom response")
return LLMResult(generations=[[generation]])
return FakeOpenAI()
@pytest.fixture
def langchain_chat_llm(self):
class FakeChatOpenAI(ChatOpenAI):
openai_api_key: str = "fake_key"
def generate(self, prompts, stop=None, run_manager=None, **kwargs):
generation = ChatGeneration(
message=AIMessage(content="Custom response")
)
return LLMResult(generations=[[generation]])
return FakeChatOpenAI()
@pytest.fixture
def prompt(self):
class MockBasePrompt(BasePrompt):
template: str = "Hello"
return MockBasePrompt()
def test_langchain_llm_type(self, langchain_llm):
langchain_wrapper = LangchainLLM(langchain_llm)
assert langchain_wrapper.type == "langchain_openai"
def test_langchain_model_call(self, langchain_llm, prompt):
langchain_wrapper = LangchainLLM(langchain_llm)
assert (
langchain_wrapper.call(instruction=prompt, suffix="!") == "Custom response"
)
def test_langchain_chat_call(self, langchain_chat_llm, prompt):
langchain_wrapper = LangchainLLM(langchain_chat_llm)
assert (
langchain_wrapper.call(instruction=prompt, suffix="!") == "Custom response"
)
def test_agent_integration(self):
from unittest.mock import MagicMock, PropertyMock
from pandasai.agent import Agent
class FakeChatOpenAI(LLM):
openai_api_key: str = "fake_key"
@property
def type(self) -> str:
return "langchain_openai"
def call(self, prompts, stop=None, run_manager=None, **kwargs):
generation = ChatGeneration(
message=AIMessage(content="Custom response")
)
return LLMResult(generations=[[generation]])
mock_langchain_llm = FakeChatOpenAI()
type_property = PropertyMock(return_value="openai")
type(mock_langchain_llm)._llm_type = type_property
mock_langchain_llm.openai_api_key = "fake_key"
mock_langchain_llm.call = lambda instruction, suffix: "Custom response"
agent = Agent(
dfs=[MagicMock()],
config={"llm": mock_langchain_llm},
vectorstore=MagicMock(),
)
print(agent._state.config.llm.type)
assert agent._state.config.llm.type == "langchain_openai"