-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathbot.py
116 lines (99 loc) · 4.82 KB
/
bot.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
import plotly
import streamlit as st
from langchain_core.messages import AIMessage, HumanMessage
import asyncio
from utils import get_version
from graph_agent import GraphEcoToxFredAgent
from astream_events_handler import invoke_our_graph
about_text = f"""
**EcoToxFred v{get_version()}** — a Neo4j-backed Chatbot discussing environmental monitoring and hazard data.
Contact:
- Jana Schor [email protected]
- Patrick Scheibe [email protected]
EcoToxFred may occasionally provide incorrect or incomplete information.
Always carefully verify the returned results before relying on them.
"""
st.set_page_config(page_title="EcoToxFred", page_icon="figures/assistant.png",
layout='centered',
menu_items={"about": about_text})
example_questions = [
"EcoToxFred, what is your expertise? Structure your response in bullet points.",
"What is Diuron and where has it been measured?",
"What is Triclosan? Has it been measured in European freshwater?",
"Show the ratioTU distribution for algae along the Danube (2010–2015).",
"Find the 10 most frequent multiple risk drivers.",
"For Citalopram, provide the name of the sampling site and the measurement time point as a table?"
]
# Set up the session state and initialize the LLM agent
if "initialized" not in st.session_state:
st.session_state.initialized = True
st.session_state.chat_agent = GraphEcoToxFredAgent()
st.session_state.messages = [AIMessage(content="Hi, I'm EcoToxFred! How can I help you?")]
st.session_state.figure_numbers = 0
st.session_state.example_question = None
def generate_response(query: str):
"""
Generates a chat assistant's response based on the user-provided query, updates
the application's session state with messages, and renders the response in the user interface.
:param query: The user's input message that will be processed and sent to the chat assistant.
"""
st.session_state.messages.append(HumanMessage(content=query))
st.chat_message("user", avatar="figures/user.png").write(query)
with st.chat_message("assistant", avatar="figures/assistant.png"):
# create a placeholder container for streaming and any other events to visually render here
placeholder = st.container()
try:
response = asyncio.run(invoke_our_graph(
st.session_state.chat_agent,
st.session_state.messages,
placeholder))
st.session_state.messages.append(response)
except Exception as e:
print(f'[OpenAI API] {e}')
st.session_state.messages.append(AIMessage(
content=f"There was an OpenAI API connection error: {e}. "
f"This may happen if you hit OpenAI API's rate limit "
f"(the number of request send in a specific time interval)."
f"Please try again in a few seconds."))
def handle_example_question(example_question):
st.session_state.example_question = example_question
with st.sidebar:
st.image("figures/UFZ_MPG_Logo.svg")
st.header(f"EcoToxFred (v{get_version()})", divider=True)
st.markdown(
"A Chatbot for discussing environmental monitoring and hazard "
"data collected in a large knowledge graph and stored in a Neo4j Graph Database."
)
st.header("Example Questions", divider=True)
for index, example_question in enumerate(example_questions):
st.button(
example_question,
key=f"example_question_{index}",
on_click=handle_example_question,
args=[example_question]
)
# Display messages in Session State
for msg in st.session_state.messages:
# https://docs.streamlit.io/develop/api-reference/chat/st.chat_message
# we store them as AIMessage and HumanMessage as its easier to send to LangGraph
if isinstance(msg, AIMessage):
with st.chat_message("assistant", avatar="figures/assistant.png"):
if "artifact" in msg.model_extra.keys():
st.session_state.figure_numbers += 1
fig = plotly.io.from_json(msg.artifact)
st.plotly_chart(
fig,
key=f"plotly_chart_{st.session_state.figure_numbers:04d}",
use_container_width=True,
config={'displayModeBar': False})
st.write(msg.content)
elif isinstance(msg, HumanMessage):
st.chat_message("user", avatar="figures/user.png").write(msg.content)
# If an example button was pressed, we use this as input and generate a response.
if st.session_state.example_question:
generate_response(st.session_state.example_question)
st.session_state.example_question = None
# Here the user can type in a question that gets answered.
question = st.chat_input()
if question:
generate_response(question)