-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathapp.py
More file actions
126 lines (89 loc) · 3.58 KB
/
app.py
File metadata and controls
126 lines (89 loc) · 3.58 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
from typing import List
import streamlit as st
from dotenv import load_dotenv
from langchain.callbacks import StreamlitCallbackHandler
from langchain.schema import ChatMessage, Document
from workshop_oai_qa.chain import AssistantMessage
from workshop_oai_qa.resources import conversation_chain
from workshop_oai_qa.utils import role_from_message
st.set_page_config(
page_title='Luminis Azure OpenAI QA Workshop',
layout='centered',
initial_sidebar_state=st.session_state.get('sidebar_state', 'expanded')
)
st.session_state.sidebar_state = 'expanded'
def main():
st.title('Luminis Azure OpenAI QA Workshop')
# Store LLM generated responses
if "messages" not in st.session_state.keys():
st.session_state.messages = [ChatMessage(role='assistant', content='How may I help you?')]
with st.sidebar:
sidebar_content = st.empty()
with sidebar_content.container():
st.markdown('Click on a citation to view it\'s content.')
chat_window_container = st.container()
st.session_state.sidebar_content = sidebar_content
st.session_state.chat_window_container = chat_window_container
chat_window()
def chat_window():
with st.session_state.chat_window_container:
# Display chat messages
for i, message in enumerate(st.session_state.messages):
role = role_from_message(message)
with st.chat_message(role):
if isinstance(message, AssistantMessage):
st.write(message.formatted_content)
followup_block(message.follow_ups, id=i)
citations_block(message.citations, id=i)
else:
st.write(message.content)
# User-provided prompt
if prompt := st.chat_input(disabled=False):
on_chat_input(prompt)
def followup_block(follow_ups: List[str], id=None):
for j, follow_up in enumerate(follow_ups):
if st.button(
follow_up,
type='secondary',
key=f'followup-{id}-{j}',
):
on_followup_click(follow_up)
def citations_block(citations: List[Document], id=None):
if not citations:
return
with st.expander(f"{len(citations)} references"):
for j, citation in enumerate(citations):
if st.button(
f'🔗 {j}. {citation.metadata["source"]}',
key=f'citation-{id}-{j}',
):
on_citation_click(citation)
def on_followup_click(follow_up):
with st.session_state.chat_window_container:
on_chat_input(follow_up)
def on_citation_click(citation):
with st.session_state.sidebar_content.container():
st.markdown(citation.page_content, unsafe_allow_html=True)
def on_chat_input(prompt):
st.session_state.messages.append(ChatMessage(role='user', content=prompt))
with st.chat_message("user"):
st.write(prompt)
# Generate a new response if last message is not from assistant
with st.chat_message("assistant"):
message = on_generate_response(st.session_state.messages)
st.session_state.messages.append(message)
# Rerender the chat now messages have been updated
st.rerun()
def on_generate_response(messages) -> AssistantMessage:
message_placeholder = st.empty()
st_cb = StreamlitCallbackHandler(message_placeholder.container(), expand_new_thoughts=False)
chain = conversation_chain()
output = chain({
'input': messages[-1].content,
'callbacks': [st_cb],
'history': messages[:-1],
})
return output['reply']
if __name__ == '__main__':
load_dotenv()
main()