forked from onlyphantom/llm-python
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path14_streamlit.py
More file actions
43 lines (40 loc) · 1.79 KB
/
14_streamlit.py
File metadata and controls
43 lines (40 loc) · 1.79 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import os
from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from langchain_core.prompts import ChatPromptTemplate
from langchain.agents import create_agent
from langchain_core.messages import HumanMessage
from langchain_community.tools import DuckDuckGoSearchRun
from langchain_community.callbacks.streamlit import StreamlitCallbackHandler
import streamlit as st
load_dotenv()
llm = ChatOpenAI(temperature=0, streaming=True, openai_api_key=os.getenv("OPENAI_API_KEY"))
search_tool = DuckDuckGoSearchRun()
tools = [search_tool]
prompt = ChatPromptTemplate.from_messages([
("system", "You are a helpful assistant. Use the search tool to find current information when needed."),
("placeholder", "{chat_history}"),
("human", "{input}"),
("placeholder", "{agent_scratchpad}"),
])
agent = create_agent(llm, tools)
# try: "what are the names of the kids of the 44th president of america"
# try: "top 3 largest shareholders of nvidia"
if prompt := st.chat_input():
st.chat_message("user").write(prompt)
with st.chat_message("assistant"):
thinking_placeholder = st.empty()
thinking_placeholder.write("🧠 thinking...")
st_callback = StreamlitCallbackHandler(st.container())
response = agent.invoke({"messages": [HumanMessage(content=prompt)]}, callbacks=[st_callback])
thinking_placeholder.empty() # Clear the thinking message
# Extract and display the final answer
if "messages" in response:
messages = response["messages"]
# Get the last AIMessage that has content
for msg in reversed(messages):
if hasattr(msg, 'content') and msg.content and msg.content.strip():
st.write(msg.content)
break
else:
st.write(str(response))