Commit abfef4d5 by Hyder

added requirements

parent 929dee7d
...@@ -29,10 +29,14 @@ class Basic: ...@@ -29,10 +29,14 @@ class Basic:
if user_query: if user_query:
utils.display_msg(user_query, 'user') utils.display_msg(user_query, 'user')
with st.chat_message("assistant"): with st.chat_message("assistant"):
try:
st_cb = StreamHandler(st.empty()) st_cb = StreamHandler(st.empty())
response = chain.run(user_query, callbacks=[st_cb]) response = chain.run(user_query, callbacks=[st_cb])
st.session_state.messages.append( st.session_state.messages.append(
{"role": "assistant", "content": response}) {"role": "assistant", "content": response})
except Exception as e:
print(e)
if __name__ == "__main__": if __name__ == "__main__":
......
...@@ -47,11 +47,15 @@ class ChatbotTools: ...@@ -47,11 +47,15 @@ class ChatbotTools:
if user_query: if user_query:
utils.display_msg(user_query, 'user') utils.display_msg(user_query, 'user')
with st.chat_message("assistant"): with st.chat_message("assistant"):
try:
st_cb = StreamlitCallbackHandler(st.container()) st_cb = StreamlitCallbackHandler(st.container())
response = agent.run(user_query, callbacks=[st_cb]) response = agent.run(user_query, callbacks=[st_cb])
st.session_state.messages.append( st.session_state.messages.append(
{"role": "assistant", "content": response}) {"role": "assistant", "content": response})
st.write(response) st.write(response)
except Exception as e:
print(e)
if __name__ == "__main__": if __name__ == "__main__":
......
...@@ -8,7 +8,7 @@ from langchain.document_loaders import PyPDFLoader ...@@ -8,7 +8,7 @@ from langchain.document_loaders import PyPDFLoader
from langchain.memory import ConversationBufferMemory from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationalRetrievalChain from langchain.chains import ConversationalRetrievalChain
from langchain.embeddings import OpenAIEmbeddings from langchain.embeddings import OpenAIEmbeddings
from langchain.vectorstores import Chroma from langchain.vectorstores import FAISS
from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain.text_splitter import RecursiveCharacterTextSplitter
st.set_page_config(page_title="ChatPDF", page_icon="📄") st.set_page_config(page_title="ChatPDF", page_icon="📄")
...@@ -51,7 +51,7 @@ class CustomDataChatbot: ...@@ -51,7 +51,7 @@ class CustomDataChatbot:
# Create embeddings and store in vectordb # Create embeddings and store in vectordb
embeddings = OpenAIEmbeddings() embeddings = OpenAIEmbeddings()
vectordb = Chroma.from_documents(splits, embeddings) vectordb = FAISS.from_documents(splits, embeddings)
# Define retriever # Define retriever
retriever = vectordb.as_retriever() retriever = vectordb.as_retriever()
...@@ -86,10 +86,13 @@ class CustomDataChatbot: ...@@ -86,10 +86,13 @@ class CustomDataChatbot:
utils.display_msg(user_query, 'user') utils.display_msg(user_query, 'user')
with st.chat_message("assistant"): with st.chat_message("assistant"):
try:
st_cb = StreamHandler(st.empty()) st_cb = StreamHandler(st.empty())
response = qa_chain.run(user_query, callbacks=[st_cb]) response = qa_chain.run(user_query, callbacks=[st_cb])
st.session_state.messages.append( st.session_state.messages.append(
{"role": "assistant", "content": response}) {"role": "assistant", "content": response})
except Exception as e:
print(e)
if __name__ == "__main__": if __name__ == "__main__":
......
langchain==0.0.312
PyPDF2==3.0.1
python-dotenv==1.0.0
streamlit==1.27.2
openai==0.27.6
altair==4
Pillow==9.5.0
tiktoken==0.5.1
duckduckgo-search==3.9.3
pypdf==3.16.4
faiss-cpu==1.7.4
chromadb
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment