Commit abfef4d5 by Hyder

added requirements

parent 929dee7d
...@@ -29,10 +29,14 @@ class Basic: ...@@ -29,10 +29,14 @@ class Basic:
if user_query: if user_query:
utils.display_msg(user_query, 'user') utils.display_msg(user_query, 'user')
with st.chat_message("assistant"): with st.chat_message("assistant"):
st_cb = StreamHandler(st.empty()) try:
response = chain.run(user_query, callbacks=[st_cb]) st_cb = StreamHandler(st.empty())
st.session_state.messages.append( response = chain.run(user_query, callbacks=[st_cb])
{"role": "assistant", "content": response}) st.session_state.messages.append(
{"role": "assistant", "content": response})
except Exception as e:
print(e)
if __name__ == "__main__": if __name__ == "__main__":
......
...@@ -47,11 +47,15 @@ class ChatbotTools: ...@@ -47,11 +47,15 @@ class ChatbotTools:
if user_query: if user_query:
utils.display_msg(user_query, 'user') utils.display_msg(user_query, 'user')
with st.chat_message("assistant"): with st.chat_message("assistant"):
st_cb = StreamlitCallbackHandler(st.container()) try:
response = agent.run(user_query, callbacks=[st_cb]) st_cb = StreamlitCallbackHandler(st.container())
st.session_state.messages.append( response = agent.run(user_query, callbacks=[st_cb])
{"role": "assistant", "content": response}) st.session_state.messages.append(
st.write(response) {"role": "assistant", "content": response})
st.write(response)
except Exception as e:
print(e)
if __name__ == "__main__": if __name__ == "__main__":
......
...@@ -8,7 +8,7 @@ from langchain.document_loaders import PyPDFLoader ...@@ -8,7 +8,7 @@ from langchain.document_loaders import PyPDFLoader
from langchain.memory import ConversationBufferMemory from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationalRetrievalChain from langchain.chains import ConversationalRetrievalChain
from langchain.embeddings import OpenAIEmbeddings from langchain.embeddings import OpenAIEmbeddings
from langchain.vectorstores import Chroma from langchain.vectorstores import FAISS
from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain.text_splitter import RecursiveCharacterTextSplitter
st.set_page_config(page_title="ChatPDF", page_icon="📄") st.set_page_config(page_title="ChatPDF", page_icon="📄")
...@@ -51,7 +51,7 @@ class CustomDataChatbot: ...@@ -51,7 +51,7 @@ class CustomDataChatbot:
# Create embeddings and store in vectordb # Create embeddings and store in vectordb
embeddings = OpenAIEmbeddings() embeddings = OpenAIEmbeddings()
vectordb = Chroma.from_documents(splits, embeddings) vectordb = FAISS.from_documents(splits, embeddings)
# Define retriever # Define retriever
retriever = vectordb.as_retriever() retriever = vectordb.as_retriever()
...@@ -84,12 +84,15 @@ class CustomDataChatbot: ...@@ -84,12 +84,15 @@ class CustomDataChatbot:
qa_chain = self.setup_qa_chain(uploaded_files) qa_chain = self.setup_qa_chain(uploaded_files)
utils.display_msg(user_query, 'user') utils.display_msg(user_query, 'user')
with st.chat_message("assistant"): with st.chat_message("assistant"):
st_cb = StreamHandler(st.empty()) try:
response = qa_chain.run(user_query, callbacks=[st_cb]) st_cb = StreamHandler(st.empty())
st.session_state.messages.append( response = qa_chain.run(user_query, callbacks=[st_cb])
{"role": "assistant", "content": response}) st.session_state.messages.append(
{"role": "assistant", "content": response})
except Exception as e:
print(e)
if __name__ == "__main__": if __name__ == "__main__":
......
langchain==0.0.312
PyPDF2==3.0.1
python-dotenv==1.0.0
streamlit==1.27.2
openai==0.27.6
altair==4
Pillow==9.5.0
tiktoken==0.5.1
duckduckgo-search==3.9.3
pypdf==3.16.4
faiss-cpu==1.7.4
chromadb
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment