Commit abfef4d5 by Hyder

added requirements

parent 929dee7d
......@@ -29,10 +29,14 @@ class Basic:
if user_query:
utils.display_msg(user_query, 'user')
with st.chat_message("assistant"):
st_cb = StreamHandler(st.empty())
response = chain.run(user_query, callbacks=[st_cb])
st.session_state.messages.append(
{"role": "assistant", "content": response})
try:
st_cb = StreamHandler(st.empty())
response = chain.run(user_query, callbacks=[st_cb])
st.session_state.messages.append(
{"role": "assistant", "content": response})
except Exception as e:
print(e)
if __name__ == "__main__":
......
......@@ -47,11 +47,15 @@ class ChatbotTools:
if user_query:
utils.display_msg(user_query, 'user')
with st.chat_message("assistant"):
st_cb = StreamlitCallbackHandler(st.container())
response = agent.run(user_query, callbacks=[st_cb])
st.session_state.messages.append(
{"role": "assistant", "content": response})
st.write(response)
try:
st_cb = StreamlitCallbackHandler(st.container())
response = agent.run(user_query, callbacks=[st_cb])
st.session_state.messages.append(
{"role": "assistant", "content": response})
st.write(response)
except Exception as e:
print(e)
if __name__ == "__main__":
......
......@@ -8,7 +8,7 @@ from langchain.document_loaders import PyPDFLoader
from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationalRetrievalChain
from langchain.embeddings import OpenAIEmbeddings
from langchain.vectorstores import Chroma
from langchain.vectorstores import FAISS
from langchain.text_splitter import RecursiveCharacterTextSplitter
st.set_page_config(page_title="ChatPDF", page_icon="📄")
......@@ -51,7 +51,7 @@ class CustomDataChatbot:
# Create embeddings and store in vectordb
embeddings = OpenAIEmbeddings()
vectordb = Chroma.from_documents(splits, embeddings)
vectordb = FAISS.from_documents(splits, embeddings)
# Define retriever
retriever = vectordb.as_retriever()
......@@ -84,12 +84,15 @@ class CustomDataChatbot:
qa_chain = self.setup_qa_chain(uploaded_files)
utils.display_msg(user_query, 'user')
with st.chat_message("assistant"):
st_cb = StreamHandler(st.empty())
response = qa_chain.run(user_query, callbacks=[st_cb])
st.session_state.messages.append(
{"role": "assistant", "content": response})
try:
st_cb = StreamHandler(st.empty())
response = qa_chain.run(user_query, callbacks=[st_cb])
st.session_state.messages.append(
{"role": "assistant", "content": response})
except Exception as e:
print(e)
if __name__ == "__main__":
......
langchain==0.0.312
PyPDF2==3.0.1
python-dotenv==1.0.0
streamlit==1.27.2
openai==0.27.6
altair==4
Pillow==9.5.0
tiktoken==0.5.1
duckduckgo-search==3.9.3
pypdf==3.16.4
faiss-cpu==1.7.4
chromadb
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment