I want to build a RAG-App with lanchain and Streamlit. Can you help me?
Drop files here
or click to upload
import streamlit as st
from langchain.embeddings import OpenAIEmbeddings
from langchain.vectorstores import Chroma
from langchain.text_splitter import CharacterTextSplitter
from langchain.chat_models import ChatOpenAI
from langchain.chains import ConversationalRetrievalChain
from langchain.memory import ConversationBufferMemory
import os
from dotenv import load_dotenv
# Load environment variables
load_dotenv()
# Set page config
st.set_page_config(page_title="RAG Chat Assistant", layout="wide")
st.title("RAG Chat Assistant")
# Initialize session state for chat history and uploaded documents
if "messages" not in st.session_state:
st.session_state.messages = []
if "conversation" not in st.session_state:
st.session_state.conversation = None
if "db" not in st.session_state:
st.session_state.db = None
# File uploader
uploaded_file = st.file_uploader("Upload a text document", type=["txt"])
if uploaded_file:
# Read and process the document
document_text = uploaded_file.read().decode()
# Split the text into chunks
text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
texts = text_splitter.split_text(document_text)
# Create embeddings and store in vector database
embeddings = OpenAIEmbeddings()
st.session_state.db = Chroma.from_texts(texts, embeddings)
# Initialize conversation chain
memory = ConversationBufferMemory(
memory_key="chat_history",
return_messages=True
)
st.session_state.conversation = ConversationalRetrievalChain.from_llm(
llm=ChatOpenAI(temperature=0),
retriever=st.session_state.db.as_retriever(),
memory=memory,
)
st.success("Document processed and ready for questions!")
# Display chat messages
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.write(message["content"])
# Chat input
if prompt := st.chat_input("Ask a question about your document"):
if not st.session_state.db:
st.error("Please upload a document first!")
else:
# Add user message to chat history
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.write(prompt)
# Get AI response
with st.chat_message("assistant"):
with st.spinner("Thinking..."):
response = st.session_state.conversation.run(prompt)
st.write(response)
st.session_state.messages.append({"role": "assistant", "content": response})
# Add sidebar instructions
with st.sidebar:
st.markdown("""
## How to use this app
1. Upload a text document using the file uploader
2. Wait for the document to be processed
3. Ask questions about the document in the chat
4. The AI will respond using information from your document
Note: This app requires an OpenAI API key set in your environment variables as OPENAI_API_KEY
""")
Hi! I can help you with any questions about Streamlit and Python. What would you like to know?