|
| 1 | +import os |
| 2 | +import streamlit as st |
| 3 | +from langchain_community.document_loaders import PDFPlumberLoader |
| 4 | +from langchain_text_splitters import RecursiveCharacterTextSplitter |
| 5 | +from langchain_core.vectorstores import InMemoryVectorStore |
| 6 | +from langchain_ollama import OllamaEmbeddings |
| 7 | +from langchain_core.prompts import ChatPromptTemplate |
| 8 | +from langchain_ollama.llms import OllamaLLM |
| 9 | + |
| 10 | +LLM = "deepseek-r1:8b" |
| 11 | + |
| 12 | +# Prompt template for answering questions |
| 13 | +template = """ |
| 14 | +You are an assistant for question-answering tasks. Use the following pieces of retrieved context to answer the question. If you don't know the answer, just say that you don't know. Use three sentences maximum and keep the answer concise. |
| 15 | +Question: {question} |
| 16 | +Context: {context} |
| 17 | +Answer: |
| 18 | +""" |
| 19 | + |
| 20 | +# Directory to save uploaded PDFs |
| 21 | +pdfs_directory = "chat-with-pdf/pdfs/" |
| 22 | + |
| 23 | +# Ensure the directory exists |
| 24 | +os.makedirs(pdfs_directory, exist_ok=True) |
| 25 | + |
| 26 | +# Initialize embeddings and model |
| 27 | +embeddings = OllamaEmbeddings(model=LLM) |
| 28 | +model = OllamaLLM(model=LLM) |
| 29 | + |
| 30 | +# Initialize vector store |
| 31 | +vector_store = None |
| 32 | + |
| 33 | + |
| 34 | +def upload_pdf(file): |
| 35 | + """Save the uploaded PDF to the specified directory.""" |
| 36 | + try: |
| 37 | + file_path = os.path.join(pdfs_directory, file.name) |
| 38 | + with open(file_path, "wb") as f: |
| 39 | + f.write(file.getbuffer()) |
| 40 | + return file_path |
| 41 | + except Exception as e: |
| 42 | + st.error(f"Error saving file: {e}") |
| 43 | + return None |
| 44 | + |
| 45 | + |
| 46 | +def load_pdf(file_path): |
| 47 | + """Load the content of the PDF using PDFPlumberLoader.""" |
| 48 | + try: |
| 49 | + loader = PDFPlumberLoader(file_path) |
| 50 | + return loader.load() |
| 51 | + except Exception as e: |
| 52 | + st.error(f"Error loading PDF: {e}") |
| 53 | + return None |
| 54 | + |
| 55 | + |
| 56 | +def split_text(documents): |
| 57 | + """Split the documents into smaller chunks for indexing.""" |
| 58 | + text_splitter = RecursiveCharacterTextSplitter( |
| 59 | + chunk_size=1000, chunk_overlap=200, add_start_index=True |
| 60 | + ) |
| 61 | + return text_splitter.split_documents(documents) |
| 62 | + |
| 63 | + |
| 64 | +def index_docs(documents): |
| 65 | + """Index the documents in the vector store.""" |
| 66 | + global vector_store |
| 67 | + vector_store = InMemoryVectorStore(embeddings) |
| 68 | + vector_store.add_documents(documents) |
| 69 | + |
| 70 | + |
| 71 | +def retrieve_docs(query): |
| 72 | + """Retrieve relevant documents based on the query.""" |
| 73 | + return vector_store.similarity_search(query) |
| 74 | + |
| 75 | + |
| 76 | +def answer_question(question, documents): |
| 77 | + """Generate an answer to the question using the retrieved documents.""" |
| 78 | + context = "\n\n".join([doc.page_content for doc in documents]) |
| 79 | + prompt = ChatPromptTemplate.from_template(template) |
| 80 | + chain = prompt | model |
| 81 | + return chain.invoke({"question": question, "context": context}) |
| 82 | + |
| 83 | + |
| 84 | +# Streamlit UI |
| 85 | +st.title("Chat with Your PDF") |
| 86 | +uploaded_file = st.file_uploader( |
| 87 | + "Upload a PDF file to get started", type="pdf", accept_multiple_files=False |
| 88 | +) |
| 89 | + |
| 90 | +if uploaded_file: |
| 91 | + # Save the uploaded PDF |
| 92 | + file_path = upload_pdf(uploaded_file) |
| 93 | + |
| 94 | + if file_path: |
| 95 | + st.success(f"File uploaded successfully: {uploaded_file.name}") |
| 96 | + |
| 97 | + # Load and process the PDF |
| 98 | + with st.spinner("Processing PDF..."): |
| 99 | + documents = load_pdf(file_path) |
| 100 | + if documents: |
| 101 | + chunked_documents = split_text(documents) |
| 102 | + index_docs(chunked_documents) |
| 103 | + st.success("PDF indexed successfully! Ask your questions below.") |
| 104 | + |
| 105 | + # Chat input |
| 106 | + question = st.chat_input("Ask a question about the uploaded PDF:") |
| 107 | + |
| 108 | + if question: |
| 109 | + st.chat_message("user").write(question) |
| 110 | + |
| 111 | + with st.spinner("Retrieving relevant information..."): |
| 112 | + related_documents = retrieve_docs(question) |
| 113 | + if related_documents: |
| 114 | + answer = answer_question(question, related_documents) |
| 115 | + st.chat_message("assistant").write(answer) |
| 116 | + else: |
| 117 | + st.chat_message("assistant").write("No relevant information found.") |
0 commit comments