From d46dedbbfaabb624128c08cd9ea5ee4bbc3b1a3e Mon Sep 17 00:00:00 2001 From: suoko Date: Sat, 28 Oct 2023 08:59:30 +0200 Subject: [PATCH] Update main.py with gradio.chatinterface --- examples/langchain-docscanner/main.py | 27 +++++++++++++++++++++------ 1 file changed, 21 insertions(+), 6 deletions(-) diff --git a/examples/langchain-docscanner/main.py b/examples/langchain-docscanner/main.py index 6cb152cc..919233ae 100644 --- a/examples/langchain-docscanner/main.py +++ b/examples/langchain-docscanner/main.py @@ -29,34 +29,49 @@ for file in os.listdir(docsUrl): pdf_path = docsUrl + "/" + file loader = PyPDFLoader(pdf_path) documents.extend(loader.load()) + print("Found " + pdf_path) elif file.endswith('.docx') or file.endswith('.doc'): doc_path = docsUrl + "/" + file loader = Docx2txtLoader(doc_path) documents.extend(loader.load()) + print("Found " + doc_path) elif file.endswith('.txt') or file.endswith('.kt') or file.endswith('.json'): text_path = docsUrl + "/" + file loader = TextLoader(text_path) documents.extend(loader.load()) + print("Found " + text_path) elif file.endswith('.html') or file.endswith('.htm'): - text_path = docsUrl + "/" + file - loader = UnstructuredHTMLLoader(text_path) + htm_path = docsUrl + "/" + file + loader = UnstructuredHTMLLoader(htm_path) documents.extend(loader.load()) + print("Found " + htm_path) -text_splitter = CharacterTextSplitter(chunk_size=3500, chunk_overlap=20) + +text_splitter = CharacterTextSplitter(chunk_size=2000, chunk_overlap=20) all_splits = text_splitter.split_documents(documents) from langchain.embeddings import GPT4AllEmbeddings from langchain.vectorstores import Chroma vectorstore = Chroma.from_documents(documents=all_splits, embedding=GPT4AllEmbeddings()) -def greet(question): + + + + + +def AI_response(question, history): docs = vectorstore.similarity_search(question) len(docs) qachain=RetrievalQA.from_chain_type(ollama, retriever=vectorstore.as_retriever()) - reply=qachain({"query": question}) + #reply=qachain() + #reply=str(qachain({"query": question})) + reply=str(qachain.run(question)) return reply -demo = gr.Interface(fn=greet, inputs="text", outputs="text") + + +demo = gr.ChatInterface(AI_response, title="Put your files in folder" + docsUrl) if __name__ == "__main__": demo.launch(server_name="0.0.0.0", server_port=7860) +