From 2aca3958baccde359e99b714925c524390129685 Mon Sep 17 00:00:00 2001 From: Lisheng Guan Date: Tue, 2 Apr 2024 13:34:42 +0800 Subject: [PATCH 1/2] Update ingest.py --- examples/langchain-python-rag-privategpt/ingest.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/langchain-python-rag-privategpt/ingest.py b/examples/langchain-python-rag-privategpt/ingest.py index 35324775..66fe21dc 100755 --- a/examples/langchain-python-rag-privategpt/ingest.py +++ b/examples/langchain-python-rag-privategpt/ingest.py @@ -5,7 +5,7 @@ from typing import List from multiprocessing import Pool from tqdm import tqdm -from langchain.document_loaders import ( +from langchain_community.document_loaders import ( CSVLoader, EverNoteLoader, PyMuPDFLoader, @@ -20,9 +20,9 @@ from langchain.document_loaders import ( ) from langchain.text_splitter import RecursiveCharacterTextSplitter -from langchain.vectorstores import Chroma -from langchain.embeddings import HuggingFaceEmbeddings -from langchain.docstore.document import Document +from langchain_community.vectorstores import Chroma +from langchain_community.embeddings import HuggingFaceEmbeddings +from langchain_community.docstore.document import Document from constants import CHROMA_SETTINGS From 3794baa1098c83ea56077dddec9c969f3dc9ff14 Mon Sep 17 00:00:00 2001 From: Lisheng Guan Date: Tue, 2 Apr 2024 13:35:19 +0800 Subject: [PATCH 2/2] Update privateGPT.py --- examples/langchain-python-rag-privategpt/privateGPT.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/langchain-python-rag-privategpt/privateGPT.py b/examples/langchain-python-rag-privategpt/privateGPT.py index 7d97a567..8d47bc79 100755 --- a/examples/langchain-python-rag-privategpt/privateGPT.py +++ b/examples/langchain-python-rag-privategpt/privateGPT.py @@ -1,9 +1,9 @@ #!/usr/bin/env python3 from langchain.chains import RetrievalQA -from langchain.embeddings import HuggingFaceEmbeddings +from langchain_community.embeddings import HuggingFaceEmbeddings from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler -from langchain.vectorstores import Chroma -from langchain.llms import Ollama +from langchain_community.vectorstores import Chroma +from langchain_community.llms import Ollama import chromadb import os import argparse @@ -30,7 +30,7 @@ def main(): # activate/deactivate the streaming StdOut callback for LLMs callbacks = [] if args.mute_stream else [StreamingStdOutCallbackHandler()] - llm = Ollama(model=model, callbacks=callbacks) + llm = Ollama(base_url='http://localhost:11434', model=model, callbacks=callbacks) qa = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=retriever, return_source_documents= not args.hide_source) # Interactive questions and answers