Merge 3794baa1098c83ea56077dddec9c969f3dc9ff14 into d7eb05b9361febead29a74e71ddffc2ebeff5302

This commit is contained in:
Lisheng Guan 2024-11-14 13:58:10 +08:00 committed by GitHub
commit b418500f10
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 8 additions and 8 deletions

View File

@ -5,7 +5,7 @@ from typing import List
from multiprocessing import Pool
from tqdm import tqdm
from langchain.document_loaders import (
from langchain_community.document_loaders import (
CSVLoader,
EverNoteLoader,
PyMuPDFLoader,
@ -20,9 +20,9 @@ from langchain.document_loaders import (
)
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.vectorstores import Chroma
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.docstore.document import Document
from langchain_community.vectorstores import Chroma
from langchain_community.embeddings import HuggingFaceEmbeddings
from langchain_community.docstore.document import Document
from constants import CHROMA_SETTINGS

View File

@ -1,9 +1,9 @@
#!/usr/bin/env python3
from langchain.chains import RetrievalQA
from langchain.embeddings import HuggingFaceEmbeddings
from langchain_community.embeddings import HuggingFaceEmbeddings
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
from langchain.vectorstores import Chroma
from langchain.llms import Ollama
from langchain_community.vectorstores import Chroma
from langchain_community.llms import Ollama
import chromadb
import os
import argparse
@ -30,7 +30,7 @@ def main():
# activate/deactivate the streaming StdOut callback for LLMs
callbacks = [] if args.mute_stream else [StreamingStdOutCallbackHandler()]
llm = Ollama(model=model, callbacks=callbacks)
llm = Ollama(base_url='http://localhost:11434', model=model, callbacks=callbacks)
qa = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=retriever, return_source_documents= not args.hide_source)
# Interactive questions and answers