# 加载外挂知识库(向量数据库) # 设置embedding model from llama_index.embeddings.ollama import OllamaEmbedding from llama_index.core import Settings Settings.embed_model = OllamaEmbedding(model_name="quentinz/bge-large-zh-v1.5:latest",base_url='http://192.168.3.155:11434')
# 前面已经持久化的向量数据库 import chromadb from llama_index.core import VectorStoreIndex # 需要先pip install llama-index-vector-stores-chroma from llama_index.vector_stores.chroma import ChromaVectorStore from llama_index.core import StorageContext
# initialize client, setting path to save data db = chromadb.PersistentClient(path="./chroma_db") # create collection chroma_collection = db.get_or_create_collection("quickstart") # assign chroma as the vector_store to the context vector_store = ChromaVectorStore(chroma_collection=chroma_collection) storage_context = StorageContext.from_defaults(vector_store=vector_store)
from llama_index.core import VectorStoreIndex, get_response_synthesizer from llama_index.core.retrievers import VectorIndexRetriever from llama_index.core.query_engine import RetrieverQueryEngine from llama_index.core.postprocessor import KeywordNodePostprocessor