自己制作网站,房产网站建设机构,wordpress汇聚素材网,wordpress 调用所有目录
安装
llama_index 搜索引擎
用 DeepSeek API 替换本地 Ollama 模型
源代码#xff1a; 安装 pip install llama_index llama_index 搜索引擎
llama_index框架构建搜索引擎_llamaindex使用正则表达式拆分文档-CSDN博客
用 DeepSeek API 替换本地 Ollama 模型
https…目录
安装
llama_index 搜索引擎
用 DeepSeek API 替换本地 Ollama 模型
源代码 安装 pip install llama_index llama_index 搜索引擎
llama_index框架构建搜索引擎_llamaindex使用正则表达式拆分文档-CSDN博客
用 DeepSeek API 替换本地 Ollama 模型
https://zhuanlan.zhihu.com/p/842132629 源代码
# %%
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader
from llama_index.vector_stores.chroma import ChromaVectorStore
from llama_index.core import StorageContext
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
from IPython.display import Markdown, displayfrom llama_index.llms.openai import OpenAI
import chromadb# %%import openai
openai.api_key skopenai.api_base https://api.deepseek.com/v1
llm OpenAI(modeldeepseek-chat,api_keyopenai.api_key, base_urlopenai.base_url)from llama_index.core import Settings# llm OpenAI(api_keyopenai.api_key, base_urlopenai.base_url)
Settings.llm OpenAI(modeldeepseek-chat,api_keyopenai.api_key, base_urlopenai.base_url)
# %%
import osjinaai_api_key jina
os.environ[JINAAI_API_KEY] jinaai_api_keyfrom llama_index.embeddings.jinaai import JinaEmbeddingtext_embed_model JinaEmbedding(api_keyjinaai_api_key,modeljina-embeddings-v3,# choose retrieval.passage to get passage embeddingstaskretrieval.passage,
)# %%
# create client and a new collection
chroma_client chromadb.EphemeralClient()
chroma_collection chroma_client.create_collection(quickstart)# %%# define embedding function
embed_model text_embed_model# load documents
documents SimpleDirectoryReader(./data/paul_graham/).load_data()# save to diskdb chromadb.PersistentClient(path./chroma_db)
chroma_collection db.get_or_create_collection(quickstart)
vector_store ChromaVectorStore(chroma_collectionchroma_collection)
storage_context StorageContext.from_defaults(vector_storevector_store)index VectorStoreIndex.from_documents(documents, storage_contextstorage_context, embed_modelembed_model
)# load from disk
db2 chromadb.PersistentClient(path./chroma_db)
chroma_collection db2.get_or_create_collection(quickstart)
vector_store ChromaVectorStore(chroma_collectionchroma_collection)
index VectorStoreIndex.from_vector_store(vector_store,embed_modelembed_model,
)# Query Data from the persisted index
query_engine index.as_query_engine()
response query_engine.query(What did the author do growing up?)
print(response:,response)