I am creating a query engine wiht Llama-index version 0.10.18 with local embeddings(hugging face) from llama_index.core import VectorStoreIndex, SimpleDirectoryReader from llama_index.core.node_parser import SimpleNodeParser from llama_index.core import ServiceContext,StorageContext
from llama_index.llms.huggingface import HuggingFaceInferenceAPI
from llama_index.embeddings.langchain import LangchainEmbedding from langchain_community.embeddings.huggingface import HuggingFaceInferenceAPIEmbeddings
if not os.path.exists(Persis_Dir): #create index documents = SimpleDirectoryReader('data').load_data() parser=SimpleNodeParser() #parsing_doc_into_nodes nodes=parser.get_nodes_from_documents(documents)
StorageContext= StorageContext.from_defaults()
index=VectorStoreIndex( nodes, llm=llm, embed_model=embed_model, storage_context=StorageContext ) else: #load existing pass Error:- ValueError: "HuggingFaceEmbeddings" object has no field "callback_manager" in indexing part @everyone