reader = SimpleDirectoryReader( input_dir="cases/" ) documents = reader.load_data()
# Insert documents into vector index # Each document has metadata of the city attached for city, wiki_doc in zip(cities, wiki_docs): nodes = node_parser.get_nodes_from_documents([wiki_doc]) # add metadata to each node for node in nodes: node.extra_info = {"title": city} vector_index.insert_nodes(nodes)
python3 main.py dict_keys(['city_stats']) [('Toronto', 2930000, 'Canada'), ('Tokyo', 13960000, 'Japan'), ('Berlin', 3645000, 'Germany')] Traceback (most recent call last): File "/home/bi-ai/ai/txt-to-sql/main.py", line 107, in <module> node.extra_info = {"title": city} File "pydantic/main.py", line 357, in pydantic.main.BaseModel.__setattr__ ValueError: "TextNode" object has no field "extra_info"
Traceback (most recent call last): File "/home/bi/ai/llama_docs_bot/3_eval_baseline/main.py", line 110, in <module> response = query_engine.query(query) File "/usr/local/lib/python3.10/dist-packages/llama_index/indices/query/base.py", line 23, in query response = self._query(str_or_query_bundle) File "/usr/local/lib/python3.10/dist-packages/llama_index/query_engine/sub_question_query_engine.py", line 126, in _query sub_questions = self._question_gen.generate(self._metadatas, query_bundle) File "/usr/local/lib/python3.10/dist-packages/llama_index/question_gen/openai_generator.py", line 77, in generate question_list = self._program(query_str=query_str, tools_str=tools_str) File "/usr/local/lib/python3.10/dist-packages/llama_index/program/openai_program.py", line 101, in __call__ chat_response = self._llm.chat( File "/usr/local/lib/python3.10/dist-packages/llama_index/llms/base.py", line 134, in wrapped_llm_chat CBEventType.LLM, payload={EventPayload.MESSAGES: args[0]} IndexError: tuple index out of range
class MetadataExtractor(BaseExtractor): """Metadata extractor.""" ...other code... in_place: bool = Field( default=True, description="Whether to process nodes in place." )
Traceback (most recent call last): File "/home/bi-ai/ai/bottoms-up-embeddings/main.py", line 40, in <module> embed_model=InstructorEmbeddings(embed_batch_size=2), chunk_size=512 TypeError: Can't instantiate abstract class InstructorEmbeddings with abstract methods _aget_query_embedding, class_name
def class_name(self) -> str: return "InstructorEmbeddings" async def _aget_query_embedding(self, query: str) -> List[float]: return self._get_query_embedding(query)
Traceback (most recent call last): File "/home/bi-ai/ai/bottoms-up-embeddings/main.py", line 46, in <module> embed_model=InstructorEmbeddings(embed_batch_size=2), chunk_size=512 File "/home/bi-ai/ai/bottoms-up-embeddings/main.py", line 19, in __init__ self._model = INSTRUCTOR(instructor_model_name) File "pydantic/main.py", line 357, in pydantic.main.BaseModel.__setattr__ ValueError: "InstructorEmbeddings" object has no field "_model"
# Provide URI to constructor, or use environment variable from markdown import Markdown import pymongo from llama_index.vector_stores.mongodb import MongoDBAtlasVectorSearch from llama_index.indices.vector_store.base import VectorStoreIndex from llama_index.storage.storage_context import StorageContext from llama_index.readers.file.base import SimpleDirectoryReader mongo_uri = "mongodb+srv://<username>:<password>@<host>/?retryWrites=true&w=majority" mongodb_client = pymongo.MongoClient(mongo_uri) store = MongoDBAtlasVectorSearch(mongodb_client) storage_context = StorageContext.from_defaults(vector_store=store) silva_docs = SimpleDirectoryReader(input_files=["data/Anderson_Silva.pdf"]).load_data() index = VectorStoreIndex.from_documents(silva_docs, storage_context=storage_context) response = index.as_query_engine().query("When was Anderson Silva born?") print(f"<b>{response}</b>")
python3 main.py <b>None</b>
bi@bi:~/ai/quiz-maker-be$ python3 main.py Traceback (most recent call last): File "/home/bi/ai/quiz-maker-be/main.py", line 5, in <module> from llama_index import SimpleDirectoryReader File "/home/bi/.local/lib/python3.10/site-packages/llama_index/__init__.py", line 12, in <module> from llama_index.data_structs.struct_type import IndexStructType File "/home/bi/.local/lib/python3.10/site-packages/llama_index/data_structs/__init__.py", line 3, in <module> from llama_index.data_structs.data_structs import ( File "/home/bi/.local/lib/python3.10/site-packages/llama_index/data_structs/data_structs.py", line 14, in <module> from llama_index.schema import BaseNode, TextNode File "/home/bi/.local/lib/python3.10/site-packages/llama_index/schema.py", line 9, in <module> from llama_index.bridge.langchain import Document as LCDocument File "/home/bi/.local/lib/python3.10/site-packages/llama_index/bridge/langchain.py", line 21, in <module> from langchain.embeddings import HuggingFaceEmbeddings, HuggingFaceBgeEmbeddings ImportError: cannot import name 'HuggingFaceBgeEmbeddings' from 'langchain.embeddings' (/home/bi/.local/lib/python3.10/site-packages/langchain/embeddings/__init__.py)