` (llama_env) meaning@Ottis llamaindex_project % pip list | grep llama-index llama-index 0.12.2 llama-index-agent-openai 0.4.0 llama-index-cli 0.4.0 llama-index-core 0.12.2 llama-index-embeddings-openai 0.3.1 llama-index-indices-managed-llama-cloud 0.6.3 llama-index-legacy 0.9.48.post4 llama-index-llms-openai 0.3.2 llama-index-multi-modal-llms-openai 0.3.0 llama-index-program-openai 0.3.1 llama-index-question-gen-openai 0.3.0 llama-index-readers-file 0.4.0 llama-index-readers-llama-parse 0.4.0 (llama_env) meaning@Ottis llamaindex_project % nano query_documents.py (llama_env) meaning@Ottis llamaindex_project % python query_documents.py Traceback (most recent call last): File "/Users/meaning/llamaindex_project/query_documents.py", line 2, in <module> from llama_index import SimpleDirectoryReader, VectorStoreIndex ImportError: cannot import name 'SimpleDirectoryReader' from 'llama_index' (unknown location)
` import requests from llama_index import SimpleDirectoryReader, VectorStoreIndex from llama_index.llms.ollama import Ollama # Configure Ollama as the backend LLM ollama = Ollama( model="llama3:8b-instruct-q6_K", # Specify the model base_url="http://127.0.0.1:11434" # Local URL for Ollama ) # Load the documents documents_path = "/Users/meaning/Downloads/PES" documents = SimpleDirectoryReader(documents_path).load_data() # Create an index with Ollama index = VectorStoreIndex.from_documents(documents, llm=ollama) # Query the index query_engine = index.as_query_engine() response = query_engine.query("What are the key points mentioned in my documents?") print(f"Ollama response: {response}")
` (llama_env) meaning@Ottis llamaindex_project % python query_documents.py Traceback (most recent call last): File "/Users/meaning/llamaindex_project/query_documents.py", line 2, in <module> from llama_index import SimpleDirectoryReader, VectorStoreIndex ImportError: cannot import name 'SimpleDirectoryReader' from 'llama_index' (unknown location)
from llama_index.core import SimpleDirectoryReader, VectorStoreIndex