llm = Ollama(model="mistral") # response = llm.complete("Who is Laurie Voss?") # print(response) chat_store = SimpleChatStore() chat_memory = ChatMemoryBuffer.from_defaults( token_limit=3000, chat_store=chat_store, chat_store_key="user1", ) chat_engine = SimpleChatEngine.from_defaults(llm=llm)
Traceback (most recent call last): combat-ai | File "/usr/local/lib/python3.11/site-packages/llama_index/llms/utils.py", line 29, in resolve_llm combat-ai | validate_openai_api_key(llm.api_key) combat-ai | File "/usr/local/lib/python3.11/site-packages/llama_index/llms/openai_utils.py", line 383, in validate_openai_api_key combat-ai | raise ValueError(MISSING_API_KEY_ERROR_MESSAGE) combat-ai | ValueError: No API key found for OpenAI. combat-ai | Please set either the OPENAI_API_KEY environment variable or openai.api_key prior to initialization. combat-ai | API keys can be found or created at https://platform.openai.com/account/api-keys combat-ai | combat-ai | combat-ai | During handling of the above exception, another exception occurred: combat-ai | combat-ai | Traceback (most recent call last): combat-ai | File "/app/app.py", line 29, in <module> combat-ai | chat_engine = SimpleChatEngine.from_defaults(llm=llm) combat-ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
chat_engine = SimpleChatEngine.from_defaults(llm=llm, memory=chat_memory)
from llama_index.core import Settings Settings.llm=llm # Ollama instance
ImportError: cannot import name 'Settings' from 'llama_index.core' (/usr/local/lib/python3.11/site-packages/llama_index/core/__init__.py)
from llama_index import ServiceContext from llama_index import set_global_service_context service_context = ServiceContext(llm=llm, embed_model='BAAI/bge-small-en-v1.5`) set_global_service_context(service_context)
ImportError: cannot import name 'Ollama' from 'llama_index.llms' (unknown location)