User: Hey An error occurred: 'list' object has no attribute 'query_str' (.venv) gavsgav@buntu-white:~/AthenaIndex/FlaskAPP$ /home/gavsgav/AthenaIndex/.venv/bin/python /home/gavsgav/AthenaIndex/StreamTest.py User: Hey An error occurred: name 'chat_engine' is not defined (.venv) gavsgav@buntu-white:~/AthenaIndex/FlaskAPP$ /home/gavsgav/AthenaIndex/.venv/bin/python /home/gavsgav/AthenaIndex/StreamTest.py Traceback (most recent call last): File "/home/gavsgav/AthenaIndex/StreamTest.py", line 28, in <module> chat_engine = index.as_chat_engine(chat_prompt=message, service_context=service_context, chat_mode="condense_plus_context", memory=memory, verbose=False) NameError: name 'message' is not defined (.venv) gavsgav@buntu-white:~/AthenaIndex/FlaskAPP$ /home/gavsgav/AthenaIndex/.venv/bin/python /home/gavsgav/AthenaIndex/StreamTest.py User: Hey An error occurred: 'list' object has no attribute 'query_str' (.venv) gavsgav@buntu-white:~/AthenaIndex/FlaskAPP$ /home/gavsgav/AthenaIndex/.venv/bin/python /home/gavsgav/AthenaIndex/StreamTest.py User: Hey An error occurred: 'AgentChatResponse' object has no attribute 'format' (.venv) gavsgav@buntu-white:~/AthenaIndex/FlaskAPP$ /home/gavsgav/AthenaIndex/.venv/bin/python /home/gavsgav/AthenaIndex/StreamTest.py User: Hey. An error occurred: 'AgentChatResponse' object has no attribute 'format'
import os import sys from llama_index import VectorStoreIndex, ServiceContext from llama_index.prompts import PromptTemplate from llama_index.llms import Ollama, ChatMessage, MessageRole from llama_index.chat_engine.condense_question import ( CondenseQuestionChatEngine, ) from llama_index import SimpleDirectoryReader from pathlib import Path from llama_index.embeddings import OptimumEmbedding, HuggingFaceEmbedding OptimumEmbedding.create_and_save_optimum_model( "BAAI/bge-small-en-v1.5", "./bge_onnx" ) embed_model = OptimumEmbedding(folder_name="./bge_onnx") promt_path = Path("~/AthenaIndex/prompts/").expanduser() documents = SimpleDirectoryReader(promt_path).load_data() index = VectorStoreIndex.from_documents(documents) service_context = ServiceContext.from_defaults(embed_model=embed_model, llm = Ollama(model="dolphin2.2-mistral:7b-q8_0", base_url="http://10.252.0.216:11434")) file_path = Path("~/AthenaIndex/personas/Athena.txt").expanduser() persona = open(file_path, "r").read() while True: text_input = input("User: ") if text_input == "exit": break messages = [ ChatMessage( role="system", content=persona, metadata={"persona": os.path.basename(file_path)} ), ChatMessage(role=MessageRole.USER, content=text_input), ] chat_engine = index.as_chat_engine(chat_mode="context", service_context=service_context) resp = chat_engine.stream_chat(messages) print(resp)
stream_chat()
but not streaming the resultresp = chat_engine.stream_chat(messages) for token in resp.response_gen: print(token, end="")
resp = chat_engine.stream_chat(messages[0].content) for token in resp.response_gen: print(token, end="")
[0].content
For it to work but now the Agent is talking to itself πresp = chat_engine.stream_chat([messages[0]])
resp = chat_engine.stream_chat("hello")
chat_engine.chat_history
)resp = chat_engine.stream_chat("Hello", chat_history=chat_history)