>>> from llama_index.agent.openai import OpenAIAgent >>> agent = OpenAIAgent.from_tools([]) >>> resp = agent.chat("My name is logan") >>> print(str(resp)) Nice to meet you, Logan! How can I assist you today? >>> resp = agent.chat("I forgot my name, what was it?") >>> print(str(resp)) Your name is Logan. Is there anything else I can help you with? >>>
.chat()
call to override anything in the memory chat_history = agent.chat_history agent.chat("Hello!", chat_history=chat_history)
@st.cache_resource def initialize_agent(): agent = get_agent() return agent agent = initialize_agent()
AttributeError: 'StreamingAgentChatResponse' object has no attribute '_is_function_not_none_thread_event'
pip install -U llama-index-agent-openai llama-index-core llama-index-llms-openai
just to be sure? (You might have to restart anything you have running too)agent = OpenAIAgent.from_tools([vector_query_tool, *googletools],llm=llm,verbose=True,system_prompt=get_system_prompt())