from llama_index.core.chat_engine import CondenseQuestonChatEngine chat_engine = CondenseQuestonChatEngine.from_defaults(query_engine, ...) from llama_index.core.tools import QueryEngineTool from llama_index.agent.openai import OpenAIAgent tool = QueryEngineTool.from_defaults(query_engine, name="...", description="...") agent = OpenAIAgent.from_tools([tool], ...)