def get_query_pipeline_router(): standalone_question = """\ Given the following conversation between a user and an AI assistant and a follow up question from user, rephrase the follow up question to be a standalone question. Chat History: {chat_history} Follow Up Input: {question} Standalone question: """ standalone_question_prompt = PromptTemplate(standalone_question) text_qa_template = PromptTemplate(CHAT_SYSTEM_PROMPT) llm = OpenAI(model="gpt-3.5-turbo-0125") llm_s = llm.as_query_component(streaming=True) teg_query_engine = get_index(dir=STORAGE_DIR_DEHYDRATION_TEG).as_query_engine( text_qa_template=text_qa_template ) teg_chain = QueryPipeline(chain=[teg_query_engine]) dessicants_query_engine = get_index( dir=STORAGE_DIR_DEHYDRATION_SOLID_DESSICANTS ).as_query_engine(text_qa_template=text_qa_template) dessicants_chain = QueryPipeline(chain=[dessicants_query_engine]) router_c = RouterComponent( selector=LLMMultiSelector.from_defaults(), choices=[ "Useful for retrieving information about TEG (Glycol) systems. Useful keywords: TEG, Contactor tower, absorber tower, TEG Reboiler, TEG Regenerator, Regeneration Tower, HTU packed towres", "Useful for retrieving information about solid dessicants. Useful keywords: Gels, Alumina, Molecular Sieves, Bed, MTZ, Calcium Chloride, Membrane Permeation, Bed Regeneration", ], components=[teg_chain, dessicants_chain], verbose=True, ) qp = QueryPipeline(chain=[standalone_question_prompt, llm, router_c], verbose=True) return qp response = get_query_pipeline_router.run("Here goes the question")
pipline = QueryPipeline( modules={"input": InputComponent(), "prompt": standalone_question_prompt, "llm": llm, "router": router_c} ) pipeline.add_line("input", "prompt", src_key="question", dest_key="question") pipeline.add_link("input", "prompt" src_key="chat_history", dest_key="chat_history") pipeline.add_link("prompt", "llm") pipeline.add_link("llm", "router") pipeline.run(question="question", chat_history="...")
class CustomAgentWorker(CustomSimpleAgentWorker): prompt_str: str = Field(default=DEFAULT_PROMPT_STR) max_iterations: int = Field(default=10) _router_query_engine: RouterQueryEngine = PrivateAttr() def __init__(self, tools: List[BaseTool], **kwargs: Any) -> None: """Init params.""" # validate that all tools are query engine tools for tool in tools: if not isinstance(tool, QueryEngineTool): raise ValueError( f"Tool {tool.metadata.name} is not a query engine tool." ) self._router_query_engine = RouterQueryEngine( selector=PydanticSingleSelector.from_defaults(), query_engine_tools=tools, verbose=kwargs.get("verbose", False), ) super().__init__( tools=tools, **kwargs, ) def _initialize_state(self, task: Task, **kwargs: Any) -> Dict[str, Any]: """Initialize state.""" return {"count": 0, "current_reasoning": []} def _run_step( self, state: Dict[str, Any], task: Task, input: Optional[str] = None ) -> Tuple[AgentChatResponse, bool]: """Run step. Returns: Tuple of (agent_response, is_done) """ if "new_input" not in state: new_input = task.input else: new_input = state["new_input"] # first run router query engine response = self._router_query_engine.query(new_input) if response: is_done = True # return response return AgentChatResponse(response=str(response)), is_done
from llama_index.core.agent import AgentRunner agent = AgentRunner(agent_worker)
agent_worker = CustomAgentWorker.from_tools( tools=query_engine_tools, llm=llm, verbose=True, callback_manager=callback_manager, ) agent = AgentRunner(agent_worker) response: AgentChatResponse = agent.chat(lastMessage.content, messages)
task.memory()