Hello, I was wondering if there was a way to only create the tool metadata, vectorize it and store it somewhere (not in memory) and then only create the agent when it's called?
for route in routes:
print(f"Processing route {idx}/{len_routes}")
history = []
history.append(ChatMessage(
role='assistant',
content=("Here is all the details of the route."
f"The route is defined in the context of the {route.higher_context} context."
f"The route is {route.route} and the method is {route.method}."
f"The possible responses are {route.responses}."
f"The request body is {route.requestBody}."
f"The parameters are {route.parameters}."
f"The tags are {route.tags}."
f"The additional properties are {route.additional_properties}."),
))
function_llm = OpenAI(model="gpt-3.5-turbo")
agent = OpenAIAgent.from_tools(
None,
llm=function_llm,
verbose=True,
system_prompt=(
PROMPT1
),
chat_history=history,
)
agents[route.title()] = agent
tool_summary = (
SUMMARY
)
doc_tool = QueryEngineTool(
query_engine=agents[route.title()],
metadata=ToolMetadata(
name=f"tool_{route.title()}",
description=tool_summary,
),
)
all_tools.append(doc_tool)
idx += 1
tool_mapping = SimpleToolNodeMapping.from_objects(all_tools)
if not os.path.exists("index"):
obj_index = ObjectIndex.from_objects(
all_tools,
tool_mapping,
VectorStoreIndex,
)
obj_index.persist("index")
else:
obj_index = ObjectIndex.from_persist_dir("index", tool_mapping)
top_agent = FnRetrieverOpenAIAgent.from_retriever(
obj_index.as_retriever(similarity_top_k=3),
system_prompt=(
PROMPT
),
verbose=True,
llm=OpenAI(model="gpt-4"),
)