Find answers from the community

Updated 8 months ago

Agent

Do we have something similar for locally hosted models ?
Agents for locally hosted models ?

Plain Text
from llama_index.agent.openai import OpenAIAgent
from llama_index.llms.openai import OpenAI

# import and define tools
...

# initialize llm
llm = OpenAI(model="gpt-3.5-turbo-0613")

# initialize openai agent
agent = OpenAIAgent.from_tools(tools, llm=llm, verbose=True)
W
n
2 comments
Checkout the given example code: https://docs.llamaindex.ai/en/stable/understanding/putting_it_all_together/agents/#agents
Just replace the llm with your llm and there you have it agent with locally hosted llm!
Plain Text
def main():
    folder_name = input("Enter the name of the folder: ")
    if Path("./data").is_dir():
        docs = process_pdf_files("./data/" + folder_name)
        print(docs.keys())
        
        if len(source_Docs) > 0:
           res = processVectorStore_and_createQueryEngine(source_Docs, folder_name)
           res.get("0x379a6f200")
           query_engine_tools = [
                    QueryEngineTool(
                                query_engine=res[key].as_query_engine(),
                                metadata=ToolMetadata(
                                    name=f"{key}",
                                    description=f"Provides information {key} data",
                                ),
                            )
                        for key in res
                        ]
           print("--------------------------------------------------------------")
        #    print("query engine tools array ",query_engine_tools)
        #    print("--------------------------------------------------------------")
           sub_question_query_engine =SubQuestionQueryEngine.from_defaults(query_engine_tools=query_engine_tools)
        #    response = sub_question_query_engine.chat("why was Uber subject to a 4-year lock-up with respect to our shares in Aurora")
        #    print(response)
           while True:
               question = input("Enter your question: ")
               if question == "exit":
                   break
               query_engine = RouterQueryEngine(selector=LLMSingleSelector.from_defaults(), query_engine_tools=query_engine_tools)
               response = query_engine.query(question)
               print(response)
        
        else:
            print("No PDF files found in the folder.")

    else:
        print("Folder not found. Please enter a valid folder name.")


main()


i discovered a hacky way of doing this.. if there is anyway to improve this... then please let me know
Add a reply
Sign up and join the conversation on Discord