from llama_index.agent import OpenAIAgent agent = OpenAIAgent.from_tools(query_engine_tools, verbose=True, llm=llm) agent.chat_repl() OUTPUT ERROR: ===== Entering Chat REPL ===== Type "exit" to exit. Human: when parliament building inaugurated === Calling Function === Calling function: new_parmialment with args: { "input": "When was the parliament building inaugurated?" } --------------------------------------------------------------------------- AuthenticationError Traceback (most recent call last) . . . . AuthenticationError: No API key provided. You can set your API key in code using 'openai.api_key = <API-KEY>', or you can set the environment variable OPENAI_API_KEY=<API-KEY>). If your API key is stored in a file, you can point the openai module at it with 'openai.api_key_path = <PATH>'. You can generate API keys in the OpenAI web interface. See https://platform.openai.com/account/api-keys for details, or email support@openai.com if you have any questions. The above exception was the direct cause of the following exception: RetryError Traceback (most recent call last) . . . . RetryError: RetryError[<Future at 0x7fd9199b00a0 state=finished raised AuthenticationError>]
0.0.200
.langchain==0.0.157
works for me!class Song(BaseModel): title: str length_seconds: int prompt_template_str = """\ Generate an example album, with an artist and a list of songs. \ Using the movie {movie_name} as inspiration.\ """ program = OpenAIPydanticProgram.from_defaults( output_cls=Album, prompt_template_str=prompt_template_str, verbose=True, llm=ChatOpenAI( model_name="gpt-3.5-turbo-0613", openai_api_key=key, ) ) output = program(movie_name='The Shining') OUTPUT ERROR: KeyError Traceback (most recent call last) /tmp/ipykernel_19989/281202892.py in <module> ----> 1 output = program(movie_name='The Shining') 2 print(output) ~/.local/lib/python3.10/site-packages/llama_index/program/openai_program.py in __call__(self, *args, **kwargs) 91 formatted_prompt = self._prompt.format(**kwargs) 92 ---> 93 openai_fn_spec = _openai_function(self._output_cls) 94 95 ai_message = self._llm.predict_messages( ~/.local/lib/python3.10/site-packages/llama_index/program/openai_program.py in _openai_function(output_cls) 18 return { 19 "name": schema["title"], ---> 20 "description": schema["description"], 21 "parameters": output_cls.schema(), 22 } KeyError: 'description'
class Thing(BaseModel): """This is a thing.""" field1: str
import openai openai.api_key = "blah"
export OPENAI_API_KEY=".."
import openai openai.api_key = ".."
import os os.environ["OPENAI_API_KEY"] = ".."
query_engine_tools = [ QueryEngineTool( query_engine=query_engine, metadata=ToolMetadata( name='new_parmialment', description="Tells about new parliament building and inauguration ceremony of the building. " "Use a detailed plain text question as input to the tool." ) ) ] from llama_index.agent import OpenAIAgent from llama_index.selectors.llm_selectors import LLMSingleSelector agent = OpenAIAgent.from_tools( [query_plan_tool], max_function_calls=10, llm=llm, verbose=True ) response = agent.query("when parliament building inaugurated") OUTPUT ERROR: === Calling Function === Calling function: query_plan_tool with args: { "nodes": [ { "id": 1, "query_str": "When was the new parliament building inaugurated?", "tool_name": "new_parliament", "dependencies": [] } ] } Executing node {"id": 1, "query_str": "When was the new parliament building inaugurated?", "tool_name": "new_parliament", "dependencies": []} --------------------------------------------------------------------------- KeyError Traceback (most recent call last) . . . KeyError: 'new_parliament'