openai_fn_spec = to_openai_tool(self._output_cls)
but not sooooo helpful.to_openai_tool(...)['function']
output and use name
, description
, and parameters
from there? Although might need some light massaging from there to fit the promptfoo format"items": { "$ref": "#/definitions/Contact" }
from llama_index.llms.openai_utils import to_openai_tool to_openai_tool(pydantic_class)
def to_openai_function(pydantic_class: Type[BaseModel]) -> Dict[str, Any]: """Convert pydantic class to OpenAI function.""" schema = pydantic_class.schema() return { "name": schema["title"], "description": schema["description"], "parameters": pydantic_class.schema(), }
>>> from llama_index.llms.openai_utils import to_openai_tool >>> from pydantic import BaseModel >>> class Test(BaseModel): ... """This is a test""" ... name: str = "Logan" ... >>> to_openai_tool(Test) {'type': 'function', 'function': {'name': 'Test', 'description': 'This is a test', 'parameters': {'title': 'Test', 'description': 'This is a test', 'type': 'object', 'properties': {'name': {'title': 'Name', 'default': 'Logan', 'type': 'string'}}}}} >>>
class ContactList(BaseModel): """ Provides a list of contacts. Return empty list [] if not confident. It Is important to not hallicunate any of the data. email, phone, first name, last name. """ contacts: Optional[List[Contact]] = Field( description="A list of contact information", default="" )
>>> to_openai_tool(Test2) {'type': 'function', 'function': {'name': 'Test2', 'description': 'Nested thing', 'parameters': {'title': 'Test2', 'description': 'Nested thing', 'type': 'object', 'properties': {'prev': {'title': 'Prev', 'type': 'array', 'items': {'$ref': '#/definitions/Test'}}, 'name_2': {'title': 'Name 2', 'default': 'bmax', 'type': 'string'}}, 'definitions': {'Test': {'title': 'Test', 'description': 'This is a test', 'type': 'object', 'properties': {'name': {'title': 'Name', 'default': 'Logan', 'type': 'string'}}}}}}} >>> from llama_index.llms import OpenAI >>> llm = OpenAI() >>> openai_fn_spec = to_openai_tool(Test2) >>> from llama_index.llms import OpenAI, ChatMessage >>> chat_response = llm.chat(messages=[ChatMessage(content="Hello!", role="user")], tools=[openai_fn_spec]) >>>