Hi, I am trying to extract a pydantic model and having success using "gpt-4o" and "gpt-4o-mini" but cannot get it working with "o1-preview" and "o1-mini".
Can anyone help with how to perform structured data extraction with o1-mini and o1-preview please
class Biography(BaseModel):
"""Data model for a biography."""
name: str
best_known_for: List[str]
extra_info: str
documents = [Document(
text="My name is John Dewberry, I am known for my back flips and I live under a mushroom in Alberta"
)]
index = VectorStoreIndex.from_documents(documents)
llms = ["gpt-4o","gpt-4o-mini","o1-preview","o1-mini"]
for llm_name in llms:
llm = OpenAI(model=llm_name, temperature=0.1)
query_engine = index.as_query_engine(
output_cls=Biography, response_mode="compact", llm=llm
)
response = query_engine.query("Who is John DewBerry?")
print(f"{llm_name}: {response.response.model_dump_json()}") # this fails with BadRequestError: Error code: 400 - {'error': {'message': "Unsupported parameter: 'tool_choice' is not supported with this model."