def resolve_llm(llm: Optional[LLMType] = None) -> LLM:
"""Resolve LLM from string or LLM instance."""
if llm == "default":
# return default OpenAI model. If it fails, return LlamaCPP
try:
llm = OpenAI()
except ValueError as e:
llm = "local"
print(
"******\n"
"Could not load OpenAI model. Using default LlamaCPP=llama2-13b-chat. "
"If you intended to use OpenAI, please check your OPENAI_API_KEY.\n"
"Original error:\n"
f"{e!s}"
"\n******"
)