from llama_index.llms import Anthropic gpt_4_context = ServiceContext.from_defaults( llm=Anthropic(model='claude-2', temperature=0.3), embed_model="local:BAAI/bge-base-en-v1.5", callback_manager=callback_manager )