from llama_index import ServiceContext, set_global_service_context ctx = ServiceContext.from_defaults(llm=llm) set_global_service_context(ctx)
from llama_index.llms import LangChainLLM llm = LangChainLLM(<create vllm from langchain>)
CustomLLM
object class