prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit) llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0.7, model_name="gpt-3.5-turbo", max_tokens=num_outputs)) sc = ServiceContext.from_defaults(llm_predictor=llm_predictor, prompt_helper=prompt_helper) download_loader('SimpleDirectoryReader') documents = SimpleDirectoryReader(input_files=[file_path]).load_data() index = GPTVectorStoreIndex.from_documents(documents, service_context=sc) # save storage_context = StorageContext.from_defaults(persist_dir="./storage") # load index = load_index_from_storage(storage_context) return index index = build_index(file_path=file_path) query_engine = index.as_query_engine(similarity_top_k=5)