but also unrelated, your code still has some minor error
def construct_index(directory_path):
max_input_size = 4096
num_outputs = 512
max_chunk_overlap = 0.08
chunk_size_limit = 600
prompt_helper = PromptHelper(...)
llm_predictor = LLMPredictor(llm=ChatOpenAI(
temperature=0.7, model_name="gpt-3.5-turbo", max_tokens=num_outputs))
documents = SimpleDirectoryReader(directory_path).load_data()
service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor, chunk_size=chunk_size_limit, prompt_helper=prompt_helper)
index = GPTVectorStoreIndex.from_documents(documents, service_context=service_context)
index.storage_context.persist(persist_dir="./storage")
return index
def chatbot(input_text):
index = load_index_from_storage(StorageContext.from_defaults(persist_dir="./storage"))
response = index.as_query_engine(response_mode="compact").query(input_text)
return response.response