prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit) llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0.7, model_name="gpt-3.5-turbo", max_tokens=num_outputs)) sc = ServiceContext.from_defaults(llm_predictor=llm_predictor, prompt_helper=prompt_helper) download_loader('SimpleDirectoryReader') documents = SimpleDirectoryReader(input_files=[file_path]).load_data() index = GPTVectorStoreIndex.from_documents(documents, service_context=sc) # save storage_context = StorageContext.from_defaults(persist_dir="./storage") # load index = load_index_from_storage(storage_context) return index index = build_index(file_path=file_path) query_engine = index.as_query_engine(similarity_top_k=5)
download_loader('SimpleDirectoryReader') documents = SimpleDirectoryReader(input_files=[file_path]).load_data() index = GPTVectorStoreIndex.from_documents(documents, service_context=sc) index.storage_context.persist(persist_dir="./storage") # save storage_context = StorageContext.from_defaults(persist_dir="./storage") # load index = load_index_from_storage(storage_context) return index
# load index = load_index_from_storage(storage_context, service_context=sc)
openai.ChatCompletion.create( model="gpt-3.5-turbo", messages=[ {"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": "Who won the world series in 2020?"}, {"role": "assistant", "content": "The Los Angeles Dodgers won the World Series in 2020."}, {"role": "user", "content": "Where was it played?"} ] )
def chatbot(prompt):
role = "superhero"
language = "en"
full_prompt = f"{role}:{language}:{prompt}"
return query_engine.query(full_prompt)