documents = SimpleDirectoryReader('Knowlege').load_data() index = VectorStoreIndex.from_documents(documents) query_engine = index.as_query_engine() response = query_engine('what are these documents about?') print(response)
from llama_index import LLMPredictor
def train(directory): documents = SimpleDirectoryReader(directory).load_data() index = VectorStoreIndex.from_documents(documents) index.storage_context.persist() documents = SimpleDirectoryReader('Knowlege').load_data() service_context = ServiceContext.from_defaults(llm_predictor=OpenAI()) storage_context = StorageContext.from_defaults(persist_dir='./storage') index = load_index_from_storage(storage_context, service_context=service_context) query_engine = index.as_query_engine() response = query_engine.query('Write me a dialogue like the style from the documents I have presented you?') print(response)
pip install llama-index
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader documents = SimpleDirectoryReader("data").load_data() index = VectorStoreIndex.from_documents(documents) query_engine = index.as_query_engine() response = query_engine.query("What did the author do growing up?") print(response)