text_splitter = TokenTextSplitter( separator="*$$$*", chunk_size=1024, chunk_overlap=20, backup_separators=["","\n"], ) node_parser = SimpleNodeParser.from_defaults(text_splitter=text_splitter) service_context = ServiceContext.from_defaults(node_parser=node_parser) documents = SimpleDirectoryReader("./data").load_data() d = 1536 faiss_index = faiss.IndexFlatL2(d) vector_store = FaissVectorStore(faiss_index=faiss_index) storage_context = StorageContext.from_defaults(vector_store=vector_store) index = VectorStoreIndex.from_documents( documents=documents, storage_context=storage_context, service_context=service_context, show_progress=True) index.storage_context.persist(persist_dir="./indexes")