Find answers from the community

Home
Members
GeneralCTR
G
GeneralCTR
Offline, last seen 4 months ago
Joined September 25, 2024
Hi has anyone else come across this error:
No existing llama_index.storage.kvstore.simple_kvstore found at ./index.json/docstore.json.

I am using from llama_index import StorageContext, load_index_from_storage modules?
3 comments
g
j
Hi all, when using the following code

rom llama_index import (SimpleDirectoryReader, GPTListIndex, readers,
LLMPredictor, PromptHelper, ServiceContext)

from llama_index import StorageContext, load_index_from_storage
from langchain import OpenAI

import os
import pickle
from dotenv import load_dotenv

load_dotenv()
os.environ['OPENAI_API_KEY'] = os.getenv("OPENAI_API_KEY")
if not os.environ.get('OPENAI_API_KEY'):
raise ValueError("OPENAI_API_KEY is not set in the .env file.")

documents = SimpleDirectoryReader("./documents/Book1.pdf").load_data()
index = GPTListIndex.from_documents(documents)
index.storage_context.persist_dir = "./my_index"


storage_context = StorageContext.from_defaults(persist_dir="./index.json")


max_input_size = 4096
num_outputs = 2000
max_chunk_overlap = 20
chunk_size_limit = 600

prompt_helper = PromptHelper(
max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
llm_predictor = LLMPredictor(llm=OpenAI(
temperature=0, model_name="gpt-3.5-turbo-0301", max_tokens=num_outputs))

service_context = ServiceContext.from_defaults(
llm_predictor=llm_predictor, prompt_helper=prompt_helper)

load index

index = load_index_from_storage(storage_context)

query_engine = index.as_query_engine(
service_context=service_context, verbose=True, response_mode="compact")

I get the following error

No existing llama_index.storage.kvstore.simple_kvstore found at ./index.json/docstore.json.
2 comments
L