Hey guys, I have this code, where I use the chat engined I had started earlier based on the documentation here:
https://docs.llamaindex.ai/en/stable/examples/docstore/MongoDocstoreDemo.htmlIt answers correctly using the storage_context, but when I restart my server, it answers but stops answering from the context. Only uses the system_prompt. I tried lots of stuff but still don't know what can be the issue that the storage_context doesn't full work after the restart. It seems to load index correctly.
def initialize_confluence_chat_engine(self, team_id):
try:
team_document = user_collection.find_one({'team_id': team_id})
if not team_document or 'confluence_index_id' not in team_document:
raise ValueError("Confluence settings have not been configured for this team.")
confluence_index_id = team_document['confluence_index_id']
storage_context = storage_context_manager.get_storage_context(team_id, mongo_uri, mongo_db_name)
confluence_space_index_from_storage = load_index_from_storage(storage_context, index_id=confluence_index_id)
space_names = team_document.get('space_names', '')
chat_engine = confluence_space_index_from_storage.as_chat_engine(
chat_mode="context",
service_context=service_context,
refine_template=DEFAULT_REFINE_PROMPT,
system_prompt=f"""Your name is Tiko. You're set up to assist with the Confluence spaces: {space_names}...""",
response_synthesizer=response_synthesizer,
similarity_top_k=5
)
return chat_engine
except Exception as e:
logging.error(f"An error occurred while initializing the Confluence chat engine: {e}")
traceback.print_exc()