{ "index_struct_type": "simple_dict", "query_mode": "default", "query_kwargs": { "similarity_top_k": 3, }, }
graph = ComposableGraph.from_indices( GPTListIndex, index_arr, index_summaries=summaries, service_context=self.service_context )
GPTSimpleVectorIndex.from_documents(documents, service_context=self.service_context)
[ { "node": { "doc_hash": "5246e5461ab428ab6238789afd48826009d87ccf39a43750885918911215471d", "doc_id": "ab561760-1429-4f8a-b1b5-08611391a1a3", "embedding": null, "extra_info": null, "node_info": null, "relationships": {}, "text": "This seems to be the correct answer" }, "score": null }, { "node": { "doc_hash": "ea8067f36510cf58342c3b80cb01c8be736745819d21a65c974274e56ca18dc2", "doc_id": "29cddf2b-cbf8-4ed9-9dd2-6a2c77168754", "embedding": null, "extra_info": null, "node_info": null, "relationships": {}, "text": "Sorry, there is still no information provided in the given context about the question" }, "score": null } ]
Original answer still stands as the new context does not provide any information
from langchain.prompts.chat import ( AIMessagePromptTemplate, ChatPromptTemplate, HumanMessagePromptTemplate, ) from llama_index.prompts.prompts import RefinePrompt # Refine Prompt CHAT_REFINE_PROMPT_TMPL_MSGS = [ HumanMessagePromptTemplate.from_template("{query_str}"), AIMessagePromptTemplate.from_template("{existing_answer}"), HumanMessagePromptTemplate.from_template( "I have more context below which can be used " "(only if needed) to update your previous answer.\n" "------------\n" "{context_msg}\n" "------------\n" "Given the new context, update the previous answer to better " "answer my previous query." "If the previous answer remains the same, repeat it verbatim. " "Never reference the new context or my previous query directly.", ), ] CHAT_REFINE_PROMPT_LC = ChatPromptTemplate.from_messages(CHAT_REFINE_PROMPT_TMPL_MSGS) CHAT_REFINE_PROMPT = RefinePrompt.from_langchain_prompt(CHAT_REFINE_PROMPT_LC) ... query_configs = [ { "index_struct_type": "simple_dict", "query_mode": "default", "query_kwargs": { "similarity_top_k": 3, "refine_template": CHAT_REFINE_PROMPT }, }, { "index_struct_type": "list", "query_mode": "default", "query_kwargs": { "refine_template": CHAT_REFINE_PROMPT }, } ]