custom_qa_prompt = ( "We have provided context information below. \n" "---------------------\n" "{context_str}" "\n---------------------\n" "Given this information, please answer the question: {query_str}\n" ) custom_qa_template = PromptTemplate(custom_qa_prompt) response_synthesizer = get_response_synthesizer( llm=LLM_INSTANCES[model], text_qa_template=custom_qa_template )