from llama_index.core import PromptTemplate template = ( "We have provided context information below. \n" "---------------------\n" "{context_str}" "\n---------------------\n" "Given this information, please answer the question: {query_str}\n" "ANSWER in salesman style" ) qa_template = PromptTemplate(template) query_engine = index.as_query_engine( text_qa_template=qa_template )