document = Document(script, "script")
CHAT_REFINE_PROMPT_TMPL_MSGS = [
# optionally add a system prompt
# SystemMessagePromptTemplate.from_template("Some system message"),
HumanMessagePromptTemplate.from_template("{query_str}"),
AIMessagePromptTemplate.from_template("{existing_answer}"),
HumanMessagePromptTemplate.from_template(
"We have the opportunity to refine the above answer "
"(only if needed) with some more context below.\n"
"------------\n"
"{context_msg}\n"
"------------\n"
"Given the new context, refine the original answer to better "
"answer the question. "
"If the context isn't useful, output the original answer again.",
),
]
CHAT_REFINE_PROMPT_LC = ChatPromptTemplate.from_messages(CHAT_REFINE_PROMPT_TMPL_MSGS)
CHAT_REFINE_PROMPT = Prompt.from_langchain_prompt(CHAT_REFINE_PROMPT_LC)
CHAT_QA_PROMPT_TMPL_MSGS = [
# optionally add a system prompt
# SystemMessagePromptTemplate.from_template("My system message"),
HumanMessagePromptTemplate.from_template(
"Context information is below. \n"
"------------\n"
"{context_msg}\n"
"------------\n"
"Given the context information and not prior knowledge, "
"answer the question: {query_str}\n"
),
]
CHAT_QA_PROMPT_LC = ChatPromptTemplate.from_messages(CHAT_QA_PROMPT_TMPL_MSGS)
CHAT_QA_PROMPT = Prompt.from_langchain_prompt(CHAT_QA_PROMPT_LC)
article_llm = LLMPredictor(llm=ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo"))
article_service_context = ServiceContext.from_defaults(llm_predictor=article_llm, chunk_size=1024)
query_engine = GPTVectorStoreIndex.from_documents([document]).as_query_engine(text_qa_template=CHAT_QA_PROMPT, refine_template=CHAT_REFINE_PROMPT)
response = query_engine.query(script_prompt)
print(f"Script: {response}")
return response
I'm almost definitely using it wrong, I get the input from another function using a SubQuestionQueryEngine, and then pass the output response to this function to refine, but it gives me a keyerror about context_msg, which means it's just missing but I can't find any examples online