from llama_index import ServiceContext service_context = ServiceContext.from_defaults(system_prompt="Define System prompt here!!!",...)
doc = Document(text="hi",doc_id="UNIQUE_ID_HERE") # change it afterwards also doc.doc_id="Change_ID"
system_prompt
which is a different instruction that you add with your text along with the other instructions that you may or may not add along with the text being sent to LLM..query()
method it will remain the same unless you change itand then call the .query()
againdef custom_query(self, query_str: str): nodes = self.retriever.retrieve(query_str) context_str = "\n\n".join([n.node.get_content() for n in nodes]) response = self.llm.complete( qa_prompt.format(context_str=context_str, query_str=query_str) )