Hi, I'm currently always using AzureOpenAI and cognitive search as my db for retrieving context, I wanted to know why I have this error in my code:
documents = SimpleDirectoryReader("files").load_data()
llm = AzureOpenAI(
model="gpt-35-turbo-16k",
engine="gpt-35-turbo-16k",
api_key=api_key,
api_base=api_base,
api_type=api_type,
api_version=api_version,
)
# You need to deploy your own embedding model as well as your own chat completion model
embed_model = OpenAIEmbedding(
model="text-embedding-ada-002",
deployment_name="text-embedding-ada-002",
api_key=api_key,
api_base=api_base,
api_type=api_type,
api_version=api_version,
)
prompt_helper = PromptHelper(context_window=16384, num_output=2048)
storage_context = StorageContext.from_defaults(vector_store=vector_store)
service_context = ServiceContext.from_defaults(
embed_model=embed_model,
prompt_helper=prompt_helper,
llm=llm,
)
summary_text=(
"Des informations contextuelles provenant de plusieurs sources sont présentées ci-dessous.\n"
"---------------------\n"
"{context_str}\n"
"---------------------\n"
"Ătant donnĂ© les informations provenant de sources multiples et sans connaissances prĂ©alables,"
"rĂ©pondre Ă la requĂȘte.\n"
"RequĂȘte : {query_str}\n"
"RĂ©ponse : ")
index1 = SummaryIndex(
documents,
service_context=service_context,
storage_context=storage_context,
summary_text=summary_text,
response_mode="tree_summarize"
)
index2 = VectorStoreIndex(
documents,
service_context=service_context,
storage_context=storage_context,
)
list_query_engine = index1.as_query_engine(response_mode="tree_summarize")
vector_query_engine = index2.as_query_engine(similarity_top_k=5)
list_tool = QueryEngineTool.from_defaults(
query_engine=list_query_engine,
description="Utile pour les questions de synthÚse liées à la source de données",
)
vector_tool = QueryEngineTool.from_defaults(
query_engine=vector_query_engine,
description="Utile pour retrouver un contexte spécifique lié à la source de données",
)
# initialize router query engine (single selection, pydantic)
query_engine = RouterQueryEngine(
selector=LLMMultiSelector.from_defaults(),
query_engine_tools=[
list_tool,
vector_tool,
],
service_context=service_context
)
#query_engine=index1.as_query_engine()
resp=query_engine.query("Fait un résumé complet des transcriptions d'appels, en français, à partir du contexte, avec une ou plusieurs conversations entre un assuré et un ou plusieurs opérateurs d'assistance d'assurance")
print(resp)