Hi, I'm having an issue when trying to use a vector index with the SubQuestionEngine - It is always erroring :/
import os
import openai
os.environ['OPENAI_API_KEY'] = "sk-***"
openai.api_key = os.environ["OPENAI_API_KEY"]
from llama_index import Document, SummaryIndex, VectorStoreIndex, SimpleDirectoryReader
from llama_index.tools import QueryEngineTool, ToolMetadata
from llama_index.query_engine import SubQuestionQueryEngine
from llama_index.callbacks import CallbackManager, LlamaDebugHandler
from llama_index import ServiceContext
from llama_index.question_gen.llm_generators import LLMQuestionGenerator
from qdrant_client import QdrantClient
from llama_index.vector_stores.qdrant import QdrantVectorStore
# Using the LlamaDebugHandler to print the trace of the sub questions
# captured by the SUB_QUESTION callback event type
llama_debug = LlamaDebugHandler(print_trace_on_end=True)
callback_manager = CallbackManager([llama_debug])
service_context = ServiceContext.from_defaults(
callback_manager=callback_manager
)
vector_index = VectorStoreIndex.from_documents(
documents=[
Document(text="whe have the color green for trees")
],
service_context=service_context
)
vector_query_engine = vector_index.as_query_engine()
# setup base query engine as tool
query_engine_tools = [
QueryEngineTool(
query_engine=vector_query_engine,
metadata=ToolMetadata(
name="tree_colors",
description="Everything around trees",
),
),
]
query_engine = SubQuestionQueryEngine.from_defaults(
query_engine_tools=query_engine_tools,
service_context=service_context,
question_gen=LLMQuestionGenerator.from_defaults(
service_context=service_context
)
)
response = query_engine.query(
"What is the color for trees?"
)
print(response)
return msg