ERROR:root:2 validation errors for LLMChatStartEvent messages.0 Input should be a valid dictionary or instance of ChatMessage [type=model_type, input_value=ChatMessage(role=<Message... additional_kwargs=None), input_type=ChatMessage]
messages = [ ChatMessage( role=MessageRole.SYSTEM, content=PromptTemplate(( "Você é responsável por reformular e criar uma nova questão a partir de uma questão existente.\n" + "A questão criada deverá manter a mesma qualidade e relevância da questão original.\n" + "É necessário reforumular o conteúdo para apresentar variações em estilo, complexidade e contexto.\n" + "A questão original é a seguinte:\n" + "{question}\n" + "As alternativas são:\n" + "{alternatives}\n" # "Os comentários do professor são:\n" + # "{comments}" )).format( question=parsed_question_text, alternatives=alternative_text, comments=question_comments ) ), ChatMessage( role=MessageRole.USER, content=PromptTemplate(( "Reformule a questão original e crie uma nova questão.\n" + "Retorne com a nova questão e as alternativas." )) ) ]
redis_client: TtlRedis = TtlRedis( host=os.getenv("REDIS_CLUSTER_ADDRESS", "localhost"), password=os.getenv("REDIS_CLUSTER_PASSWORD", None), port=os.getenv("REDIS_CLUSTER_PORT", 6379), ttl=86400, ) # Initialize Redis vector store vector_store = RedisVectorStore( redis_client=redis_client, schema=await create_schema(user_id, application_id) ) index = VectorStoreIndex.from_vector_store(vector_store) # Check if the index exists; create if it doesn't if not vector_store.index_exists(): # try loading from s3 since it is not in redis
{"error":"Wrong input: Vector inserting error: expected dim: 1536, got 1024"}
email_docs = process_emails_sync(filtered_unprocessed_emails, user) docstore = MongoDocumentStore.from_uri(uri=LLAMAINDEX_MONGODB_STORAGE_SRV) parser = SentenceSplitter() nodes = parser.get_nodes_from_documents(my_docs) docstore.add_documents(nodes) Settings.llm = OpenAI(model=ModelType.OPENAI_GPT_4_o_MINI.value) Settings.embed_model = OpenAIEmbedding(api_key=OPENAI_API_KEY) client = qdrant_client.QdrantClient(url=QDRANT_API_URL, api_key=QDRANT_API_TOKEN) vector_store = QdrantVectorStore(client=client, collection_name=LLAMAINDEX_QDRANT_COLLECTION_NAME) index_store = MongoIndexStore.from_uri(uri=LLAMAINDEX_MONGODB_STORAGE_SRV) storage_context = StorageContext.from_defaults(vector_store=vector_store, index_store=index_store, docstore=docstore) index = VectorStoreIndex(nodes, storage_context=storage_context, show_progress=True) index.storage_context.persist()
index_id
because a new index is created every time I run the code above. How to pass the index_id to the store so it updates existing index? Please note that I am already using doc_id
correctly to ensure upserting of documents. VectorStoreIndex(nodes, storage_context=storage_context, show_progress=True, index_id="<index_id>")
but that approach didn't work.try: # Load appropriate data based on query type json_data = load_marketing_data() if is_logo_query else load_json_data() result = await w.run( query=user_message.content, list_of_dict=json_data, llm=llm, table_name=DEFAULT_TABLE_NAME, timeout=10 )
input()
or a websocket and then resume operations. But my use case needs to be asynhronous.summary_index = DocumentSummaryIndex.from_documents(
documents=documents,
transformations=[splitter],
response_synthesizer = response_synthesizer,
extractors=[sentiment_extractor],
)
class SentimentExtractor(BaseExtractor):
def __init__(self, me, str):
print("here")
self.me = me
self.str = str
async def aextract(self, nodes: Sequence) -> List[Dict]:
metadata_list = []
for node in nodes:
generated_sentiment = {"sentiment": "Positive"} # Replace with actual LLM call
metadata_list.append(generated_sentiment)
return metadata_list
sentiment_extractor = SentimentExtractor(me = "zsdfsadf", str="hello")
ValueError: "SentimentExtractor" object has no field "me"