Find answers from the community

Updated 4 months ago

Code

Here is my llamaindex version that does not work:

Plain Text
## LLamaIndex version of the above code
from llama_index.core import Settings
import os
from llama_index.core import VectorStoreIndex
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
from llama_index.vector_stores.neo4jvector import Neo4jVectorStore
from llama_index.core.query_engine import RetrieverQueryEngine
from llama_index.llms.azure_openai import AzureOpenAI

embed_model = HuggingFaceEmbedding(model_name="sentence-transformers/all-MiniLM-L6-v2")
retrieval_query="""
 MATCH (node)-[:PART_OF]->(d:Document)
WITH d, apoc.text.join(collect(node.text),"\n----\n") as text, avg(score) as score
RETURN text, score, {source: COALESCE(CASE WHEN d.url CONTAINS "None" THEN d.fileName ELSE d.url END, d.fileName)} as metadata
"""


neo_db = Neo4jVectorStore(
    embedding=embed_model,  
    embedding_dimension=384,
    url=url,
    username=username,
    password=password,
    database="neo4j",
    index_name="vector",
    retrieval_query=retrieval_query,
    text_node_property="text",
    #keyword_index_name="Term"
)

#index = VectorStoreIndex(
#    vector_store=neo_db,
#    embed_model=embed_model
#)

index = VectorStoreIndex.from_vector_store(neo_db)
retriever = index.as_retriever()

llm = AzureOpenAI(
    deployment_name="gpt35",
    temperature=0, 
    api_key=AZURE_API_KEY,
    azure_endpoint=AZURE_OPENAI_ENDPOINT,
    api_version=AZURE_API_VERSION
)

Settings.llm = llm

#query_engine = RetrieverQueryEngine(
#    retriever=retriever,
#)

query_engine = index.as_query_engine()

response = query_engine.query("What is a bias audit?")
L
1 comment
Probably helpful to state what about your llamainxdx code doesn't work, seems fine at first glance
Add a reply
Sign up and join the conversation on Discord