>>> from llama_index.postprocessor.tei_rerank import TextEmbeddingInference as TEIR >>> from llama_index.core.schema import TextNode, NodeWithScore >>> nodes = [NodeWithScore(score=1.0, node=TextNode(text="dog")), NodeWithScore(score=1.0, node=TextNode(text="cat")), NodeWithScore(score=1.0, node=TextNode(text="cow"))] >>> reranker = TEIR(top_n=2, base_url="http://127.0.0.1:8081") >>> reranker.postprocess_nodes(nodes, query_str="dog dog")[0].text 'dog'
try:
logger.debug("Setting up rerranker")
logger.debug(config.RERANKER_BASE_URL)
reranker = TEIR(top_n=10,base_url=config.RERANKER_BASE_URL)
except Exception as e:
logger.error(f"Reranker error:{e}")
query_engine = CitationQueryEngine.from_args(
index,
node_postprocessors=[reranker],
similarity_top_k=similarity_top_k,
llm=llm,
)
inputs
must have less than 512 tokens. Given: 866inputs
must have less than 512 tokens. Given: 694inputs
must have less than 512 tokens. Given: 866inputs
must have less than 512 tokens. Given: 866inputs
must have less than 512 tokens. Given: 866inputs
must have less than 512 tokens. Given: 694inputs
must have less than 512 tokens. Given: 866