Find answers from the community

Updated last year

Hello folks, how are you doing??

Hello folks, how are you doing??
I am using FnRetrieverOpenAIAgent to get information about different query engines, the problem is that when I verbose I see that is like makes a summary of the text that in this case I am retrieving.
And I don't like how this resume is done. There is a way to use only the context and don't do openai to make a summary previously??
I tried with ContextRetrieverOpenAIAgent but as also have the retriever as a required argument, makes the same.
e
4 comments
Here is my code so I can give you some more context:
Plain Text
class ChatBot:
    def __init__(
        self,
        user_id: str,
        project_id: str,
        llm: OpenAI = OpenAI(temperature=0.1, model="gpt-3.5-turbo", streaming=True),
        chat_history: List[ChatMessage] = [],
        system_prompt: str = "",
    ) -> None:
        self._user_id = user_id
        self.project_id = project_id
        self.system_prompt = system_prompt

        self._memory = ChatMemoryBuffer.from_defaults(
            token_limit=2000, chat_history=chat_history
        )
        self._queries_engines = get_query_engines(
            project_id=project_id,
        )

        tool_mapping = SimpleToolNodeMapping.from_objects(self._queries_engines)

        self.obj_index = ObjectIndex.from_objects(
            self._queries_engines,
            tool_mapping,
            VectorStoreIndex,
        )

        self.token_counter = TokenCountingHandler(
            tokenizer=tiktoken.encoding_for_model("gpt-3.5-turbo").encode,
        )

        self.callback_manager = CallbackManager([self.token_counter])
        self.llm = llm

        self._agent = ContextRetrieverOpenAIAgent.from_tools_and_retriever(
            tools=self._queries_engines,
            retriever=self.obj_index.as_retriever(similarity_top_k=5),
            llm=self.llm,
            callback_manager=self.callback_manager,
            memory=self._memory,
            system_prompt=system_prompt,
            verbose=True,
        )

        self._chat_history = chat_history
The get_query_engines code is:
Plain Text
def get_query_engines(project_id):
    _all_collections = get_collections(project_id)
    _all_tools = []
    # openai does not accept query engine tool names that don't follow this pattern
    pattern = re.compile("[^a-zA-Z0-9_-]")
    for collection in _all_collections:
        if not collection["deleted"]:
            collection_name = sanitize_collection_name(
                f"{project_id}-{collection['name']}"
            )
            chroma_collection = chroma_client.get_or_create_collection(
                collection_name, embedding_function=openai_ef
            )
            _vector_store = ChromaVectorStore(chroma_collection=chroma_collection)
            _index = VectorStoreIndex.from_vector_store(
                vector_store=_vector_store,
                verbose=True,
            )
            similarity_top_k = 7
            _retriever = VectorIndexRetriever(
                index=_index,
                similarity_top_k=similarity_top_k,
            )
            _query_engine = RetrieverQueryEngine(
                retriever=_retriever,
            )
            valid_name = re.sub(pattern, "", collection["name"])
            query_engine_tool = QueryEngineTool.from_defaults(
                query_engine=_query_engine,
                name=valid_name,
                description=collection["description"],
            )
            print(collection["description"])
            _all_tools.append(query_engine_tool)
    return _all_tools
I tried with OpenAIAgent and I get the same summary
I just want a tool to pass the openai api a te question with the prompt and the context
Add a reply
Sign up and join the conversation on Discord