chatmemory = ChatMemoryBuffer.from_defaults(chat_history=chat_history, token_limit=history_limit)
len(chatmemory.chat_store.store['chat_history'])
>>> from llama_index.core.memory import ChatMemoryBuffer >>> from llama_index.core.llms import ChatMessage >>> memory = ChatMemoryBuffer.from_defaults(token_limit=100) >>> message = ChatMessage(role="user", content="a "*10) >>> for _ in range(20): ... memory.put(message) ... >>> len(memory.get()) 9 >>> len(memory.get_all()) 20 >>>