Traceback (most recent call last):
File "/home/anton/dev/tmp/litest/test.py", line 1, in <module>
from llama_index.llms import LlamaCPP
File "/home/anton/dev/tmp/litest/.venv/lib/python3.11/site-packages/llama_index/init.py", line 20, in <module>
from llama_index.indices.keyword_table import (
File "/home/anton/dev/tmp/litest/.venv/lib/python3.11/site-packages/llama_index/indices/init.py", line 4, in <module>
from llama_index.indices.keyword_table.base import (
File "/home/anton/dev/tmp/litest/.venv/lib/python3.11/site-packages/llama_index/indices/keyword_table/init.py", line 4, in <module>
from llama_index.indices.keyword_table.base import (
File "/home/anton/dev/tmp/litest/.venv/lib/python3.11/site-packages/llama_index/indices/keyword_table/base.py", line 18, in <module>
from llama_index.indices.base import BaseIndex
File "/home/anton/dev/tmp/litest/.venv/lib/python3.11/site-packages/llama_index/indices/base.py", line 6, in <module>
from llama_index.chat_engine.types import BaseChatEngine, ChatMode
File "/home/anton/dev/tmp/litest/.venv/lib/python3.11/site-packages/llama_index/chat_engine/init.py", line 1, in <module>
from llama_index.chat_engine.condense_question import CondenseQuestionChatEngine
File "/home/anton/dev/tmp/litest/.venv/lib/python3.11/site-packages/llama_index/chat_engine/condense_question.py", line 5, in <module>
from llama_index.chat_engine.types import (
File "/home/anton/dev/tmp/litest/.venv/lib/python3.11/site-packages/llama_index/chat_engine/types.py", line 11, in <module>
from llama_index.memory import BaseMemory
File "/home/anton/dev/tmp/litest/.venv/lib/python3.11/site-packages/llama_index/memory/init.py", line 1, in <module>
from llama_index.memory.chat_memory_buffer import ChatMemoryBuffer
File "/home/anton/dev/tmp/litest/.venv/lib/python3.11/site-packages/llama_index/memory/chat_memory_buffer.py", line 13, in <module>
class ChatMemoryBuffer(BaseMemory):
File "/home/anton/dev/tmp/litest/.venv/lib/python3.11/site-packages/llama_index/memory/chat_memory_buffer.py", line 19, in ChatMemoryBuffer
default_factory=cast(Callable[[], Any], GlobalsHelper().tokenizer),
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/anton/dev/tmp/litest/.venv/lib/python3.11/site-packages/llama_index/utils.py", line 50, in tokenizer
enc = tiktoken.get_encoding("gpt2")
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/anton/dev/tmp/litest/.venv/lib/python3.11/site-packages/tiktoken/registry.py", line 63, in get_encoding
enc = Encoding(**constructor())
^^^^^^^^^^^^^
File "/home/anton/dev/tmp/litest/.venv/lib/python3.11/site-packages/tiktoken_ext/openai_public.py", line 11, in gpt2
mergeable_ranks = data_gym_to_mergeable_bpe_ranks(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/anton/dev/tmp/litest/.venv/lib/python3.11/site-packages/tiktoken/load.py", line 92, in data_gym_to_mergeable_bpe_ranks
encoder_json = json.loads(read_file_cached(encoder_json_file))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/anton/.asdf/installs/python/3.11.4/lib/python3.11/json/init.py", line 346, in loads
return _default_decoder.decode(s)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/anton/.asdf/installs/python/3.11.4/lib/python3.11/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/anton/.asdf/installs/python/3.11.4/lib/python3.11/json/decoder.py", line 353, in raw_decode
obj, end = self.scan_once(s, idx)
^^^^^^^^^^^^^^^^^^^^^^
json.decoder.JSONDecodeError: Expecting ':' delimiter: line 1 column 48690 (char 48689)
hmmm looks like the cached tokenizer is corrupted ?
Or, actually I have no idea
something to do with tiktoken
Thanks. Yeah, I am not using Windows and this was a very strange message that looked to me like it must have been downloading something from the web that was no longer available or had been changed/updated. Actually, rebooting my machine appears to have solved the problem... (maybe a logout/login would also have worked). Even on Linux in 2023, for things that seem utterly unrelated... your first reaction should be to reboot...
It was still happening with a completely fresh venv though... so I feel guilty but not too guilty! What on earth could be causing that?