KeyError: 'Could not automatically map gpt-4o to a tokeniser. Please use `tiktok.get_encoding` to explicitly get the tokeniser you expect.'
token_counter = TokenCountingHandler( tokenizer=tiktoken.encoding_for_model(model_name).encode, verbose=False )