and for traceback i have this:
Traceback (most recent call last):
File "/Users/shanayajain/leximgpt-AI/Title21_DocQA/test.py", line 59, in <module>
answer_generator = service.get_answer()
^^^^^^^^^^^^^^^^^^^^
File "/Users/shanayajain/leximgpt-AI/Title21_DocQA/test.py", line 51, in get_answer
streaming_response = chat_engine.stream_chat("Tell me a joke.")
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/shanayajain/leximgpt-AI/.venv/lib/python3.11/site-packages/llama_index/core/callbacks/utils.py", line 41, in wrapper
return func(self, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/shanayajain/leximgpt-AI/.venv/lib/python3.11/site-packages/llama_index/core/agent/runner/base.py", line 623, in stream_chat
chat_response = self._chat(
^^^^^^^^^^^
File "/Users/shanayajain/leximgpt-AI/.venv/lib/python3.11/site-packages/llama_index/core/agent/runner/base.py", line 520, in _chat
cur_step_output = self._run_step(
^^^^^^^^^^^^^^^
File "/Users/shanayajain/leximgpt-AI/.venv/lib/python3.11/site-packages/llama_index/core/agent/runner/base.py", line 374, in _run_step
cur_step_output = self.agent_worker.stream_step(step, task, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/shanayajain/leximgpt-AI/.venv/lib/python3.11/site-packages/llama_index/core/callbacks/utils.py", line 41, in wrapper
return func(self, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/shanayajain/leximgpt-AI/.venv/lib/python3.11/site-packages/llama_index/core/agent/react/step.py", line 622, in stream_step
return self._run_step_stream(step, task)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/shanayajain/leximgpt-AI/.venv/lib/python3.11/site-packages/llama_index/core/agent/react/step.py", line 504, in _run_step_stream
for latest_chunk in chat_stream:
File "/Users/shanayajain/leximgpt-AI/.venv/lib/python3.11/site-packages/llama_index/core/llms/callbacks.py", line 99, in wrapped_gen
for x in f_return_val:
File "/Users/shanayajain/leximgpt-AI/.venv/lib/python3.11/site-packages/llama_index/llms/vertex/base.py", line 255, in gen
for r in response:
File "/Users/shanayajain/leximgpt-AI/.venv/lib/python3.11/site-packages/vertexai/generative_models/_generative_models.py", line 934, in _send_message_streaming
for chunk in stream:
File "/Users/shanayajain/leximgpt-AI/.venv/lib/python3.11/site-packages/vertexai/generative_models/_generative_models.py", line 505, in _generate_content_streaming
response_stream = self._prediction_client.stream_generate_content(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/shanayajain/leximgpt-AI/.venv/lib/python3.11/site-packages/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py", line 2207, in stream_generate_content
response = rpc(
^^^^
File "/Users/shanayajain/leximgpt-AI/.venv/lib/python3.11/site-packages/google/api_core/gapic_v1/method.py", line 131, in __call__
return wrapped_func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/shanayajain/leximgpt-AI/.venv/lib/python3.11/site-packages/google/api_core/grpc_helpers.py", line 174, in error_remapped_callable
raise exceptions.from_grpc_error(exc) from exc
google.api_core.exceptions.ServiceUnavailable: 503 Getting metadata from plugin failed with error: 'str' object has no attribute 'before_request'