Hello all! Trying to use llama with vertex ai, but I'm getting this error:
TypeError: BaseLLM.predict() got an unexpected keyword argument 'context_str'
Any help?
This is my code:
def query_google_llm(chat, query):
response = chat.send_message(query)
print(response.text)
return response.text
chat = build_google_llm()
class PaLM(LLM):
model_name = "Bard"
total_tokens_used = 0
last_token_usage = 0
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
print("prompt: ", prompt)
response = query_google_llm(chat, prompt)
print("response: ", response)
return response
@property
def _identifying_params(self) -> Mapping[str, Any]:
return {"name_of_model": self.model_name}
@property
def _llm_type(self) -> str:
return "custom"