from llama_index.core.llms.ollama import Ollama
pip install llama-index-llms-ollama
import openai
from openai import OpenAI
client = OpenAI(api_key="ollama", base_url="http://localhost:11434/v1")
completion = client.chat.completions.create(model="tinyllama",
messages=[
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "how old does cat get at max?"}
])
print(completion.choices[0].message.content)
raise HTTPStatusError(message, request=request, response=self)
httpx.HTTPStatusError: Client error '404 Not Found' for url 'http://localhost:11434/v1/api/chat'
For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/404
import os
import pandas as pd
from llama_index.core.query_engine import PandasQueryEngine
from prompts import new_prompt, instruction_str
from llama_index.llms.ollama import Ollama
llm = Ollama(model="tinyllama", request_timeout=60.0, base_url="http://localhost:11434/v1")
#my quran.json file
quran_path = os.path.join('data', 'quran.csv');
#load
quran_df = pd.read_csv(quran_path)
quran_query_engine = PandasQueryEngine(df=quran_df, verbose=True, instruction_str=instruction_str, llm=llm);
quran_query_engine.update_prompts({"pandas_prompt": new_prompt})
quran_query_engine.query("how many total hasanat in the the whole quran")