from fastapi import FastAPI
from fastapi.responses import StreamingResponse
from llama_index import SimpleDirectoryReader, VectorStoreIndex
documents = SimpleDirectoryReader("./docs/examples/data/paul_graham/").load_data()
index = VectorStoreIndex.from_documents(documents)
agent = index.as_chat_engine()
app = FastAPI()
app.agent = agent
@app.get("/chat/")
async def chat(message: str) -> StreamingResponse:
response = await app.agent.astream_chat(message)
response_gen = response.async_response_gen()
return StreamingResponse(response_gen, media_type="text/plain")
@app.get("/")
async def root():
return {"message": "Hello World"}
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, loop="asyncio", host="127.0.0.1", port=8000)