from config import load_envs load_envs() from flask import Flask, request, jsonify from index_manager import initialize_index, get_service_context from flask_cors import CORS, cross_origin from llama_index.prompts import PromptTemplate from llama_index.chat_engine import CondenseQuestionChatEngine from chat_history_parser import retrieve_chat_history from mongodb.db import insert_message_in_chat import os app = Flask(__name__) cors = CORS(app) app.config['CORS_HEADERS'] = 'Content-Type' query_engine = initialize_index() custom_prompt = PromptTemplate("""\ Given a conversation (between Human and Assistant), a context, a history, and a follow up message from Human, \ rewrite the message to be a standalone question that captures all relevant context \ from the conversation, Always Reply in italian, not provide response outside the context or the chat history. <Chat History> {chat_history} <Follow Up Message> {question} <Standalone question> """) @app.route("/chat/<chatId>/answer", methods=["GET"]) def query_index(chatId): query_text = request.args.get("text") if query_text is None: return "No text found, please include a ?text=example parameter in the URL", 400 service_context = get_service_context() history = retrieve_chat_history(chatId) chat_engine = CondenseQuestionChatEngine.from_defaults( query_engine=query_engine, condense_question_prompt=custom_prompt, chat_history=history, service_context=service_context, verbose=True ) response = chat_engine.chat(query_text) insert_message_in_chat(chatId, query_text, 'user') insert_message_in_chat(chatId, str(response), 'assistant') return jsonify(response = str(response)), 200 if __name__ == "__main__": app.config['MONGO_URI'] = os.getenv("MONGODB_URI") app.run(host="0.0.0.0", port=5601)