mirror of
https://github.com/hoshikawa2/rfp_response_automation.git
synced 2026-03-03 16:09:35 +00:00
83 lines
2.2 KiB
Python
83 lines
2.2 KiB
Python
import json
|
|
from flask import Blueprint, request, jsonify
|
|
from modules.core.security import requires_app_auth
|
|
from oci_genai_llm_graphrag_rerank_rfp import answer_question, search_active_chunks
|
|
from modules.core.audit import audit_log
|
|
from .service import start_chat_job
|
|
from .store import CHAT_JOBS, CHAT_LOCK
|
|
|
|
chat_bp = Blueprint("chat", __name__)
|
|
|
|
def parse_llm_json(raw: str) -> dict:
|
|
try:
|
|
if not isinstance(raw, str):
|
|
return {"answer": "ERROR", "justification": "LLM output is not a string", "raw_output": str(raw)}
|
|
raw = raw.replace("```json", "").replace("```", "").strip()
|
|
return json.loads(raw)
|
|
except Exception:
|
|
return {"answer": "ERROR", "justification": "LLM returned invalid JSON", "raw_output": raw}
|
|
|
|
@chat_bp.route("/chat", methods=["POST"])
|
|
@requires_app_auth
|
|
def chat():
|
|
data = request.get_json(force=True) or {}
|
|
question = (data.get("question") or "").strip()
|
|
|
|
if not question:
|
|
return jsonify({"error": "Empty question"}), 400
|
|
|
|
raw_answer = answer_question(question)
|
|
parsed_answer = parse_llm_json(raw_answer)
|
|
audit_log("RFP_QUESTION", f"question={question}")
|
|
|
|
# (opcional) manter comportamento antigo de evidence/full_text se você quiser
|
|
# docs = search_active_chunks(question)
|
|
|
|
return jsonify({
|
|
"question": question,
|
|
"result": parsed_answer
|
|
})
|
|
|
|
@chat_bp.post("/chat/start")
|
|
def start():
|
|
|
|
question = request.json["question"]
|
|
|
|
job_id = start_chat_job(question)
|
|
|
|
return jsonify({"job_id": job_id})
|
|
|
|
@chat_bp.get("/chat/<job_id>/status")
|
|
def status(job_id):
|
|
|
|
with CHAT_LOCK:
|
|
job = CHAT_JOBS.get(job_id)
|
|
|
|
if not job:
|
|
return jsonify({"error": "not found"}), 404
|
|
|
|
return jsonify({"status": job["status"]})
|
|
|
|
@chat_bp.get("/chat/<job_id>/result")
|
|
def result(job_id):
|
|
|
|
with CHAT_LOCK:
|
|
job = CHAT_JOBS.get(job_id)
|
|
|
|
if not job:
|
|
return jsonify({"error": "not found"}), 404
|
|
|
|
return jsonify({
|
|
"result": parse_llm_json(job["result"]),
|
|
"error": job["error"]
|
|
})
|
|
|
|
@chat_bp.get("/chat/<job_id>/logs")
|
|
def logs(job_id):
|
|
|
|
with CHAT_LOCK:
|
|
job = CHAT_JOBS.get(job_id)
|
|
|
|
return jsonify({"logs": job["logs"]})
|
|
|