mirror of
https://github.com/hoshikawa2/rfp_response_automation.git
synced 2026-03-03 16:09:35 +00:00
116 lines
3.5 KiB
Python
116 lines
3.5 KiB
Python
import threading
|
|
import json
|
|
from pathlib import Path
|
|
from datetime import datetime
|
|
from flask import jsonify
|
|
from .storage import upload_file, generate_download_url
|
|
|
|
from rfp_process import process_excel_rfp
|
|
from .store import EXCEL_JOBS, EXCEL_LOCK
|
|
from modules.users.email_service import send_completion_email
|
|
from modules.excel.queue_manager import enqueue_excel_job
|
|
|
|
EXECUTION_METHOD = "QUEUE" # THREAD OR QUEUE
|
|
|
|
UPLOAD_FOLDER = Path("uploads")
|
|
UPLOAD_FOLDER.mkdir(exist_ok=True)
|
|
|
|
|
|
def make_job_logger(job_id: str):
|
|
"""Logger simples: guarda logs na memória (igual ao arquiteto)."""
|
|
def _log(msg):
|
|
with EXCEL_LOCK:
|
|
job = EXCEL_JOBS.get(job_id)
|
|
if job is not None:
|
|
job.setdefault("logs", []).append(str(msg))
|
|
return _log
|
|
|
|
|
|
def start_excel_job(job_id: str, input_path: Path, email: str, auth_user: str, auth_pass: str, api_url: str):
|
|
|
|
job_dir = UPLOAD_FOLDER / job_id
|
|
job_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
output_path = job_dir / "result.xlsx"
|
|
status_file = job_dir / "status.json"
|
|
object_name = f"{job_id}/result.xlsx"
|
|
|
|
logger = make_job_logger(job_id)
|
|
|
|
def write_status(state: str, detail: str | None = None):
|
|
payload = {
|
|
"status": state,
|
|
"updated_at": datetime.utcnow().isoformat(),
|
|
}
|
|
if detail:
|
|
payload["detail"] = detail
|
|
|
|
status_file.write_text(
|
|
json.dumps(payload, ensure_ascii=False, indent=2),
|
|
encoding="utf-8"
|
|
)
|
|
|
|
with EXCEL_LOCK:
|
|
job = EXCEL_JOBS.get(job_id)
|
|
if job is not None:
|
|
job["status"] = state
|
|
if detail:
|
|
job["detail"] = detail
|
|
|
|
# garante estrutura do job na memória
|
|
with EXCEL_LOCK:
|
|
EXCEL_JOBS.setdefault(job_id, {})
|
|
EXCEL_JOBS[job_id].setdefault("logs", [])
|
|
EXCEL_JOBS[job_id]["status"] = "PROCESSING"
|
|
|
|
write_status("PROCESSING")
|
|
logger(f"Starting Excel job {job_id}")
|
|
logger(f"Input: {input_path}")
|
|
logger(f"Output: {output_path}")
|
|
|
|
def background():
|
|
try:
|
|
# processamento principal
|
|
process_excel_rfp(
|
|
input_excel=input_path,
|
|
output_excel=output_path,
|
|
api_url=api_url,
|
|
auth_user=auth_user,
|
|
auth_pass=auth_pass,
|
|
)
|
|
|
|
write_status("DONE")
|
|
logger("Excel processing DONE")
|
|
|
|
upload_file(output_path, object_name)
|
|
download_url = generate_download_url(object_name)
|
|
|
|
write_status("DONE", download_url)
|
|
|
|
# email / dev message
|
|
dev_message = send_completion_email(email, download_url, job_id)
|
|
if dev_message:
|
|
logger(f"DEV email message/link: {dev_message}")
|
|
|
|
except Exception as e:
|
|
logger(f"ERROR: {e}")
|
|
write_status("ERROR", str(e))
|
|
|
|
try:
|
|
dev_message = send_completion_email(
|
|
email=email,
|
|
download_url=download_url,
|
|
job_id=job_id,
|
|
status="ERROR",
|
|
detail=str(e)
|
|
)
|
|
if dev_message:
|
|
logger(f"DEV email error message/link: {dev_message}")
|
|
except Exception as mail_err:
|
|
logger(f"EMAIL ERROR: {mail_err}")
|
|
|
|
if EXECUTION_METHOD == "THREAD":
|
|
threading.Thread(target=background, daemon=True).start()
|
|
else:
|
|
enqueue_excel_job(job_id, background)
|