| from __future__ import annotations |
|
|
| import os |
| import time |
| from pathlib import Path |
|
|
| from flask import Flask, jsonify, request, send_from_directory |
|
|
| from generate_question import ( |
| APP_TITLE, |
| QUESTION_LIMIT, |
| QuestionGenerator, |
| format_questions, |
| normalize_text, |
| parse_question_count, |
| resolve_model_dir, |
| ) |
|
|
| IGNORED_MODEL_DIR_NAMES = { |
| ".git", |
| ".vscode", |
| "__pycache__", |
| "backend", |
| "frontend", |
| "venv", |
| } |
|
|
|
|
| def project_root() -> Path: |
| return Path(__file__).resolve().parents[1] |
|
|
|
|
| def build_generator( |
| model_dir: str | Path | None = None, |
| prefer_nested_model: bool = True, |
| ) -> QuestionGenerator: |
| root = project_root() |
| selected_model_dir = ( |
| Path(model_dir).expanduser() |
| if model_dir is not None |
| else Path(os.getenv("HVU_MODEL_DIR", str(root / "t5-viet-qg-finetuned"))).expanduser() |
| ) |
| if not selected_model_dir.is_absolute(): |
| selected_model_dir = root / selected_model_dir |
|
|
| return QuestionGenerator( |
| model_dir=str(selected_model_dir), |
| task_prefix=os.getenv("HVU_TASK_PREFIX", "sinh câu hỏi"), |
| max_source_length=int(os.getenv("HVU_MAX_SOURCE_LENGTH", "512")), |
| max_new_tokens=int(os.getenv("HVU_MAX_NEW_TOKENS", "64")), |
| device=os.getenv("HVU_DEVICE", "auto"), |
| cpu_threads=_read_optional_int(os.getenv("HVU_CPU_THREADS")), |
| gpu_dtype=os.getenv("HVU_GPU_DTYPE", "auto"), |
| prefer_nested_model=prefer_nested_model, |
| ) |
|
|
|
|
| def _read_optional_int(value: str | None) -> int | None: |
| if value in (None, ""): |
| return None |
| return int(value) |
|
|
|
|
| def _humanize_model_segment(value: str) -> str: |
| normalized = value.replace("_", "-") |
| parts: list[str] = [] |
| for part in normalized.split("-"): |
| lowered = part.lower() |
| if not lowered: |
| continue |
| if lowered in {"t5", "qg", "qa", "hvu"}: |
| parts.append(lowered.upper()) |
| elif lowered == "seq2seq": |
| parts.append("Seq2Seq") |
| elif lowered == "checkpoint": |
| parts.append("Checkpoint") |
| elif part.isdigit(): |
| parts.append(part) |
| else: |
| parts.append(part.capitalize()) |
| return "-".join(parts) or "Model" |
|
|
|
|
| def _display_model_name(meta: dict[str, object]) -> str: |
| raw_name = Path(str(meta.get("model_root") or meta.get("model_dir") or "model")).name |
| return _humanize_model_segment(raw_name) |
|
|
|
|
| def _model_label(relative_path: str | Path) -> str: |
| path = Path(relative_path) |
| return path.name or "model" |
|
|
|
|
| def _iter_model_candidates(root: Path): |
| for child in sorted(root.iterdir(), key=lambda path: path.name.lower()): |
| if not child.is_dir() or child.name.startswith(".") or child.name in IGNORED_MODEL_DIR_NAMES: |
| continue |
|
|
| if (child / "config.json").exists(): |
| yield {"path": child, "prefer_nested_model": False} |
|
|
| for nested_name in ("best-model", "final-model"): |
| nested = child / nested_name |
| if nested.is_dir() and (nested / "config.json").exists(): |
| yield {"path": nested, "prefer_nested_model": False} |
|
|
|
|
| def _discover_available_models( |
| root: Path, |
| active_generator: QuestionGenerator | None = None, |
| ) -> list[dict[str, str]]: |
| models: list[dict[str, str]] = [] |
| seen_model_roots: set[str] = set() |
| root = root.resolve() |
|
|
| for candidate_info in _iter_model_candidates(root): |
| candidate = candidate_info["path"] |
| prefer_nested_model = bool(candidate_info["prefer_nested_model"]) |
| model_key = str(candidate.resolve()) |
| if model_key in seen_model_roots: |
| continue |
|
|
| try: |
| relative_candidate = candidate.resolve().relative_to(root) |
| except ValueError: |
| continue |
|
|
| seen_model_roots.add(model_key) |
| models.append( |
| { |
| "id": relative_candidate.as_posix(), |
| "label": _model_label(relative_candidate), |
| "model_root": str(candidate.resolve()), |
| "model_dir": str(resolve_model_dir(candidate, prefer_nested_model=False).resolve()), |
| "prefer_nested_model": prefer_nested_model, |
| } |
| ) |
|
|
| if active_generator is not None: |
| current_root = active_generator.model_root.resolve() |
| current_dir = active_generator.model_dir.resolve() |
| exists = any( |
| Path(item["model_root"]).resolve() == current_root |
| or Path(item["model_dir"]).resolve() == current_dir |
| for item in models |
| ) |
| if not exists: |
| models.append( |
| { |
| "id": current_root.as_posix(), |
| "label": _display_model_name(active_generator.metadata()), |
| "model_root": str(current_root), |
| "model_dir": str(current_dir), |
| "prefer_nested_model": False, |
| } |
| ) |
|
|
| return models |
|
|
|
|
| def _selected_model_id( |
| app: Flask, |
| models: list[dict[str, str]], |
| active_generator: QuestionGenerator | None = None, |
| ) -> str: |
| explicit_selection = str(app.config.get("SELECTED_MODEL_ID") or "").strip() |
| if explicit_selection and any(item["id"] == explicit_selection for item in models): |
| return explicit_selection |
|
|
| active_generator = active_generator or _generator(app) |
| current_root = active_generator.model_root.resolve() |
| current_dir = active_generator.model_dir.resolve() |
|
|
| for item in models: |
| if Path(item["model_dir"]).resolve() == current_dir: |
| return item["id"] |
|
|
| for item in models: |
| if Path(item["model_root"]).resolve() == current_root: |
| return item["id"] |
|
|
| return models[0]["id"] if models else "" |
|
|
|
|
| def _switch_generator(app: Flask, model_id: str) -> QuestionGenerator: |
| available_models = _discover_available_models(app.config["PROJECT_ROOT"], _generator(app)) |
| selected_model = next((item for item in available_models if item["id"] == model_id), None) |
| if selected_model is None: |
| raise ValueError("Model được chọn không hợp lệ hoặc chưa tồn tại trong thư mục dự án.") |
|
|
| current_model_id = _selected_model_id(app, available_models) |
| if current_model_id != model_id: |
| app.config["GENERATOR"] = build_generator( |
| selected_model["model_root"], |
| prefer_nested_model=bool(selected_model.get("prefer_nested_model")), |
| ) |
|
|
| app.config["SELECTED_MODEL_ID"] = model_id |
| return _generator(app) |
|
|
|
|
| def _info_payload(app: Flask, active_generator: QuestionGenerator | None = None) -> dict[str, object]: |
| active_generator = active_generator or _generator(app) |
| meta = active_generator.metadata() |
| available_models = _discover_available_models(app.config["PROJECT_ROOT"], active_generator) |
| selected_model_id = _selected_model_id(app, available_models, active_generator) |
| model_name = next( |
| (item["label"] for item in available_models if item["id"] == selected_model_id), |
| _display_model_name(meta), |
| ) |
|
|
| return { |
| "ok": True, |
| "title": APP_TITLE, |
| "model_name": model_name, |
| "selected_model_id": selected_model_id, |
| "available_models": [{"id": item["id"], "label": item["label"]} for item in available_models], |
| "meta": meta, |
| } |
|
|
|
|
| def create_app(generator: QuestionGenerator | None = None) -> Flask: |
| root = project_root() |
| frontend_root = root / "frontend" |
|
|
| app = Flask(__name__, static_folder=None) |
| app.json.ensure_ascii = False |
| app.config["GENERATOR"] = generator or build_generator() |
| app.config["PROJECT_ROOT"] = root |
| app.config["FRONTEND_ROOT"] = frontend_root |
| app.config["SELECTED_MODEL_ID"] = "" |
|
|
| @app.get("/") |
| def index(): |
| return send_from_directory(app.config["FRONTEND_ROOT"], "index.html") |
|
|
| @app.get("/frontend/<path:filename>") |
| def frontend_file(filename: str): |
| return send_from_directory(app.config["FRONTEND_ROOT"], filename) |
|
|
| @app.get("/assets/<path:filename>") |
| def asset_file(filename: str): |
| return send_from_directory(app.config["PROJECT_ROOT"], filename) |
|
|
| @app.get("/api/info") |
| def info(): |
| return jsonify(_info_payload(app)) |
|
|
| @app.post("/api/model") |
| def set_model(): |
| payload = request.get_json(silent=True) or {} |
| model_id = str(payload.get("model_id") or "").strip() |
| if not model_id: |
| return jsonify({"ok": False, "error": "Vui lòng chọn model trước khi chuyển."}), 400 |
|
|
| try: |
| active_generator = _switch_generator(app, model_id) |
| except ValueError as exc: |
| return jsonify({"ok": False, "error": str(exc)}), 404 |
|
|
| return jsonify(_info_payload(app, active_generator)) |
|
|
| @app.post("/api/generate") |
| def generate(): |
| payload = request.get_json(silent=True) or {} |
| requested_model_id = str(payload.get("model_id") or "").strip() |
|
|
| if requested_model_id: |
| try: |
| active_generator = _switch_generator(app, requested_model_id) |
| except ValueError as exc: |
| return jsonify({"ok": False, "error": str(exc)}), 400 |
| else: |
| active_generator = _generator(app) |
|
|
| text = normalize_text(payload.get("text")) |
| if not text: |
| return jsonify({"ok": False, "error": "Vui lòng nhập đoạn văn bản trước khi sinh câu hỏi."}), 400 |
|
|
| raw_count = payload.get("num_questions") |
| if raw_count in (None, ""): |
| count = 100 |
| else: |
| try: |
| count = int(raw_count) |
| except (TypeError, ValueError): |
| return jsonify({"ok": False, "error": "Số câu hỏi phải là số nguyên trong khoảng 1 đến 100."}), 400 |
|
|
| if count < 1 or count > QUESTION_LIMIT: |
| return jsonify({"ok": False, "error": f"Số câu hỏi phải nằm trong khoảng 1 đến {QUESTION_LIMIT}."}), 400 |
|
|
| started = time.perf_counter() |
| try: |
| questions = active_generator.generate(text, parse_question_count(count)) |
| except Exception as exc: |
| return jsonify({"ok": False, "error": str(exc)}), 500 |
|
|
| elapsed_ms = round((time.perf_counter() - started) * 1000, 2) |
| info_payload = _info_payload(app, active_generator) |
| return jsonify( |
| { |
| "ok": True, |
| "text": text, |
| "num_questions": count, |
| "questions": questions, |
| "formatted": format_questions(questions), |
| "elapsed_ms": elapsed_ms, |
| "model_name": info_payload["model_name"], |
| "selected_model_id": info_payload["selected_model_id"], |
| "meta": active_generator.metadata(), |
| } |
| ) |
|
|
| return app |
|
|
|
|
| def _generator(app: Flask) -> QuestionGenerator: |
| generator: QuestionGenerator = app.config["GENERATOR"] |
| return generator |
|
|