| from __future__ import annotations |
|
|
| import argparse |
| import base64 |
| import fnmatch |
| import importlib.util |
| import json |
| import os |
| import shutil |
| import subprocess |
| import sys |
| import textwrap |
| import zlib |
| from dataclasses import dataclass |
| from pathlib import Path |
|
|
| SCRIPT_ROOT = Path(__file__).resolve().parent |
| IS_WINDOWS = os.name == "nt" |
| TOOL_VENV_DIR = SCRIPT_ROOT / ".hvu_qa_tool_venv" |
| TOOL_VENV_PYTHON = TOOL_VENV_DIR / ("Scripts/python.exe" if IS_WINDOWS else "bin/python") |
|
|
| HF_DATASET_REPO_ID = "DANGDOCAO/GeneratingQuestions" |
| HF_DATASET_REVISION = "main" |
| HF_PROJECT_SUBDIR = "HVU_QA" |
| HF_MODEL_SUBDIR = f"{HF_PROJECT_SUBDIR}/t5-viet-qg-finetuned" |
| HF_BEST_MODEL_SUBDIR = f"{HF_MODEL_SUBDIR}/best-model" |
|
|
| HF_HUB_REQUIREMENT = "huggingface_hub>=0.23.0,<1.0.0" |
| RUNTIME_REQUIREMENTS = [ |
| "accelerate>=1.1.0,<2.0.0", |
| "datasets>=2.19.0,<4.0.0", |
| "Flask>=3.0.0,<4.0.0", |
| HF_HUB_REQUIREMENT, |
| "numpy>=1.26.0,<3.0.0", |
| "safetensors>=0.4.3,<1.0.0", |
| "sentencepiece>=0.2.0,<1.0.0", |
| "torch>=2.2.0,<3.0.0", |
| "transformers>=4.41.0,<5.0.0", |
| ] |
| LOCAL_PROJECT_MARKERS = [ |
| "main.py", |
| "backend/app.py", |
| "frontend/index.html", |
| "generate_question.py", |
| ] |
| DEPENDENCY_IMPORTS = { |
| "accelerate": "accelerate", |
| "datasets": "datasets", |
| "Flask": "flask", |
| "numpy": "numpy", |
| "torch": "torch", |
| "transformers": "transformers", |
| "sentencepiece": "sentencepiece", |
| "safetensors": "safetensors", |
| "huggingface_hub": "huggingface_hub", |
| } |
| MODEL_IGNORE_PATTERNS = [ |
| f"{HF_MODEL_SUBDIR}/checkpoint-*/**", |
| f"{HF_MODEL_SUBDIR}/all_results.json", |
| f"{HF_MODEL_SUBDIR}/eval_results.json", |
| f"{HF_MODEL_SUBDIR}/train_results.json", |
| f"{HF_MODEL_SUBDIR}/trainer_state.json", |
| f"{HF_MODEL_SUBDIR}/training_summary.json", |
| f"{HF_MODEL_SUBDIR}/training_args.bin", |
| f"{HF_BEST_MODEL_SUBDIR}/training_args.bin", |
| ] |
| RUNTIME_SOURCE_TEXT_FILES = ( |
| "requirements.txt", |
| "main.py", |
| "backend/__init__.py", |
| "backend/app.py", |
| "generate_question.py", |
| "frontend/index.html", |
| "frontend/style.css", |
| "frontend/app.js", |
| ) |
| RUNTIME_OPTIONAL_ASSET_FILES = ("HVU.png",) |
| RUNTIME_BUNDLE_FILES = RUNTIME_SOURCE_TEXT_FILES + RUNTIME_OPTIONAL_ASSET_FILES |
| HF_RUNTIME_REPO_FILE_MAP = { |
| f"{HF_PROJECT_SUBDIR}/{relative_path}": relative_path.replace("\\", "/") |
| for relative_path in RUNTIME_BUNDLE_FILES |
| } |
| EMBEDDED_RUNTIME_TEXT_PAYLOAD_B64 = """ |
| eNrtvQ2PI0dyIPpXSq2Vl5RINslms9mc6ZFHo5FmTvO1063xrqcbVDVZ3awdsoqqKvZMe9zAGYuHxcFYeAXjvYPhZ9zKgrC3Xgve |
| 9e7B2GkYBq4F/Y++X/IiIj8qMyuzSM6M1vvuvLakZlZ+REZGRkZERkQ+X0uCT+dhEkyDKEsb2bNsrb/2pvdwHmXhNPDe8bLED6Mw |
| OvZGwSyIRkE0DIPUO4oT79ajjwffu97Yj9707l2++Le5N7r4HdS79+j2+7evex8++LjmjS++OPWGFz8LvcPLF19EXhYnw7H39WcX |
| L6DiyeX534bejY/fv+4NL89/7nvTi89PvWwcxN74m1/BRxz08sUvI284vvgqGsOny/NfDHnlB6d72BkM7w+HwSRI/Cy4ttNqtBrN |
| 2tV2o9lo7kcjP/PTIEuv7bQbrW380GEfPpj46ZNrOxv4SxaO58fHMNEjfxgMxvPDazvNRnsDv7fY92g+nZ3iEO0ulm6w0tQ/CrIg |
| SuMkxRadxoZskAJKAV/BLAyGAXWn9EaoQMDaSmeA7CgF3E4D7K3T6NBsNtnHtdraFNaiMTuFJTpK4qk3GBzNs3kSDAZeOJ3FSeb5 |
| URRnfhbGUbof7Ue8NE7ln9k4CfwRTFOWPA0OD5P4aRok2IL6PfSHT2CtRadDaJIFA382wxrwH29HKatUsRSQHRx5Az8dHMbxpHLi |
| T+ZB30uzxPtz714cBTWgnyN/Psn6HlaoevVr9Ed/P/Lgf+GRR028MKXqvBj/lwQwxUg0Z+W8jJo0YJAQoGhM4qdBUql6gAEvjLzn |
| +2vN/bWat7925E/SgP0Zxey/8dHR/tqZAngMxD3geBhMYGpJZRynGU2h5iEa+tBpRnArAALcOJycd5w2jpEcTir7a7g/7j+4eW/w |
| 3sP7f7J78+H+WrXm7SXzoFqYHfud+Qk0HuC4gOH9tVZ7C1e+0dpfw4GonE+Myvn0+n2YihfAHKkK62ueTKAPmOQ4y2b99fXnSudn |
| /ec4obP9NT6uoInGHuz5pAIUXvMm/vRw5PcV8mggjirQcbUKOPeTTKw8wDYYRP4U6XAHAR8MkFAHA4CNjcCnZGLn1v3dPTYHZapV |
| 1oQobwdxXkDqg/sPebPNZhOQUOVNRsHh/BjaOFfj/ZvvffwhLcMHSBK8mWvt2bLzSkDojWQe0Yed/OsO/qvGRt6hf9e8eRoMkmAS |
| +6Mg2REDweblu2p9MACGmg0GykZu4Kay7rbBwJ9MEK/e4/015dPagdYngvdqfAFWnu/+mZ+NJ+GhaPoAfkrOcISMU3whLlrzfpjG |
| QAKnNQ9PkgAxA4xvNMD6gxGcLENgdaeyh+MgIl49oLoAkOitwhB9/cGDwd7tvTs3a+z39z6+ubt3+/69wZ3bd2/viULe9kPWWZzw |
| cmSdfia7TnlxhMWT8M+CQRY8y3jhzE/SHIrBMIYjrybYSxpPToLBNB4FE5wDlBOt3/7w3v2HN98f3L3//s07g/dvPxzcu3735i6s |
| znPWcn+tcRxmQJzy50k6hF6UksFgdjr0h+MA94cs5QuplAC28PhQi06AlOmnwrlmSfxDQPEgiWPYkMiecMH6Gp/EkgoQRTiBQasN |
| Pj1gmYADPPYftw7yDg/n4WQ0OBaY5csiUSGYOvbJeTtMn1g8x2sSHME+igCvwYihkPF8qIbcD3GJYBbWUMAME4Gq+rzYpxTO+aHs |
| FaGBipWcmdI85bdqI3g286MR7MdE9MB5dt4ezhtk4Ah/XoN4KXVmchC58Mh/ABEVgnYd1ibbrJ+EQVb/9Lh+FEaA9ygYIWuyQVHV |
| Do/irBphOvAPYZHmGaySclpYEcBBKH7bjzQqKCBcwZxss4OTKnZVreV1QaZ6MsBVDp/tmPjZu7770eDBw5sf3P4+49BpCILb8OLv |
| 5x4Ib38VIvNVRvWfDdJ4noDANQmi42y8Y2P3d69/f7B7/+OHN24O7ty89+HeLc77W23Er9FdFDwdZDFspdTZ172bfzLYu//RzXu7 |
| rKNux+hnFJyEw2CneHo8un3jJmvjz7NYn8twNh+wgzTdGeB/4FhBbPuTgQ2QGw8AWbce3rz+/q4x+jF0NMpOZ0UAQKQevL/3gwdO |
| GCxbb8dSVpNUKMWfIsAFCY42LXzhPwuCW+RVmKAHYsVaUcRRthgvkKOogIznUz9CTs1ILw2OUTFRoCEw4L99g7WPYCcweTAJZhOQ |
| 4QFpA4aoei5UgNSS9r1JmGaPoY8DPFYP5NFBn3EieaeNdDYJswrrQ5kTyZo0JrYRoqfGY3Br82pKQyIVYOxhNA+06qJHJuBlmwz0 |
| T4/5f3323/HJHIQ9oz+aFYoQcFpUeD+NOfwEkKoqV1OGITktDT5twz9STLN2uL+2C9V2sVppZ3CiDZ/MYljVRf3dUGqaXRI6w3QU |
| wjGqsb5CP/ijqvPsRdUbQ38WZrSyEjOcGGmFGz8EoKhmWvWAHvbX7iIVopwsCXQUAkn4p5w+UeqtTIMMBOVROCSiqnnxIZ5cBwal |
| Jv5Tqg40Q0cLMlpsiXsckML6Q14OSMHBC9+Qo69xuKYMLjhdsEttJq4tJMZXdxurMfEPg0kFRFYQEEHqQflPPeWNeeBnMQetjY5Q |
| LCHgNICVsUOQszkAQzgfQ1DWg5TO0z4btp9vzOEYZBLcGqBmAxujWg3sAHBSAXH+SXC6wxQWj0EvRxdbUyUlvjmpUzxqqRNPDMOa |
| kYKTPg1hkkAXHO/5d4TFIQw6N7sGQIVNCSUHqHEUHjdQjIZxQFwA7pQWaP80DKA+sAacGuwwBgzyBAuHx++keZypwyImeS0xB5jc |
| IZTUeSvSlkM4AkSBCQVrjvo/B1/pT6+JSKZvOYZhlb0K72H5iVsnz3pZavb5vh3GJ1DTP/FDoPgJ3x9pJRc7GeHx49EfEmVLQbhf |
| FKAK8i9tFTpdcm6A58xBX5GixQFkVFHOojQAhTRnCNAgDeSBBX+Kk4aLyrQbpFAvVpz2jdhYcKQfxbjgJdtORbz8hEutdfJYLMJB |
| qdwB7UgFLza2rddB1RRFYUfjXIFHyh7yKdqEeWyADMJE3eLdmCWnRi3J1qxoUPQnWTGLGQqV8+jZMJhl3iMUSW4miVRvyiAxgW/4 |
| o1FFTs/EUX6iah0/L26f/bVwhJuiOK+Gnw5mcRo+q6hyZN6OjgZsaj8qZD/21uqZ1ncuZklTOvJYy4JOnvdVswq+zPKyaBwH+3BL |
| zeJ/Z/lPueWAGE2uoSqY6v6aJ6h5D/gONls1csypG9tszfQ/R2PUIy1tGZfFZtGpQTl0pAN/mD7Wl+5AwSLKeirweg8w32IntIiu |
| PkhNVbtAtoXNcTMzMtcQbZzhbDbG1irbHY4dou4SdXrl+8PcIxbZsLg4INbhvYSdIJ37RoFpUTtl0yhYdrcqO0Etjc70oqphZRBr |
| Jg9ew5gQihWB5elzE+Liw/FVz+RcfA2egUY3DDMOFxog2TmDJlQukTCpe/fmnZs39qSAd/t9KX2jtMJvHeTGt/SL8g7uMr4bkLoO |
| kPYtVYtUb9Ggi+0E7gtcp8gVEHLFqgez5bC/PCt6GTaUyyb6dHXxfHUmUjjBmYlBwfzLj72YCy4/urZLHjcPJF0IQSZl1kckMnUT |
| gSYyHOvrJ3aPJzZWbh9x2VZN2RcvS9yCcb4jAMoHD+//J9gOg4f37+8BvDWTmKwmWug/Cp5lFcKnhvkCJDB/c5+IeYGCRwYoudeM |
| QSx3hn4IOMxlrwrX5L2vP8Ob5S+GoL9cnv8k8p6ML34bHaNt8ouZN7k8/7E3ji9f/At+/+ZXvpddnv91BP9+8XkIYmIMNTMo96aX |
| 518OvdHl+a+9i8+jBlkx9C0hQEcEFxkg4KtWwEA+vUInb+S4UCapLc+HN+/dfHh97/5Dwp3DiG83I5t0bvB8m2xFor3eC2ea1sNE |
| O+okrjT4LcwW5yGmrVs4TDaW2xRAxxjM/FO8fNO2yMoHB22jgkmn/+rsFs9/K7eUcsHr36rmYPbrlJWo1dlpLvuIzZ8vvWCrXGI6 |
| WJkjFIBTCctlmquq9m6FjBQxcH8tfoIyD7+fyouzMJsE+MW8llQFLjJ+QJ38l1apADRJZ2ah1sTEBbZ4/FwIqCpWaqoAaqD3rBS/ |
| B/pMAFE0B/gv/6BYTxRvjxW3EO2/xTd74rJzoJg0yEaU34IqbAMqUL8V4XaAUiJopMPBUTzBW3fluMCdgRamRhCleCXup8MwxA7I |
| KSRaxEi1DW1y1aqlvbH5+FwsFT94eP/e3s177ys1NSwszySFrID1/xgbMFa8Lq3ruIQhLOSzikWqtFzWV0oBRZKj3hrjbDpRDj91 |
| bDGV9atkkMWrZ1yqaxpMcr74uSLqMDHmtQAqurSC6KfoG1YKIFV5TdAVuHI5cLNwHU8zYw2PYtsScg+MSuH8q+p9gyYrOxdHc947 |
| TpVKtSF4b0jHzL0DIRzgiJUUJ5DtkEsTbo/nZ6YVj44UVK54L9p9BvIvl0KlqPgWyccy9eeChTO1FfoMUPDDkv21R/PQm1z8M4hv |
| XOxjcmOWkJvhEKTAEL7MTy/Pf4TS3BkInJ1ms9xGaJEBrFJ6Lp6XGQeB0rDwpeeIOIb2VQZ6RwV9EY1YTnI31QjvHY1wROHroBte |
| WRdKVqUgjYaKPRpYLq7tautbHKCqd7fkar/KijdLL0JtcynK0BIfILSh7KY5TRn4x6Lc8U7ZrfjhtezUaHz54qsZKGygj30eeSdf |
| /ygSTsTqtjX8S+y7F68+yb+L7uvVeUTzae4tpt1GI+XIZm7nBmbKZ123FiyDndBE65Df0dJvOwVV9k5njIJqCjVVX4mOQKq4PP9M |
| QaE3A8x/gcvwMy/FT9Hx/PTiHyKuAD9BBfkL+KMFa3P54t8inLYd7UKdpQlehQZ4I0U/rhlufa80gyPnFKLLF7+eOuF+rsNwZp8E |
| XQTTlSe6SDZmQXLElkh3+CiurSQrm7YnWSY5JFp9ESuMEIqnxk36DxkYGRvxvDdh733q97337txsNluv4xDZ1LEQTPxZihwuJYF2 |
| Ho0qFQs+vLrAV9V7GwmjWfPa6qZSzh5kQkucRc6DbIlbL6tSpyh3xMT6nuIUqlcwuEPfU51E9apaNdP/VK/KnFSzgHQ502O1Iv+y |
| 31flC4Gt818ll2hCPVWR/Vj/eGBtblVcjV4sdeydCQWzxPCxxA0bpwHuHM2tPjaraKkZtFSH3XHrgxoMshNyxC64Nb+EO7afHBMj |
| kAXkGWFx1k7yKumpNbbD7chNX7LTGYb08A/XI3LPjlPQkU9C4JYNUAV4vAWckeQreftPbz7cHTy4/vD6nTs379zevauFVlTdrR/8 |
| AHB349YA43wG0Pj+jcGN+/c+YM2ZaywZJbibUtrH7cqVSra+ZMwdTMMUzvrjgQxDOh0Q46oQB7wbj+aT4F6cfYC8iR2LRqgGtyws |
| 8IUWS5zHRKnWCDNWSkRQTIM09Y8D8tLP3W0AdeOQIqPIanwSXp7/OPIenGbjOOp7zwFw8iU6kx7evNVtZKYzILMg8casEZmgoQ0s |
| dyN4FgznGSItb3kgjccaiBaXGg4qfMqKt6OPbTv3BsZv4Zn5L5keBAbi2K+BitBsHo372iyU2Zgwe3WgzHAGjASOisnEq4MCoAJ9 |
| VujooHADzOz7u6cgdU9vPiMHzX0gF+7Ax6dYrXpE7IBnRkz5GS02DMZgSQMUSgp5+JXcHPMspruDD+KEO0LeuVuj4j30MwYBGTgA |
| P5otZKjL+otpmRO+tDeSfefuxW+98cU/gqhryLtEWZfnfwfLcAwLNEOCVHywqXHBBXs/0iUfIbpyVoflD67v7lJcAyeQSrOxXfPg |
| X5s1Hs7VAklJcOwKhqnh1y0o79K3TfVbjz5CB/BHu+Z1qyKegm1wQ9MgFQKZkulAKTw1PbHQKLCQtpKrgOSuW1U6n0chsGTWM5pC |
| Vfff3F0Lf/Vz5yuXw1U8z2bzzO1ALGytbKCcbJmLdEGnwmqKgMN8npgTs82jGL/jnTL+lwe5EbDm5U4QkeOQ7jKUgy8cI4TvtYJb |
| VkEJLrHJpdwXGxZICeoLSYfZFE7iHCZdKqbeRlzVUUdfWr2RPdiiAaf+swpQ3pS58KboHqjTuUoWBbnLShs58XH11lxYSZMfsYtE |
| 2mv5JaOqkVz8jl0zNsShIdsqrAv5JdlWzxrecxwMuCGjLCysSQILQDRlCgRBBFtOnVzRV8oavlNbGKtjxhPl94TiJHVE25iuAnyE |
| 3OMQN47SGx6uuiOq8VHzSuU7Tu/t7cIYaid0wW7zU6TbdgWUmlfs6MDiUYmrkNfQXQhyLzfd9zVvvLz7K6cS2VQ9AD8IjcMG6YdT |
| Ynbxj1OMmH7x5al6aT2VJ0l+5w1yRT7/M032YgyAwlHMGAw6PBv0yR2LYcTmcioCBoTi545+BXcEKkXW6pJGRn3zAu2e6mi2sAaW |
| bbSNShtto5tFNQ4L4BzaRjs8KqlypvAOiRrOuTkSypwW8sWEtfqvwExhxZTwoOeE4jNptZJskDp+nA/Jg/uGoBelTp2INCkemVrR |
| Is6OapY4MS0McMcZ/2YPG2ONgcXkkkpZVJg8XVptZ7SXqNPtFAK5xGgiYsoetNVXYpv0gEYtKKuksyUCH0mENfQSgeXGSzNXowce |
| Feg+A2rLBoqZ/StLiPaw/JcJh7mC6Mlhllna5MvJG+QFRu3c5s7WmEQC/MOopywwunDnv4x6cn3xylf8bdSxe7nbHF70doIKOcNk |
| 8GqEZjZghKYw2NLqmVBD7J8FoJZPAzTiwL+GT3A5ZRaAO1CQX6Pw+EBGTQx6dP05Ug8BNkPLLQ6Jz5b1ssq3SisKKJvNC5FknMep |
| w1Z4zfK+5iO/0Jm4sqDOsAbF3Qo3CWsgCuPRPDuKcC17j24ovv7s4gtxv8gjSLFPL4LDF1g4JTdRtXnheza8+A0lQWloUyiZLE2l |
| aBe1VsM5OucnPA0F+mzrTSe/udzKmZ975AlCF+54qJQpxQ2i4zeci6HNQpzhhSGUfZrz4OKyjv3Uz1A5lFMnp4V0gMf1IJ3PZhRH |
| hreHKJjpGNLrVEquV/RDf9F81BoySC4Xr/T56cvBREU0ISaM9PIlMU4S+1pYce1q1Mjxa9uCorJ6dhYxxOaNLgVoROf1KlxFQ/3P |
| 7KNarRZ7MZeSIsp5nyFayeKZZOhrtoUqgmI0qyhaoxUsNHNUq3b8aRjUaY7nc0hrkt4ZqdlrSRIFnXQ6t8TdWao3WN2GPwFeOsiO |
| NtpczlgeqihaZqQoKhvD2eYwiIbjqZ88kU1ygqb7nkUkLJ17RfwMIdA48uzhNfZFwmhO8+BzEPcrjm0f35AIxNloHq5VWwsumxgN |
| iHHY6ls4RtUEhQeWPfWT41QNX74enaJd6/mZHTVFJmFj5+YAj0FDQCrhktXagZiMIWk52iL54a6cBtPBHI27rAeVrpwykWaqbZCL |
| FhrXMa8ZsHee70IRnqsshc+Rn3IfFUv/QqiyWoeXG+Ptt9U5OgeBqVQUvLvrBaAO6lKbvFmTO01fZNWXnN3K6YSprnTxXJE7g6QI |
| XbqEueNQqni+nEQid1lBwH9Dnkea2FI4Uo2L4AUuvK54K0MhqzqaKKFW5vqaDcwJcSfg4kwLLbW1Ua5QHdVN3PNwRq2s0IiyVY0k |
| TAX2V2iAgopcuNwAYllTa1POBfqMgiRxMVJLFbqSqGVSkZLcgw3I7hH1NChsGFXZk/AdGwKJ2uxMF7lk4LIfpaCrpARGjTk4saxw |
| k3AaKmnhzIsEty8V/msV16nHByqTU6yO1osI4gt4D4JZGUDYwQA3INPKu1d3HjfeePegur+fvvPn+/vRO+r2OSJxnSUsRPvU44L/ |
| VwLTVLOkJAG/btEGYxiqakYvpYeD4oG74V3d8SZBVBHDy1sc/LDRtPlMSRSIywzRtlqMJh1OfODn5MO1/JwSNMunbz+uXekf4B+U |
| 70mMYZM01YmwEbVptJsO6bQ4FdbadGOkjBU2ezNJNnFCBg6clRjW8FHTbOXKHRo1fdwnAbiFef8CXlatHlTJas46Z3ZyHOBAd62L |
| njA1n2lL6nVbPqaeogOvLPpexT9MKzga/sxxVfdazSoDg67HqkVNl435uE/770DbtcxDfwbn7yzjGxYD7fM9yzazLZOQMgDdWhsG |
| r7M+7Jjo+PL8n7whOpKN+95z3vUZfAHt//MZRoNBMRvjTHHKZ8wnjI6CBAmIzEQVxYN2kPrT2SRYBHGNOSARy8F9Np2hCReEvL5H |
| GiaUgWYz47+cTEmwW02S1VR2La7OUORdOqXNMsKC7lgsnbwVI/dLxeDAHMNmc3K10AGr2ERbbY05rgSazGOALeiAJ2jdgdMxK7gX |
| ZCAhD8kZZ8fiKoZmSJ6czG7bVEPb3MIau1mVMxQhc9wLUB/y7bcZOiyQKOnNLFZTo8Eo5nRlm5dCPTvK32YtpKcd+rfxBU9Rjt0U |
| JRgg63TH5h8XxVBvFvhw8B4n/nSQwsLubBTWaRaAoIaXzMAD/Ul2iql8a9bQf3ln6z7+kKUTSgYhsK5QXGynDhfN4gmtkyEsJyZx |
| rMgu4Sx4Es4G6SwYhv5ErAgpC9XC+SZGsRwAciqC9+dJMfN2DVSqeSaid9GOQKwYeZSocfauzSD4WHaG6Mh7jjSfiNzfkLKQAduV |
| 40qefA2O4YPlmdlhgMsc+FBtFY72H/zq/3B+hSwFaSfdQaNch9NGdRnGg9ILYz65V5L0XfaTyekgBSaG96I2nP0Hi/rfi0VpQUkF |
| tS3nONJjycZ1CL9kqFQz77Dtu6xKJ/iejfVo2QlK415M5pOHsjijBhRTDLuAVvFptRcW1F3JqXAzbrQ53ry3AWVV7x3vMf/+uN/q |
| NQ8ki3/cb6s/D6zbo8yNrswFz9g9itTCBBWuktdYhAJsAZCDyXfKK7g2mhFSbNKIVMCXUC2I3PAvRs/4qaheDkhv9KPjoMKGtWmH |
| WJGNQaHYbDSHRpgEU/7kwg7HeZ2InmGtam9EV56iHeibLnWz4Oz3uE9jHNirM/lRBk8hm+0xiiDU1pRB3/baVQdw5uZmJMd1HuNE |
| rGlj1iyrXC2ZGnOhlOyhcMtsQVu5N2UBIW7vSvN/urelgGlBI87g+FojW2PsqrxVyarqeyanQUbnYkVxTCypFgw0K1HjMlR4mAT+ |
| k+I2KtKHKkYWiEQOY6OGZalg6dVfYtVXWu3VV5ljrXCCFpdbuID6owGJZYzns0seEWHSuOdPg3TmD4OimyvWbBinlgi7EZ/0ymwY |
| jKgoNiE/JqMWWnEBOoILxLcYnU5A1J1nR/WelBJQ6D9NG2kGXxshXmKe2kLfqVc4QO9ZwkWHly9+Hpke833ytjXCzUUMvxwQAaxo |
| EcVqcnzCYcKy7UuUXk+O5xjA8IA+9pVHBiimyF6tMgrSIYxB0vz+2q7p3X95/stCCCzGXDCFZ2/TQ1+7OjnbNdQs1/hcB9DrwOej |
| YQbrunJnIT23d5yZ68v7Up2/lN4Uj75FwBT8waAb8msk45bocLPVXqIjxU/M2ku3s6ATI9LP1ker2VzQibhvAelnHIeoljyWboLi |
| vir3BzhQ14Dnci/vX7vRsIGopD1x9aFcvqw8PouXXGm4fNOv2JDxtQFz0zdQKuDYX2O+0xomeWS6kYs64c8bsV2MR0fFdD1AJiUz |
| Z4kd3mAyNn5Tcv0JpwrOLwDWGt3wyUt33a0ir9ZQ6rhZnxqnv9yjDcRg1ddKbG81MO6dF5Q+xMB6LLEdGNq9rG/V7rk/HFUq3ECq |
| LydQDaXA+i4CVZI/F7x/oHre8qAUpsDZTkheQw3dXi1mm0DTmElVErqIetac3kUMsk2d1BzNFwYYv1RgcR6XWxKQe6Yf99rWZC4g |
| bB+qgToJOlNR3qXRfDpLRfqImqemYdrhMegY3BJlO+3iXY94UgZ7y4OOlYkeLP0GFdv0FKwrkxPlaYzW+mtX33j//g18XsPDgmv7 |
| 0VX8rzfxaYuehPtr17Cnq2MgnGusz6uUzW04RlJA5vPx3ge4j7117TuCRT0ET9GnEL6TOBthi6fhCHYb2xN1+oHoCLPQn9TToT8J |
| dlr4zlfeI7kzXLv16GPve9dBFF86MvDqOmupACaQzeFTRJH8YjiH9BZmR8Qn+D4TcU7uwfgbfXuJKP36p+Tyit6xQ+8WvRP46Jtf |
| ffN5dCzDoeQMJ2H0BFNV75AvAwAQBUNEGnCEox32oFnaX18/ijHC9TiOjyeBPwuBbcRTFVGrdcPyh/E+hkmcpnESHoNWoPUnsLIE |
| KOvDNG2/e+RPw8npznvBO49AxAJEv/MgiftPj8fZH3eazSub8E8X/tmCf3rN5h/xDHY76VN/lq8Bm0SanU6CdBwEWRnG1FoCzDwZ |
| F31tAGQqotiqe2ky1OqyxGlQcYRM9drVdVaP7YF1uQmuHsajU9HVKDzxKMwEke4fB3WAZDLhG4dq+Gk4CmQd/HHoJ+gVNFJ/Xsv5 |
| gNYnr1DP4plWCcGYZ1lsuIKJZiBVzKHN8TGKIYbGOuLf96yf6bzZX+OdGx/9JPTrlGwPqty9PP9vInno159d/IZyhfqwSw4v/sHe |
| koV0BAgAD6O3VML9l8QTZfI32I7Ua1/Tm14F3S7CJcP/vPqnq+sMAeqyrMO6XFMVUutCDQWw2gLLKbA5jsMR8H+U3Sj0Xx865RmS |
| 1Z7rQz8ZGRWh6kyvxNMgXruh5Py6uj4zW1G1vCXlsyjQrYYis3I4RNpYPBfRxcmxh2fBe/EzqNf0ml67A/+PgZ7hBEkpQl86W1Oc |
| o69Gzmi3vUiErba34XUam95Wo32y3ejegYJ2a7LV2Kx3Gu1HUIolG386hWFPeo0O/tfjX7Fxq9XoiPYmPeYZcRKQ73ZQj6E8tTfi |
| CeaPKatcBw4VDP0ZNCIb6RK10TllyersDN1fazW69prrtmVYh3Uorq9tXxAV4kpzboGEtEsFyB3DFP3cRrYh2INa177+qY/cgFIS |
| jZAlpBefD8eMIBuNxtV1Xs8CDI1iUuw6kayxQ/k+uaa7w7q3D2XMnKeOraTtZV5xHM4W7whWdxQ7N7cLwzOGXCYJ7VI3UJuh7kl4 |
| ef4jSs8APBUzamTe4eX5XxLuCvuZMyYXZ2AARjHmsFPXU4zIroDTyxf/mt8ARyC8zDxk8BObSYmSY/x87mUhsPoCSC+xNOMwxfyR |
| S6yNqInHcZDYlsfBFO8A/oAG08vzr4o4lMcpQ9AQpJrkFhsJkGacide+f/Eb33JCOBfDBj/eBIn14GV3qOhasYulEerPs3GcOLBo |
| kxc8s6ldZuBSA6uz56hSKjksKQEUZQA2plUEKAgBBqr5lJyUUlZ9xSPuFQ+50mMuP+habR/OKui5Bf806z2P/WL/1/vTaX3L6w2b |
| 9Y3GRqvjbTRaG51619uqd9Mtr93o9uBvr+s6WVY85VY951Y+6SxnXc9Vd92+Itbzzr5FBTE4RJDCLmEe9tf24FjzjoE/f2FjKa6h |
| HKXasaJtxz8geWva81pNortOvfP/Q3mp963IS8phUML6BXfWNQSDwXlsfZfibMcJZp9Tu3EeITb9xdXtUQyyguNoZcdjPDtNQlDp |
| 74RER//zF1672e56aKOpex/t3bi3jJSinmr2WlfXSXlWgL6KNi0J79M4eUJ3err+zDi4rAWqs6liG/MO0RgXZqfFMzOcHstak/g4 |
| BmxzqwHPpQ1TbsyiY9ycZIy/A5WWsgIp9ohSmOqIbNtajFvXlhnn6jpULC7ktY/Gse99dHn+OxAw55cvvsJb7iGFZ0fH0M2Pl1tB |
| vYQZSYJE15JNIWUcJHEd9lAwKVsTqmWd+9Vx2848LVETggRANHma4EOA/ErHcjM+4oPuMd5erIEWYmq+s2J6skKiZc2K8vI9OWwa |
| 6wZ+iqtkkyYLy5QE6XyScVmSoYcV3WAlbBbhCfL5WTwJSb3gzMsxBFqxZf/DeDqL8ZKKdy7N8R9ArdLdijcYwl7BrSx0ZbNrNWEY |
| 5o6TMJ37k8kpP0dZsinZxR5Rx7V7rlTMNk0UTdTQzE8C3yo1qz2bFbghuqxKEj9FuFvFLxQXNmbvP9j9AVj6aNAaLd2yBIQjQ6C+ |
| ui7mUn5UiNWr+0N+R1N+YtFlkVw0bQcYqYwdIo0qHLHOhGJntHeo23Z4sgAfjQaIQNvAhTgGaWJmAvj1T0Eb/4s5pbr4L9GY5Yhe |
| AHCuUOojCu1I6P74zkga3MBvRUVTB+NDlDOnS44uUGYVGetOHFktn6p9h27CjDlRViYxJXHpRlN6xL+wLaZ9AjCaV9dZfy+NvzBa |
| AX97sCOOXwf63nk59Dn1DmJoFuzBBDh/MIv5PCULoxVAUd8iVbgkP1P2o31cH07maebSlrVNeILeCXVmXfLCtB5MZyhBsXnQR2Fh |
| sh4V15bbpxwshtS0fKs5wg01eF3WCYVfU8XbuCbvlVZeYO+wnPWcRXOPJtAbz3+C8tbFb0Lv07nvZQnJASN8luPHmb3HUmq1TXVl |
| Q8ZrMGYsNGhoRo1Nf6Ox6eE/zJABf9Xhn5N6R/nQ8pr1La950rmuV6YO/tRtRHgJw8bLGDdeysCxmpHDbehYBeFdwFyr1dj0N+GP |
| TYFaKPKatBhbJxvT+obXHHf/A6cuA1LJtUmpMcB+rgknG+NoE8WCBwl2k84Pp2G24NiS3DOLp3XKdJB4yt/1dLo6Ryj0O4yTUj7u |
| bBgnh2Hm5X/WfeymWI+ughKyuPNBXsNYh7/HsYavZazyj1r3nOVLyVjXKF+n2FJmYSpTPVEFLNc7JxiXEx0LCwHbDbzwgc1q4CdZ |
| OJwEhQ7oYkn8OJ6Ho6D8ZkltR7cA5ZdLovqTcPiEBKZb9EgPHOijyxe/jLwIfSGsN00z+5Dshu7aHqhvMd72/c1Cv+gvInERd3Lx |
| s3jZq8F4UhifsMOvpIrwTkKhEHNvj9EFZf8rAINQeBe/5RFdAA+0tPXG/RNIFJ/QjSNlBZSTzd3Y3X0gRF9GQD702I2F1Fk5Iuj8 |
| R1iYu3I9uzz/yptc/Kutc1BJzHtmTl/m3dtSZMcie9Jvl/AeXXyFT/N+6U2B8OYvQXJ8PZSVnDCTIHXIUTgcX/x3TtVeBNQZgqBK |
| Lj+IzeiYEj1Ol6VB2/QZrjSjNisquRd9pbWhxxS+3aXZS3CPIJYuPg9dDjkmFxe98Dxm9UN/dIxB3XUMtAnJdKwyRR7R/R7WsjgQ |
| OIyEpT4DFiLRhmSaHTeW8QHJSQEI47PQ43nxkB8Q8fxkiFv6y3meh1tuRhvBWImDFsvJokqaJPFTtzZ7TTySjVMYod3crZYyjqLh |
| gVrfoweFVJ8XctPgDGjJy7+XnsCe4h4iOdtKk3ifHFC0WeTU8/ucChnyWJzPE3zOjJjay6yIYmBaAfRy/uK+tbq6jpdSwiFUdgIi |
| FfMTvbrOvatVP2zpl7rWX+tTBmxykq/XD4/73ptH3aPO0eEVVkKCUN9Ljg/9Sntzs+bl/2o2er2qWq2exkeZrItvkXV6su52W9RF |
| fYrX2m7XvN4mvmJCb5a0tDp1hj571basykLN32xvtDc6G0oZh+bNbrfb6fnqh+k8C0bwZXtre+Nwk3/xh8MA49Tf3Bx2N4ItrVRC |
| 8mZnozMamh/zWSOQ+PBKe6OFQDa3BZBP/WQKzYNmt7k15GXH8QShOGoebm72eFk69kfx0zquaR/tHr3ZM6/bgX9R91uA0y4M0cL4 |
| cBVdvBmDBO0S0ILaMqi2AHWAtdYWg0quWuKPwnlafwYLvNGcPdNLJzDfdscsnQLIrZ5ZmsLkWqwupXV7mxHUYfysnoZ/hnn34e8E |
| lT8oEpWQMGv7ERIqqz4NIzjm8Gq3jy/kvCUqKjX85JgwQ8OjH3iduXzjw4rvBR73+vYeJDwIbDc4jgPv49uUT8yPANIgCY+o9RCN |
| Bn3vxE8qjC4YVvD4OCY9X74hNMLogGP8L4ZLDcMET3c/81qbb+E/Nfva96o19sIRe+nKa7ffEhEe7i570GVPdtlud6BPfOEH/4AF |
| N7ps9WSXuGH8JO+y1WuOguMakFcP9vPQa76Ff7eD4GibcFuVyCXVC3ohQzD8lzlBwh/iLoahHpGNWRzGgMBMNG7kXueChQhXZDKA |
| 9AF6Tiv8N+U967SB2mre0J8MKwDMyVOv7m0AzVbZEqh0IOuMRZ0rKiG0oHsPo+molPvz9z0U46gE/6hjTPsEjRyw5PMpZglhi66B |
| WsVhMdIe9ljrKCkQgxPDdta41amKT50ucsJWzjXbfJ5sS8AkYA5pPAlHC9gs30Jsz/Vpg1+R24w4gJxZzkZY0/gkSI4mWIHZXOTs |
| Rkk8qx+FkwwBOZzMkwrubtaISI1SsPTtiEQ+3Es9vARR92pDIDYGmdFbTCMb3WbOOkRjVnfmj0bEPiTPWX5JOluI905+AvXkkmxs |
| G5+6VR3FnAnJhWFoxfGqOqUdTQIGF/5RZ2+RE8IYihgR+jOdPTaUwIoi6+sKplscxJ+Ex1Gdv52Eh0+QUPkP52kWHp0K7x7lGxtP |
| ichg43HMb4rdKcaWBYI2mzbKsy7Gm0dHR+XkiMeTgb4wopNe7le6VbZMgyNxk487nCcpMu9ZHMoKKrXKl+WQQhmB1hS48lIbhvpj |
| 3C0MT7Ij3idS/w8q9ZbYIupcm4wb4UlqPXfZcV1cEFK/1FWRp65YlQ1tUeQybG9v29Zhc7Tp94blOOkIpMQzfxhmp0pZyU42oe5H |
| 2Rgd1SejSqtahjBkKV6Cb3AGlc4m7NHqy43S5qNwqBl5vkQ/G6XQ1lVw6zq8DSPUhvVjOXiUPW/AS8G8crc33fyZU3c9OKGXOj28 |
| 9rqyAOzCsisLLHeCRgqyNIdqOb5uxYScaasw03azKYRMc2byBLfPrKmjnywDIKA0FI9z/Kk4DeFP4SUiZNHlDluhpyzkYgY85nnV |
| se/ZdnP1c6yNh9Wm7RzbxE/sX+xTRz/HuIxLlVubLdSeUA7p0PORra4xDearpAvaXJvI5W1M1tZHoSQJpnnpU77GGFmJhZMAsxPX |
| 0T+S0AHy8AavX5C7mT6mwKLEhBVPx83VT0e2F8Us5BI1nYvUMk9BtyBgOQG1WeAFuM7cmwZzlwUaYphuaT8t+YQt2OJxU+p4XJEy |
| T/R4njE9XDIVdSaKkqGC9uZmb7PTaxaXfbPZzAHKA51KmWNPlYfymCejzZKL23R0hmlUG0oQUlGNtLTJFR5J7dttK7l3OblL/Gxt |
| trdGZq/5wPoO6og+aVWl5tvoLrlPZPCXseAGgckCg8o3BWGUcaKNTeJEDAq0YlRrOoXa5CD8v00pBel6cSufgBp3VdzowKWcO90p |
| bOsdc1/s568oTJOXd/0wyJ4GQWThKIyc6ocxaNJTnbANSNx81tWC39c/t8jki/drkZXYqbcgTeugoN27XM5pSkw8My04qmwjj3nO |
| g4We1bFgDAft9wHWwych7JlhEk8mUi3kpN5bvl09G8+nhyoeF4jRNtptF2mXXogt5bf52WGukvPUMcEoWgS4kJKLG2jxaxWOxkV2 |
| EFwQtHZYiFrbHcBqEgYzMSL6BswiONLILTcOraahtXpWFU0tzqUZ5YN1MZj2VjNKG2FaZ+94LKvXFeQnk4/1rBogWV47y/O+onxg |
| Kn+yYGn5wNCmixyOM6wsngndUgeK4lyYzbtWKJbaqhz2EF92KuvEdqBuOg6/zubiY7bjd7rdwDGihE+dZadUhF141IoRyM10SUl/ |
| he28VV0EhlVGYLApSpCVvbULgq+EzMXO2yvrKDSZzgb+a8vQUZgZru20tal7rL2Bxu5NtJn02vku0yaqmrFQXuD7pnnFxoRzYf/K |
| S52gKkv7FiUIO1d1HMsaHvpH8RCEwJMwDQ8FVqRs39Z1XfM061RVVaAeHx2lQZbvFbvRT4NBFbAKa/HaTyHDqKnDcM3rT/w0YwYe |
| K0QW7BXZ70bB9uZCR3v501rb3m9uHW5t+t2X49xIeqYhqPxEldBpxc6j1IIedMfWJJyeqWBYKIN1gMcuMxa50K4cxNzgxhhL1YFb |
| NF20NzSjvutw5Le2VTvBFJe+Yy59ZxXOvsCGg2C3e1Ybzia3yNBFEX2qvubz3pBR0EDdllbqVhvYdqu1hcahbn4/7ESZSQ4ug4ZO |
| DhjXqJjsWJQvU8950SwAdhfV0/kUDu7T4gdMocZUJ8c3ikNxqVUqHDZd3K7f99zmrI4uO6i7Wig89acJMjM/On0Kpwjbsfi6U52y |
| C/dZkuE6lrBPY1g5GiMQCQpN8HGll+e2BcuIHgO+YkdtZ0ePmc36wBALOZfSWrD1WvLwdphsW70O0iqKFexaXBxky9xSOW9rHXO3 |
| GuYVtstzXNp4L/tkV2nEN6teI77aWLcdobnSoxW/fq1HuN0sRul255X0I31/Z0l4fOwUNJxCnyGaLyP3Lavflho1F8puxrSueW8v |
| llj0Nq8i9/Xscp9lg4sx1WtqO7Owy8Rkp8uNB0vLxU0nKBgq64CFyUYt+D/TziRqlAsJFo3WOCW2OuWXHrqKtrH5LZ8GHCXDcXCS |
| GLKMS0b7d5JCF4mbxkwKEqfppsALLH0oLK8MTeVi5wrCJD/QVDtHORW1HHYPTi3SxNHc8Ltbvx8C4iLXKlcUmzq03aPu9nb39wOt |
| 7i+ocYD81hI5f87+hbmZTEG54sZOO1Zoihhb3G2j3d6Slt+lrrIswK7C3imD8UqCmXtwkooTEDxeUT/XVPNS+7ATCuXKQ+eom694 |
| jfxmz+91Dtsr0lBBXyicJdou6LlslW7I5T4OOr2t3re8M7g29bxA65YjXWheNpusYQ99s3vY7fRG3xbwMoGS64KuxKWusGAruO1u |
| Nt8CEdThtttqu912V/MA3c4toD3D27CaHyMsP1Tx9rMownRQqtsgvua+GHU7AuqirWoccErDDEKRC8p1AyiGdF3Nq0c1ZrHSTvbh |
| xJ/OKr0eugO3midP0RLRFOqI9AEuqxQf/hD9H47waXCctR9GNhlFuLiSvyvTRyoL1ZG2slBaSqyXxIXKqvX+xviyoVFGxhGZicob |
| t/G3GmTrsnWYXYt6z8SWybGSb3+GZDzZgQsAChqdTcR0mxVY7yBayxtM6haLSbvdbm5Y2QTyGMdsinxLAN7sEeAgSfWIRBqdtgvw |
| 9hJ8e9Tpdo/yfcPY3qE/8aOhcvMks4kZl0JNvlN7zpuWRdumabkaMAzgOW08Nz3sm+jv3wIGJbbJ8gCYfY/bbvqxcizL2nQZUW0w |
| itp2ElT7lSjKqdeVkZeRnY2vYyz0mSQAWQi0CauilAv4+cITVGRFlf+xuYDaTTHke+/RQxQVtARvVjXmxfzzmxY3T3ZA5b6eW5ua |
| KUnVyvDTcH4YDkHv/rMwSGAgumpjYVVANeK4Y6MqnbkMYlZE9vuHAYzID3d5Hu2v7a9d0VHsH4LkPc9Y92FEtoh67y2vvrnYP4lB |
| 2mq18SBW5soPb9gFXBqwHdFVb6NXWqGx1at6ne3yOi2os9l9yzJ6t/sWFladC/79Sr21sfkWrPiT4Cn+6EkV1CAXhyuwFfOoAuNr |
| O6dWj1yXhy2nupaF4prVnBMZzu5KkFnh7FwIn04iBpx+FE59blNAoz16OAYROQgU6RfJFkel6+MqDZ+CfpeNJQyKb7B+bHalVNUr |
| OIgWeFzzSgl3KDMrmuEUuU1U8tJ8Y3Gjng12xJ1m81PQpFSrhxFiwzC2GP3MZyNoKawWFpi0VkdBUO7PqQl6vBF7VkpxkpK4bQvJ |
| y+ok3Vnacr/dLY2lokjJFgVfcbq0m6U7ZbTcVBylixNTwgpwd/LIAhfk5CKxXYRcd4PA68DWNvpro0+F4QahpNXh8VRUxE7czTw2 |
| k0oFXqyG4G5VrXpMxOf0e3PSPRcK+OkrQNFFeOvHl7jHtDqwOmR7uuZEMlMGxzlWrbisp1MLOjfyiAXrcYVXBIpxlZRuAZbjtmWT |
| BV963p/V6T0sxvCK8DCkWGDqaFq9SIakq1UYNWni3HtbMf5r8ZWlNZfwGnbr2nC64T818o2H//E41KOgExzChsMf24f+Nnzo0g8W |
| fC0jVLV9yg5Y5l3sBrsJQpODV8CMankn7i42OtVShxnHPpDr2dZXh2Vmer5Y6Oka156NTSPsQNnRpWvzNJxMgCXhI9mKRd4CVd13 |
| msObWtSTmsOKNeEwtzoFYuc9bLVdPQyX7KG+2St2IfJXlSJ04TYQofOLt0GrXb4NDMf3ko56Lrrc6FTta2ObMbEY1yikFHGlgbGm |
| Mmb0fYUZvdp2Hm2NjkiYxh+9Q39LjzHXycc2K+E7Xzax3/u8RodBG/7utPHHVrDdhB/OeQ3dqyVgTcppTZnjoqPjFZlv93Db67Af |
| lHzCyAiAoX4gH/IkgCghqqJjP4qzSmPsp3UYLTmtehrNoo3KSCqIfeGZRh0VeY8iwaofQV0PUdxH5ZNpfcAwjvCtxuB1AHq4AqCH |
| ZYAeCkDb3wqcwxXgHJbBOWRwthvt141QlD6WgjIXU0wQ8Ut9Np+kgddiSgucpfgosQQRXzgIksjPCopJfmPmuEFgpjtu9DH8NIzQ |
| HZsOQ/0rXuWKOrhhiylsrxKszb2e8SzYULwDq6u5K+hxgW6/5GVc3uzODE6kKNpPJ/Le8drl+s/Gpgw/1Sf55mGnM9xqLj3ORvk4 |
| 5IreJt1pe1MfaPuoG7S2lx6o4xyIvFApuQdfNWOgjc3ucHvLTU7vsPHsdjKbCi6SVT5f7Nmu015zqYseY5R3POu4alSFaTIxSssu |
| 2O16uQ4IvUuQu47yYvmwsurMqScGWrhHO4v26Kttra3ey3mTOqfvJMGuwTjayzuwlKHTtauQ4tua3aV8V3HKIf/h33cYpuZEhjcZ |
| dTWWkmsI8tKi5N51qYwotrB+VxR1t9fd2PZfzgSyKK5uVX9/BTFq3pPFZ9TWCo73yiAoFcDPUBgTiwO1u4LSNjYsNP1m+6jnb3as |
| K1vwG9sy/ca2iuc722cKg8Fw7ZrKsNPUP7byGYMHG7dZPftIxXmXG4u1y6sZyEna9ZW5i0tNtALMFJfodKJG/5tGb22e9PoFUntA |
| GrsYzA3EJCy9R9PDVvk1/EZ7iS2ko8ZA+NamBi29zzEl1rEUxO9IsFc6uNrVsl4dxzuHsSKqVr39tUb5zRjTe5u5nVEyvoW8bEVx |
| YHXJ0+Ev2yvzl9VQlqdIkP6pbZCefLMeT0L7ckkiOqYkgFb/lWQjMXwRbFMcW7G/16kWl3X+qqpsWd+vrH7aO/+2tEaFq+dCzMao |
| 23JU1QzyWn4cNX2BlkDerIuScZlPiMP/Uk9MqP5stJWCdo+yKqIiIvIXWjyw1GTUxpWcjAszJfjSe7kFXnFNfuHlCHzTI0bK7/JQ |
| qWttdrgcIhIYvcxd3qKMXUVcKSKsFl5i5gqyRB+5ZkOhziTAb2s3k4ujlSypB8iYjrOnA9XRrl1dJmjjJeI1ZIaogr1FxyRLTV4z |
| cSskruKLAC4XN71Dm+f6a4zn01wJLaThyu/jchvu6s7zveaW7/eMEJa2DKu0Oo0bKf49F62u7OPtoISC+6ITAHVFyvyuymeQo7TM |
| g3jZPpaLaegU+jXpsCDvWwRpQ1aWIGbB7MoyKXSkA+JRx9/adpKPE9ClRW/BvNhfm81/NzZmkdcNrJVMdoF8rXZUXeIiv6te5EuB |
| UUQ6t81I59VtvVY3qdUF7sUhWpo9qlvODe2mXgu+1TCnZRR/y+sa5T5EqmsDb5RLKqVxnmVJ+xyU2n3VsOFep7pifGh3tYQfrz9n |
| Um1BWLGC9NXy4C6MHF6aL2x33ZHDbZdYpzhIqXSTJzHSShflMCp2UZrCSPLsrU57q2XvRQ75UgF2W6Otnt8t7Cv9kZbi/VTzZe6n |
| Vrd9G6EWr2YKbyumcOs8Df9WiwVxayULonMQzdjweo3vzjGDJImTpa/Q2qV3aPr7J941p3z0B6LR2KBeOvmgtbEMfnS0LViEXvIk |
| efUEFG4EwBz6R2GipzbSL9w65RhA3lNzfCqLWy7t0BbM2X3lYM7DXvewXTq0kxt3yxPKFXj0S8Y0ilsHNbe0y8JjmFYcnLf9sk8b |
| 9NQjc0mrip4S2rSotBVjtvLS/dLJH0oiNg3FMgmy4VjZxD1F25Vr13a45K+QzsMMkcn7V3Ofs298FOWDDRnEpZ/BDhgFlqy1rS1V |
| hD0J07k/mZzyl06X8VlsmVdWjqgETnJ18d2aqGY4CWeoAA4zNBfy/y+LidJxWyQE/U0YUyC3yh5aCvhO03qbWsxGDeozbeyV0lN3 |
| DjvdLfvVUNuGRFW4lkRREIlt8+/3SfEaxxPphi95WNDrHeaimvZS+XN7QInDL0cKf0fSQGRKiVeMyLw8+I/e2KkWlA3yQbQC1xDP |
| qJelcTIbqNmFlpF7CgPm4o5DiFGfE9fTtbWLBjKtTKZxa5dzEfN0xzRtLZLrNjaV6IcV/LlYNrd2IV2vTBN+tOn7zZcTtssivBco |
| kL/vXFX8m9P3QF1apmySh2EfcOIfToJR9XXpnhvVZR3v7NFBesK8EtVTnRH5P4LwGkRKjFUx1Md44ktGXby88G/Lxt4VwJvjuaCX |
| i1CzTGsepfPZLE4ycQLm0YqNza5GirCgwHrgXApGV0rDzNQxirkZ3fmMSh9HKHZacBERZ5ft4Tkp5hGfK3pX2DcraI2r3Z7Y09R0 |
| X8aLTpHfGMj14QTAcKZO4z5TzPpZ8o6aESwupmjJ1sVGZShPX9nniwRK22iOexGy+7qeDrE67Tgtq3o+N+G65Xc72xsLg8oZGM6k |
| N3abyKaRgmDL5zdQ2tSyYDYrvGlQgN1+yUMpRPhtcbeNl8W45NX8hCz1n+hp/nQITNmpXDh9y/3nVk85vdES/LBws9zpOp53+3bM |
| 7IUcEq/TjpvH9Ft89/JFeKXzc/nDTnXT1YYXYxbOg05bw8koOPLnk8xOSFa7QquxYdfsjT7s6Zy6pRLh8jmdTZFOT9nObuv810BD |
| PcU3ygioKMytLU8O7YGkotLM1rOZI2PRe0fNFR+G4dmJtXdinL4ZygN5G4y4WttNNbmoQKp4UNBxlWnVeF+rPK07N7zGbe1a4te3 |
| i3sirYEV0UpAuwmBvpHFnJ/6yqOvZpNXyvTRspOkepe2DB06cnOY2bw6vbecdatOpHjOoHstvN4R2GkLsLfFJht5P0AZeek3HGt5 |
| 8hbzWUfH3HIvu1eMLdwC0tngP4bbow7wI6XWtxPY3eouF9gNdO9Cf9u5+MU4NpUOjKwl9lQEnPE5iETNMlZM7+UwclsFj0WB6fLI |
| /OMnwelR4k+D1MwawsZMYp4MoZCIxpm5qCsYEQ2AHhSFHlqlPTRlcxNE09mzAKPSocyd1HZAU6jbarTKR9Y8b8vGNkPny0fntTe2 |
| tOolEBwuDYEMvV8Kgnq711sShOHSIOSx+8thodkuA0HNAER9NN+y9LVMXiUOC/Kh8h5KOmDQTQPgjV5FMb7yfGusX5snsFMxS4JZ |
| 4GcVEC91X18NZvM+V+2S9YSug+tevaW1stswSu9qSm5rRK8260LRqFIwHJR0pZkMyrrSjQO5j6Lor2hxtyevc3jh5HgzTAlFmBwz |
| sAvuxTgNiz3GmsW8hOS2mwrFmQ+t25Co3VU1mydjUa4F/DhCRhZ6j3KElD4CX3PAulzX+vPw5oPtBvQyFND5fKvolaVhZaCZSR1z |
| wxwzk0nP3MI7hvKL6LaQdcywuCld8XJXT1ZuovUl9RSzL+XDkjhegtGoxKm3WeCRvDoQdg/j2hKj5h4uLj/iIpt8SdyW7NGtrrJH |
| LWlfBclp2TZLAXaN1O0oI6mZhQ3b8kaBumReXskzuj05ZcEvZJFoZgPbkv1zg2X/3Gp0WP7Plsz/6cggWrZ9VP7VK+xEwQJEDw5O |
| XOC7JlJnSXAUJOhnNpoPg1F9Ggu5Gn9zFL9Ni/e20H/FL/9IORGVmCU8eLw3wilenPj85lpX7+w12Bum9cNg7J+EqIzTe2lGNTaB |
| tdoaiGV4PI7W/dms8cN0rb8G52Waebt79x9e//Dm4KObP/B2QDkfn8wHn/oD/rLh4KRF2jqr+72Pb+7u3b5/b3Dj/sf39gZ3bt+9 |
| vbcLrZ4j4vroRABER+dHTRj00OhwJjvYvX73wZ2bg917tx88uElNHyOQHCfhCK0Dk7mf1Y9DP64DSusjP6yP4+H+msijiRIfVrsz |
| v3zxVeZ9GF58Hnujy/Mvh97Xn12++Dz0xpfnP8kbwIqdhMFTbPK9+SnWOf/LaOydXJ7/hTf85nMvvTz/b96xo5eaNxx/86tvPgcV |
| L0su/hHaff3Zxc9iL4MqsXdy8TPv0/kp9BRByfmvofLl+c8bOayY7hTGvbFoGG948RsvGoeX5z+eImRfYnf/N3X3i6E3jqFmRpD/ |
| DQCiQFDzouNxePEPkTeEmnPvyTj2BeDwny9mXnbx+RBgvDz/bIhN/o1gno0JAhpoKDq9PP9rnOU4iHFOEk/w/ec+tLj4fOZNCOUN |
| b/F8vv7sm1/B+DAm/P41TAGnFlmQ5R1fvvinCEH52xBR/PlwLBFxHF6++CLkiK95h/ArpmG+mGLjF19m3oSGMZEyvPgtFD0Z+yGU |
| Q19/AZhhAJT2gah5AjjKEGEvvjDxzmBEkvl07kesTuRNLv4VAIaWEXbzt0NcEvzr7wQqsNdnF1/gr78Jc9pI58fHAZyioxsoTva9 |
| TSo/qxW2w2Fcpx0xwQ0RR8fFnfAe9MzXxrvjxwJg2w64h0B4ozmbzl9wIhGLP8kb1+RaIfxAZV//wpcEAwQhJ6m0KRL+LVf3gLaf |
| IQn9mgjkr3z4D8If4Zqf/5O9ew4J/wCNvyLKMzqmaZ3gag9xkCltrSzB9SSyqAHeaF4Ttq9rJomsgITp5fl/Db1D2H8N76Nx6CHT |
| 4hRUgtghbklshPuQCHx+8fcRIeLnMMj8FD8BJf/TkO0TLELIYAdwDnT54jc0Eb6rhuOLr7T9jtBmyZy2M26/K7AXaEto21LBepST |
| BeGMdt5Uzn4CbcKShQcZmu+OBZzDSf0dF/UT6fuYVgb/NY5RUoEzeurbNgI/Eq4D+uKLnyFKadbQAJbqxef2LfGDi3+YA6wvfj7n |
| 3AFm8mOlaQ2n8M/wd3R8ef5L30tgWqGXxBzLGsdiU1Z4N6028oe/j+zHArKSmqMFAOHjWvgmJ4JRgUSmF79F5kjLAYVshpzcNag0 |
| puc7kXPFw5XiW+qQkRZbvZzV1YC8z3/H6SbDz79TSP1HOqoIcwA2I1NkjF//KCKu+y9sRz3DPUwMFEicOOuUOLIFxob3iO1pGBqp |
| 98VXs1re/oRxE8kQlLmV4YrvPzg7XlAhO8YuvhqOa6IQh/rFVOMDGjr5hxrfI9iWI4ShjlHEcNFK46pN6CDHasucEgdSprr+8d6t |
| +w8HDx7e/+D2nZt2mWoE4jieH+q2ifwp7ZqvfwpszLuB2/mnsEB5BXxSDSvshigvEXTaca5ASe9EYd09OAplbZjbb6aGCIB08hfA |
| pn6mnIWjAETZcMZkXWM8vgOyBJoD9dChjqsw5gTCeh2zPQtijnoMzaOQNtpHKBZ9pBJu3btBFAI8HVrKBsHUDycMX4AuxNbmRrvZ |
| bP3xMX5AVZBXtXAqGAOZEyxfPTpWBFaB5lsI8LG3BwB8GXn3jjUh9Q8A1bkEqPYhxMXPQoWsM7kcysExHMevfRkIp4hSxCgsxOYy |
| C4HMJIDDIoR/jf3COuztNgD7uG3/r8jbg4Pu3yLv1sXPCkvxIZ6BKFAQdsYk3cHP0eWLX1qXo6w+IIcd68CHIsLUEmtT2iGTLQUe |
| cZXEsSsrLjXmy68NQzNieez/MSiNDdCAGyeRsi7IogSTQqtrgJoiuQAx3bLv4YXZLfajwm7qUkqTGoxuw1JGc24UZEnV+97jA/rp |
| nwAIeBd9Nx4FkzQvJ59jKrw9YvfMVB6mu0/DbDgOo2P62PeO/An3QhXjXadHsLRRw5QVPkDL2v1ZEOXtzpSZkVV5V50ejCe8INWh |
| wvSO8PpUi5NgGB9HQtEXg4/CFK0b96NdMswoAMPvhwGlB9K7P/TTYI9LFnziR2HkT/bQjkB0paIEHRXCqfUbvpZ+E92v77L8JvKT |
| OmtAG0Zkp2LO8GO+xx6e94B5zvFj4zjIbrJ6753eHlX21/Ja+2t8vZnV9oYw3bvb6jVF+yku6C4tYunAeTXRcgQSIFs4tGO7m6r1 |
| 9FEXNlWqiZac9JESyloq1UTL4STwk1ti57ibqvVEW2Yvu4GxuyUt81qiHbv8crfgVrQ4+QAqyiWlbGa7aMIvXc+8mt6SEfGihlhL |
| tKMteBujMd5jjiclrc26Wh+Ll1SpJlqKVF1cRHO31SpaWz9CV7Wlu6DaOTkPE3TnWAiFVlG0phQLy7TWKorWwpi6GPt6TdGevUC4 |
| mH2o9fS2S/APraLeetFuzGvJzRjPTsnSf4eik0p2o1pRMoEgifeY0lrCAkQl0YrfYNBxVNZQrSd3Fl2DLJpnXssY8yGLRn4Pg5GX |
| GFqtbvTEts6iLV6obPRCx/g9EugWdiLrGn28T3x9yU7yyjZIFu6aQmXWCztQD+M4S7PEn1Wq9PtoHrG0usoHLjedjnDzfE9lAhV2 |
| YZKeRsNddkJWSCao8mi5aBQkUsRSyphsk2pldzja2Z0Z/+RnmT8c36T3pXjRGAZ6KA8LUQ+lL4DulqBbBbIbYheIuvMsfkiBfHs8 |
| bo5/wORsjySHVnq4oxA1Lz5Ch4Hb0VFckW5SEnV2TDEsYnf6ByHMNDT22iDH4WLfOkJYn7KHXMJp+KMRVWJSX5DQ2RwOn4As5UG7 |
| nWtCbVHX7g1JRHRXPgRhjMTGBn/CMs0lIbpAB0LiF9aMeLycCi3jPwlOR/HTiCCgF8NUMMIjXtiAat7ODl4G3QQpdAY07/3RH3kv |
| AZno2kmfdD+lAC/xqEoxK2AShGOFfLJkLschBaTB5Sq0kRxoH3K9A76hFK59ZQqI2grfC4a+9J1l32+O/VKcsyL0lU+5sHRMJudX |
| mTseW0XYV8CIAJNxClQNHTwe+Zlf56PUw9EBLNGVfO3fYO2rMIlsnkQcOidS1ZEp7T8MrCG5AerHqFJBBx+CFf9ohCOiLO5BiQCl |
| ACNvcXukwcMfEyiAU1gvqtjg3vTFNaPPYuEktnMxkm102Q+99vjnfy5+UfJr+CndlkvYl4sAWOkeQeRc/lz8Ll19H7fSK9IA5Uam |
| +SEZKGWs/pKEoaw9LYOxqNjhTfbxitqAjWGvv0ff9OqA4r0YTxBqEk5G+DcivsKHrvEuNZjzZg64ebLpHc9HH3aS6KD+JJwdxrhJ |
| lfZqt6xVoUs+lZwpwgqitCxyWucI5TVhwtezLAnhZ4ASZhLyt9BxidEme/EFCG0x3u38Ypa3fgpbKn6KrfdAwIphO2r8zwJIEkzj |
| k8AOy7LQ7BYBOcP4wmbTRcvK5rLQMgXOF5i3c1dZupeC6rvfIqfkaZqWZpTc0kWtoG/D12EFdsi6MLghK1yWOy9idaw34nXKIPfm |
| 00PAYJk8hKzQ6gbS4A4fVe/qDj6hqh39urzFB9cvN7DjTU0qKGW0tskdgYSSOonG1PyXFysyEugKcmlxCMXMZO2dXl4rdM/oJoIZ |
| cNslEg/sxui4Yu1ZrgMeSdUG1JxWNFJRu4JqWs874pTWrKVYjRWbxlJlISXlKSuUUmWqWVHGcSFI1eDfXUWuY6AZBlnA0xuOT9rB |
| q+s6Lqi+bXbCRlmGndCUaa0Z5BWDP/jcWO2aj2biWU0P0Tfq6+EHda/lglQzJ/37Q/qOG1I0hdoATOeH0zArE8wYUaDHAfz3fTZU |
| JZegkQSW2HpIx9wgD6M+mofehK7a8S757+iuhy7kiE94zzAVVnJ5/tch3cZ95U3QN6uhnvzmZs6ByS80GsqVxVLAjC7Pf4k36eEw |
| ifl1+98OvSfj0EvxonF48fdzvLn6q2VAyaU/3Fiuo0znfpyTzqffky8t7HhLkgbnuOUUoi8bCZlLISYijwXv68/QkSPyTtAR4pCc |
| KpbG0qIDr3xRNZwYB7Qb7Az9AdCz4jMFKvj2D5F38VVGs/oy81p2OPW9vRyobAF5fsWbXKuUgOL97tR/ApJRRcT1Se+fdAzL94TE |
| 9pq32Va+Y4n4IV/gkJeGDGR/lgaju6lyAccuZNHqZxQOmbFPRWj+DSaMl4n4PXjqvY8RTHBAx7d373PyyuFK+V3D/hqfrrxiBYBs |
| 12+EKEMNzpXcRqOhFtU0JB649dByMXGx0itOq+wOC4U07AO4gs9z1OHqgp47gz8CqXqRIQ/obt2fhevihgA5qmyI94tw5NGl7oP7 |
| u3sKqtCajlGXgMnn6FPF7Pv1vVM0W2F9fzaDM4Rcrdd/mOKlA3ca4BpQPDrte/9p9/49wB8uUXh0WlEG5pd9A6Q9h+A0B6SCcE8p |
| cfJmGt2RA8J8OlDoz0pAePLIH2f5buH7wj/FG3OJOIHJBs5LWQ3OneTn+AmC+QZvDj+rKmqBYOKnRK9061sR1VgKLhIzP+L+VGP0 |
| 8TKYFLCDF0MvuvjZqcYGznJgdBuRMjLiVCVTUIcUTCza2MKRTsBrIFxBtajxqQXfyt6XE2dFg6laT/ACUQmf6+OkMGDfEFXaVzjy |
| RyEaqXgFpTfORAqAvduYBNFxNiax3U4gCo/RcCfLlbo5jxnFeP2kztvFYlQms4y1zWJapRo1T7Kk3CCIQdCmDlyYfPWgkcKWDSjY |
| sVk1xpEsT/3ZmPozpd+KqlwbFOa9y0mxT8NX8/4dNl2XUc/BTs88YDbDMQiCiGFVD14WfGWDuKehcpZ3PY1h4f8A99i4ZpbnFEHg |
| aSRhIwz61RCP4hAzuIG+iuT5/AwkmFMv8R2yi9H1mfqTIV/WWIhTcmWZKGeJcuI47hP0W5uiysCud2RyCuKvCez7R/zs43KjvAWS |
| 4iWFXME5aNQEafkDelyoovRThcW562fjBvpjD/Uv/XJ1RBky9Y8CMRj1hukc2XztXUz9Zxy3rLr/zCHYTtHB2ZgRY7HqlEEP4IkO |
| duDv2+g8dOQPg/fm6WmloCnZ5Gtp0JAzYd3bGz1ikj3wc36cL26vq7wyV8mOAjqaOCQeQQR2IuRKiYq6uOdrO+41sVKluBYLUzRc |
| cIJ0XbYxU1Thqq3m8dZXXLeRNhOvyFaL7Tl+eTdGP7rvlbUvlmpH6ekNpSt90uYlr7oFd2dBMBw/zH3irs9CwDW3exe+Iur5t6fB |
| 4ZMwK9Tg1EkCka1zyaA1tVc68MHYxF6u5IYHAv3jsEJ3EexHnvZQaFHVWs5i81sgqfKcKVAN/ejjNHgPhLA0SFS8vA7Q7ifhcRit |
| BpUynOKcCMMxEaOAxYoVr1eMrnTIUUuQl/sCbLJnFsilaH3lbpaIPPsAVdVQJHsX4qgL38Cqy9cUOLYbs6J782QtX/0FxhbDuq+d |
| dMUOKeWBYaY2+I1Zw45KZdmtqHwNpK8ohsrAwol1ocnnZpRLB0p7w+NV12BVXJv+r86amiOuseWUappbrrua6VurD6vvha9/6mMg |
| Djljk/2IRcf9BM1JF78JQcLL8WtfuwYttzB72+XSJaeqgjaeT/0IpIVHvGnCpsQ7Lqz+WZEINaLmwyL7SLiMwejctc3lNiGtdgHZ |
| lrI3c8bvxfEk8KMCSAVmZi43Z2cC3kJ7C7ZfE6mWWW8sG2uRB4PgJMpyF9bnXU8QJ4YsnjqJ0+vLiqphWCFcJ9nGMwvzWun4UYUK |
| /RRDS4etiXSFywGZ+ESO+2snYf3RPY5ztQL6PoXRPJ6n6pmm1uDLx9x/nLVANrzOXyQNTwKs1uLkpNbCycBug6/abYhjcyhD/X5Y |
| VXQ8DmBxvfEFaIVIBRiXEvNFl9zAOi/m+4ITs7qlubZsyeUk+2MSsIvVD8o2m6h0273TRAqLxKtgbUqbJu/7GPC3sewK/3RV+5Ry |
| w4b4+g4sb9U0i2bqsOL6V+3kMTU+eNw8eLehVLbfA4sLClmv6nFaDQwzoW0Mpsr6E81OaMejP0PDBNtPN8bz6EnFUq+mzE4xEQLz |
| SgNzCNsq2Acp1HQMY7sOKcKozLWUSxdhcVev2ZBWNYl0AbO3TlXZhgyU9xP/KJPLX7Y9QaL4cspuBL3o8sW/zb0p5oeIOJNesFOZ |
| VXjHcRcOx2fjqZ9EMGhRV6OmfVQRGc0xeUG7UVM+cA9U/5CES+aC6mQEuAtcLK5qX9ly/uZiKOUdaJKRMnUuH+mzdmI4GpkcnluB |
| xvF8MnqfzVs/3jVkaJehvO8/C0Zcrl6NfstoU791HfvpQ3MsIU/pQHBOla+88wATR5PqBqUhAWiiMK6y3GXCUSkehLhUM/CXc5US |
| 1yTt9CGMWklMft3D7D74iRv+lBPHOPxMklOmaoxT0uiK1kQOzmV2aXEl3rw6ygvzJVfG4fjiv2vRwqqwyMSEi9/yW/LGckC84dzv |
| JcDwiywusY5EnDTPiuGAr+HdQolG96RYDosWCn+9CuXrV3jL+eKr6OrFo0ldn1qOSZM7IiNWmWQxJKOwlw8p2fEQ/9ZUgdzU/Z6f |
| KnZprC9FKauxn/rNG1DXFuFLqqxGS+HeZUBwxVIfywWPLLai3nTUrD/e308rB99Zb2TocKb3hGatT77zXC88Uwuox7NPQF0r1vMs |
| FYvRNqUGARX9VIJhVEg0+Pe7DYzLV/zrBT6UmiQH3I6AoMIRkd5Nbl7QdXyodJccj77+jHRTLWcK38NcD52xpFk8x0FGopDmiMOs |
| LtpeP3PDdi/OrrOnbDhkFD5gVNoNhvMkzE5N4Je0ActZOi2QumOmxMl75HGEacx8THPz5UwmM2FuWpgZgOc2G1GioWy5GX+AmcBL |
| 5stC5lKjZnHNhG/BxT9S/ooXX55SVg9MonR4ef6XUn1UQSo0JscEp6lMcVFwsPISei6KcfjvG/FINVDJMj555XUjBTtKBVhmRE9d |
| q/gHQxO4d/5laEXl8uSizNafj0LMQDOD8YMliUCDZKpsbKKKIZHIlCf8WQKEKMiexskTE8mRfxIeYwQ7CN4YolvELSZaUciRwRAf |
| HWGuWE5OLOVXRFlSSFODwazulzbPvNe8zgZi97SlEnnAVICl9AOo/MvhmOU3s5n0OAe9+B0U3BgnMex0vjo3R3h8Y/60H01RBef5 |
| lWreowf3QFg8hZM9CZ4CmbuRUrJyaISbY1Zg3CvytsFGRfbJjjG9UQYjf2El6IxSwByz/FLLEHMU11OSNEoIGXVtQchOUZKGP758 |
| 8XO57b75PFqK070GHue+BXuujMvvVsOUTq+ALoBZdBzefg/9yTgGgYPXmsTMza6BhXisW65+lVayljYivYTHwUNiFbETfUqDRD9 |
| 0j8JM3QhbCgVq4WavLNWe6vRhP9rWTrjVfr9ko+P4esBiCvcSUKf4YIbxOfaohoEi6vHubJJtDODaBNK0kS0O1uwM7mj8RQT9Nko |
| wclUDGDvMTigt1+bCtync984FMgv/b/YE9fd2tt7sCsglAvGZzHFvKKX579glwZfshswHHCcZbO0v74uGuJv+Jm3t0ytYB5TZdE0 |
| Q0+OZawiuTXArXmZHhPLGBxyCJYxtDgsDsUZC+1qKhSrrGBgMK5H0nhyIqk013E0t6+ClqMHVrFcDYbTjtHzFWczi4OLeHUW7YVv |
| GB1VV+6JX8lzVOzsmJeUK/TFnD6xM8fl/ZXysDPFhcjhvrCgAztY8knRl4fM3rHyqCethcPlYkHfus9QxDwltEDTUlsg3jm+z5Q3 |
| y+HH7xpX4lB8REt+B8s+U7fLMJ4wB+Fb7GXoHW+jqVgMhFPV3TCSFXpdpcLYT/MdsiiwRfgDX6N85C4Wk2ank6AxFsOBGm8AeTZ7 |
| 9olq1YBGEjjpUGjtmXKMs7r0wqybzxWBkL6H+Xg1dfrvWpDVN/FblbAbg1LqKNd+VzzblAF5MCQf6lphKd/xOkV60JLUiTfXTg3T |
| vH9CG1qRhCa70AJ4FWaDuZ0F04qSaN0I7D3RjJb8wJWpJszgGHIhhbEoZoF+VVgXaqfXk8Q/xbBI/C93O82tSY8PrhiaAqshQDDd |
| qZUruqJjMXrxC0dlQHAhQEdzWaeK8jdbkAWBMmfVqoSr4EGuz1P3J6+imVgvUraT7hWjy/Ec9wXPFdNDXLxwWlz0tLjoNTPKRHOO |
| z/0kWRyhHFNJc+MgvvKAmhCa5o5CqwWSLB9GsnwQCcqCeoZ4mavXSB9a0GoxlOd0l94eIYTwQUr9m3jMjp4yspj/RQ/q0UzePDwO |
| P92n5JyY6UaoYzyBsFChfThfUkq+TFFDyiQUT+UTl7gku7Xq5/j+HoDSKLr+KHkGjR4t7vt5dJ+isLPbYD6CkLDRRhUk3gfAYZ80 |
| NM+PO2p+rpw7TPO0XMtiSIkSGCkJufbXbpAapCKCQ6e0mCrJt5qy9BDTjolMmHcoUEHpRmnOKsZRYAuHSJVEZRYsauo4J2kHJWPO |
| 518qqydC+XTvOGPXq3H1Ux5Tr4UqYALzeJ7mFGuh4yvSLdQSXWw64ZLhijVH+oFjX4RrLkFnwsdAj0MmTy8zhZdONArJWHdqWdYM |
| gZd3G+xZRFoYJxwyyW5OZYv3pNJlvlvU7lQStE+AnTi8ukabOqiyS40szdhQlSzpfYWczJhl0ogEp/c8Jpfnv/ZJI/9JxDOSH6KF |
| TkxDRruveLSIHZwfA84wzVcJ0rSHaCpxmZwQ8tjJsz+c445xCGNVbCGTr+OUMw4xg0m4z7PqK50oy850lQNEj2FciS0YsxbsIe9R |
| 5joxOiXPumJdOcFn+LYGT7PtOLdeA0dZmadYTzx9Gb6NQ69koY0Tzgzec55HpsO5+0g6s3gAWDeNemYWhUBTS2KbWNYb0NRSvEF3 |
| ffP6QpJkY8AUJnjUFzL5iA5EKO1AsC8tbJjK8htxbq+r80Wsuga6Q+dfYSyzO4Zn9dab9ZkLCUUU2UmPrdG75ldWDCh5TlqhgYya |
| R5urb4P9jCHRzseWQaF7R7PvuWOHg/8pdJIP7+2UaBFlTMgyEVVOKV0h6QexjM4g7qrZdXvlFcLTlxb2tF7ozeuRoIdPvvNcmfMZ |
| vqLyhZdevvgfINHS8xd0fUxvFOE1oxkz/IkgoE+41xhJLHqfDY+tDPT6r6rr1ovPZ+y9JHHZgBcJE7pdMzMW4NstP5+DoI4vN31y |
| JQ95WCSiKnBYxEl9KW7E0xnUf6UVeWUh07JUZErdzZeE205vUB4hhkb9zFBODFd3GKx9yjsqk1idlOOkHQYQ5TPCRy7odSOWhUJ5 |
| 7ic6Bi7+ieipX+jptZFKzRFcbmEpdHch+LJQZxF8dlw+waeHcu+FQhphJd1bGEVBcmvv7h09HIHZDkuooaqL8oLpMgChLxlizMxx |
| PGM1yPYx1cglU72lLTeMUcOQGCUm7FImT5dHt2M3xuFkVNG7q5Y1Uy5TlGiXYtAXhW4wRLAFiI/sW8nAWrwytmIdS0wBldkyYhuG |
| WJ3JKiiKFdScLSSINJ4GKxxUC9QK11ErZnlWkhVR5p+0WivKxT7bnpOp/ZRgQWu2QdLljC+KTP6GJRmgkWvT2u2ycfWONIVmoL3W |
| tf1qQwT8u3q0dSgC91mQfnnaRIOUigPIvHZFjNl4lX0/LmpsPGrGLiRY3WJgBMBI1ILiKlXRidt840ezDYzgKAond/3kyXxGSR14 |
| VzwWnLV5X6kkoegXZ+jJQ+iqn2ThcBIolyC4mjviMYk6XiXEKNsoI2rXy7z/M+ZowlVNJY8kvZEO/X3neUBJ2m9l00lFTr+qtFMS |
| ylxlmSTVTCxWuOpAtcfHQaIO7nnZ6SyAmrwP7ZOe4XIpsMjWoW4XaiXzUDCs6A2uqW2vjsITF/TxbH/tmp5+5moKozjq45tf0MAG |
| M36qnl1dx9Yr9DgcBycJ2bGU7BhQCw+pAmjY1Qk+9BU8fS9+BrWaXtNrd+D/of1RSMscUQKnQkNoOvOFPqb/D/E57XmtptfB/6t3 |
| dNTn9pUkfoLLOpwnwFPhWJrESVnVOno+ApqgSYKevQvr/jAOo6Uq08vbULPV6NnqrRfRtg54M1eluFRX14FU9BIcMTq2rjj5pOGK |
| syp6M/eK86fgHGTEv1oo6eo6209K0Xeeq1xJ5k26us6ZCq/6CU9V2kAEO5Jn2B/2sB2WeU5t17mX19BYtZnnmli1yFYNrJrzRM56 |
| JPZ4du0hPTdlshZPT8Bt8hOeQJrxE4k466KKRNeYTM6yqrSi1hb8iVhjxbTV+qQU9cVnS2xoV1/r0VIAW2soJ7jt3QD2tQwSVbWV |
| iq1ygUPXdbk5MldvlTo2u2WuokLFJn+Qjquh1qa8nVQuqZKuO+aao9KDeKbyEB+9xnedP2fvvbKnx43XJ88w3+CZgne2iAXsaw8W |
| RdzuW6ilPEfkrJO/NuSsorwlVN4NeykIfZTcySzKZmJoGXI9rixsaH8+gGv5NSan5GvlqS521VV6p1cSdCMDd84TFgUuFOUGBrMi |
| H1ZUFD/7JpB5SjP3shoIy+nP2k4utU2bw3Jrq3z1jWb5VnOPxnKQWROj5RvQwonUXOIyebjCjUxpm+mLiriti9WyixLCtIve3o7z |
| g9IvLCam3IVF9FWRvZjM3eLIXCKwK1cBaIBC/5xUvJbsec+FdXp/7ZGPHtYX/0yelOxJPo4A5YlWeUGpNPz6p99gdufL878sNsWX |
| VmUbYJxCMqAHVWHGj5V+bmJZsQtW9eyALh2YcfBAjwQUZ61bNMbLVPXMdNdEHGkS53eeS7SpQhEd+LoMlpsaPimIbOUDgij+FMUB |
| PK7NU5/MFiwd45ldFC4Rz6hz7mp6rdgtfXHI+Ut1fcIeY7zmhNjRdVE2BblCy5xZ1ZCdCxyKXJh3kf/tDtPkVwXcxpyzAW6VZnx1 |
| OB/5aJeU7v/s+FXNrh8++Ni78fH71xsyMY6z6o0HH5cF2ukW8xUBE2CYUMCYliGNXK4KE9Rz1Ro2VOsjXaoU/F2NsMUjW9yD/Vpu |
| /p2wKCtEjgjUARkmZvKMw/Cs+WywFf7uYsuKC1Jtnpwd6f6eRmYaborDqaGFpGA8VLID220YRk4NlSmYWoF8nQxGkRc/NPSZS0vI |
| HzQjhsHhMzmEvoFlm+GyOnqJhi7VVtBaPeb/LH6VK++kupOefrfV9trNYafRgX579XZjG/7dbWzutrpQtOXB5y1Q4rehvOO1Nhqb |
| w6bXanQbW94G/BcKG607m167NdlotOvwYdhqtKBhu9FpdL2NRq/R/VOAxqHqF9Xvbl5YVODXiwq3oYTbVHAr+uE8iyyYtqhxtKrK |
| W2xf/1R5SYFCm5AjWpV2q5KHz3n49IgW0PB0VtH9kqtWdr1Ydf9E+PJwnl2qHo7jp+qjgMYNUEGr0/wjbNUsj4+dhGl4SC/Iuh/S |
| NDiy8VThElDJe5dyoFRNwoBryYZjP2WP1q3YDmNYZsAqSBFwt7TarpdAmf58xlTLzGIu8iuBLU4qLXO465wqIYwCHhfjI7drz+Qm |
| ZlXrwkNI/1kPowm9eKxvO4Ed2F4zuWu+81xN+s2FdnWSQtT5ZPGh9xooyDJv56wXzu8TB7mIBOcF+cO+rNYXRK1q1+oE5yCKb29z |
| LVrtUpTxBghqGKR6jKpN++HrdRTgdZxCcrwDJvnwl0ZB9FGHogAO9qlalLvLJG1HL6oG+qnyQpHuhsbeV1CiWPiDBXmR6XQmPgiN |
| Ng930eW6T65OwuKxisQK5aYxUwmTzeYp92Tg3ILAYR+4VC4NLTw76PV7H3rfvzz/R+/Oxd+RHGhpo5pu7lye/z+3lYpc5H9X+9nI |
| 4o9noHTd8FOWjXl/7frHe/c5q9Zx8d4kHj5ZBmDFAUU52S0kJFuU3EJl8bROfi2Jp/wtmeFygqamaWIvwxizZlxb5hIKq8fJYZh5 |
| +Z91VOMt9Uhsp0sq0fVLj3D4rY8wfIURLPc/BYYexfjm0DWuuhpeQBTBYXm8i/xdGw31MDNHU9yS3FsgbwxUuPRhw/pT3+tY8YEO |
| dkwpZN/3zEg5/ZmRT67GExM42YLA01gRdB9Prn2iv/vxXSfu716efzlUFWQ1IcLkm1/N1eghZSJMjWZpLf/Xf/5/d/V5/q///Hd0 |
| afAjrmizjBg48e/abWf8ks0Ekh1OpJgKzVO8oGwaqsQLOfrtlIWlFExsS2gqgf7aj01VsV1q5Vx8uQYiAy0bTTzQWgikhLImgKDg |
| u9A5O2tPG/KlJSKkBSPKytj5NOWdfiKdIgzTV36dF/D0JjqiebGOa8uC8Hp1DFcxGf14o1Cb4pDrpJZC7Uc5Y+DWG0xseHV9vKF3 |
| ZJg86OFtadKwmji097rLqM0zn/LGi1aCUhw+ZOjc0d50NrgxBo/PTm/DccplreqZzkwL99Umfy3ucAGDjYGVvLeu6wlABGx5/t0W |
| XJK/XPMb+SFB/DXx/93XWwQw/T5XPGf8JHed2Zas6MJQFJxNOKzi/ZK+MuXCluE2Q0a4bW+7sbV5vd1ob3r0L+y75bVa+PdWY3Pc |
| LX7bbmxSqxPLty38u9Ub14vftr3WJraqo2FODXZa4Iyz2AVnCcebgr1vK/+6XoqhFuHhpF5EE0yqDaVeB9BUmG8Ti72uDU1NKgZs |
| 3Nr+w0REbt78xH7jeANoFi+J2cbEMBe2FfXEl/z1Q03JL4kf4X1V89CW/BG9hY3ypKJc75R3Ipr3OANTJCTkHNpMaaadymWsWs0/ |
| r2ZPK1G9iunTXkr8tqRsc0rapdN7FVHaMv3XqejjrEzhy5yLRe2vsRcHmPb/nef8+QGvddbw2E3JmaL270fKky3aVdrvUzLP7+yM |
| kHg8G/biG5NwdhijbUt5fFmJW2YRvnlax6Go33iawIRpo2ZKJm8+R2nENhKCWN6vxYxAZQEBGc8apL6Gir+l73uWv8PCvxhO5Mgc |
| 4mjCfW0K/bBEO7M4FS+a7K8dhc/QvmWrF8/8YZidUrWmUkV/YU6NLcDG5pCkZ1fMB2nT+XAY0K2g7C14FgxvxNOpH6FVEdesAD+3 |
| HZpvQYvejHc0zbw4apDomf19ofxpSPlGn3mRIZ4ZLk+pxdqy5/VER1eWep4PWvMh7B2UW1e19FvSuqp0gpltyrvA1z78kHKVqKbd |
| /CJIfwyz7PXG6oqRGYUXIjUpzoU6BWADecohaAYNW10drVCqh3GYIlyOtyzdRKK9/Yj1rzjDXApVnSGqQpG+yp78MaKacRLQ6FaQ |
| xHt42laMZOajQM0hNhbV1KzimKdY8+fl4WDzyYS7VvIzEKs2UJ+A/SNP+sLRTr2KxkbP1IPunSZqXpHfS30L1WsunlZpilzgbjAK |
| fahZmSXBUZBg/dF8GIzq0xgR1ffY7yoCSw2CPJDLGFVejipD2u9SOAAJy8J/PQqnlKDig8RH7zjVU2KpIWyP0qpvW0/9Z3fUc91M |
| QJ+vVOni5A0U0lL6LiSUV71mtATw+SvMsrlX9zbEiGcgehVFYtNcVXxlF0gs4M9wUd4vhccg/PIx3Xv+vQrWxQRq2F+lWlUk2Ucg |
| E/oUGCplWoGwp2rv2lUGLNz7Pm586jeLsYbIN0ZyIjQ2ipXmp/gWQzKiDmT/0ILVkV/xNBefEHYGBmCupYeZo0OsCBDl8FBy3AAn |
| y4eXL4/VvOfeGER0tH+166PwOMQEht40jEBk0ApF2gW6KWYTlmj75NbFb6cgHJ2C9CeHP/tE1nehJZ+b/tHoGJMsmh2rnF+dpNKN |
| NkkYxJwjMJOxUXYa+ISKCI6SJByKWRfPAlTOEvTI0TkntteoBBfqA+BVP4AP5jEwFH1ggnCDv33yP38BU8buzrxbjz6GVf5o78a9 |
| 4rZQ7DfqjtC1ECYgio3NXUiSYDaB8+n6ZAKI+iMmEv4R7KwrjipXeRV8QNpe4xqvceyo8d39te9ShU/nsbOT7/JO3tzYvmJzbxFp |
| APMLbs5Oh8npLItRgEHTV3zkaeWNBATHePrxx7ff54qb6LGou7ka5lpYztdQ16l/5zkudgP2JXCvOs9PydpR9kG+DK1ulbO+dlW8 |
| drF29v8B83HiaQ== |
| """ |
|
|
|
|
| @dataclass(frozen=True) |
| class RuntimeContext: |
| root: Path |
| main_file: Path |
| requirements_file: Path |
| local_model_dir: Path |
| local_best_model_dir: Path |
| standalone_mode: bool |
|
|
|
|
| def print_step(message: str) -> None: |
| text = f"[HVU_QA_tool] {message}" |
| try: |
| print(text) |
| except UnicodeEncodeError: |
| encoding = getattr(sys.stdout, "encoding", None) or "utf-8" |
| safe_text = text.encode(encoding, errors="backslashreplace").decode(encoding, errors="ignore") |
| print(safe_text) |
|
|
|
|
| def module_exists(module_name: str) -> bool: |
| return importlib.util.find_spec(module_name) is not None |
|
|
|
|
| def run_command( |
| command: list[str], |
| *, |
| cwd: Path | None = None, |
| env: dict[str, str] | None = None, |
| ) -> None: |
| subprocess.check_call(command, cwd=str(cwd) if cwd else None, env=env) |
|
|
|
|
| def is_running_in_virtualenv() -> bool: |
| return sys.prefix != getattr(sys, "base_prefix", sys.prefix) or bool(os.getenv("VIRTUAL_ENV")) |
|
|
|
|
| def format_bytes(size: int) -> str: |
| units = ["B", "KB", "MB", "GB", "TB"] |
| value = float(size) |
| for unit in units: |
| if value < 1024 or unit == units[-1]: |
| if unit == "B": |
| return f"{int(value)} {unit}" |
| return f"{value:.1f} {unit}" |
| value /= 1024 |
| return f"{size} B" |
|
|
|
|
| def render_progress_bar(current: int, total: int, width: int = 28) -> str: |
| if total <= 0: |
| return "[----------------------------] 0.0%" |
|
|
| ratio = max(0.0, min(1.0, current / total)) |
| filled = int(ratio * width) |
| bar = "#" * filled + "-" * (width - filled) |
| percent = ratio * 100 |
| return f"[{bar}] {percent:5.1f}%" |
|
|
|
|
| def matches_any_pattern(path: str, patterns: list[str]) -> bool: |
| normalized = path.replace("\\", "/") |
| return any(fnmatch.fnmatch(normalized, pattern) for pattern in patterns) |
|
|
|
|
| def build_allow_patterns(best_model_only: bool, include_runtime_bundle: bool = False) -> list[str]: |
| patterns = [f"{HF_BEST_MODEL_SUBDIR}/**"] if best_model_only else [f"{HF_MODEL_SUBDIR}/**"] |
| if include_runtime_bundle: |
| patterns.extend(HF_RUNTIME_REPO_FILE_MAP.keys()) |
| return patterns |
|
|
|
|
| def has_local_project(root: Path) -> bool: |
| return all((root / marker).exists() for marker in LOCAL_PROJECT_MARKERS) |
|
|
|
|
| def build_runtime_requirements_text() -> str: |
| lines = [ |
| "# Runtime dependencies for standalone HVU_QA launcher.", |
| "# Nếu dùng GPU NVIDIA, hãy cài đúng bản torch theo CUDA của máy nếu cần.", |
| *RUNTIME_REQUIREMENTS, |
| "", |
| ] |
| return "\n".join(lines) |
|
|
|
|
| def build_runtime_file_map() -> dict[str, str]: |
| requirements_text = build_runtime_requirements_text() |
| return { |
| "requirements.txt": requirements_text, |
| "main.py": textwrap.dedent( |
| """ |
| from __future__ import annotations |
| |
| import os |
| import threading |
| import webbrowser |
| |
| from backend import create_app |
| |
| app = create_app() |
| |
| |
| def _as_bool(value: str | None, default: bool) -> bool: |
| if value is None: |
| return default |
| return value.strip().lower() not in {"0", "false", "no", "off"} |
| |
| |
| def _open_browser_later(host: str, port: int) -> None: |
| if not _as_bool(os.getenv("HVU_OPEN_BROWSER"), True): |
| return |
| target_host = "127.0.0.1" if host in {"0.0.0.0", "::"} else host |
| url = f"http://{target_host}:{port}" |
| threading.Timer(1.2, lambda: webbrowser.open(url)).start() |
| |
| |
| if __name__ == "__main__": |
| host = os.getenv("HVU_HOST", "127.0.0.1") |
| port = int(os.getenv("HVU_PORT", "5000")) |
| debug = _as_bool(os.getenv("HVU_DEBUG"), False) |
| _open_browser_later(host, port) |
| app.run(host=host, port=port, debug=debug, use_reloader=False) |
| """ |
| ).strip() |
| + "\n", |
| "backend/__init__.py": 'from .app import create_app\n\n__all__ = ["create_app"]\n', |
| "backend/app.py": textwrap.dedent( |
| """ |
| from __future__ import annotations |
| |
| import os |
| import time |
| from pathlib import Path |
| |
| from flask import Flask, jsonify, request, send_from_directory |
| |
| from generate_question import ( |
| APP_TITLE, |
| QUESTION_LIMIT, |
| QuestionGenerator, |
| format_questions, |
| normalize_text, |
| parse_question_count, |
| resolve_model_dir, |
| ) |
| |
| IGNORED_MODEL_DIR_NAMES = { |
| ".git", |
| ".vscode", |
| "__pycache__", |
| "backend", |
| "frontend", |
| "venv", |
| ".hvu_qa_tool_venv", |
| "HVU_QA_runtime", |
| } |
| |
| |
| def project_root() -> Path: |
| return Path(__file__).resolve().parents[1] |
| |
| |
| def _read_optional_int(value: str | None) -> int | None: |
| if value in (None, ""): |
| return None |
| return int(value) |
| |
| |
| def build_generator( |
| model_dir: str | Path | None = None, |
| prefer_nested_model: bool = True, |
| ) -> QuestionGenerator: |
| root = project_root() |
| selected_model_dir = ( |
| Path(model_dir).expanduser() |
| if model_dir is not None |
| else Path(os.getenv("HVU_MODEL_DIR", str(root / "t5-viet-qg-finetuned"))).expanduser() |
| ) |
| if not selected_model_dir.is_absolute(): |
| selected_model_dir = root / selected_model_dir |
| |
| return QuestionGenerator( |
| model_dir=str(selected_model_dir), |
| task_prefix=os.getenv("HVU_TASK_PREFIX", "sinh câu hỏi"), |
| max_source_length=int(os.getenv("HVU_MAX_SOURCE_LENGTH", "512")), |
| max_new_tokens=int(os.getenv("HVU_MAX_NEW_TOKENS", "64")), |
| device=os.getenv("HVU_DEVICE", "auto"), |
| cpu_threads=_read_optional_int(os.getenv("HVU_CPU_THREADS")), |
| gpu_dtype=os.getenv("HVU_GPU_DTYPE", "auto"), |
| prefer_nested_model=prefer_nested_model, |
| ) |
| |
| |
| def _model_label(relative_path: str | Path) -> str: |
| path = Path(relative_path) |
| return path.name or "model" |
| |
| |
| def _iter_model_candidates(root: Path): |
| for child in sorted(root.iterdir(), key=lambda path: path.name.lower()): |
| if not child.is_dir() or child.name.startswith(".") or child.name in IGNORED_MODEL_DIR_NAMES: |
| continue |
| |
| if (child / "config.json").exists(): |
| yield {"path": child, "prefer_nested_model": False} |
| |
| for nested_name in ("best-model", "final-model"): |
| nested = child / nested_name |
| if nested.is_dir() and (nested / "config.json").exists(): |
| yield {"path": nested, "prefer_nested_model": False} |
| |
| |
| def _discover_available_models( |
| root: Path, |
| active_generator: QuestionGenerator | None = None, |
| ) -> list[dict[str, str]]: |
| models: list[dict[str, str]] = [] |
| seen_roots: set[str] = set() |
| root = root.resolve() |
| |
| for candidate_info in _iter_model_candidates(root): |
| candidate = candidate_info["path"] |
| model_key = str(candidate.resolve()) |
| if model_key in seen_roots: |
| continue |
| |
| try: |
| relative_candidate = candidate.resolve().relative_to(root) |
| except ValueError: |
| continue |
| |
| seen_roots.add(model_key) |
| models.append( |
| { |
| "id": relative_candidate.as_posix(), |
| "label": _model_label(relative_candidate), |
| "model_root": str(candidate.resolve()), |
| "model_dir": str(resolve_model_dir(candidate, prefer_nested_model=False).resolve()), |
| "prefer_nested_model": bool(candidate_info["prefer_nested_model"]), |
| } |
| ) |
| |
| if active_generator is not None: |
| current_root = active_generator.model_root.resolve() |
| current_dir = active_generator.model_dir.resolve() |
| exists = any( |
| Path(item["model_root"]).resolve() == current_root |
| or Path(item["model_dir"]).resolve() == current_dir |
| for item in models |
| ) |
| if not exists: |
| models.append( |
| { |
| "id": current_root.as_posix(), |
| "label": current_root.name, |
| "model_root": str(current_root), |
| "model_dir": str(current_dir), |
| "prefer_nested_model": False, |
| } |
| ) |
| |
| return models |
| |
| |
| def _selected_model_id( |
| app: Flask, |
| models: list[dict[str, str]], |
| active_generator: QuestionGenerator | None = None, |
| ) -> str: |
| explicit_selection = str(app.config.get("SELECTED_MODEL_ID") or "").strip() |
| if explicit_selection and any(item["id"] == explicit_selection for item in models): |
| return explicit_selection |
| |
| active_generator = active_generator or _generator(app) |
| current_root = active_generator.model_root.resolve() |
| current_dir = active_generator.model_dir.resolve() |
| |
| for item in models: |
| if Path(item["model_dir"]).resolve() == current_dir: |
| return item["id"] |
| |
| for item in models: |
| if Path(item["model_root"]).resolve() == current_root: |
| return item["id"] |
| |
| return models[0]["id"] if models else "" |
| |
| |
| def _switch_generator(app: Flask, model_id: str) -> QuestionGenerator: |
| available_models = _discover_available_models(app.config["PROJECT_ROOT"], _generator(app)) |
| selected_model = next((item for item in available_models if item["id"] == model_id), None) |
| if selected_model is None: |
| raise ValueError("Model được chọn không hợp lệ hoặc chưa tồn tại trong thư mục runtime.") |
| |
| current_model_id = _selected_model_id(app, available_models) |
| if current_model_id != model_id: |
| app.config["GENERATOR"] = build_generator( |
| selected_model["model_root"], |
| prefer_nested_model=bool(selected_model.get("prefer_nested_model")), |
| ) |
| |
| app.config["SELECTED_MODEL_ID"] = model_id |
| return _generator(app) |
| |
| |
| def _info_payload(app: Flask, active_generator: QuestionGenerator | None = None) -> dict[str, object]: |
| active_generator = active_generator or _generator(app) |
| available_models = _discover_available_models(app.config["PROJECT_ROOT"], active_generator) |
| selected_model_id = _selected_model_id(app, available_models, active_generator) |
| model_name = next( |
| (item["label"] for item in available_models if item["id"] == selected_model_id), |
| Path(active_generator.model_dir).name, |
| ) |
| return { |
| "ok": True, |
| "title": APP_TITLE, |
| "model_name": model_name, |
| "selected_model_id": selected_model_id, |
| "available_models": [{"id": item["id"], "label": item["label"]} for item in available_models], |
| "meta": active_generator.metadata(), |
| } |
| |
| |
| def create_app(generator: QuestionGenerator | None = None) -> Flask: |
| root = project_root() |
| frontend_root = root / "frontend" |
| |
| app = Flask(__name__, static_folder=None) |
| app.json.ensure_ascii = False |
| app.config["GENERATOR"] = generator or build_generator() |
| app.config["PROJECT_ROOT"] = root |
| app.config["FRONTEND_ROOT"] = frontend_root |
| app.config["SELECTED_MODEL_ID"] = "" |
| |
| @app.get("/") |
| def index(): |
| return send_from_directory(app.config["FRONTEND_ROOT"], "index.html") |
| |
| @app.get("/frontend/<path:filename>") |
| def frontend_file(filename: str): |
| return send_from_directory(app.config["FRONTEND_ROOT"], filename) |
| |
| @app.get("/api/info") |
| def info(): |
| return jsonify(_info_payload(app)) |
| |
| @app.post("/api/model") |
| def set_model(): |
| payload = request.get_json(silent=True) or {} |
| model_id = str(payload.get("model_id") or "").strip() |
| if not model_id: |
| return jsonify({"ok": False, "error": "Vui lòng chọn model trước khi chuyển."}), 400 |
| |
| try: |
| active_generator = _switch_generator(app, model_id) |
| except ValueError as exc: |
| return jsonify({"ok": False, "error": str(exc)}), 404 |
| |
| return jsonify(_info_payload(app, active_generator)) |
| |
| @app.post("/api/generate") |
| def generate(): |
| payload = request.get_json(silent=True) or {} |
| requested_model_id = str(payload.get("model_id") or "").strip() |
| |
| if requested_model_id: |
| try: |
| active_generator = _switch_generator(app, requested_model_id) |
| except ValueError as exc: |
| return jsonify({"ok": False, "error": str(exc)}), 400 |
| else: |
| active_generator = _generator(app) |
| |
| text = normalize_text(payload.get("text")) |
| if not text: |
| return jsonify({"ok": False, "error": "Vui lòng nhập đoạn văn bản trước khi sinh câu hỏi."}), 400 |
| |
| raw_count = payload.get("num_questions") |
| if raw_count in (None, ""): |
| count = 5 |
| else: |
| try: |
| count = int(raw_count) |
| except (TypeError, ValueError): |
| return jsonify({"ok": False, "error": "Số câu hỏi phải là số nguyên trong khoảng 1 đến 100."}), 400 |
| |
| if count < 1 or count > QUESTION_LIMIT: |
| return jsonify({"ok": False, "error": f"Số câu hỏi phải nằm trong khoảng 1 đến {QUESTION_LIMIT}."}), 400 |
| |
| started = time.perf_counter() |
| try: |
| questions = active_generator.generate(text, parse_question_count(count)) |
| except Exception as exc: # noqa: BLE001 |
| return jsonify({"ok": False, "error": str(exc)}), 500 |
| |
| elapsed_ms = round((time.perf_counter() - started) * 1000, 2) |
| info_payload = _info_payload(app, active_generator) |
| return jsonify( |
| { |
| "ok": True, |
| "text": text, |
| "num_questions": count, |
| "questions": questions, |
| "formatted": format_questions(questions), |
| "elapsed_ms": elapsed_ms, |
| "model_name": info_payload["model_name"], |
| "selected_model_id": info_payload["selected_model_id"], |
| "meta": active_generator.metadata(), |
| } |
| ) |
| |
| return app |
| |
| |
| def _generator(app: Flask) -> QuestionGenerator: |
| generator: QuestionGenerator = app.config["GENERATOR"] |
| return generator |
| """ |
| ).strip() |
| + "\n", |
| "generate_question.py": textwrap.dedent( |
| """ |
| from __future__ import annotations |
| |
| import argparse |
| import json |
| import os |
| import re |
| import sys |
| import threading |
| from pathlib import Path |
| from typing import Any |
| |
| os.environ.setdefault("TOKENIZERS_PARALLELISM", "false") |
| os.environ.setdefault("PYTORCH_CUDA_ALLOC_CONF", "expandable_segments:True") |
| |
| |
| def raise_missing_dependency_error(exc: ModuleNotFoundError) -> None: |
| root = Path(__file__).resolve().parent |
| requirements = root / "requirements.txt" |
| message = [ |
| f"Thiếu thư viện Python: {exc.name}", |
| f"Interpreter hiện tại: {sys.executable}", |
| ] |
| if requirements.exists(): |
| message.extend( |
| [ |
| "Cài đặt dependencies bằng lệnh:", |
| f"{sys.executable} -m pip install -r {requirements}", |
| ] |
| ) |
| raise SystemExit("\\n".join(message)) from exc |
| |
| |
| try: |
| import torch |
| from transformers import AutoModelForSeq2SeqLM, AutoTokenizer |
| except ModuleNotFoundError as exc: |
| raise_missing_dependency_error(exc) |
| |
| |
| APP_TITLE = "HVU_QA Tool - Sinh câu hỏi" |
| TASK_PREFIX = "sinh câu hỏi" |
| QUESTION_LIMIT = 100 |
| GENERATION_PASSES = ( |
| (0.9, 0.95, 1, 4), |
| (1.0, 0.97, 1, 5), |
| (1.08, 0.99, 2, 6), |
| ) |
| |
| |
| def normalize_text(text: Any) -> str: |
| return " ".join(str(text or "").split()) |
| |
| |
| def unique_text(items: list[str]) -> list[str]: |
| seen: set[str] = set() |
| output: list[str] = [] |
| for item in items: |
| value = normalize_text(item) |
| key = value.lower() |
| if key and key not in seen: |
| seen.add(key) |
| output.append(value) |
| return output |
| |
| |
| def parse_question_count(value: Any, default: int = 5) -> int: |
| try: |
| parsed = int(value) |
| except (TypeError, ValueError): |
| parsed = default |
| return max(1, min(parsed, QUESTION_LIMIT)) |
| |
| |
| def format_questions(items: list[str]) -> str: |
| if not items: |
| return "Không sinh được câu hỏi phù hợp." |
| return "\\n".join(f"{index}. {item}" for index, item in enumerate(items, 1)) |
| |
| |
| def resolve_model_dir(model_dir: str | Path, prefer_nested_model: bool = True) -> Path: |
| model_root = Path(model_dir).expanduser().resolve() |
| nested_candidates = [model_root / "best-model", model_root / "final-model"] |
| candidates = [*nested_candidates, model_root] if prefer_nested_model else [model_root, *nested_candidates] |
| for candidate in candidates: |
| if candidate.is_dir() and (candidate / "config.json").exists(): |
| return candidate |
| raise FileNotFoundError(f"Không tìm thấy thư mục mô hình hợp lệ: {model_root}") |
| |
| |
| def parse_dtype(value: str) -> torch.dtype: |
| normalized = value.strip().lower() |
| mapping = { |
| "float16": torch.float16, |
| "fp16": torch.float16, |
| "float32": torch.float32, |
| "fp32": torch.float32, |
| "bfloat16": torch.bfloat16, |
| "bf16": torch.bfloat16, |
| } |
| if normalized not in mapping: |
| raise ValueError(f"Không hỗ trợ gpu_dtype={value}") |
| return mapping[normalized] |
| |
| |
| class QuestionGenerator: |
| def __init__( |
| self, |
| model_dir: str | Path = "t5-viet-qg-finetuned", |
| task_prefix: str = TASK_PREFIX, |
| max_source_length: int = 512, |
| max_new_tokens: int = 64, |
| device: str = "auto", |
| cpu_threads: int | None = None, |
| gpu_dtype: str = "auto", |
| prefer_nested_model: bool = True, |
| ) -> None: |
| self.model_root = Path(model_dir).expanduser().resolve() |
| self.model_dir = resolve_model_dir(model_dir, prefer_nested_model=prefer_nested_model) |
| self.task_prefix = task_prefix |
| self.max_source_length = max_source_length |
| self.max_new_tokens = max_new_tokens |
| self.requested_device = device |
| self.cpu_threads = cpu_threads |
| self.gpu_dtype = gpu_dtype |
| self.device: torch.device | None = None |
| self.dtype: torch.dtype | None = None |
| self.tokenizer = None |
| self.model = None |
| self._load_lock = threading.Lock() |
| |
| def _resolve_device(self) -> torch.device: |
| requested = self.requested_device.lower() |
| if requested == "cpu": |
| return torch.device("cpu") |
| if requested == "cuda": |
| if not torch.cuda.is_available(): |
| raise RuntimeError("Bạn đã chọn device=cuda nhưng máy hiện tại không có CUDA.") |
| return torch.device("cuda") |
| return torch.device("cuda" if torch.cuda.is_available() else "cpu") |
| |
| def _resolve_dtype(self) -> torch.dtype: |
| if self.device is None or self.device.type != "cuda": |
| return torch.float32 |
| if self.gpu_dtype == "auto": |
| if hasattr(torch.cuda, "is_bf16_supported") and torch.cuda.is_bf16_supported(): |
| return torch.bfloat16 |
| return torch.float16 |
| return parse_dtype(self.gpu_dtype) |
| |
| def _configure_runtime(self) -> None: |
| if self.device is None: |
| return |
| if self.device.type == "cpu": |
| if self.cpu_threads: |
| torch.set_num_threads(max(1, int(self.cpu_threads))) |
| if hasattr(torch, "set_num_interop_threads"): |
| torch.set_num_interop_threads(max(1, min(int(self.cpu_threads), 4))) |
| return |
| |
| if hasattr(torch.backends, "cuda") and hasattr(torch.backends.cuda, "matmul"): |
| torch.backends.cuda.matmul.allow_tf32 = True |
| if hasattr(torch.backends, "cudnn"): |
| torch.backends.cudnn.allow_tf32 = True |
| torch.backends.cudnn.benchmark = True |
| |
| def load(self) -> None: |
| if self.model is not None and self.tokenizer is not None: |
| return |
| |
| with self._load_lock: |
| if self.model is not None and self.tokenizer is not None: |
| return |
| |
| self.device = self._resolve_device() |
| self.dtype = self._resolve_dtype() |
| self._configure_runtime() |
| |
| model_kwargs: dict[str, Any] = {} |
| if self.device.type == "cuda": |
| model_kwargs["torch_dtype"] = self.dtype |
| model_kwargs["low_cpu_mem_usage"] = True |
| |
| self.tokenizer = AutoTokenizer.from_pretrained(str(self.model_dir), use_fast=True) |
| self.model = AutoModelForSeq2SeqLM.from_pretrained(str(self.model_dir), **model_kwargs) |
| self.model.to(self.device) |
| self.model.eval() |
| |
| def metadata(self) -> dict[str, Any]: |
| active_device = self.device.type if self.device is not None else None |
| predicted_device = "cuda" if torch.cuda.is_available() and self.requested_device != "cpu" else "cpu" |
| return { |
| "title": APP_TITLE, |
| "model_root": str(self.model_root), |
| "model_dir": str(self.model_dir), |
| "requested_device": self.requested_device, |
| "active_device": active_device, |
| "predicted_device": predicted_device, |
| "loaded": self.model is not None, |
| "gpu_available": torch.cuda.is_available(), |
| "gpu_dtype": None if self.dtype is None else str(self.dtype).replace("torch.", ""), |
| "cpu_threads": torch.get_num_threads(), |
| } |
| |
| def _candidate_answers(self, text: str, limit: int) -> list[str]: |
| text = normalize_text(text) |
| if not text: |
| return [] |
| |
| candidates: list[str] = [] |
| split_pattern = r"(?<=[.!?])\\s+|\\n+" |
| for sentence in [normalize_text(part) for part in re.split(split_pattern, text) if normalize_text(part)]: |
| if 3 <= len(sentence.split()) <= 30: |
| candidates.append(sentence) |
| for clause in (normalize_text(part) for part in re.split(r"\\s*[,;:]\\s*", sentence)): |
| if 3 <= len(clause.split()) <= 20: |
| candidates.append(clause) |
| |
| if not candidates: |
| words = text.split() |
| candidates = [" ".join(words[: min(12, len(words))])] if words else [text] |
| |
| ranked = sorted(unique_text(candidates), key=lambda item: (abs(len(item.split()) - 10), len(item))) |
| return ranked[:limit] |
| |
| def _build_prompt(self, context: str, answer: str) -> str: |
| return f"{self.task_prefix}:\\nngữ cảnh: {context}\\nđáp án: {answer}" |
| |
| @torch.inference_mode() |
| def _sample(self, context: str, answer: str, count: int, temperature: float, top_p: float) -> list[str]: |
| if self.tokenizer is None or self.model is None or self.device is None: |
| raise RuntimeError("Model chưa được load.") |
| |
| inputs = self.tokenizer( |
| self._build_prompt(context, answer), |
| return_tensors="pt", |
| truncation=True, |
| max_length=self.max_source_length, |
| ).to(self.device) |
| outputs = self.model.generate( |
| **inputs, |
| max_new_tokens=self.max_new_tokens, |
| do_sample=True, |
| temperature=temperature, |
| top_p=top_p, |
| num_return_sequences=max(1, min(count, 6)), |
| no_repeat_ngram_size=3, |
| repetition_penalty=1.1, |
| ) |
| questions: list[str] = [] |
| for token_ids in outputs: |
| question = normalize_text(self.tokenizer.decode(token_ids, skip_special_tokens=True)) |
| if question: |
| questions.append(question if question.endswith("?") else f"{question}?") |
| return [question for question in unique_text(questions) if len(question.split()) >= 3] |
| |
| def generate(self, text: str, num_questions: int = 5) -> list[str]: |
| clean_text = normalize_text(text) |
| requested_count = parse_question_count(num_questions) |
| if not clean_text: |
| return [] |
| |
| self.load() |
| answers = self._candidate_answers(clean_text, limit=max(requested_count * 3, 8)) |
| questions: list[str] = [] |
| |
| for temperature, top_p, candidate_step, sample_count in GENERATION_PASSES: |
| for index, answer in enumerate(answers): |
| generated = self._sample( |
| clean_text, |
| answer, |
| count=min(sample_count + requested_count, requested_count + 2), |
| temperature=temperature, |
| top_p=top_p, |
| ) |
| questions.extend(generated) |
| questions = unique_text(questions) |
| if len(questions) >= requested_count: |
| return questions[:requested_count] |
| if candidate_step and (index + 1) % candidate_step == 0 and len(questions) >= requested_count: |
| return questions[:requested_count] |
| |
| return questions[:requested_count] |
| |
| |
| def _read_text_from_args(args: argparse.Namespace) -> str: |
| if args.text: |
| return normalize_text(args.text) |
| if args.input_file: |
| return normalize_text(Path(args.input_file).read_text(encoding="utf-8")) |
| raise SystemExit("Vui lòng truyền --text hoặc --input_file.") |
| |
| |
| def build_parser() -> argparse.ArgumentParser: |
| parser = argparse.ArgumentParser(description="Sinh câu hỏi từ một đoạn văn bản bằng model T5 tiếng Việt.") |
| parser.add_argument("--text", help="Đoạn văn bản đầu vào.") |
| parser.add_argument("--input_file", help="Đọc đoạn văn bản từ file UTF-8.") |
| parser.add_argument("--num_questions", type=int, default=5, help="Số câu hỏi cần sinh.") |
| parser.add_argument("--model_dir", default=os.getenv("HVU_MODEL_DIR", "t5-viet-qg-finetuned")) |
| parser.add_argument("--task_prefix", default=os.getenv("HVU_TASK_PREFIX", TASK_PREFIX)) |
| parser.add_argument("--device", default=os.getenv("HVU_DEVICE", "auto"), choices=["auto", "cpu", "cuda"]) |
| parser.add_argument("--cpu_threads", type=int, default=None) |
| parser.add_argument("--gpu_dtype", default=os.getenv("HVU_GPU_DTYPE", "auto")) |
| parser.add_argument("--max_source_length", type=int, default=int(os.getenv("HVU_MAX_SOURCE_LENGTH", "512"))) |
| parser.add_argument("--max_new_tokens", type=int, default=int(os.getenv("HVU_MAX_NEW_TOKENS", "64"))) |
| parser.add_argument("--output_format", choices=["text", "json"], default="text") |
| return parser |
| |
| |
| def main() -> int: |
| if hasattr(sys.stdout, "reconfigure"): |
| sys.stdout.reconfigure(encoding="utf-8") |
| if hasattr(sys.stderr, "reconfigure"): |
| sys.stderr.reconfigure(encoding="utf-8") |
| |
| args = build_parser().parse_args() |
| text = _read_text_from_args(args) |
| generator = QuestionGenerator( |
| model_dir=args.model_dir, |
| task_prefix=args.task_prefix, |
| max_source_length=args.max_source_length, |
| max_new_tokens=args.max_new_tokens, |
| device=args.device, |
| cpu_threads=args.cpu_threads, |
| gpu_dtype=args.gpu_dtype, |
| ) |
| questions = generator.generate(text, args.num_questions) |
| payload = { |
| "ok": True, |
| "text": text, |
| "num_questions": parse_question_count(args.num_questions), |
| "questions": questions, |
| "formatted": format_questions(questions), |
| "meta": generator.metadata(), |
| } |
| |
| if args.output_format == "json": |
| print(json.dumps(payload, ensure_ascii=False, indent=2)) |
| else: |
| print(payload["formatted"]) |
| return 0 |
| |
| |
| if __name__ == "__main__": |
| raise SystemExit(main()) |
| """ |
| ).strip() |
| + "\n", |
| "frontend/index.html": textwrap.dedent( |
| """ |
| <!doctype html> |
| <html lang="vi"> |
| <head> |
| <meta charset="utf-8"> |
| <meta name="viewport" content="width=device-width, initial-scale=1"> |
| <title>HVU_QA Tool</title> |
| <link rel="stylesheet" href="/frontend/style.css"> |
| </head> |
| <body> |
| <div class="page-shell"> |
| <header class="hero"> |
| <span class="hero-badge">HVU_QA Tool</span> |
| <h1>Sinh câu hỏi từ văn bản</h1> |
| <p>Launcher nhẹ dành cho người dùng cuối. Chỉ cần một file tool để dựng runtime, tải model và chạy ứng dụng.</p> |
| </header> |
| |
| <div class="layout"> |
| <aside class="sidebar"> |
| <section class="panel"> |
| <div class="panel-heading"> |
| <h2>Trạng thái model</h2> |
| <span id="readyBadge" class="badge badge-soft">Đang tải</span> |
| </div> |
| |
| <label class="field-label" for="modelSelect">Model đang dùng</label> |
| <select id="modelSelect" class="select-field"></select> |
| |
| <dl class="status-list"> |
| <div> |
| <dt>Tên hiển thị</dt> |
| <dd id="modelName">-</dd> |
| </div> |
| <div> |
| <dt>Thiết bị</dt> |
| <dd id="deviceStatus">-</dd> |
| </div> |
| <div> |
| <dt>Trạng thái nạp</dt> |
| <dd id="loadedStatus">-</dd> |
| </div> |
| </dl> |
| </section> |
| |
| <section class="panel"> |
| <div class="panel-heading"> |
| <h2>Ví dụ mẫu</h2> |
| </div> |
| <p class="panel-hint">Bấm vào một văn bản luật mẫu để chèn nhanh nội dung thử nghiệm.</p> |
| <div id="sampleList" class="sample-list"></div> |
| </section> |
| </aside> |
| |
| <main class="main-panel"> |
| <section class="composer panel"> |
| <label class="field-label" for="sourceText">Đoạn văn bản đầu vào</label> |
| <textarea id="sourceText" class="text-input" placeholder="Nhập đoạn văn bản ..."></textarea> |
| |
| <div class="composer-footer"> |
| <div class="count-field"> |
| <span class="field-label">Số câu hỏi</span> |
| <div class="count-controls"> |
| <button id="decreaseCount" type="button" class="count-button">-</button> |
| <input id="questionCount" class="count-input" type="number" min="1" max="100" value="5"> |
| <button id="increaseCount" type="button" class="count-button">+</button> |
| </div> |
| </div> |
| |
| <button id="generateButton" type="button" class="primary-button"> |
| <span id="generateButtonText">Sinh câu hỏi</span> |
| </button> |
| </div> |
| |
| <p id="formMessage" class="form-message"></p> |
| </section> |
| |
| <section id="resultPanel" class="result-panel panel"> |
| <div id="resultPlaceholder" class="result-placeholder"> |
| Nhập văn bản và nhấn <strong>Sinh câu hỏi</strong> để xem kết quả. |
| </div> |
| |
| <div id="resultContent" class="result-content hidden"> |
| <div class="result-header"> |
| <div> |
| <h2>Kết quả sinh câu hỏi</h2> |
| <p id="resultStats" class="result-stats"></p> |
| </div> |
| <button id="copyButton" type="button" class="secondary-button">Sao chép</button> |
| </div> |
| |
| <ol id="resultList" class="result-list"></ol> |
| <pre id="formattedOutput" class="formatted-output"></pre> |
| </div> |
| </section> |
| </main> |
| </div> |
| </div> |
| |
| <script src="/frontend/app.js"></script> |
| </body> |
| </html> |
| """ |
| ).strip() |
| + "\n", |
| "frontend/app.js": textwrap.dedent( |
| """ |
| const sampleTexts = [ |
| { |
| title: 'Luật Giáo dục đại học', |
| text: 'Cơ sở giáo dục đại học có nhiệm vụ tổ chức đào tạo, nghiên cứu khoa học, chuyển giao công nghệ và phục vụ cộng đồng theo quy định của pháp luật.' |
| }, |
| { |
| title: 'Bộ luật Lao động', |
| text: 'Người lao động là người làm việc cho người sử dụng lao động theo thỏa thuận, được trả lương và chịu sự quản lý, điều hành, giám sát của người sử dụng lao động.' |
| }, |
| { |
| title: 'Luật An toàn thông tin mạng', |
| text: 'An toàn thông tin mạng là sự bảo vệ thông tin, hệ thống thông tin trên mạng khỏi bị truy nhập, sử dụng, tiết lộ, gián đoạn, sửa đổi hoặc phá hoại trái phép.' |
| } |
| ]; |
| |
| const state = { |
| info: null, |
| loading: false, |
| count: 5, |
| lastFormatted: '' |
| }; |
| |
| const elements = { |
| modelSelect: document.getElementById('modelSelect'), |
| readyBadge: document.getElementById('readyBadge'), |
| modelName: document.getElementById('modelName'), |
| deviceStatus: document.getElementById('deviceStatus'), |
| loadedStatus: document.getElementById('loadedStatus'), |
| sampleList: document.getElementById('sampleList'), |
| sourceText: document.getElementById('sourceText'), |
| decreaseCount: document.getElementById('decreaseCount'), |
| increaseCount: document.getElementById('increaseCount'), |
| questionCount: document.getElementById('questionCount'), |
| generateButton: document.getElementById('generateButton'), |
| generateButtonText: document.getElementById('generateButtonText'), |
| formMessage: document.getElementById('formMessage'), |
| resultPanel: document.getElementById('resultPanel'), |
| resultPlaceholder: document.getElementById('resultPlaceholder'), |
| resultContent: document.getElementById('resultContent'), |
| resultStats: document.getElementById('resultStats'), |
| resultList: document.getElementById('resultList'), |
| formattedOutput: document.getElementById('formattedOutput'), |
| copyButton: document.getElementById('copyButton') |
| }; |
| |
| function normalizeCount(value) { |
| const parsed = Number.parseInt(value, 10); |
| if (Number.isNaN(parsed)) { |
| return 1; |
| } |
| return Math.max(1, Math.min(100, parsed)); |
| } |
| |
| function setCount(value) { |
| state.count = normalizeCount(value); |
| elements.questionCount.value = String(state.count); |
| } |
| |
| function setMessage(text, tone = 'muted') { |
| elements.formMessage.textContent = text || ''; |
| elements.formMessage.dataset.tone = tone; |
| } |
| |
| function setLoading(loading) { |
| state.loading = loading; |
| elements.generateButton.disabled = loading; |
| elements.modelSelect.disabled = loading; |
| elements.generateButtonText.textContent = loading ? 'Đang xử lý...' : 'Sinh câu hỏi'; |
| elements.readyBadge.textContent = loading ? 'Đang chạy' : 'Sẵn sàng'; |
| elements.readyBadge.classList.toggle('badge-busy', loading); |
| } |
| |
| async function fetchJson(url, options = {}) { |
| const response = await fetch(url, options); |
| const payload = await response.json().catch(() => ({})); |
| if (!response.ok || payload.ok === false) { |
| throw new Error(payload.error || `Yêu cầu thất bại (${response.status})`); |
| } |
| return payload; |
| } |
| |
| function renderSamples() { |
| elements.sampleList.innerHTML = ''; |
| sampleTexts.forEach((sample) => { |
| const button = document.createElement('button'); |
| button.type = 'button'; |
| button.className = 'sample-card'; |
| button.innerHTML = `<strong>${sample.title}</strong><span>${sample.text}</span>`; |
| button.addEventListener('click', () => { |
| elements.sourceText.value = sample.text; |
| setMessage(`Đã chèn mẫu: ${sample.title}`, 'muted'); |
| elements.sourceText.focus(); |
| }); |
| elements.sampleList.appendChild(button); |
| }); |
| } |
| |
| function renderInfo(info) { |
| state.info = info; |
| const models = Array.isArray(info.available_models) ? info.available_models : []; |
| const selectedId = info.selected_model_id || models[0]?.id || ''; |
| |
| elements.modelSelect.innerHTML = ''; |
| if (!models.length) { |
| const option = document.createElement('option'); |
| option.value = ''; |
| option.textContent = 'Không có model khả dụng'; |
| elements.modelSelect.appendChild(option); |
| } else { |
| models.forEach((model) => { |
| const option = document.createElement('option'); |
| option.value = model.id; |
| option.textContent = model.label; |
| elements.modelSelect.appendChild(option); |
| }); |
| elements.modelSelect.value = selectedId; |
| } |
| |
| const meta = info.meta || {}; |
| elements.modelName.textContent = info.model_name || '-'; |
| elements.deviceStatus.textContent = meta.active_device |
| ? meta.active_device.toUpperCase() |
| : (meta.predicted_device ? `Dự đoán: ${String(meta.predicted_device).toUpperCase()}` : '-'); |
| elements.loadedStatus.textContent = meta.loaded ? 'Đã nạp' : 'Chưa nạp'; |
| elements.readyBadge.textContent = 'Sẵn sàng'; |
| elements.readyBadge.classList.remove('badge-busy'); |
| } |
| |
| function renderResult(result) { |
| const questions = Array.isArray(result.questions) ? result.questions : []; |
| elements.resultPlaceholder.classList.add('hidden'); |
| elements.resultContent.classList.remove('hidden'); |
| elements.resultList.innerHTML = ''; |
| |
| questions.forEach((question) => { |
| const item = document.createElement('li'); |
| item.textContent = question; |
| elements.resultList.appendChild(item); |
| }); |
| |
| state.lastFormatted = result.formatted || ''; |
| elements.formattedOutput.textContent = state.lastFormatted; |
| elements.resultStats.textContent = `${questions.length} câu hỏi • ${result.model_name || 'Không rõ model'} • ${result.elapsed_ms || 0} ms`; |
| } |
| |
| async function loadInfo() { |
| const info = await fetchJson('/api/info'); |
| renderInfo(info); |
| setMessage('Sẵn sàng để sinh câu hỏi.', 'muted'); |
| } |
| |
| async function changeModel() { |
| const modelId = elements.modelSelect.value; |
| if (!modelId) { |
| return; |
| } |
| setLoading(true); |
| setMessage('Đang chuyển model...', 'muted'); |
| try { |
| const info = await fetchJson('/api/model', { |
| method: 'POST', |
| headers: { 'Content-Type': 'application/json' }, |
| body: JSON.stringify({ model_id: modelId }) |
| }); |
| renderInfo(info); |
| setMessage(`Đã chuyển sang model: ${info.model_name}`, 'muted'); |
| } catch (error) { |
| setMessage(error.message, 'error'); |
| } finally { |
| setLoading(false); |
| } |
| } |
| |
| async function generateQuestions() { |
| const text = elements.sourceText.value.trim(); |
| if (!text) { |
| setMessage('Vui lòng nhập đoạn văn bản trước khi sinh câu hỏi.', 'error'); |
| elements.sourceText.focus(); |
| return; |
| } |
| |
| setLoading(true); |
| setMessage('Đang sinh câu hỏi từ nội dung đã nhập...', 'muted'); |
| |
| try { |
| const payload = await fetchJson('/api/generate', { |
| method: 'POST', |
| headers: { 'Content-Type': 'application/json' }, |
| body: JSON.stringify({ |
| text, |
| num_questions: state.count, |
| model_id: elements.modelSelect.value || undefined |
| }) |
| }); |
| renderResult(payload); |
| setMessage(`Đã sinh xong ${payload.questions.length} câu hỏi.`, 'muted'); |
| } catch (error) { |
| setMessage(error.message, 'error'); |
| } finally { |
| setLoading(false); |
| } |
| } |
| |
| async function copyOutput() { |
| if (!state.lastFormatted) { |
| setMessage('Chưa có nội dung để sao chép.', 'error'); |
| return; |
| } |
| |
| try { |
| await navigator.clipboard.writeText(state.lastFormatted); |
| setMessage('Đã sao chép kết quả vào clipboard.', 'muted'); |
| } catch (error) { |
| setMessage('Không thể sao chép tự động. Hãy sao chép thủ công.', 'error'); |
| } |
| } |
| |
| function bindEvents() { |
| elements.decreaseCount.addEventListener('click', () => setCount(state.count - 1)); |
| elements.increaseCount.addEventListener('click', () => setCount(state.count + 1)); |
| elements.questionCount.addEventListener('change', (event) => setCount(event.target.value)); |
| elements.modelSelect.addEventListener('change', changeModel); |
| elements.generateButton.addEventListener('click', generateQuestions); |
| elements.copyButton.addEventListener('click', copyOutput); |
| } |
| |
| async function init() { |
| renderSamples(); |
| setCount(5); |
| bindEvents(); |
| try { |
| await loadInfo(); |
| } catch (error) { |
| setMessage(error.message || 'Không thể kết nối backend.', 'error'); |
| elements.readyBadge.textContent = 'Lỗi'; |
| } |
| } |
| |
| document.addEventListener('DOMContentLoaded', init); |
| """ |
| ).strip() |
| + "\n", |
| "frontend/style.css": textwrap.dedent( |
| """ |
| :root { |
| --bg-start: #f8f5ff; |
| --bg-end: #eef4ff; |
| --panel: rgba(255, 255, 255, 0.82); |
| --border: rgba(103, 102, 181, 0.18); |
| --text: #23244d; |
| --muted: #6c6d9a; |
| --primary-start: #6b73ff; |
| --primary-end: #d96ba2; |
| --shadow: 0 22px 60px rgba(52, 56, 121, 0.14); |
| } |
| |
| * { |
| box-sizing: border-box; |
| } |
| |
| body { |
| margin: 0; |
| min-height: 100vh; |
| font-family: "Be Vietnam Pro", "Segoe UI", sans-serif; |
| color: var(--text); |
| background: |
| radial-gradient(circle at top left, rgba(123, 135, 255, 0.14), transparent 28%), |
| radial-gradient(circle at bottom right, rgba(217, 107, 162, 0.18), transparent 25%), |
| linear-gradient(135deg, var(--bg-start), var(--bg-end)); |
| } |
| |
| button, |
| input, |
| textarea, |
| select { |
| font: inherit; |
| } |
| |
| .page-shell { |
| width: min(1200px, calc(100% - 32px)); |
| margin: 24px auto; |
| } |
| |
| .hero { |
| padding: 32px; |
| border: 1px solid var(--border); |
| border-radius: 28px; |
| background: var(--panel); |
| box-shadow: var(--shadow); |
| backdrop-filter: blur(18px); |
| } |
| |
| .hero-badge { |
| display: inline-flex; |
| padding: 8px 14px; |
| border-radius: 999px; |
| background: rgba(107, 115, 255, 0.12); |
| color: #5058d9; |
| font-size: 13px; |
| font-weight: 700; |
| letter-spacing: 0.04em; |
| text-transform: uppercase; |
| } |
| |
| .hero h1 { |
| margin: 18px 0 10px; |
| font-size: clamp(34px, 5vw, 56px); |
| line-height: 1.04; |
| } |
| |
| .hero p { |
| margin: 0; |
| max-width: 760px; |
| color: var(--muted); |
| font-size: 18px; |
| line-height: 1.65; |
| } |
| |
| .layout { |
| display: grid; |
| grid-template-columns: 320px minmax(0, 1fr); |
| gap: 20px; |
| margin-top: 20px; |
| } |
| |
| .panel { |
| border: 1px solid var(--border); |
| border-radius: 24px; |
| background: var(--panel); |
| box-shadow: var(--shadow); |
| backdrop-filter: blur(18px); |
| } |
| |
| .sidebar, |
| .main-panel { |
| display: grid; |
| gap: 20px; |
| align-content: start; |
| } |
| |
| .panel-heading { |
| display: flex; |
| align-items: center; |
| justify-content: space-between; |
| gap: 12px; |
| margin-bottom: 16px; |
| } |
| |
| .panel h2 { |
| margin: 0; |
| font-size: 18px; |
| } |
| |
| .sidebar .panel, |
| .composer, |
| .result-panel { |
| padding: 22px; |
| } |
| |
| .badge { |
| display: inline-flex; |
| align-items: center; |
| justify-content: center; |
| min-width: 92px; |
| padding: 8px 12px; |
| border-radius: 999px; |
| font-size: 13px; |
| font-weight: 700; |
| } |
| |
| .badge-soft { |
| background: rgba(39, 179, 112, 0.14); |
| color: #218b59; |
| } |
| |
| .badge-busy { |
| background: rgba(238, 160, 59, 0.16); |
| color: #b86a00; |
| } |
| |
| .field-label { |
| display: inline-block; |
| margin-bottom: 10px; |
| color: var(--muted); |
| font-size: 13px; |
| font-weight: 700; |
| letter-spacing: 0.02em; |
| } |
| |
| .select-field, |
| .text-input, |
| .count-input { |
| width: 100%; |
| border: 1px solid rgba(103, 102, 181, 0.14); |
| border-radius: 18px; |
| background: rgba(255, 255, 255, 0.92); |
| color: var(--text); |
| } |
| |
| .select-field { |
| min-height: 52px; |
| padding: 0 16px; |
| } |
| |
| .status-list { |
| display: grid; |
| gap: 14px; |
| margin: 18px 0 0; |
| } |
| |
| .status-list div { |
| padding: 14px 16px; |
| border-radius: 18px; |
| background: rgba(104, 109, 208, 0.07); |
| } |
| |
| .status-list dt { |
| margin: 0 0 6px; |
| color: var(--muted); |
| font-size: 12px; |
| font-weight: 700; |
| text-transform: uppercase; |
| letter-spacing: 0.04em; |
| } |
| |
| .status-list dd { |
| margin: 0; |
| font-size: 15px; |
| font-weight: 600; |
| word-break: break-word; |
| } |
| |
| .panel-hint { |
| margin: 0 0 14px; |
| color: var(--muted); |
| line-height: 1.6; |
| } |
| |
| .sample-list { |
| display: grid; |
| gap: 12px; |
| } |
| |
| .sample-card { |
| display: grid; |
| gap: 8px; |
| width: 100%; |
| padding: 16px; |
| border: 1px solid rgba(103, 102, 181, 0.14); |
| border-radius: 18px; |
| background: rgba(255, 255, 255, 0.92); |
| text-align: left; |
| color: var(--text); |
| cursor: pointer; |
| transition: transform 0.18s ease, border-color 0.18s ease, box-shadow 0.18s ease; |
| } |
| |
| .sample-card:hover { |
| transform: translateY(-2px); |
| border-color: rgba(86, 98, 218, 0.32); |
| box-shadow: 0 16px 30px rgba(61, 70, 154, 0.12); |
| } |
| |
| .sample-card span { |
| color: var(--muted); |
| line-height: 1.55; |
| } |
| |
| .text-input { |
| min-height: 250px; |
| padding: 18px 20px; |
| resize: vertical; |
| line-height: 1.7; |
| } |
| |
| .composer-footer { |
| display: flex; |
| align-items: end; |
| justify-content: space-between; |
| gap: 18px; |
| margin-top: 18px; |
| } |
| |
| .count-field { |
| min-width: 230px; |
| } |
| |
| .count-controls { |
| display: grid; |
| grid-template-columns: 48px 92px 48px; |
| gap: 10px; |
| align-items: center; |
| } |
| |
| .count-button, |
| .secondary-button { |
| min-height: 48px; |
| border: 1px solid rgba(103, 102, 181, 0.16); |
| border-radius: 16px; |
| background: rgba(255, 255, 255, 0.92); |
| color: var(--text); |
| cursor: pointer; |
| } |
| |
| .count-button { |
| font-size: 22px; |
| font-weight: 700; |
| } |
| |
| .count-input { |
| min-height: 48px; |
| padding: 0 12px; |
| text-align: center; |
| font-weight: 700; |
| } |
| |
| .primary-button { |
| min-width: 220px; |
| min-height: 56px; |
| padding: 0 24px; |
| border: none; |
| border-radius: 18px; |
| background: linear-gradient(135deg, var(--primary-start), var(--primary-end)); |
| color: white; |
| font-size: 16px; |
| font-weight: 800; |
| cursor: pointer; |
| box-shadow: 0 18px 34px rgba(95, 105, 220, 0.24); |
| } |
| |
| .primary-button:disabled, |
| .secondary-button:disabled { |
| cursor: not-allowed; |
| opacity: 0.7; |
| } |
| |
| .form-message { |
| min-height: 22px; |
| margin: 14px 0 0; |
| color: var(--muted); |
| } |
| |
| .form-message[data-tone="error"] { |
| color: #c33b5f; |
| } |
| |
| .result-panel { |
| min-height: 320px; |
| } |
| |
| .result-placeholder { |
| display: grid; |
| place-items: center; |
| min-height: 260px; |
| padding: 24px; |
| border: 1px dashed rgba(103, 102, 181, 0.24); |
| border-radius: 20px; |
| color: var(--muted); |
| text-align: center; |
| line-height: 1.7; |
| } |
| |
| .result-content.hidden, |
| .result-placeholder.hidden { |
| display: none; |
| } |
| |
| .result-header { |
| display: flex; |
| align-items: start; |
| justify-content: space-between; |
| gap: 16px; |
| margin-bottom: 18px; |
| } |
| |
| .result-header h2 { |
| margin: 0 0 8px; |
| } |
| |
| .result-stats { |
| margin: 0; |
| color: var(--muted); |
| } |
| |
| .result-list { |
| margin: 0; |
| padding-left: 20px; |
| display: grid; |
| gap: 12px; |
| line-height: 1.65; |
| } |
| |
| .formatted-output { |
| margin: 20px 0 0; |
| padding: 18px; |
| border-radius: 18px; |
| background: rgba(104, 109, 208, 0.07); |
| white-space: pre-wrap; |
| word-break: break-word; |
| line-height: 1.65; |
| } |
| |
| @media (max-width: 980px) { |
| .layout { |
| grid-template-columns: 1fr; |
| } |
| } |
| |
| @media (max-width: 640px) { |
| .page-shell { |
| width: min(100% - 16px, 1000px); |
| margin: 16px auto; |
| } |
| |
| .hero, |
| .sidebar .panel, |
| .composer, |
| .result-panel { |
| padding: 18px; |
| } |
| |
| .composer-footer, |
| .result-header { |
| flex-direction: column; |
| align-items: stretch; |
| } |
| |
| .count-field, |
| .primary-button, |
| .secondary-button { |
| width: 100%; |
| } |
| } |
| """ |
| ).strip() |
| + "\n", |
| } |
|
|
|
|
| INLINE_RUNTIME_FILE_MAP_FACTORY = build_runtime_file_map |
|
|
|
|
| def build_runtime_requirements_text() -> str: |
| requirements_path = SCRIPT_ROOT / "requirements.txt" |
| if requirements_path.exists(): |
| return requirements_path.read_text(encoding="utf-8") |
|
|
| lines = [ |
| "# Runtime dependencies for standalone HVU_QA launcher.", |
| "# Nếu dùng GPU NVIDIA, hãy cài đúng bản torch theo CUDA của máy nếu cần.", |
| *RUNTIME_REQUIREMENTS, |
| "", |
| ] |
| return "\n".join(lines) |
|
|
|
|
| def build_runtime_source_file_map() -> dict[str, str] | None: |
| runtime_files: dict[str, str] = {} |
| for relative_path in RUNTIME_SOURCE_TEXT_FILES: |
| source_file = SCRIPT_ROOT / relative_path |
| if not source_file.exists(): |
| return None |
| runtime_files[relative_path] = source_file.read_text(encoding="utf-8") |
| return runtime_files |
|
|
|
|
| def load_embedded_runtime_file_map() -> dict[str, str]: |
| payload = "".join(EMBEDDED_RUNTIME_TEXT_PAYLOAD_B64.split()) |
| if not payload: |
| return INLINE_RUNTIME_FILE_MAP_FACTORY() |
|
|
| try: |
| raw_bytes = base64.b64decode(payload + ("=" * (-len(payload) % 4))) |
| decoded = json.loads(zlib.decompress(raw_bytes).decode("utf-8")) |
| except Exception as exc: |
| print_step( |
| "Không đọc được runtime nhúng từ payload base64. " |
| "Đang chuyển sang fallback inline an toàn." |
| ) |
| return INLINE_RUNTIME_FILE_MAP_FACTORY() |
|
|
| if not isinstance(decoded, dict): |
| print_step("Runtime nhúng từ payload không hợp lệ. Đang dùng fallback inline an toàn.") |
| return INLINE_RUNTIME_FILE_MAP_FACTORY() |
|
|
| runtime_files: dict[str, str] = {} |
| for relative_path, content in decoded.items(): |
| if not isinstance(relative_path, str) or not isinstance(content, str): |
| print_step("Runtime nhúng chứa dữ liệu không hợp lệ. Đang dùng fallback inline an toàn.") |
| return INLINE_RUNTIME_FILE_MAP_FACTORY() |
| runtime_files[relative_path] = content |
|
|
| if "requirements.txt" not in runtime_files: |
| runtime_files["requirements.txt"] = build_runtime_requirements_text() |
|
|
| return runtime_files |
|
|
|
|
| def build_runtime_file_map() -> dict[str, str]: |
| return build_runtime_source_file_map() or load_embedded_runtime_file_map() |
|
|
|
|
| def sync_text_file(destination_file: Path, content: str, force_write: bool) -> bool: |
| destination_file.parent.mkdir(parents=True, exist_ok=True) |
| if destination_file.exists() and not force_write: |
| current = destination_file.read_text(encoding="utf-8") |
| if current == content: |
| return False |
| destination_file.write_text(content, encoding="utf-8") |
| return True |
|
|
|
|
| def sync_binary_file(destination_file: Path, source_file: Path, force_write: bool) -> bool: |
| destination_file.parent.mkdir(parents=True, exist_ok=True) |
| source_bytes = source_file.read_bytes() |
|
|
| if destination_file.exists() and not force_write: |
| if destination_file.read_bytes() == source_bytes: |
| return False |
|
|
| destination_file.write_bytes(source_bytes) |
| return True |
|
|
|
|
| def materialize_runtime_assets(runtime_root: Path, force_refresh: bool) -> tuple[int, int]: |
| created = 0 |
| reused = 0 |
|
|
| for relative_path in RUNTIME_OPTIONAL_ASSET_FILES: |
| source_file = SCRIPT_ROOT / relative_path |
| if not source_file.exists(): |
| continue |
|
|
| destination = runtime_root / relative_path |
| if sync_binary_file(destination, source_file, force_write=force_refresh): |
| created += 1 |
| else: |
| reused += 1 |
|
|
| return created, reused |
|
|
|
|
| def materialize_standalone_runtime(runtime_root: Path, force_refresh: bool) -> None: |
| runtime_files = build_runtime_file_map() |
| created = 0 |
| reused = 0 |
|
|
| for relative_path, content in runtime_files.items(): |
| destination = runtime_root / relative_path |
| if sync_text_file(destination, content, force_write=force_refresh): |
| created += 1 |
| else: |
| reused += 1 |
|
|
| asset_created, asset_reused = materialize_runtime_assets(runtime_root, force_refresh=force_refresh) |
| created += asset_created |
| reused += asset_reused |
|
|
| print_step( |
| f"Đã chuẩn bị runtime standalone tại {runtime_root}. " |
| f"File mới/cập nhật: {created}, file giữ nguyên: {reused}." |
| ) |
|
|
|
|
| def resolve_runtime_context(args: argparse.Namespace) -> RuntimeContext: |
| use_local_project = has_local_project(SCRIPT_ROOT) and not args.force_standalone_runtime |
| if use_local_project: |
| runtime_root = SCRIPT_ROOT |
| standalone_mode = False |
| else: |
| requested_runtime_dir = Path(args.runtime_dir).expanduser() |
| if not requested_runtime_dir.is_absolute(): |
| requested_runtime_dir = SCRIPT_ROOT / requested_runtime_dir |
| runtime_root = requested_runtime_dir.resolve() |
| standalone_mode = True |
| materialize_standalone_runtime(runtime_root, force_refresh=args.force_runtime_refresh) |
|
|
| context = RuntimeContext( |
| root=runtime_root, |
| main_file=runtime_root / "main.py", |
| requirements_file=runtime_root / "requirements.txt", |
| local_model_dir=runtime_root / "t5-viet-qg-finetuned", |
| local_best_model_dir=runtime_root / "t5-viet-qg-finetuned" / "best-model", |
| standalone_mode=standalone_mode, |
| ) |
| mode_label = "standalone" if standalone_mode else "full project" |
| print_step(f"Runtime mode: {mode_label}") |
| print_step(f"Runtime root: {context.root}") |
| return context |
|
|
|
|
| def maybe_bootstrap_tool_venv(args: argparse.Namespace) -> int | None: |
| if args.no_venv or is_running_in_virtualenv(): |
| return None |
|
|
| if not TOOL_VENV_PYTHON.exists(): |
| print_step("Không phát hiện virtualenv hiện tại. Đang tạo môi trường riêng cho launcher...") |
| run_command([sys.executable, "-m", "venv", str(TOOL_VENV_DIR)], cwd=SCRIPT_ROOT) |
| run_command([str(TOOL_VENV_PYTHON), "-m", "pip", "install", "--upgrade", "pip"], cwd=SCRIPT_ROOT) |
|
|
| relaunch_env = os.environ.copy() |
| relaunch_env["HVU_QA_TOOL_BOOTSTRAPPED"] = "1" |
| relaunch_command = [str(TOOL_VENV_PYTHON), str(Path(__file__).resolve()), *sys.argv[1:]] |
|
|
| print_step("Đang chuyển sang môi trường Python riêng của launcher...") |
| return subprocess.call(relaunch_command, cwd=str(SCRIPT_ROOT), env=relaunch_env) |
|
|
|
|
| def ensure_huggingface_hub(skip_install: bool, context: RuntimeContext) -> None: |
| if module_exists("huggingface_hub"): |
| return |
|
|
| if skip_install: |
| install_hint = ( |
| f"{sys.executable} -m pip install {HF_HUB_REQUIREMENT}" |
| if not context.requirements_file.exists() |
| else f"{sys.executable} -m pip install -r {context.requirements_file}" |
| ) |
| raise RuntimeError( |
| "Thiếu huggingface_hub. Hãy chạy " |
| f"`{install_hint}` hoặc bỏ `--skip-install`." |
| ) |
|
|
| print_step("Thiếu huggingface_hub. Đang cài tự động...") |
| if context.requirements_file.exists(): |
| run_command([sys.executable, "-m", "pip", "install", "-r", str(context.requirements_file)], cwd=context.root) |
| else: |
| run_command([sys.executable, "-m", "pip", "install", HF_HUB_REQUIREMENT], cwd=context.root) |
|
|
|
|
| def find_missing_dependencies() -> list[str]: |
| missing: list[str] = [] |
| for package_name, module_name in DEPENDENCY_IMPORTS.items(): |
| if not module_exists(module_name): |
| missing.append(package_name) |
| return missing |
|
|
|
|
| def ensure_runtime_dependencies(skip_install: bool, context: RuntimeContext) -> None: |
| missing = find_missing_dependencies() |
| if not missing: |
| print_step("Môi trường Python đã có đủ dependency cần thiết.") |
| return |
|
|
| if skip_install: |
| missing_text = ", ".join(missing) |
| install_hint = ( |
| f"{sys.executable} -m pip install -r {context.requirements_file}" |
| if context.requirements_file.exists() |
| else f"{sys.executable} -m pip install {' '.join(RUNTIME_REQUIREMENTS)}" |
| ) |
| raise RuntimeError( |
| f"Thiếu dependency: {missing_text}. " |
| f"Hãy chạy `{install_hint}` hoặc bỏ `--skip-install`." |
| ) |
|
|
| if context.requirements_file.exists(): |
| print_step(f"Đang cài dependency còn thiếu: {', '.join(missing)}") |
| run_command([sys.executable, "-m", "pip", "install", "-r", str(context.requirements_file)], cwd=context.root) |
| return |
|
|
| print_step(f"Đang cài dependency runtime còn thiếu: {', '.join(missing)}") |
| run_command([sys.executable, "-m", "pip", "install", *RUNTIME_REQUIREMENTS], cwd=context.root) |
|
|
|
|
| def select_repo_files( |
| repo_files: list[str], |
| best_model_only: bool, |
| include_runtime_bundle: bool, |
| ) -> list[str]: |
| allow_patterns = build_allow_patterns(best_model_only, include_runtime_bundle=include_runtime_bundle) |
| selected: list[str] = [] |
|
|
| for repo_file in repo_files: |
| normalized = repo_file.replace("\\", "/") |
| if not matches_any_pattern(normalized, allow_patterns): |
| continue |
| if matches_any_pattern(normalized, MODEL_IGNORE_PATTERNS): |
| continue |
| selected.append(normalized) |
|
|
| return sorted(selected) |
|
|
|
|
| def get_target_destination(context: RuntimeContext, repo_file: str) -> Path: |
| normalized = repo_file.replace("\\", "/") |
| runtime_relative_path = HF_RUNTIME_REPO_FILE_MAP.get(normalized) |
| if runtime_relative_path is not None: |
| return context.root / Path(runtime_relative_path) |
|
|
| relative_path = Path(normalized).relative_to(HF_MODEL_SUBDIR) |
| return context.local_model_dir / relative_path |
|
|
|
|
| def is_runtime_repo_file(repo_file: str) -> bool: |
| return repo_file.replace("\\", "/") in HF_RUNTIME_REPO_FILE_MAP |
|
|
|
|
| def resolve_repo_files( |
| repo_id: str, |
| revision: str, |
| best_model_only: bool, |
| include_runtime_bundle: bool, |
| ) -> list[dict[str, int | str | None]]: |
| from huggingface_hub import HfApi |
|
|
| api = HfApi() |
| repo_files = api.list_repo_tree(repo_id=repo_id, repo_type="dataset", revision=revision, recursive=True) |
|
|
| file_entries: list[str] = [] |
| size_map: dict[str, int | None] = {} |
| for entry in repo_files: |
| entry_path = str(getattr(entry, "path", "")).replace("\\", "/") |
| entry_size = getattr(entry, "size", None) |
| if not entry_path or entry_path.endswith("/") or entry_size is None: |
| continue |
| file_entries.append(entry_path) |
| size_map[entry_path] = entry_size |
|
|
| selected_paths = select_repo_files( |
| file_entries, |
| best_model_only=best_model_only, |
| include_runtime_bundle=include_runtime_bundle, |
| ) |
| if not selected_paths: |
| scope = "best-model" if best_model_only else "model" |
| if include_runtime_bundle: |
| scope = f"runtime web + {scope}" |
| raise FileNotFoundError( |
| f"Không tìm thấy file {scope} hợp lệ trong repo {repo_id}@{revision}. " |
| "Hãy kiểm tra lại cấu trúc repo trên Hugging Face." |
| ) |
|
|
| return [{"path": path, "size": size_map.get(path)} for path in selected_paths] |
|
|
|
|
| def sync_single_file( |
| source_file: Path, |
| destination_file: Path, |
| force_copy: bool, |
| *, |
| verify_content: bool = False, |
| ) -> tuple[bool, int]: |
| destination_file.parent.mkdir(parents=True, exist_ok=True) |
| size = source_file.stat().st_size |
|
|
| if ( |
| destination_file.exists() |
| and not force_copy |
| and destination_file.stat().st_size == size |
| ): |
| if not verify_content or destination_file.read_bytes() == source_file.read_bytes(): |
| return False, size |
|
|
| shutil.copy2(source_file, destination_file) |
| return True, size |
|
|
|
|
| def download_and_sync_model( |
| context: RuntimeContext, |
| repo_id: str, |
| revision: str, |
| force_download: bool, |
| best_model_only: bool, |
| include_runtime_bundle: bool, |
| ) -> tuple[int, int, int, int]: |
| from huggingface_hub import hf_hub_download |
|
|
| repo_files = resolve_repo_files( |
| repo_id=repo_id, |
| revision=revision, |
| best_model_only=best_model_only, |
| include_runtime_bundle=include_runtime_bundle, |
| ) |
| total_files = len(repo_files) |
| total_bytes = sum(int(item["size"] or 0) for item in repo_files) |
|
|
| copied_files = 0 |
| skipped_files = 0 |
| copied_bytes = 0 |
| skipped_bytes = 0 |
| processed_bytes = 0 |
| if include_runtime_bundle: |
| download_scope = "runtime web + best-model" if best_model_only else "runtime web + toàn bộ model" |
| else: |
| download_scope = "best-model" if best_model_only else "toàn bộ model" |
|
|
| print_step(f"Tìm thấy {total_files} file cần đồng bộ cho {download_scope}.") |
|
|
| for index, repo_item in enumerate(repo_files, start=1): |
| repo_file = str(repo_item["path"]) |
| destination_path = get_target_destination(context, repo_file) |
| relative_label = destination_path.relative_to(context.root).as_posix() |
| print_step(f"[{index}/{total_files}] Đang tải {relative_label}") |
|
|
| cached_file = hf_hub_download( |
| repo_id=repo_id, |
| repo_type="dataset", |
| revision=revision, |
| filename=repo_file, |
| force_download=force_download, |
| local_files_only=False, |
| ) |
|
|
| copied, size = sync_single_file( |
| Path(cached_file), |
| destination_path, |
| force_copy=force_download, |
| verify_content=is_runtime_repo_file(repo_file), |
| ) |
| if copied: |
| copied_files += 1 |
| copied_bytes += size |
| print_step(f" Đã đồng bộ {relative_label} ({format_bytes(size)})") |
| else: |
| skipped_files += 1 |
| skipped_bytes += size |
| print_step(f" Giữ nguyên {relative_label} ({format_bytes(size)})") |
|
|
| processed_bytes += size |
| if processed_bytes > total_bytes: |
| total_bytes = processed_bytes |
|
|
| print_step( |
| " Tổng tiến độ " |
| f"{render_progress_bar(processed_bytes, total_bytes)} " |
| f"({format_bytes(processed_bytes)}/{format_bytes(total_bytes)})" |
| ) |
|
|
| return copied_files, skipped_files, copied_bytes, skipped_bytes |
|
|
|
|
| def required_model_files(context: RuntimeContext, best_model_only: bool) -> list[Path]: |
| if best_model_only: |
| model_dir = context.local_best_model_dir |
| else: |
| model_dir = context.local_model_dir |
|
|
| return [ |
| model_dir / "config.json", |
| model_dir / "generation_config.json", |
| model_dir / "model.safetensors", |
| model_dir / "tokenizer_config.json", |
| model_dir / "special_tokens_map.json", |
| model_dir / "spiece.model", |
| ] |
|
|
|
|
| def validate_local_model_dir(context: RuntimeContext, best_model_only: bool) -> None: |
| missing_files = [ |
| str(path.relative_to(context.root)) |
| for path in required_model_files(context, best_model_only) |
| if not path.exists() |
| ] |
| if missing_files: |
| raise FileNotFoundError( |
| "Model chưa đầy đủ sau khi tải về. Thiếu các file: " + ", ".join(missing_files) |
| ) |
|
|
|
|
| def prepare_model( |
| context: RuntimeContext, |
| repo_id: str, |
| revision: str, |
| force_download: bool, |
| skip_download: bool, |
| best_model_only: bool, |
| ) -> None: |
| if skip_download: |
| print_step("Bỏ qua bước tải model theo yêu cầu `--skip-download`.") |
| validate_local_model_dir(context, best_model_only=best_model_only) |
| return |
|
|
| copied_files, skipped_files, copied_bytes, skipped_bytes = download_and_sync_model( |
| context=context, |
| repo_id=repo_id, |
| revision=revision, |
| force_download=force_download, |
| best_model_only=best_model_only, |
| include_runtime_bundle=context.standalone_mode, |
| ) |
| validate_local_model_dir(context, best_model_only=best_model_only) |
|
|
| scope = "best-model" if best_model_only else "toàn bộ model" |
| if context.standalone_mode: |
| scope = f"runtime web + {scope}" |
| print_step( |
| f"Đồng bộ {scope} xong. " |
| f"File mới/cập nhật: {copied_files} ({format_bytes(copied_bytes)}), " |
| f"file giữ nguyên: {skipped_files} ({format_bytes(skipped_bytes)})." |
| ) |
|
|
|
|
| def build_runtime_env(context: RuntimeContext, args: argparse.Namespace) -> dict[str, str]: |
| env = os.environ.copy() |
|
|
| if args.host: |
| env["HVU_HOST"] = args.host |
| if args.port is not None: |
| env["HVU_PORT"] = str(args.port) |
| if args.device: |
| env["HVU_DEVICE"] = args.device |
| if args.debug: |
| env["HVU_DEBUG"] = "1" |
| if args.no_browser: |
| env["HVU_OPEN_BROWSER"] = "0" |
|
|
| env["HVU_MODEL_DIR"] = str(context.local_model_dir) |
| return env |
|
|
|
|
| def launch_app(context: RuntimeContext, args: argparse.Namespace) -> int: |
| if not context.main_file.exists(): |
| raise FileNotFoundError(f"Không tìm thấy file chạy ứng dụng: {context.main_file}") |
|
|
| env = build_runtime_env(context, args) |
| command = [sys.executable, str(context.main_file)] |
|
|
| print_step("Đang chạy ứng dụng web...") |
| print_step( |
| "Mở trình duyệt tại " |
| f"http://{env.get('HVU_HOST', '127.0.0.1')}:{env.get('HVU_PORT', '5000')}" |
| ) |
| return subprocess.call(command, cwd=str(context.root), env=env) |
|
|
|
|
| def build_parser() -> argparse.ArgumentParser: |
| parser = argparse.ArgumentParser( |
| description=( |
| "Launcher cho HVU_QA: có thể chạy full project nếu đang đứng trong repo, " |
| "hoặc tự dựng runtime standalone khi chỉ có file HVU_QA_tool.py." |
| ), |
| ) |
| parser.add_argument("--repo-id", default=HF_DATASET_REPO_ID, help="Repo dataset trên Hugging Face.") |
| parser.add_argument("--revision", default=HF_DATASET_REVISION, help="Revision trên Hugging Face.") |
| parser.add_argument("--host", default=None, help="Host chạy Flask. Mặc định dùng HVU_HOST hoặc 127.0.0.1.") |
| parser.add_argument("--port", type=int, default=None, help="Port chạy Flask. Mặc định dùng HVU_PORT hoặc 5000.") |
| parser.add_argument( |
| "--device", |
| choices=["auto", "cpu", "cuda"], |
| default=None, |
| help="Thiết bị chạy model. Mặc định dùng HVU_DEVICE hoặc auto.", |
| ) |
| parser.add_argument("--debug", action="store_true", help="Bật Flask debug.") |
| parser.add_argument("--no-browser", action="store_true", help="Không tự mở trình duyệt.") |
| parser.add_argument("--no-venv", action="store_true", help="Không tự tạo virtualenv riêng cho launcher.") |
| parser.add_argument("--force-download", action="store_true", help="Tải lại model và ghi đè file local.") |
| parser.add_argument( |
| "--best-model-only", |
| action="store_true", |
| help="Chỉ tải thư mục best-model. Lệnh này chỉ dùng được khi repo thật sự có best-model.", |
| ) |
| parser.add_argument("--skip-download", action="store_true", help="Bỏ qua bước tải model từ Hugging Face.") |
| parser.add_argument("--skip-install", action="store_true", help="Không tự cài dependency còn thiếu.") |
| parser.add_argument("--skip-run", action="store_true", help="Chỉ chuẩn bị môi trường và model, không chạy app.") |
| parser.add_argument( |
| "--runtime-dir", |
| default="HVU_QA_runtime", |
| help="Thư mục runtime standalone sẽ được tạo nếu không có full project hoặc khi ép standalone.", |
| ) |
| parser.add_argument( |
| "--force-standalone-runtime", |
| action="store_true", |
| help="Luôn dựng runtime standalone, kể cả khi đang đứng trong full project.", |
| ) |
| parser.add_argument( |
| "--force-runtime-refresh", |
| action="store_true", |
| help="Ghi đè lại các file runtime standalone được nhúng sẵn trong launcher.", |
| ) |
| parser.add_argument( |
| "--prepare-runtime-only", |
| action="store_true", |
| help="Chỉ dựng runtime standalone hoặc kiểm tra full project hiện tại, không cài dependency, không tải model.", |
| ) |
| return parser |
|
|
|
|
| def main() -> int: |
| if hasattr(sys.stdout, "reconfigure"): |
| sys.stdout.reconfigure(encoding="utf-8") |
| if hasattr(sys.stderr, "reconfigure"): |
| sys.stderr.reconfigure(encoding="utf-8") |
|
|
| parser = build_parser() |
| args = parser.parse_args() |
|
|
| bootstrap_exit_code = maybe_bootstrap_tool_venv(args) |
| if bootstrap_exit_code is not None: |
| return bootstrap_exit_code |
|
|
| print_step("Bắt đầu chuẩn bị dự án HVU_QA...") |
| context = resolve_runtime_context(args) |
|
|
| if args.prepare_runtime_only: |
| print_step("Đã chuẩn bị xong runtime. Bỏ qua các bước tiếp theo theo `--prepare-runtime-only`.") |
| return 0 |
|
|
| ensure_huggingface_hub(skip_install=args.skip_install, context=context) |
| prepare_model( |
| context=context, |
| repo_id=args.repo_id, |
| revision=args.revision, |
| force_download=args.force_download, |
| skip_download=args.skip_download, |
| best_model_only=args.best_model_only, |
| ) |
| ensure_runtime_dependencies(skip_install=args.skip_install, context=context) |
|
|
| if args.skip_run: |
| print_step("Đã chuẩn bị xong model và dependency. Bỏ qua chạy app theo `--skip-run`.") |
| return 0 |
|
|
| return launch_app(context, args) |
|
|
|
|
| if __name__ == "__main__": |
| raise SystemExit(main()) |
|
|