| { | |
| "config": { | |
| "model_name": "Qwen/Qwen2.5-3B-Instruct", | |
| "model_dtype": "float16", | |
| "model_sha": "main" | |
| }, | |
| "results": { | |
| "KyrgyzMMLU": { | |
| "metric_name": 0.34 | |
| }, | |
| "KyrgyzRC": { | |
| "metric_name": 0.732 | |
| }, | |
| "WinoGrande": { | |
| "metric_name": 0.513 | |
| }, | |
| "BoolQ": { | |
| "metric_name": 0.574 | |
| }, | |
| "HellaSwag": { | |
| "metric_name": 0.237 | |
| }, | |
| "GSM8K": { | |
| "metric_name": 0.095 | |
| }, | |
| "TruthfulQA": { | |
| "metric_name": 0.344 | |
| } | |
| } | |
| } |