| { |
| "_name_or_path": "/storage6/aa_fujimoto/sft/zoo_3exp_v2_2epoch_5000", |
| "adapter_configs": [ |
| { |
| "alpha_pattern": {}, |
| "auto_mapping": { |
| "base_model_class": "LlamaForCausalLM", |
| "parent_library": "transformers.models.llama.modeling_llama" |
| }, |
| "base_model_name_or_path": "team-sanai/llama2_7B_pretrain", |
| "bias": "none", |
| "fan_in_fan_out": false, |
| "inference_mode": true, |
| "init_lora_weights": true, |
| "layer_replication": null, |
| "layers_pattern": null, |
| "layers_to_transform": null, |
| "loftq_config": {}, |
| "lora_alpha": 32, |
| "lora_dropout": 0.1, |
| "megatron_config": null, |
| "megatron_core": "megatron.core", |
| "modules_to_save": null, |
| "peft_type": "LORA", |
| "r": 560, |
| "rank_pattern": {}, |
| "revision": null, |
| "target_modules": "{'down_proj', 'up_proj', 'gate_proj'}", |
| "task_type": null, |
| "use_dora": false, |
| "use_rslora": false |
| }, |
| { |
| "alpha_pattern": {}, |
| "auto_mapping": { |
| "base_model_class": "LlamaForCausalLM", |
| "parent_library": "transformers.models.llama.modeling_llama" |
| }, |
| "base_model_name_or_path": "team-sanai/llama2_7B_pretrain", |
| "bias": "none", |
| "fan_in_fan_out": false, |
| "inference_mode": true, |
| "init_lora_weights": true, |
| "layer_replication": null, |
| "layers_pattern": null, |
| "layers_to_transform": null, |
| "loftq_config": {}, |
| "lora_alpha": 32, |
| "lora_dropout": 0.1, |
| "megatron_config": null, |
| "megatron_core": "megatron.core", |
| "modules_to_save": null, |
| "peft_type": "LORA", |
| "r": 560, |
| "rank_pattern": {}, |
| "revision": null, |
| "target_modules": "{'down_proj', 'up_proj', 'gate_proj'}", |
| "task_type": null, |
| "use_dora": false, |
| "use_rslora": false |
| }, |
| { |
| "alpha_pattern": {}, |
| "auto_mapping": { |
| "base_model_class": "LlamaForCausalLM", |
| "parent_library": "transformers.models.llama.modeling_llama" |
| }, |
| "base_model_name_or_path": "team-sanai/llama2_7B_pretrain", |
| "bias": "none", |
| "fan_in_fan_out": false, |
| "inference_mode": true, |
| "init_lora_weights": true, |
| "layer_replication": null, |
| "layers_pattern": null, |
| "layers_to_transform": null, |
| "loftq_config": {}, |
| "lora_alpha": 32, |
| "lora_dropout": 0.1, |
| "megatron_config": null, |
| "megatron_core": "megatron.core", |
| "modules_to_save": null, |
| "peft_type": "LORA", |
| "r": 560, |
| "rank_pattern": {}, |
| "revision": null, |
| "target_modules": "{'down_proj', 'up_proj', 'gate_proj'}", |
| "task_type": null, |
| "use_dora": false, |
| "use_rslora": false |
| } |
| ], |
| "architectures": [ |
| "LlamaForCausalLM" |
| ], |
| "attention_bias": false, |
| "attention_dropout": 0.0, |
| "auto_map": { |
| "AutoModelForCausalLM": "modeling_llama.LlamaForCausalLM" |
| }, |
| "bos_token_id": 1, |
| "eos_token_id": 2, |
| "hidden_act": "silu", |
| "hidden_size": 4096, |
| "initializer_range": 0.009, |
| "intermediate_size": 11008, |
| "max_position_embeddings": 4096, |
| "max_sequence_length": 4096, |
| "model_type": "llama", |
| "num_attention_heads": 32, |
| "num_experts": 3, |
| "num_experts_per_tok": 2, |
| "num_hidden_layers": 32, |
| "num_key_value_heads": 32, |
| "pad_token_id": 0, |
| "pretraining_tp": 1, |
| "rms_norm_eps": 1e-05, |
| "rope_scaling": null, |
| "rope_theta": 10000.0, |
| "router_layers": [ |
| "down_proj", |
| "up_proj", |
| "gate_proj" |
| ], |
| "router_layers_index": [ |
| 0, |
| 1, |
| 2, |
| 3, |
| 4, |
| 5, |
| 6, |
| 7, |
| 8, |
| 9, |
| 10, |
| 11, |
| 12, |
| 13, |
| 14, |
| 15, |
| 16, |
| 17, |
| 18, |
| 19, |
| 20, |
| 21, |
| 22, |
| 23, |
| 24, |
| 25, |
| 26, |
| 27, |
| 28, |
| 29, |
| 30, |
| 31 |
| ], |
| "tie_word_embeddings": false, |
| "torch_dtype": "float16", |
| "transformers_version": "4.40.1", |
| "use_cache": true, |
| "vocab_size": 32000 |
| } |
|
|