cnxup commited on
Commit
fa8b46b
·
verified ·
1 Parent(s): 7dfebfb

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +105 -0
config.json ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlavaNextForConditionalGeneration"
4
+ ],
5
+ "hidden_size": 4096,
6
+ "ignore_index": -100,
7
+ "image_grid_pinpoints": [
8
+ [
9
+ 336,
10
+ 672
11
+ ],
12
+ [
13
+ 672,
14
+ 336
15
+ ],
16
+ [
17
+ 672,
18
+ 672
19
+ ],
20
+ [
21
+ 1008,
22
+ 336
23
+ ],
24
+ [
25
+ 336,
26
+ 1008
27
+ ]
28
+ ],
29
+ "image_seq_length": 576,
30
+ "image_token_index": 128256,
31
+ "keys_to_ignore_at_inference": [
32
+ "past_key_values"
33
+ ],
34
+ "mha2mla": {
35
+ "is_baseline": false,
36
+ "is_gqa2mha2mla": false,
37
+ "low_rank": 128,
38
+ "multimodal_rope_section_for_mla": null,
39
+ "partial_rope_version": "mkl",
40
+ "peft_train": "v2",
41
+ "qk_tensor_path": "mkl/llava_next/llava_next-ranks.pth",
42
+ "rope_dim_for_mla": 32,
43
+ "stage1_path": "cnxup/LLaVA-NeXT-8B-MLA-stage1-rope32",
44
+ "svd_init_method": "joint",
45
+ "svd_init_weight_path": "LLaVA-NeXT-8B-rope32-d_kv_128.pt",
46
+ "svd_split_modal": true,
47
+ "uniform_start_point": 0
48
+ },
49
+ "model_type": "llava_next",
50
+ "multimodal_projector_bias": true,
51
+ "pad_token_id": 128257,
52
+ "projector_hidden_act": "gelu",
53
+ "text_config": {
54
+ "_name_or_path": "meta-llama/Meta-Llama-3-8B-Instruct",
55
+ "architectures": [
56
+ "LlamaForCausalLM"
57
+ ],
58
+ "attention_bias": false,
59
+ "attention_dropout": 0.0,
60
+ "bos_token_id": 128000,
61
+ "eos_token_id": 128009,
62
+ "head_dim": 128,
63
+ "hidden_act": "silu",
64
+ "hidden_size": 4096,
65
+ "initializer_range": 0.02,
66
+ "intermediate_size": 14336,
67
+ "max_position_embeddings": 8192,
68
+ "mlp_bias": false,
69
+ "model_type": "llama",
70
+ "num_attention_heads": 32,
71
+ "num_hidden_layers": 32,
72
+ "num_key_value_heads": 8,
73
+ "pretraining_tp": 1,
74
+ "rms_norm_eps": 1e-05,
75
+ "rope_scaling": null,
76
+ "rope_theta": 500000.0,
77
+ "torch_dtype": "bfloat16",
78
+ "use_cache": true,
79
+ "vocab_size": 128320
80
+ },
81
+ "tie_word_embeddings": false,
82
+ "torch_dtype": "bfloat16",
83
+ "transformers_version": "4.51.0",
84
+ "use_image_newline_parameter": true,
85
+ "vision_config": {
86
+ "attention_dropout": 0.0,
87
+ "hidden_act": "quick_gelu",
88
+ "hidden_size": 1024,
89
+ "image_size": 336,
90
+ "initializer_factor": 1.0,
91
+ "initializer_range": 0.02,
92
+ "intermediate_size": 4096,
93
+ "layer_norm_eps": 1e-05,
94
+ "model_type": "clip_vision_model",
95
+ "num_attention_heads": 16,
96
+ "num_channels": 3,
97
+ "num_hidden_layers": 24,
98
+ "patch_size": 14,
99
+ "projection_dim": 768,
100
+ "torch_dtype": "bfloat16",
101
+ "vocab_size": 32000
102
+ },
103
+ "vision_feature_layer": -2,
104
+ "vision_feature_select_strategy": "default"
105
+ }