wonderwind271's picture
seed 142 and 242
844cbfe verified
{
"activation_function": "gelu_new",
"architectures": [
"LlavaGPTForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"attn_pdrop": 0.1,
"bos_token_id": 1,
"detect_loss": false,
"embd_pdrop": 0.1,
"eos_token_id": 2,
"freeze_mm_mlp_adapter": false,
"hidden_act": "silu",
"image_aspect_ratio": "pad",
"initializer_range": 0.02,
"intermediate_size": 3072,
"is_decoder": true,
"layer_norm_epsilon": 1e-05,
"mm_projector_lr": null,
"mm_use_im_patch_token": false,
"mm_use_im_start_end": false,
"mm_vision_tower": "dino",
"model_type": "gpt2",
"n_embd": 768,
"n_head": 12,
"n_inner": null,
"n_layer": 12,
"n_positions": 1024,
"num_key_value_heads": 12,
"reorder_and_upcast_attn": false,
"resid_pdrop": 0.1,
"rms_norm_eps": 1e-05,
"rope_scaling": null,
"rope_theta": 10000.0,
"scale_attn_by_inverse_layer_idx": false,
"scale_attn_weights": true,
"summary_activation": null,
"summary_first_dropout": 0.1,
"summary_proj_to_labels": true,
"summary_type": "cls_index",
"summary_use_proj": true,
"tie_word_embeddings": false,
"tokenizer_class": "LlamaTokenizer",
"tokenizer_model_max_length": 2048,
"tokenizer_padding_side": "right",
"torch_dtype": "float32",
"transformers_version": "4.38.0",
"tune_mm_mlp_adapter": false,
"use_cache": false,
"vision_tower_type": "dino",
"vocab_size": 25005
}