dacorvo's picture
dacorvo HF Staff
Synchronizing local compiler cache.
2878501 verified
raw
history blame
2.75 kB
{
"_entry_class": "MultiModelCacheEntry",
"_model_id": "PixArt-alpha/PixArt-XL-2-512x512",
"_task": null,
"text_encoder": {
"architectures": [
"T5EncoderModel"
],
"classifier_dropout": 0.0,
"d_ff": 10240,
"d_kv": 64,
"d_model": 4096,
"decoder_start_token_id": 0,
"dense_act_fn": "gelu_new",
"dropout_rate": 0.1,
"feed_forward_proj": "gated-gelu",
"initializer_factor": 1.0,
"is_encoder_decoder": true,
"is_gated_act": true,
"layer_norm_epsilon": 1e-06,
"model_type": "t5",
"neuron": {
"auto_cast": null,
"auto_cast_type": null,
"compiler_type": "neuronx-cc",
"compiler_version": "2.17.194.0+d312836f",
"dynamic_batch_size": false,
"inline_weights_to_neff": false,
"optlevel": "2",
"output_attentions": false,
"output_hidden_states": false,
"static_batch_size": 1,
"static_sequence_length": 120,
"task": "feature-extraction",
"tensor_parallel_size": 1
},
"num_decoder_layers": 24,
"num_heads": 64,
"num_layers": 24,
"output_past": true,
"relative_attention_max_distance": 128,
"relative_attention_num_buckets": 32,
"tie_word_embeddings": false,
"use_cache": true,
"vocab_size": 32128
},
"transformer": {
"_class_name": "PixArtTransformer2DModel",
"activation_fn": "gelu-approximate",
"attention_bias": true,
"attention_head_dim": 72,
"attention_type": "default",
"caption_channels": 4096,
"cross_attention_dim": 1152,
"double_self_attention": false,
"dropout": 0.0,
"in_channels": 4,
"interpolation_scale": null,
"neuron": {
"auto_cast": null,
"auto_cast_type": null,
"compiler_type": "neuronx-cc",
"compiler_version": "2.17.194.0+d312836f",
"dynamic_batch_size": false,
"inline_weights_to_neff": false,
"optlevel": "2",
"output_attentions": false,
"output_hidden_states": false,
"static_batch_size": 1,
"static_encoder_hidden_size": 4096,
"static_height": 64,
"static_num_channels": 4,
"static_patch_size": 2,
"static_sequence_length": 120,
"static_vae_scale_factor": 8,
"static_width": 64,
"task": "semantic-segmentation",
"tensor_parallel_size": 1
},
"norm_elementwise_affine": false,
"norm_eps": 1e-06,
"norm_num_groups": 32,
"norm_type": "ada_norm_single",
"num_attention_heads": 16,
"num_embeds_ada_norm": 1000,
"num_layers": 28,
"num_vector_embeds": null,
"only_cross_attention": false,
"out_channels": 8,
"patch_size": 2,
"upcast_attention": false,
"use_additional_conditions": null,
"use_linear_projection": false
}
}