{ "_entry_class": "SingleModelCacheEntry", "_model_id": "HuggingFaceTB/SmolLM3-3B", "_task": "text-generation", "architectures": [ "SmolLM3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 11008, "layer_types": [ "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention" ], "max_position_embeddings": 65536, "max_window_layers": 28, "mlp_bias": false, "model_type": "smollm3", "neuron": { "_serialized_key": "NxDNeuronConfig", "batch_size": 4, "capacity_factor": null, "cc_pipeline_tiling_factor": 2, "checkpoint_id": "HuggingFaceTB/SmolLM3-3B", "checkpoint_revision": "1c00fc78bd9cf90108046bc433cb34992480f1c1", "continuous_batching": true, "enable_bucketing": false, "ep_degree": 1, "fused_qkv": true, "glu_mlp": true, "local_ranks_size": 2, "logical_nc_config": 1, "max_batch_size": 4, "max_context_length": 4096, "max_topk": 256, "n_active_tokens": 4096, "neuronxcc_version": "2.19.8089.0+8ab9f450", "on_device_sampling": true, "optimum_neuron_version": "0.3.1.dev5", "output_logits": false, "pp_degree": 1, "sequence_length": 4096, "speculation_length": 0, "start_rank_id": 0, "target": null, "torch_dtype": "bfloat16", "tp_degree": 2 }, "no_rope_layer_interval": 4, "no_rope_layers": [ 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0 ], "num_attention_heads": 16, "num_hidden_layers": 36, "num_key_value_heads": 4, "pretraining_tp": 2, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 5000000.0, "sliding_window": null, "use_cache": false, "use_sliding_window": false, "vocab_size": 128256 }