FLUX.2-klein-4B-SDNQ-4bit-dynamic / transformer /quantization_config.json
Disty0's picture
Upload folder using huggingface_hub
4d9d38f verified
raw
history blame
6.5 kB
{
"add_skip_keys": false,
"dequantize_fp32": false,
"dynamic_loss_threshold": 0.01,
"group_size": 0,
"is_integer": true,
"is_training": false,
"modules_dtype_dict": {
"int5": [
"single_transformer_blocks.19.attn.to_qkv_mlp_proj.weight",
"transformer_blocks.0.attn.to_k.weight"
],
"uint4": [
"single_transformer_blocks.0.attn.to_out.weight",
"single_transformer_blocks.0.attn.to_qkv_mlp_proj.weight",
"single_transformer_blocks.1.attn.to_out.weight",
"single_transformer_blocks.1.attn.to_qkv_mlp_proj.weight",
"single_transformer_blocks.10.attn.to_out.weight",
"single_transformer_blocks.10.attn.to_qkv_mlp_proj.weight",
"single_transformer_blocks.11.attn.to_out.weight",
"single_transformer_blocks.11.attn.to_qkv_mlp_proj.weight",
"single_transformer_blocks.12.attn.to_out.weight",
"single_transformer_blocks.12.attn.to_qkv_mlp_proj.weight",
"single_transformer_blocks.13.attn.to_out.weight",
"single_transformer_blocks.13.attn.to_qkv_mlp_proj.weight",
"single_transformer_blocks.14.attn.to_out.weight",
"single_transformer_blocks.14.attn.to_qkv_mlp_proj.weight",
"single_transformer_blocks.15.attn.to_out.weight",
"single_transformer_blocks.15.attn.to_qkv_mlp_proj.weight",
"single_transformer_blocks.16.attn.to_out.weight",
"single_transformer_blocks.16.attn.to_qkv_mlp_proj.weight",
"single_transformer_blocks.17.attn.to_out.weight",
"single_transformer_blocks.17.attn.to_qkv_mlp_proj.weight",
"single_transformer_blocks.18.attn.to_out.weight",
"single_transformer_blocks.18.attn.to_qkv_mlp_proj.weight",
"single_transformer_blocks.19.attn.to_out.weight",
"single_transformer_blocks.2.attn.to_out.weight",
"single_transformer_blocks.2.attn.to_qkv_mlp_proj.weight",
"single_transformer_blocks.3.attn.to_out.weight",
"single_transformer_blocks.3.attn.to_qkv_mlp_proj.weight",
"single_transformer_blocks.4.attn.to_out.weight",
"single_transformer_blocks.4.attn.to_qkv_mlp_proj.weight",
"single_transformer_blocks.5.attn.to_out.weight",
"single_transformer_blocks.5.attn.to_qkv_mlp_proj.weight",
"single_transformer_blocks.6.attn.to_out.weight",
"single_transformer_blocks.6.attn.to_qkv_mlp_proj.weight",
"single_transformer_blocks.7.attn.to_out.weight",
"single_transformer_blocks.7.attn.to_qkv_mlp_proj.weight",
"single_transformer_blocks.8.attn.to_out.weight",
"single_transformer_blocks.8.attn.to_qkv_mlp_proj.weight",
"single_transformer_blocks.9.attn.to_out.weight",
"single_transformer_blocks.9.attn.to_qkv_mlp_proj.weight",
"transformer_blocks.0.attn.add_k_proj.weight",
"transformer_blocks.0.attn.add_q_proj.weight",
"transformer_blocks.0.attn.add_v_proj.weight",
"transformer_blocks.0.attn.to_add_out.weight",
"transformer_blocks.0.attn.to_out.0.weight",
"transformer_blocks.0.attn.to_q.weight",
"transformer_blocks.0.attn.to_v.weight",
"transformer_blocks.0.ff.linear_in.weight",
"transformer_blocks.0.ff.linear_out.weight",
"transformer_blocks.0.ff_context.linear_in.weight",
"transformer_blocks.0.ff_context.linear_out.weight",
"transformer_blocks.1.attn.add_k_proj.weight",
"transformer_blocks.1.attn.add_q_proj.weight",
"transformer_blocks.1.attn.add_v_proj.weight",
"transformer_blocks.1.attn.to_add_out.weight",
"transformer_blocks.1.attn.to_k.weight",
"transformer_blocks.1.attn.to_out.0.weight",
"transformer_blocks.1.attn.to_q.weight",
"transformer_blocks.1.attn.to_v.weight",
"transformer_blocks.1.ff.linear_in.weight",
"transformer_blocks.1.ff.linear_out.weight",
"transformer_blocks.1.ff_context.linear_in.weight",
"transformer_blocks.1.ff_context.linear_out.weight",
"transformer_blocks.2.attn.add_k_proj.weight",
"transformer_blocks.2.attn.add_q_proj.weight",
"transformer_blocks.2.attn.add_v_proj.weight",
"transformer_blocks.2.attn.to_add_out.weight",
"transformer_blocks.2.attn.to_k.weight",
"transformer_blocks.2.attn.to_out.0.weight",
"transformer_blocks.2.attn.to_q.weight",
"transformer_blocks.2.attn.to_v.weight",
"transformer_blocks.2.ff.linear_in.weight",
"transformer_blocks.2.ff.linear_out.weight",
"transformer_blocks.2.ff_context.linear_in.weight",
"transformer_blocks.2.ff_context.linear_out.weight",
"transformer_blocks.3.attn.add_k_proj.weight",
"transformer_blocks.3.attn.add_q_proj.weight",
"transformer_blocks.3.attn.add_v_proj.weight",
"transformer_blocks.3.attn.to_add_out.weight",
"transformer_blocks.3.attn.to_k.weight",
"transformer_blocks.3.attn.to_out.0.weight",
"transformer_blocks.3.attn.to_q.weight",
"transformer_blocks.3.attn.to_v.weight",
"transformer_blocks.3.ff.linear_in.weight",
"transformer_blocks.3.ff.linear_out.weight",
"transformer_blocks.3.ff_context.linear_in.weight",
"transformer_blocks.3.ff_context.linear_out.weight",
"transformer_blocks.4.attn.add_k_proj.weight",
"transformer_blocks.4.attn.add_q_proj.weight",
"transformer_blocks.4.attn.add_v_proj.weight",
"transformer_blocks.4.attn.to_add_out.weight",
"transformer_blocks.4.attn.to_k.weight",
"transformer_blocks.4.attn.to_out.0.weight",
"transformer_blocks.4.attn.to_q.weight",
"transformer_blocks.4.attn.to_v.weight",
"transformer_blocks.4.ff.linear_in.weight",
"transformer_blocks.4.ff.linear_out.weight",
"transformer_blocks.4.ff_context.linear_in.weight",
"transformer_blocks.4.ff_context.linear_out.weight"
]
},
"modules_to_not_convert": [
"double_stream_modulation_img",
"time_guidance_embed",
".proj_out",
"norm_out",
"x_embedder",
"double_stream_modulation_txt",
"single_stream_modulation",
"context_embedder"
],
"non_blocking": false,
"quant_conv": false,
"quant_method": "sdnq",
"quantization_device": null,
"quantized_matmul_dtype": null,
"return_device": null,
"sdnq_version": "0.1.4",
"svd_rank": 32,
"svd_steps": 32,
"use_dynamic_quantization": true,
"use_grad_ckpt": true,
"use_quantized_matmul": false,
"use_quantized_matmul_conv": false,
"use_static_quantization": true,
"use_stochastic_rounding": false,
"use_svd": false,
"weights_dtype": "uint4"
}