Chua, Vui Seng commited on
Commit ·
51b709b
1
Parent(s): 7b5d626
Add collaterals
Browse files- .gitattributes +8 -0
- README.md +113 -0
- XP_layer_wise_sparsity_global_rate_26.51.csv +200 -0
- XP_layer_wise_sparsity_global_rate_26.51.md +201 -0
- XP_linear_layer_sparsity_20M_params_57.92_sparsity.csv +73 -0
- XP_linear_layer_sparsity_20M_params_57.92_sparsity.md +74 -0
- XP_onnx_sparsity.csv +77 -0
- XP_onnx_sparsity.md +78 -0
- all_results.json +5 -0
- bert-base-squadv1-block-pruning-hybrid-filled-lt-nncf-57.92sparse-qat-lt.onnx +3 -0
- checkpoint-21750/config.json +115 -0
- checkpoint-21750/optimizer.pt +3 -0
- checkpoint-21750/pytorch_model.bin +3 -0
- checkpoint-21750/rng_state.pth +3 -0
- checkpoint-21750/scheduler.pt +3 -0
- checkpoint-21750/special_tokens_map.json +1 -0
- checkpoint-21750/tokenizer.json +0 -0
- checkpoint-21750/tokenizer_config.json +1 -0
- checkpoint-21750/trainer_state.json +3 -0
- checkpoint-21750/training_args.bin +3 -0
- checkpoint-21750/vocab.txt +0 -0
- compressed_graph.dot +0 -0
- config.json +115 -0
- eval_XP_results.json +5 -0
- eval_nbest_predictions.json +3 -0
- eval_predictions.json +0 -0
- nncf_bert_squad_sparsity.json +72 -0
- original_graph.dot +0 -0
- pytorch_model.bin +3 -0
- special_tokens_map.json +1 -0
- tokenizer.json +0 -0
- tokenizer_config.json +1 -0
- train_results.json +8 -0
- trainer_state.json +3 -0
- training_args.bin +3 -0
- vocab.txt +0 -0
.gitattributes
CHANGED
|
@@ -25,3 +25,11 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 25 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 26 |
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
| 27 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 25 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 26 |
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
| 27 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 28 |
+
bert-base-squadv1-block-pruning-hybrid-filled-lt-nncf-57.92sparse-qat-lt.onnx filter=lfs diff=lfs merge=lfs -text
|
| 29 |
+
pytorch_model.bin filter=lfs diff=lfs merge=lfs -text
|
| 30 |
+
trainer_state.json filter=lfs diff=lfs merge=lfs -text
|
| 31 |
+
sparsified_model/pytorch_model.bin filter=lfs diff=lfs merge=lfs -text
|
| 32 |
+
eval_nbest_predictions.json filter=lfs diff=lfs merge=lfs -text
|
| 33 |
+
checkpoint-21750/pytorch_model.bin filter=lfs diff=lfs merge=lfs -text
|
| 34 |
+
checkpoint-21750/trainer_state.json filter=lfs diff=lfs merge=lfs -text
|
| 35 |
+
checkpoint-21750/optimizer.pt filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
This model is a downstream optimization of [```vuiseng9/bert-base-squadv1-block-pruning-hybrid-filled-lt```](https://huggingface.co/vuiseng9/bert-base-squadv1-block-pruning-hybrid-filled-lt) using [OpenVINO/NNCF](https://github.com/openvinotoolkit/nncf). Applied optimization includes:
|
| 2 |
+
1. magnitude sparsification at 57.92% upon initialization so that sparsity over all linear layers of bert-base is at 90%. Parameters are ranked globally via thier absolute norm. Only linear layers of self-attention and ffnn are targeted.
|
| 3 |
+
2. NNCF Quantize-Aware Training - Symmetric 8-bit for both weight and activation on all learnable layers.
|
| 4 |
+
3. Custom distillation with large model ```bert-large-uncased-whole-word-masking-finetuned-squad```
|
| 5 |
+
|
| 6 |
+
```
|
| 7 |
+
eval_exact_match = 80.4541
|
| 8 |
+
eval_f1 = 87.6832
|
| 9 |
+
eval_samples = 10784
|
| 10 |
+
```
|
| 11 |
+
|
| 12 |
+
# Setup
|
| 13 |
+
```bash
|
| 14 |
+
# OpenVINO/NNCF
|
| 15 |
+
git clone https://github.com/vuiseng9/nncf && cd nncf
|
| 16 |
+
git checkout tld-poc
|
| 17 |
+
git reset --hard 1dec7afe7a4b567c059fcf287ea2c234980fded2
|
| 18 |
+
python setup.py develop
|
| 19 |
+
pip install -r examples/torch/requirements.txt
|
| 20 |
+
|
| 21 |
+
# Huggingface nn_pruning
|
| 22 |
+
git clone https://github.com/vuiseng9/nn_pruning && cd nn_pruning
|
| 23 |
+
git checkout reproduce-evaluation
|
| 24 |
+
git reset --hard 2d4e196d694c465e43e5fbce6c3836d0a60e1446
|
| 25 |
+
pip install -e ".[dev]"
|
| 26 |
+
|
| 27 |
+
# Huggingface Transformers
|
| 28 |
+
git clone https://github.com/vuiseng9/transformers && cd transformers
|
| 29 |
+
git checkout tld-poc
|
| 30 |
+
git reset --hard 10a1e29d84484e48fd106f58957d9ffc89dc43c5
|
| 31 |
+
pip install -e .
|
| 32 |
+
head -n 1 examples/pytorch/question-answering/requirements.txt | xargs -i pip install {}
|
| 33 |
+
|
| 34 |
+
# Additional dependencies
|
| 35 |
+
pip install onnx
|
| 36 |
+
```
|
| 37 |
+
|
| 38 |
+
# Train
|
| 39 |
+
|
| 40 |
+
```bash
|
| 41 |
+
git clone https://huggingface.co/vuiseng9/bert-base-squadv1-block-pruning-hybrid-filled-lt
|
| 42 |
+
BASE_MODEL=/path/to/cloned_repo_above #to-revise
|
| 43 |
+
|
| 44 |
+
wget https://huggingface.co/vuiseng9/bert-base-squadv1-block-pruning-hybrid-filled-lt-nncf-57.92sparse-qat-lt/raw/main/nncf_bert_squad_sparsity.json
|
| 45 |
+
NNCF_CFG=/path/to/downloaded_nncf_cfg_above #to-revise
|
| 46 |
+
|
| 47 |
+
OUTROOT=/path/to/train_output_root #to-revise
|
| 48 |
+
WORKDIR=transformers/examples/pytorch/question-answering #to-revise
|
| 49 |
+
RUNID=bert-base-squadv1-block-pruning-hybrid-filled-lt-nncf-57.92sparse-qat-lt
|
| 50 |
+
|
| 51 |
+
cd $WORKDIR
|
| 52 |
+
|
| 53 |
+
OUTDIR=$OUTROOT/$RUNID
|
| 54 |
+
mkdir -p $OUTDIR
|
| 55 |
+
|
| 56 |
+
export CUDA_VISIBLE_DEVICES=0
|
| 57 |
+
NEPOCH=5
|
| 58 |
+
|
| 59 |
+
python run_qa.py \
|
| 60 |
+
--model_name_or_path vuiseng9/bert-base-squadv1-block-pruning-hybrid \
|
| 61 |
+
--optimize_model_before_eval \
|
| 62 |
+
--optimized_checkpoint $BASE_MODEL \
|
| 63 |
+
--dataset_name squad \
|
| 64 |
+
--do_eval \
|
| 65 |
+
--do_train \
|
| 66 |
+
--evaluation_strategy steps \
|
| 67 |
+
--eval_steps 250 \
|
| 68 |
+
--learning_rate 3e-5 \
|
| 69 |
+
--lr_scheduler_type cosine_with_restarts \
|
| 70 |
+
--warmup_ratio 0.25 \
|
| 71 |
+
--cosine_cycles 1 \
|
| 72 |
+
--teacher bert-large-uncased-whole-word-masking-finetuned-squad \
|
| 73 |
+
--teacher_ratio 0.9 \
|
| 74 |
+
--num_train_epochs $NEPOCH \
|
| 75 |
+
--per_device_eval_batch_size 128 \
|
| 76 |
+
--per_device_train_batch_size 16 \
|
| 77 |
+
--max_seq_length 384 \
|
| 78 |
+
--doc_stride 128 \
|
| 79 |
+
--save_steps 250 \
|
| 80 |
+
--nncf_config $NNCF_CFG \
|
| 81 |
+
--logging_steps 1 \
|
| 82 |
+
--overwrite_output_dir \
|
| 83 |
+
--run_name $RUNID \
|
| 84 |
+
--output_dir $OUTDIR
|
| 85 |
+
```
|
| 86 |
+
|
| 87 |
+
# Eval
|
| 88 |
+
This repo must be cloned locally.
|
| 89 |
+
```bash
|
| 90 |
+
git clone https://huggingface.co/vuiseng9/bert-base-squadv1-block-pruning-hybrid-filled-lt-nncf-57.92sparse-qat-lt
|
| 91 |
+
MODELROOT=/path/to/cloned_repo_above #to-revise
|
| 92 |
+
|
| 93 |
+
export CUDA_VISIBLE_DEVICES=0
|
| 94 |
+
|
| 95 |
+
OUTDIR=eval-bert-base-squadv1-block-pruning-hybrid-filled-lt-nncf-57.92sparse-qat-lt
|
| 96 |
+
WORKDIR=transformers/examples/pytorch/question-answering #to-revise
|
| 97 |
+
cd $WORKDIR
|
| 98 |
+
mkdir $OUTDIR
|
| 99 |
+
|
| 100 |
+
nohup python run_qa.py \
|
| 101 |
+
--model_name_or_path vuiseng9/bert-base-squadv1-block-pruning-hybrid \
|
| 102 |
+
--dataset_name squad \
|
| 103 |
+
--optimize_model_before_eval \
|
| 104 |
+
--qat_checkpoint $MODELROOT/checkpoint-21750 \
|
| 105 |
+
--nncf_config $MODELROOT/nncf_bert_squad_sparsity.json \
|
| 106 |
+
--to_onnx $OUTDIR/bert-base-squadv1-block-pruning-hybrid-filled-lt-nncf-57.92sparse-qat-lt.onnx \
|
| 107 |
+
--do_eval \
|
| 108 |
+
--per_device_eval_batch_size 128 \
|
| 109 |
+
--max_seq_length 384 \
|
| 110 |
+
--doc_stride 128 \
|
| 111 |
+
--overwrite_output_dir \
|
| 112 |
+
--output_dir $OUTDIR 2>&1 | tee $OUTDIR/run.log &
|
| 113 |
+
```
|
XP_layer_wise_sparsity_global_rate_26.51.csv
ADDED
|
@@ -0,0 +1,200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
,layer_id,layer_type,param_type,shape,nparam,nnz,sparsity
|
| 2 |
+
0,nncf_module.bert.embeddings.word_embeddings,NNCFEmbedding,weight,"[30522, 768]",23440896,23440896,0.0
|
| 3 |
+
1,nncf_module.bert.embeddings.position_embeddings,NNCFEmbedding,weight,"[512, 768]",393216,393216,0.0
|
| 4 |
+
2,nncf_module.bert.embeddings.token_type_embeddings,NNCFEmbedding,weight,"[2, 768]",1536,1536,0.0
|
| 5 |
+
3,nncf_module.bert.embeddings.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 6 |
+
4,nncf_module.bert.embeddings.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 7 |
+
5,nncf_module.bert.encoder.layer.0.attention.self.query,NNCFLinear,weight,"[320, 768]",245760,93507,0.6195189952850342
|
| 8 |
+
6,nncf_module.bert.encoder.layer.0.attention.self.query,NNCFLinear,bias,[320],320,320,0.0
|
| 9 |
+
7,nncf_module.bert.encoder.layer.0.attention.self.key,NNCFLinear,weight,"[320, 768]",245760,98224,0.6003254652023315
|
| 10 |
+
8,nncf_module.bert.encoder.layer.0.attention.self.key,NNCFLinear,bias,[320],320,320,0.0
|
| 11 |
+
9,nncf_module.bert.encoder.layer.0.attention.self.value,NNCFLinear,weight,"[320, 768]",245760,113596,0.5377767086029053
|
| 12 |
+
10,nncf_module.bert.encoder.layer.0.attention.self.value,NNCFLinear,bias,[320],320,320,0.0
|
| 13 |
+
11,nncf_module.bert.encoder.layer.0.attention.output.dense,NNCFLinear,weight,"[768, 320]",245760,117320,0.5226236581802368
|
| 14 |
+
12,nncf_module.bert.encoder.layer.0.attention.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 15 |
+
13,nncf_module.bert.encoder.layer.0.attention.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 16 |
+
14,nncf_module.bert.encoder.layer.0.attention.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 17 |
+
15,nncf_module.bert.encoder.layer.0.intermediate.dense,NNCFLinear,weight,"[185, 768]",142080,97047,0.31695520877838135
|
| 18 |
+
16,nncf_module.bert.encoder.layer.0.intermediate.dense,NNCFLinear,bias,[185],185,185,0.0
|
| 19 |
+
17,nncf_module.bert.encoder.layer.0.output.dense,NNCFLinear,weight,"[768, 185]",142080,94629,0.33397382497787476
|
| 20 |
+
18,nncf_module.bert.encoder.layer.0.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 21 |
+
19,nncf_module.bert.encoder.layer.0.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 22 |
+
20,nncf_module.bert.encoder.layer.0.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 23 |
+
21,nncf_module.bert.encoder.layer.1.attention.self.query,NNCFLinear,weight,"[320, 768]",245760,118358,0.5184000730514526
|
| 24 |
+
22,nncf_module.bert.encoder.layer.1.attention.self.query,NNCFLinear,bias,[320],320,320,0.0
|
| 25 |
+
23,nncf_module.bert.encoder.layer.1.attention.self.key,NNCFLinear,weight,"[320, 768]",245760,118132,0.5193196535110474
|
| 26 |
+
24,nncf_module.bert.encoder.layer.1.attention.self.key,NNCFLinear,bias,[320],320,320,0.0
|
| 27 |
+
25,nncf_module.bert.encoder.layer.1.attention.self.value,NNCFLinear,weight,"[320, 768]",245760,107518,0.5625081062316895
|
| 28 |
+
26,nncf_module.bert.encoder.layer.1.attention.self.value,NNCFLinear,bias,[320],320,320,0.0
|
| 29 |
+
27,nncf_module.bert.encoder.layer.1.attention.output.dense,NNCFLinear,weight,"[768, 320]",245760,111172,0.5476399660110474
|
| 30 |
+
28,nncf_module.bert.encoder.layer.1.attention.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 31 |
+
29,nncf_module.bert.encoder.layer.1.attention.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 32 |
+
30,nncf_module.bert.encoder.layer.1.attention.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 33 |
+
31,nncf_module.bert.encoder.layer.1.intermediate.dense,NNCFLinear,weight,"[315, 768]",241920,148727,0.3852223753929138
|
| 34 |
+
32,nncf_module.bert.encoder.layer.1.intermediate.dense,NNCFLinear,bias,[315],315,315,0.0
|
| 35 |
+
33,nncf_module.bert.encoder.layer.1.output.dense,NNCFLinear,weight,"[768, 315]",241920,143174,0.4081762433052063
|
| 36 |
+
34,nncf_module.bert.encoder.layer.1.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 37 |
+
35,nncf_module.bert.encoder.layer.1.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 38 |
+
36,nncf_module.bert.encoder.layer.1.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 39 |
+
37,nncf_module.bert.encoder.layer.2.attention.self.query,NNCFLinear,weight,"[576, 768]",442368,162784,0.6320167779922485
|
| 40 |
+
38,nncf_module.bert.encoder.layer.2.attention.self.query,NNCFLinear,bias,[576],576,576,0.0
|
| 41 |
+
39,nncf_module.bert.encoder.layer.2.attention.self.key,NNCFLinear,weight,"[576, 768]",442368,164797,0.6274662613868713
|
| 42 |
+
40,nncf_module.bert.encoder.layer.2.attention.self.key,NNCFLinear,bias,[576],576,576,0.0
|
| 43 |
+
41,nncf_module.bert.encoder.layer.2.attention.self.value,NNCFLinear,weight,"[576, 768]",442368,135705,0.6932305097579956
|
| 44 |
+
42,nncf_module.bert.encoder.layer.2.attention.self.value,NNCFLinear,bias,[576],576,576,0.0
|
| 45 |
+
43,nncf_module.bert.encoder.layer.2.attention.output.dense,NNCFLinear,weight,"[768, 576]",442368,138448,0.6870298385620117
|
| 46 |
+
44,nncf_module.bert.encoder.layer.2.attention.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 47 |
+
45,nncf_module.bert.encoder.layer.2.attention.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 48 |
+
46,nncf_module.bert.encoder.layer.2.attention.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 49 |
+
47,nncf_module.bert.encoder.layer.2.intermediate.dense,NNCFLinear,weight,"[339, 768]",260352,154043,0.40832793712615967
|
| 50 |
+
48,nncf_module.bert.encoder.layer.2.intermediate.dense,NNCFLinear,bias,[339],339,339,0.0
|
| 51 |
+
49,nncf_module.bert.encoder.layer.2.output.dense,NNCFLinear,weight,"[768, 339]",260352,150923,0.42031168937683105
|
| 52 |
+
50,nncf_module.bert.encoder.layer.2.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 53 |
+
51,nncf_module.bert.encoder.layer.2.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 54 |
+
52,nncf_module.bert.encoder.layer.2.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 55 |
+
53,nncf_module.bert.encoder.layer.3.attention.self.query,NNCFLinear,weight,"[576, 768]",442368,170686,0.6141538619995117
|
| 56 |
+
54,nncf_module.bert.encoder.layer.3.attention.self.query,NNCFLinear,bias,[576],576,576,0.0
|
| 57 |
+
55,nncf_module.bert.encoder.layer.3.attention.self.key,NNCFLinear,weight,"[576, 768]",442368,178480,0.5965349674224854
|
| 58 |
+
56,nncf_module.bert.encoder.layer.3.attention.self.key,NNCFLinear,bias,[576],576,576,0.0
|
| 59 |
+
57,nncf_module.bert.encoder.layer.3.attention.self.value,NNCFLinear,weight,"[576, 768]",442368,172200,0.6107313632965088
|
| 60 |
+
58,nncf_module.bert.encoder.layer.3.attention.self.value,NNCFLinear,bias,[576],576,576,0.0
|
| 61 |
+
59,nncf_module.bert.encoder.layer.3.attention.output.dense,NNCFLinear,weight,"[768, 576]",442368,169194,0.6175265908241272
|
| 62 |
+
60,nncf_module.bert.encoder.layer.3.attention.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 63 |
+
61,nncf_module.bert.encoder.layer.3.attention.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 64 |
+
62,nncf_module.bert.encoder.layer.3.attention.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 65 |
+
63,nncf_module.bert.encoder.layer.3.intermediate.dense,NNCFLinear,weight,"[368, 768]",282624,163234,0.4224340319633484
|
| 66 |
+
64,nncf_module.bert.encoder.layer.3.intermediate.dense,NNCFLinear,bias,[368],368,368,0.0
|
| 67 |
+
65,nncf_module.bert.encoder.layer.3.output.dense,NNCFLinear,weight,"[768, 368]",282624,157364,0.4432036876678467
|
| 68 |
+
66,nncf_module.bert.encoder.layer.3.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 69 |
+
67,nncf_module.bert.encoder.layer.3.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 70 |
+
68,nncf_module.bert.encoder.layer.3.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 71 |
+
69,nncf_module.bert.encoder.layer.4.attention.self.query,NNCFLinear,weight,"[576, 768]",442368,176138,0.6018292307853699
|
| 72 |
+
70,nncf_module.bert.encoder.layer.4.attention.self.query,NNCFLinear,bias,[576],576,576,0.0
|
| 73 |
+
71,nncf_module.bert.encoder.layer.4.attention.self.key,NNCFLinear,weight,"[576, 768]",442368,177351,0.5990871787071228
|
| 74 |
+
72,nncf_module.bert.encoder.layer.4.attention.self.key,NNCFLinear,bias,[576],576,576,0.0
|
| 75 |
+
73,nncf_module.bert.encoder.layer.4.attention.self.value,NNCFLinear,weight,"[576, 768]",442368,164248,0.6287072896957397
|
| 76 |
+
74,nncf_module.bert.encoder.layer.4.attention.self.value,NNCFLinear,bias,[576],576,576,0.0
|
| 77 |
+
75,nncf_module.bert.encoder.layer.4.attention.output.dense,NNCFLinear,weight,"[768, 576]",442368,159471,0.6395059823989868
|
| 78 |
+
76,nncf_module.bert.encoder.layer.4.attention.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 79 |
+
77,nncf_module.bert.encoder.layer.4.attention.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 80 |
+
78,nncf_module.bert.encoder.layer.4.attention.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 81 |
+
79,nncf_module.bert.encoder.layer.4.intermediate.dense,NNCFLinear,weight,"[386, 768]",296448,167744,0.4341537356376648
|
| 82 |
+
80,nncf_module.bert.encoder.layer.4.intermediate.dense,NNCFLinear,bias,[386],386,386,0.0
|
| 83 |
+
81,nncf_module.bert.encoder.layer.4.output.dense,NNCFLinear,weight,"[768, 386]",296448,159961,0.46040791273117065
|
| 84 |
+
82,nncf_module.bert.encoder.layer.4.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 85 |
+
83,nncf_module.bert.encoder.layer.4.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 86 |
+
84,nncf_module.bert.encoder.layer.4.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 87 |
+
85,nncf_module.bert.encoder.layer.5.attention.self.query,NNCFLinear,weight,"[384, 768]",294912,114241,0.6126267910003662
|
| 88 |
+
86,nncf_module.bert.encoder.layer.5.attention.self.query,NNCFLinear,bias,[384],384,384,0.0
|
| 89 |
+
87,nncf_module.bert.encoder.layer.5.attention.self.key,NNCFLinear,weight,"[384, 768]",294912,132821,0.5496249794960022
|
| 90 |
+
88,nncf_module.bert.encoder.layer.5.attention.self.key,NNCFLinear,bias,[384],384,384,0.0
|
| 91 |
+
89,nncf_module.bert.encoder.layer.5.attention.self.value,NNCFLinear,weight,"[384, 768]",294912,135092,0.5419243574142456
|
| 92 |
+
90,nncf_module.bert.encoder.layer.5.attention.self.value,NNCFLinear,bias,[384],384,384,0.0
|
| 93 |
+
91,nncf_module.bert.encoder.layer.5.attention.output.dense,NNCFLinear,weight,"[768, 384]",294912,132344,0.5512424111366272
|
| 94 |
+
92,nncf_module.bert.encoder.layer.5.attention.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 95 |
+
93,nncf_module.bert.encoder.layer.5.attention.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 96 |
+
94,nncf_module.bert.encoder.layer.5.attention.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 97 |
+
95,nncf_module.bert.encoder.layer.5.intermediate.dense,NNCFLinear,weight,"[336, 768]",258048,153822,0.4039015769958496
|
| 98 |
+
96,nncf_module.bert.encoder.layer.5.intermediate.dense,NNCFLinear,bias,[336],336,336,0.0
|
| 99 |
+
97,nncf_module.bert.encoder.layer.5.output.dense,NNCFLinear,weight,"[768, 336]",258048,145684,0.435438334941864
|
| 100 |
+
98,nncf_module.bert.encoder.layer.5.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 101 |
+
99,nncf_module.bert.encoder.layer.5.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 102 |
+
100,nncf_module.bert.encoder.layer.5.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 103 |
+
101,nncf_module.bert.encoder.layer.6.attention.self.query,NNCFLinear,weight,"[448, 768]",344064,131784,0.6169782280921936
|
| 104 |
+
102,nncf_module.bert.encoder.layer.6.attention.self.query,NNCFLinear,bias,[448],448,448,0.0
|
| 105 |
+
103,nncf_module.bert.encoder.layer.6.attention.self.key,NNCFLinear,weight,"[448, 768]",344064,144539,0.5799066424369812
|
| 106 |
+
104,nncf_module.bert.encoder.layer.6.attention.self.key,NNCFLinear,bias,[448],448,448,0.0
|
| 107 |
+
105,nncf_module.bert.encoder.layer.6.attention.self.value,NNCFLinear,weight,"[448, 768]",344064,131107,0.6189458966255188
|
| 108 |
+
106,nncf_module.bert.encoder.layer.6.attention.self.value,NNCFLinear,bias,[448],448,448,0.0
|
| 109 |
+
107,nncf_module.bert.encoder.layer.6.attention.output.dense,NNCFLinear,weight,"[768, 448]",344064,126145,0.633367657661438
|
| 110 |
+
108,nncf_module.bert.encoder.layer.6.attention.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 111 |
+
109,nncf_module.bert.encoder.layer.6.attention.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 112 |
+
110,nncf_module.bert.encoder.layer.6.attention.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 113 |
+
111,nncf_module.bert.encoder.layer.6.intermediate.dense,NNCFLinear,weight,"[280, 768]",215040,135219,0.3711913824081421
|
| 114 |
+
112,nncf_module.bert.encoder.layer.6.intermediate.dense,NNCFLinear,bias,[280],280,280,0.0
|
| 115 |
+
113,nncf_module.bert.encoder.layer.6.output.dense,NNCFLinear,weight,"[768, 280]",215040,131559,0.3882114887237549
|
| 116 |
+
114,nncf_module.bert.encoder.layer.6.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 117 |
+
115,nncf_module.bert.encoder.layer.6.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 118 |
+
116,nncf_module.bert.encoder.layer.6.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 119 |
+
117,nncf_module.bert.encoder.layer.7.attention.self.query,NNCFLinear,weight,"[448, 768]",344064,132226,0.6156935691833496
|
| 120 |
+
118,nncf_module.bert.encoder.layer.7.attention.self.query,NNCFLinear,bias,[448],448,448,0.0
|
| 121 |
+
119,nncf_module.bert.encoder.layer.7.attention.self.key,NNCFLinear,weight,"[448, 768]",344064,152327,0.5572713017463684
|
| 122 |
+
120,nncf_module.bert.encoder.layer.7.attention.self.key,NNCFLinear,bias,[448],448,448,0.0
|
| 123 |
+
121,nncf_module.bert.encoder.layer.7.attention.self.value,NNCFLinear,weight,"[448, 768]",344064,141141,0.58978271484375
|
| 124 |
+
122,nncf_module.bert.encoder.layer.7.attention.self.value,NNCFLinear,bias,[448],448,448,0.0
|
| 125 |
+
123,nncf_module.bert.encoder.layer.7.attention.output.dense,NNCFLinear,weight,"[768, 448]",344064,135857,0.6051403284072876
|
| 126 |
+
124,nncf_module.bert.encoder.layer.7.attention.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 127 |
+
125,nncf_module.bert.encoder.layer.7.attention.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 128 |
+
126,nncf_module.bert.encoder.layer.7.attention.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 129 |
+
127,nncf_module.bert.encoder.layer.7.intermediate.dense,NNCFLinear,weight,"[211, 768]",162048,109376,0.32503950595855713
|
| 130 |
+
128,nncf_module.bert.encoder.layer.7.intermediate.dense,NNCFLinear,bias,[211],211,211,0.0
|
| 131 |
+
129,nncf_module.bert.encoder.layer.7.output.dense,NNCFLinear,weight,"[768, 211]",162048,107132,0.33888721466064453
|
| 132 |
+
130,nncf_module.bert.encoder.layer.7.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 133 |
+
131,nncf_module.bert.encoder.layer.7.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 134 |
+
132,nncf_module.bert.encoder.layer.7.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 135 |
+
133,nncf_module.bert.encoder.layer.8.attention.self.query,NNCFLinear,weight,"[448, 768]",344064,129219,0.6244332790374756
|
| 136 |
+
134,nncf_module.bert.encoder.layer.8.attention.self.query,NNCFLinear,bias,[448],448,448,0.0
|
| 137 |
+
135,nncf_module.bert.encoder.layer.8.attention.self.key,NNCFLinear,weight,"[448, 768]",344064,130088,0.6219075322151184
|
| 138 |
+
136,nncf_module.bert.encoder.layer.8.attention.self.key,NNCFLinear,bias,[448],448,448,0.0
|
| 139 |
+
137,nncf_module.bert.encoder.layer.8.attention.self.value,NNCFLinear,weight,"[448, 768]",344064,108043,0.685979962348938
|
| 140 |
+
138,nncf_module.bert.encoder.layer.8.attention.self.value,NNCFLinear,bias,[448],448,448,0.0
|
| 141 |
+
139,nncf_module.bert.encoder.layer.8.attention.output.dense,NNCFLinear,weight,"[768, 448]",344064,103567,0.6989891529083252
|
| 142 |
+
140,nncf_module.bert.encoder.layer.8.attention.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 143 |
+
141,nncf_module.bert.encoder.layer.8.attention.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 144 |
+
142,nncf_module.bert.encoder.layer.8.attention.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 145 |
+
143,nncf_module.bert.encoder.layer.8.intermediate.dense,NNCFLinear,weight,"[108, 768]",82944,63183,0.23824506998062134
|
| 146 |
+
144,nncf_module.bert.encoder.layer.8.intermediate.dense,NNCFLinear,bias,[108],108,108,0.0
|
| 147 |
+
145,nncf_module.bert.encoder.layer.8.output.dense,NNCFLinear,weight,"[768, 108]",82944,62633,0.24487602710723877
|
| 148 |
+
146,nncf_module.bert.encoder.layer.8.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 149 |
+
147,nncf_module.bert.encoder.layer.8.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 150 |
+
148,nncf_module.bert.encoder.layer.8.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 151 |
+
149,nncf_module.bert.encoder.layer.9.attention.self.query,NNCFLinear,weight,"[320, 768]",245760,107216,0.5637369155883789
|
| 152 |
+
150,nncf_module.bert.encoder.layer.9.attention.self.query,NNCFLinear,bias,[320],320,320,0.0
|
| 153 |
+
151,nncf_module.bert.encoder.layer.9.attention.self.key,NNCFLinear,weight,"[320, 768]",245760,101848,0.5855793952941895
|
| 154 |
+
152,nncf_module.bert.encoder.layer.9.attention.self.key,NNCFLinear,bias,[320],320,320,0.0
|
| 155 |
+
153,nncf_module.bert.encoder.layer.9.attention.self.value,NNCFLinear,weight,"[320, 768]",245760,52063,0.7881550788879395
|
| 156 |
+
154,nncf_module.bert.encoder.layer.9.attention.self.value,NNCFLinear,bias,[320],320,320,0.0
|
| 157 |
+
155,nncf_module.bert.encoder.layer.9.attention.output.dense,NNCFLinear,weight,"[768, 320]",245760,53127,0.7838256359100342
|
| 158 |
+
156,nncf_module.bert.encoder.layer.9.attention.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 159 |
+
157,nncf_module.bert.encoder.layer.9.attention.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 160 |
+
158,nncf_module.bert.encoder.layer.9.attention.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 161 |
+
159,nncf_module.bert.encoder.layer.9.intermediate.dense,NNCFLinear,weight,"[53, 768]",40704,33339,0.1809404492378235
|
| 162 |
+
160,nncf_module.bert.encoder.layer.9.intermediate.dense,NNCFLinear,bias,[53],53,53,0.0
|
| 163 |
+
161,nncf_module.bert.encoder.layer.9.output.dense,NNCFLinear,weight,"[768, 53]",40704,32340,0.20548349618911743
|
| 164 |
+
162,nncf_module.bert.encoder.layer.9.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 165 |
+
163,nncf_module.bert.encoder.layer.9.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 166 |
+
164,nncf_module.bert.encoder.layer.9.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 167 |
+
165,nncf_module.bert.encoder.layer.10.attention.self.query,NNCFLinear,weight,"[384, 768]",294912,112357,0.6190151572227478
|
| 168 |
+
166,nncf_module.bert.encoder.layer.10.attention.self.query,NNCFLinear,bias,[384],384,384,0.0
|
| 169 |
+
167,nncf_module.bert.encoder.layer.10.attention.self.key,NNCFLinear,weight,"[384, 768]",294912,109640,0.6282280683517456
|
| 170 |
+
168,nncf_module.bert.encoder.layer.10.attention.self.key,NNCFLinear,bias,[384],384,384,0.0
|
| 171 |
+
169,nncf_module.bert.encoder.layer.10.attention.self.value,NNCFLinear,weight,"[384, 768]",294912,61630,0.7910224199295044
|
| 172 |
+
170,nncf_module.bert.encoder.layer.10.attention.self.value,NNCFLinear,bias,[384],384,384,0.0
|
| 173 |
+
171,nncf_module.bert.encoder.layer.10.attention.output.dense,NNCFLinear,weight,"[768, 384]",294912,63912,0.7832844853401184
|
| 174 |
+
172,nncf_module.bert.encoder.layer.10.attention.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 175 |
+
173,nncf_module.bert.encoder.layer.10.attention.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 176 |
+
174,nncf_module.bert.encoder.layer.10.attention.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 177 |
+
175,nncf_module.bert.encoder.layer.10.intermediate.dense,NNCFLinear,weight,"[86, 768]",66048,50252,0.23915940523147583
|
| 178 |
+
176,nncf_module.bert.encoder.layer.10.intermediate.dense,NNCFLinear,bias,[86],86,86,0.0
|
| 179 |
+
177,nncf_module.bert.encoder.layer.10.output.dense,NNCFLinear,weight,"[768, 86]",66048,49494,0.25063592195510864
|
| 180 |
+
178,nncf_module.bert.encoder.layer.10.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 181 |
+
179,nncf_module.bert.encoder.layer.10.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 182 |
+
180,nncf_module.bert.encoder.layer.10.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 183 |
+
181,nncf_module.bert.encoder.layer.11.attention.self.query,NNCFLinear,weight,"[384, 768]",294912,88056,0.701416015625
|
| 184 |
+
182,nncf_module.bert.encoder.layer.11.attention.self.query,NNCFLinear,bias,[384],384,384,0.0
|
| 185 |
+
183,nncf_module.bert.encoder.layer.11.attention.self.key,NNCFLinear,weight,"[384, 768]",294912,85229,0.7110019326210022
|
| 186 |
+
184,nncf_module.bert.encoder.layer.11.attention.self.key,NNCFLinear,bias,[384],384,384,0.0
|
| 187 |
+
185,nncf_module.bert.encoder.layer.11.attention.self.value,NNCFLinear,weight,"[384, 768]",294912,47126,0.8402031660079956
|
| 188 |
+
186,nncf_module.bert.encoder.layer.11.attention.self.value,NNCFLinear,bias,[384],384,384,0.0
|
| 189 |
+
187,nncf_module.bert.encoder.layer.11.attention.output.dense,NNCFLinear,weight,"[768, 384]",294912,49010,0.8338148593902588
|
| 190 |
+
188,nncf_module.bert.encoder.layer.11.attention.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 191 |
+
189,nncf_module.bert.encoder.layer.11.attention.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 192 |
+
190,nncf_module.bert.encoder.layer.11.attention.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 193 |
+
191,nncf_module.bert.encoder.layer.11.intermediate.dense,NNCFLinear,weight,"[105, 768]",80640,62069,0.2302951216697693
|
| 194 |
+
192,nncf_module.bert.encoder.layer.11.intermediate.dense,NNCFLinear,bias,[105],105,105,0.0
|
| 195 |
+
193,nncf_module.bert.encoder.layer.11.output.dense,NNCFLinear,weight,"[768, 105]",80640,61476,0.23764878511428833
|
| 196 |
+
194,nncf_module.bert.encoder.layer.11.output.dense,NNCFLinear,bias,[768],768,768,0.0
|
| 197 |
+
195,nncf_module.bert.encoder.layer.11.output.LayerNorm,LayerNorm,weight,[768],768,768,0.0
|
| 198 |
+
196,nncf_module.bert.encoder.layer.11.output.LayerNorm,LayerNorm,bias,[768],768,768,0.0
|
| 199 |
+
197,nncf_module.qa_outputs,NNCFLinear,weight,"[2, 768]",1536,1536,0.0
|
| 200 |
+
198,nncf_module.qa_outputs,NNCFLinear,bias,[2],2,2,0.0
|
XP_layer_wise_sparsity_global_rate_26.51.md
ADDED
|
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
| | layer_id | layer_type | param_type | shape | nparam | nnz | sparsity |
|
| 2 |
+
|----:|:-------------------------------------------------------------|:--------------|:-------------|:-------------|---------:|---------:|-----------:|
|
| 3 |
+
| 0 | nncf_module.bert.embeddings.word_embeddings | NNCFEmbedding | weight | [30522, 768] | 23440896 | 23440896 | 0 |
|
| 4 |
+
| 1 | nncf_module.bert.embeddings.position_embeddings | NNCFEmbedding | weight | [512, 768] | 393216 | 393216 | 0 |
|
| 5 |
+
| 2 | nncf_module.bert.embeddings.token_type_embeddings | NNCFEmbedding | weight | [2, 768] | 1536 | 1536 | 0 |
|
| 6 |
+
| 3 | nncf_module.bert.embeddings.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 7 |
+
| 4 | nncf_module.bert.embeddings.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 8 |
+
| 5 | nncf_module.bert.encoder.layer.0.attention.self.query | NNCFLinear | weight | [320, 768] | 245760 | 93507 | 0.619519 |
|
| 9 |
+
| 6 | nncf_module.bert.encoder.layer.0.attention.self.query | NNCFLinear | bias | [320] | 320 | 320 | 0 |
|
| 10 |
+
| 7 | nncf_module.bert.encoder.layer.0.attention.self.key | NNCFLinear | weight | [320, 768] | 245760 | 98224 | 0.600325 |
|
| 11 |
+
| 8 | nncf_module.bert.encoder.layer.0.attention.self.key | NNCFLinear | bias | [320] | 320 | 320 | 0 |
|
| 12 |
+
| 9 | nncf_module.bert.encoder.layer.0.attention.self.value | NNCFLinear | weight | [320, 768] | 245760 | 113596 | 0.537777 |
|
| 13 |
+
| 10 | nncf_module.bert.encoder.layer.0.attention.self.value | NNCFLinear | bias | [320] | 320 | 320 | 0 |
|
| 14 |
+
| 11 | nncf_module.bert.encoder.layer.0.attention.output.dense | NNCFLinear | weight | [768, 320] | 245760 | 117320 | 0.522624 |
|
| 15 |
+
| 12 | nncf_module.bert.encoder.layer.0.attention.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 16 |
+
| 13 | nncf_module.bert.encoder.layer.0.attention.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 17 |
+
| 14 | nncf_module.bert.encoder.layer.0.attention.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 18 |
+
| 15 | nncf_module.bert.encoder.layer.0.intermediate.dense | NNCFLinear | weight | [185, 768] | 142080 | 97047 | 0.316955 |
|
| 19 |
+
| 16 | nncf_module.bert.encoder.layer.0.intermediate.dense | NNCFLinear | bias | [185] | 185 | 185 | 0 |
|
| 20 |
+
| 17 | nncf_module.bert.encoder.layer.0.output.dense | NNCFLinear | weight | [768, 185] | 142080 | 94629 | 0.333974 |
|
| 21 |
+
| 18 | nncf_module.bert.encoder.layer.0.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 22 |
+
| 19 | nncf_module.bert.encoder.layer.0.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 23 |
+
| 20 | nncf_module.bert.encoder.layer.0.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 24 |
+
| 21 | nncf_module.bert.encoder.layer.1.attention.self.query | NNCFLinear | weight | [320, 768] | 245760 | 118358 | 0.5184 |
|
| 25 |
+
| 22 | nncf_module.bert.encoder.layer.1.attention.self.query | NNCFLinear | bias | [320] | 320 | 320 | 0 |
|
| 26 |
+
| 23 | nncf_module.bert.encoder.layer.1.attention.self.key | NNCFLinear | weight | [320, 768] | 245760 | 118132 | 0.51932 |
|
| 27 |
+
| 24 | nncf_module.bert.encoder.layer.1.attention.self.key | NNCFLinear | bias | [320] | 320 | 320 | 0 |
|
| 28 |
+
| 25 | nncf_module.bert.encoder.layer.1.attention.self.value | NNCFLinear | weight | [320, 768] | 245760 | 107518 | 0.562508 |
|
| 29 |
+
| 26 | nncf_module.bert.encoder.layer.1.attention.self.value | NNCFLinear | bias | [320] | 320 | 320 | 0 |
|
| 30 |
+
| 27 | nncf_module.bert.encoder.layer.1.attention.output.dense | NNCFLinear | weight | [768, 320] | 245760 | 111172 | 0.54764 |
|
| 31 |
+
| 28 | nncf_module.bert.encoder.layer.1.attention.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 32 |
+
| 29 | nncf_module.bert.encoder.layer.1.attention.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 33 |
+
| 30 | nncf_module.bert.encoder.layer.1.attention.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 34 |
+
| 31 | nncf_module.bert.encoder.layer.1.intermediate.dense | NNCFLinear | weight | [315, 768] | 241920 | 148727 | 0.385222 |
|
| 35 |
+
| 32 | nncf_module.bert.encoder.layer.1.intermediate.dense | NNCFLinear | bias | [315] | 315 | 315 | 0 |
|
| 36 |
+
| 33 | nncf_module.bert.encoder.layer.1.output.dense | NNCFLinear | weight | [768, 315] | 241920 | 143174 | 0.408176 |
|
| 37 |
+
| 34 | nncf_module.bert.encoder.layer.1.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 38 |
+
| 35 | nncf_module.bert.encoder.layer.1.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 39 |
+
| 36 | nncf_module.bert.encoder.layer.1.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 40 |
+
| 37 | nncf_module.bert.encoder.layer.2.attention.self.query | NNCFLinear | weight | [576, 768] | 442368 | 162784 | 0.632017 |
|
| 41 |
+
| 38 | nncf_module.bert.encoder.layer.2.attention.self.query | NNCFLinear | bias | [576] | 576 | 576 | 0 |
|
| 42 |
+
| 39 | nncf_module.bert.encoder.layer.2.attention.self.key | NNCFLinear | weight | [576, 768] | 442368 | 164797 | 0.627466 |
|
| 43 |
+
| 40 | nncf_module.bert.encoder.layer.2.attention.self.key | NNCFLinear | bias | [576] | 576 | 576 | 0 |
|
| 44 |
+
| 41 | nncf_module.bert.encoder.layer.2.attention.self.value | NNCFLinear | weight | [576, 768] | 442368 | 135705 | 0.693231 |
|
| 45 |
+
| 42 | nncf_module.bert.encoder.layer.2.attention.self.value | NNCFLinear | bias | [576] | 576 | 576 | 0 |
|
| 46 |
+
| 43 | nncf_module.bert.encoder.layer.2.attention.output.dense | NNCFLinear | weight | [768, 576] | 442368 | 138448 | 0.68703 |
|
| 47 |
+
| 44 | nncf_module.bert.encoder.layer.2.attention.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 48 |
+
| 45 | nncf_module.bert.encoder.layer.2.attention.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 49 |
+
| 46 | nncf_module.bert.encoder.layer.2.attention.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 50 |
+
| 47 | nncf_module.bert.encoder.layer.2.intermediate.dense | NNCFLinear | weight | [339, 768] | 260352 | 154043 | 0.408328 |
|
| 51 |
+
| 48 | nncf_module.bert.encoder.layer.2.intermediate.dense | NNCFLinear | bias | [339] | 339 | 339 | 0 |
|
| 52 |
+
| 49 | nncf_module.bert.encoder.layer.2.output.dense | NNCFLinear | weight | [768, 339] | 260352 | 150923 | 0.420312 |
|
| 53 |
+
| 50 | nncf_module.bert.encoder.layer.2.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 54 |
+
| 51 | nncf_module.bert.encoder.layer.2.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 55 |
+
| 52 | nncf_module.bert.encoder.layer.2.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 56 |
+
| 53 | nncf_module.bert.encoder.layer.3.attention.self.query | NNCFLinear | weight | [576, 768] | 442368 | 170686 | 0.614154 |
|
| 57 |
+
| 54 | nncf_module.bert.encoder.layer.3.attention.self.query | NNCFLinear | bias | [576] | 576 | 576 | 0 |
|
| 58 |
+
| 55 | nncf_module.bert.encoder.layer.3.attention.self.key | NNCFLinear | weight | [576, 768] | 442368 | 178480 | 0.596535 |
|
| 59 |
+
| 56 | nncf_module.bert.encoder.layer.3.attention.self.key | NNCFLinear | bias | [576] | 576 | 576 | 0 |
|
| 60 |
+
| 57 | nncf_module.bert.encoder.layer.3.attention.self.value | NNCFLinear | weight | [576, 768] | 442368 | 172200 | 0.610731 |
|
| 61 |
+
| 58 | nncf_module.bert.encoder.layer.3.attention.self.value | NNCFLinear | bias | [576] | 576 | 576 | 0 |
|
| 62 |
+
| 59 | nncf_module.bert.encoder.layer.3.attention.output.dense | NNCFLinear | weight | [768, 576] | 442368 | 169194 | 0.617527 |
|
| 63 |
+
| 60 | nncf_module.bert.encoder.layer.3.attention.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 64 |
+
| 61 | nncf_module.bert.encoder.layer.3.attention.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 65 |
+
| 62 | nncf_module.bert.encoder.layer.3.attention.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 66 |
+
| 63 | nncf_module.bert.encoder.layer.3.intermediate.dense | NNCFLinear | weight | [368, 768] | 282624 | 163234 | 0.422434 |
|
| 67 |
+
| 64 | nncf_module.bert.encoder.layer.3.intermediate.dense | NNCFLinear | bias | [368] | 368 | 368 | 0 |
|
| 68 |
+
| 65 | nncf_module.bert.encoder.layer.3.output.dense | NNCFLinear | weight | [768, 368] | 282624 | 157364 | 0.443204 |
|
| 69 |
+
| 66 | nncf_module.bert.encoder.layer.3.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 70 |
+
| 67 | nncf_module.bert.encoder.layer.3.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 71 |
+
| 68 | nncf_module.bert.encoder.layer.3.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 72 |
+
| 69 | nncf_module.bert.encoder.layer.4.attention.self.query | NNCFLinear | weight | [576, 768] | 442368 | 176138 | 0.601829 |
|
| 73 |
+
| 70 | nncf_module.bert.encoder.layer.4.attention.self.query | NNCFLinear | bias | [576] | 576 | 576 | 0 |
|
| 74 |
+
| 71 | nncf_module.bert.encoder.layer.4.attention.self.key | NNCFLinear | weight | [576, 768] | 442368 | 177351 | 0.599087 |
|
| 75 |
+
| 72 | nncf_module.bert.encoder.layer.4.attention.self.key | NNCFLinear | bias | [576] | 576 | 576 | 0 |
|
| 76 |
+
| 73 | nncf_module.bert.encoder.layer.4.attention.self.value | NNCFLinear | weight | [576, 768] | 442368 | 164248 | 0.628707 |
|
| 77 |
+
| 74 | nncf_module.bert.encoder.layer.4.attention.self.value | NNCFLinear | bias | [576] | 576 | 576 | 0 |
|
| 78 |
+
| 75 | nncf_module.bert.encoder.layer.4.attention.output.dense | NNCFLinear | weight | [768, 576] | 442368 | 159471 | 0.639506 |
|
| 79 |
+
| 76 | nncf_module.bert.encoder.layer.4.attention.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 80 |
+
| 77 | nncf_module.bert.encoder.layer.4.attention.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 81 |
+
| 78 | nncf_module.bert.encoder.layer.4.attention.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 82 |
+
| 79 | nncf_module.bert.encoder.layer.4.intermediate.dense | NNCFLinear | weight | [386, 768] | 296448 | 167744 | 0.434154 |
|
| 83 |
+
| 80 | nncf_module.bert.encoder.layer.4.intermediate.dense | NNCFLinear | bias | [386] | 386 | 386 | 0 |
|
| 84 |
+
| 81 | nncf_module.bert.encoder.layer.4.output.dense | NNCFLinear | weight | [768, 386] | 296448 | 159961 | 0.460408 |
|
| 85 |
+
| 82 | nncf_module.bert.encoder.layer.4.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 86 |
+
| 83 | nncf_module.bert.encoder.layer.4.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 87 |
+
| 84 | nncf_module.bert.encoder.layer.4.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 88 |
+
| 85 | nncf_module.bert.encoder.layer.5.attention.self.query | NNCFLinear | weight | [384, 768] | 294912 | 114241 | 0.612627 |
|
| 89 |
+
| 86 | nncf_module.bert.encoder.layer.5.attention.self.query | NNCFLinear | bias | [384] | 384 | 384 | 0 |
|
| 90 |
+
| 87 | nncf_module.bert.encoder.layer.5.attention.self.key | NNCFLinear | weight | [384, 768] | 294912 | 132821 | 0.549625 |
|
| 91 |
+
| 88 | nncf_module.bert.encoder.layer.5.attention.self.key | NNCFLinear | bias | [384] | 384 | 384 | 0 |
|
| 92 |
+
| 89 | nncf_module.bert.encoder.layer.5.attention.self.value | NNCFLinear | weight | [384, 768] | 294912 | 135092 | 0.541924 |
|
| 93 |
+
| 90 | nncf_module.bert.encoder.layer.5.attention.self.value | NNCFLinear | bias | [384] | 384 | 384 | 0 |
|
| 94 |
+
| 91 | nncf_module.bert.encoder.layer.5.attention.output.dense | NNCFLinear | weight | [768, 384] | 294912 | 132344 | 0.551242 |
|
| 95 |
+
| 92 | nncf_module.bert.encoder.layer.5.attention.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 96 |
+
| 93 | nncf_module.bert.encoder.layer.5.attention.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 97 |
+
| 94 | nncf_module.bert.encoder.layer.5.attention.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 98 |
+
| 95 | nncf_module.bert.encoder.layer.5.intermediate.dense | NNCFLinear | weight | [336, 768] | 258048 | 153822 | 0.403902 |
|
| 99 |
+
| 96 | nncf_module.bert.encoder.layer.5.intermediate.dense | NNCFLinear | bias | [336] | 336 | 336 | 0 |
|
| 100 |
+
| 97 | nncf_module.bert.encoder.layer.5.output.dense | NNCFLinear | weight | [768, 336] | 258048 | 145684 | 0.435438 |
|
| 101 |
+
| 98 | nncf_module.bert.encoder.layer.5.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 102 |
+
| 99 | nncf_module.bert.encoder.layer.5.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 103 |
+
| 100 | nncf_module.bert.encoder.layer.5.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 104 |
+
| 101 | nncf_module.bert.encoder.layer.6.attention.self.query | NNCFLinear | weight | [448, 768] | 344064 | 131784 | 0.616978 |
|
| 105 |
+
| 102 | nncf_module.bert.encoder.layer.6.attention.self.query | NNCFLinear | bias | [448] | 448 | 448 | 0 |
|
| 106 |
+
| 103 | nncf_module.bert.encoder.layer.6.attention.self.key | NNCFLinear | weight | [448, 768] | 344064 | 144539 | 0.579907 |
|
| 107 |
+
| 104 | nncf_module.bert.encoder.layer.6.attention.self.key | NNCFLinear | bias | [448] | 448 | 448 | 0 |
|
| 108 |
+
| 105 | nncf_module.bert.encoder.layer.6.attention.self.value | NNCFLinear | weight | [448, 768] | 344064 | 131107 | 0.618946 |
|
| 109 |
+
| 106 | nncf_module.bert.encoder.layer.6.attention.self.value | NNCFLinear | bias | [448] | 448 | 448 | 0 |
|
| 110 |
+
| 107 | nncf_module.bert.encoder.layer.6.attention.output.dense | NNCFLinear | weight | [768, 448] | 344064 | 126145 | 0.633368 |
|
| 111 |
+
| 108 | nncf_module.bert.encoder.layer.6.attention.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 112 |
+
| 109 | nncf_module.bert.encoder.layer.6.attention.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 113 |
+
| 110 | nncf_module.bert.encoder.layer.6.attention.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 114 |
+
| 111 | nncf_module.bert.encoder.layer.6.intermediate.dense | NNCFLinear | weight | [280, 768] | 215040 | 135219 | 0.371191 |
|
| 115 |
+
| 112 | nncf_module.bert.encoder.layer.6.intermediate.dense | NNCFLinear | bias | [280] | 280 | 280 | 0 |
|
| 116 |
+
| 113 | nncf_module.bert.encoder.layer.6.output.dense | NNCFLinear | weight | [768, 280] | 215040 | 131559 | 0.388211 |
|
| 117 |
+
| 114 | nncf_module.bert.encoder.layer.6.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 118 |
+
| 115 | nncf_module.bert.encoder.layer.6.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 119 |
+
| 116 | nncf_module.bert.encoder.layer.6.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 120 |
+
| 117 | nncf_module.bert.encoder.layer.7.attention.self.query | NNCFLinear | weight | [448, 768] | 344064 | 132226 | 0.615694 |
|
| 121 |
+
| 118 | nncf_module.bert.encoder.layer.7.attention.self.query | NNCFLinear | bias | [448] | 448 | 448 | 0 |
|
| 122 |
+
| 119 | nncf_module.bert.encoder.layer.7.attention.self.key | NNCFLinear | weight | [448, 768] | 344064 | 152327 | 0.557271 |
|
| 123 |
+
| 120 | nncf_module.bert.encoder.layer.7.attention.self.key | NNCFLinear | bias | [448] | 448 | 448 | 0 |
|
| 124 |
+
| 121 | nncf_module.bert.encoder.layer.7.attention.self.value | NNCFLinear | weight | [448, 768] | 344064 | 141141 | 0.589783 |
|
| 125 |
+
| 122 | nncf_module.bert.encoder.layer.7.attention.self.value | NNCFLinear | bias | [448] | 448 | 448 | 0 |
|
| 126 |
+
| 123 | nncf_module.bert.encoder.layer.7.attention.output.dense | NNCFLinear | weight | [768, 448] | 344064 | 135857 | 0.60514 |
|
| 127 |
+
| 124 | nncf_module.bert.encoder.layer.7.attention.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 128 |
+
| 125 | nncf_module.bert.encoder.layer.7.attention.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 129 |
+
| 126 | nncf_module.bert.encoder.layer.7.attention.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 130 |
+
| 127 | nncf_module.bert.encoder.layer.7.intermediate.dense | NNCFLinear | weight | [211, 768] | 162048 | 109376 | 0.32504 |
|
| 131 |
+
| 128 | nncf_module.bert.encoder.layer.7.intermediate.dense | NNCFLinear | bias | [211] | 211 | 211 | 0 |
|
| 132 |
+
| 129 | nncf_module.bert.encoder.layer.7.output.dense | NNCFLinear | weight | [768, 211] | 162048 | 107132 | 0.338887 |
|
| 133 |
+
| 130 | nncf_module.bert.encoder.layer.7.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 134 |
+
| 131 | nncf_module.bert.encoder.layer.7.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 135 |
+
| 132 | nncf_module.bert.encoder.layer.7.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 136 |
+
| 133 | nncf_module.bert.encoder.layer.8.attention.self.query | NNCFLinear | weight | [448, 768] | 344064 | 129219 | 0.624433 |
|
| 137 |
+
| 134 | nncf_module.bert.encoder.layer.8.attention.self.query | NNCFLinear | bias | [448] | 448 | 448 | 0 |
|
| 138 |
+
| 135 | nncf_module.bert.encoder.layer.8.attention.self.key | NNCFLinear | weight | [448, 768] | 344064 | 130088 | 0.621908 |
|
| 139 |
+
| 136 | nncf_module.bert.encoder.layer.8.attention.self.key | NNCFLinear | bias | [448] | 448 | 448 | 0 |
|
| 140 |
+
| 137 | nncf_module.bert.encoder.layer.8.attention.self.value | NNCFLinear | weight | [448, 768] | 344064 | 108043 | 0.68598 |
|
| 141 |
+
| 138 | nncf_module.bert.encoder.layer.8.attention.self.value | NNCFLinear | bias | [448] | 448 | 448 | 0 |
|
| 142 |
+
| 139 | nncf_module.bert.encoder.layer.8.attention.output.dense | NNCFLinear | weight | [768, 448] | 344064 | 103567 | 0.698989 |
|
| 143 |
+
| 140 | nncf_module.bert.encoder.layer.8.attention.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 144 |
+
| 141 | nncf_module.bert.encoder.layer.8.attention.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 145 |
+
| 142 | nncf_module.bert.encoder.layer.8.attention.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 146 |
+
| 143 | nncf_module.bert.encoder.layer.8.intermediate.dense | NNCFLinear | weight | [108, 768] | 82944 | 63183 | 0.238245 |
|
| 147 |
+
| 144 | nncf_module.bert.encoder.layer.8.intermediate.dense | NNCFLinear | bias | [108] | 108 | 108 | 0 |
|
| 148 |
+
| 145 | nncf_module.bert.encoder.layer.8.output.dense | NNCFLinear | weight | [768, 108] | 82944 | 62633 | 0.244876 |
|
| 149 |
+
| 146 | nncf_module.bert.encoder.layer.8.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 150 |
+
| 147 | nncf_module.bert.encoder.layer.8.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 151 |
+
| 148 | nncf_module.bert.encoder.layer.8.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 152 |
+
| 149 | nncf_module.bert.encoder.layer.9.attention.self.query | NNCFLinear | weight | [320, 768] | 245760 | 107216 | 0.563737 |
|
| 153 |
+
| 150 | nncf_module.bert.encoder.layer.9.attention.self.query | NNCFLinear | bias | [320] | 320 | 320 | 0 |
|
| 154 |
+
| 151 | nncf_module.bert.encoder.layer.9.attention.self.key | NNCFLinear | weight | [320, 768] | 245760 | 101848 | 0.585579 |
|
| 155 |
+
| 152 | nncf_module.bert.encoder.layer.9.attention.self.key | NNCFLinear | bias | [320] | 320 | 320 | 0 |
|
| 156 |
+
| 153 | nncf_module.bert.encoder.layer.9.attention.self.value | NNCFLinear | weight | [320, 768] | 245760 | 52063 | 0.788155 |
|
| 157 |
+
| 154 | nncf_module.bert.encoder.layer.9.attention.self.value | NNCFLinear | bias | [320] | 320 | 320 | 0 |
|
| 158 |
+
| 155 | nncf_module.bert.encoder.layer.9.attention.output.dense | NNCFLinear | weight | [768, 320] | 245760 | 53127 | 0.783826 |
|
| 159 |
+
| 156 | nncf_module.bert.encoder.layer.9.attention.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 160 |
+
| 157 | nncf_module.bert.encoder.layer.9.attention.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 161 |
+
| 158 | nncf_module.bert.encoder.layer.9.attention.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 162 |
+
| 159 | nncf_module.bert.encoder.layer.9.intermediate.dense | NNCFLinear | weight | [53, 768] | 40704 | 33339 | 0.18094 |
|
| 163 |
+
| 160 | nncf_module.bert.encoder.layer.9.intermediate.dense | NNCFLinear | bias | [53] | 53 | 53 | 0 |
|
| 164 |
+
| 161 | nncf_module.bert.encoder.layer.9.output.dense | NNCFLinear | weight | [768, 53] | 40704 | 32340 | 0.205483 |
|
| 165 |
+
| 162 | nncf_module.bert.encoder.layer.9.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 166 |
+
| 163 | nncf_module.bert.encoder.layer.9.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 167 |
+
| 164 | nncf_module.bert.encoder.layer.9.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 168 |
+
| 165 | nncf_module.bert.encoder.layer.10.attention.self.query | NNCFLinear | weight | [384, 768] | 294912 | 112357 | 0.619015 |
|
| 169 |
+
| 166 | nncf_module.bert.encoder.layer.10.attention.self.query | NNCFLinear | bias | [384] | 384 | 384 | 0 |
|
| 170 |
+
| 167 | nncf_module.bert.encoder.layer.10.attention.self.key | NNCFLinear | weight | [384, 768] | 294912 | 109640 | 0.628228 |
|
| 171 |
+
| 168 | nncf_module.bert.encoder.layer.10.attention.self.key | NNCFLinear | bias | [384] | 384 | 384 | 0 |
|
| 172 |
+
| 169 | nncf_module.bert.encoder.layer.10.attention.self.value | NNCFLinear | weight | [384, 768] | 294912 | 61630 | 0.791022 |
|
| 173 |
+
| 170 | nncf_module.bert.encoder.layer.10.attention.self.value | NNCFLinear | bias | [384] | 384 | 384 | 0 |
|
| 174 |
+
| 171 | nncf_module.bert.encoder.layer.10.attention.output.dense | NNCFLinear | weight | [768, 384] | 294912 | 63912 | 0.783284 |
|
| 175 |
+
| 172 | nncf_module.bert.encoder.layer.10.attention.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 176 |
+
| 173 | nncf_module.bert.encoder.layer.10.attention.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 177 |
+
| 174 | nncf_module.bert.encoder.layer.10.attention.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 178 |
+
| 175 | nncf_module.bert.encoder.layer.10.intermediate.dense | NNCFLinear | weight | [86, 768] | 66048 | 50252 | 0.239159 |
|
| 179 |
+
| 176 | nncf_module.bert.encoder.layer.10.intermediate.dense | NNCFLinear | bias | [86] | 86 | 86 | 0 |
|
| 180 |
+
| 177 | nncf_module.bert.encoder.layer.10.output.dense | NNCFLinear | weight | [768, 86] | 66048 | 49494 | 0.250636 |
|
| 181 |
+
| 178 | nncf_module.bert.encoder.layer.10.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 182 |
+
| 179 | nncf_module.bert.encoder.layer.10.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 183 |
+
| 180 | nncf_module.bert.encoder.layer.10.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 184 |
+
| 181 | nncf_module.bert.encoder.layer.11.attention.self.query | NNCFLinear | weight | [384, 768] | 294912 | 88056 | 0.701416 |
|
| 185 |
+
| 182 | nncf_module.bert.encoder.layer.11.attention.self.query | NNCFLinear | bias | [384] | 384 | 384 | 0 |
|
| 186 |
+
| 183 | nncf_module.bert.encoder.layer.11.attention.self.key | NNCFLinear | weight | [384, 768] | 294912 | 85229 | 0.711002 |
|
| 187 |
+
| 184 | nncf_module.bert.encoder.layer.11.attention.self.key | NNCFLinear | bias | [384] | 384 | 384 | 0 |
|
| 188 |
+
| 185 | nncf_module.bert.encoder.layer.11.attention.self.value | NNCFLinear | weight | [384, 768] | 294912 | 47126 | 0.840203 |
|
| 189 |
+
| 186 | nncf_module.bert.encoder.layer.11.attention.self.value | NNCFLinear | bias | [384] | 384 | 384 | 0 |
|
| 190 |
+
| 187 | nncf_module.bert.encoder.layer.11.attention.output.dense | NNCFLinear | weight | [768, 384] | 294912 | 49010 | 0.833815 |
|
| 191 |
+
| 188 | nncf_module.bert.encoder.layer.11.attention.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 192 |
+
| 189 | nncf_module.bert.encoder.layer.11.attention.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 193 |
+
| 190 | nncf_module.bert.encoder.layer.11.attention.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 194 |
+
| 191 | nncf_module.bert.encoder.layer.11.intermediate.dense | NNCFLinear | weight | [105, 768] | 80640 | 62069 | 0.230295 |
|
| 195 |
+
| 192 | nncf_module.bert.encoder.layer.11.intermediate.dense | NNCFLinear | bias | [105] | 105 | 105 | 0 |
|
| 196 |
+
| 193 | nncf_module.bert.encoder.layer.11.output.dense | NNCFLinear | weight | [768, 105] | 80640 | 61476 | 0.237649 |
|
| 197 |
+
| 194 | nncf_module.bert.encoder.layer.11.output.dense | NNCFLinear | bias | [768] | 768 | 768 | 0 |
|
| 198 |
+
| 195 | nncf_module.bert.encoder.layer.11.output.LayerNorm | LayerNorm | weight | [768] | 768 | 768 | 0 |
|
| 199 |
+
| 196 | nncf_module.bert.encoder.layer.11.output.LayerNorm | LayerNorm | bias | [768] | 768 | 768 | 0 |
|
| 200 |
+
| 197 | nncf_module.qa_outputs | NNCFLinear | weight | [2, 768] | 1536 | 1536 | 0 |
|
| 201 |
+
| 198 | nncf_module.qa_outputs | NNCFLinear | bias | [2] | 2 | 2 | 0 |
|
XP_linear_layer_sparsity_20M_params_57.92_sparsity.csv
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
,layer_id,layer_type,param_type,shape,nparam,nnz,sparsity
|
| 2 |
+
5,nncf_module.bert.encoder.layer.0.attention.self.query,NNCFLinear,weight,"[320, 768]",245760,93507,0.6195189952850342
|
| 3 |
+
7,nncf_module.bert.encoder.layer.0.attention.self.key,NNCFLinear,weight,"[320, 768]",245760,98224,0.6003254652023315
|
| 4 |
+
9,nncf_module.bert.encoder.layer.0.attention.self.value,NNCFLinear,weight,"[320, 768]",245760,113596,0.5377767086029053
|
| 5 |
+
11,nncf_module.bert.encoder.layer.0.attention.output.dense,NNCFLinear,weight,"[768, 320]",245760,117320,0.5226236581802368
|
| 6 |
+
15,nncf_module.bert.encoder.layer.0.intermediate.dense,NNCFLinear,weight,"[185, 768]",142080,97047,0.31695520877838135
|
| 7 |
+
17,nncf_module.bert.encoder.layer.0.output.dense,NNCFLinear,weight,"[768, 185]",142080,94629,0.33397382497787476
|
| 8 |
+
21,nncf_module.bert.encoder.layer.1.attention.self.query,NNCFLinear,weight,"[320, 768]",245760,118358,0.5184000730514526
|
| 9 |
+
23,nncf_module.bert.encoder.layer.1.attention.self.key,NNCFLinear,weight,"[320, 768]",245760,118132,0.5193196535110474
|
| 10 |
+
25,nncf_module.bert.encoder.layer.1.attention.self.value,NNCFLinear,weight,"[320, 768]",245760,107518,0.5625081062316895
|
| 11 |
+
27,nncf_module.bert.encoder.layer.1.attention.output.dense,NNCFLinear,weight,"[768, 320]",245760,111172,0.5476399660110474
|
| 12 |
+
31,nncf_module.bert.encoder.layer.1.intermediate.dense,NNCFLinear,weight,"[315, 768]",241920,148727,0.3852223753929138
|
| 13 |
+
33,nncf_module.bert.encoder.layer.1.output.dense,NNCFLinear,weight,"[768, 315]",241920,143174,0.4081762433052063
|
| 14 |
+
37,nncf_module.bert.encoder.layer.2.attention.self.query,NNCFLinear,weight,"[576, 768]",442368,162784,0.6320167779922485
|
| 15 |
+
39,nncf_module.bert.encoder.layer.2.attention.self.key,NNCFLinear,weight,"[576, 768]",442368,164797,0.6274662613868713
|
| 16 |
+
41,nncf_module.bert.encoder.layer.2.attention.self.value,NNCFLinear,weight,"[576, 768]",442368,135705,0.6932305097579956
|
| 17 |
+
43,nncf_module.bert.encoder.layer.2.attention.output.dense,NNCFLinear,weight,"[768, 576]",442368,138448,0.6870298385620117
|
| 18 |
+
47,nncf_module.bert.encoder.layer.2.intermediate.dense,NNCFLinear,weight,"[339, 768]",260352,154043,0.40832793712615967
|
| 19 |
+
49,nncf_module.bert.encoder.layer.2.output.dense,NNCFLinear,weight,"[768, 339]",260352,150923,0.42031168937683105
|
| 20 |
+
53,nncf_module.bert.encoder.layer.3.attention.self.query,NNCFLinear,weight,"[576, 768]",442368,170686,0.6141538619995117
|
| 21 |
+
55,nncf_module.bert.encoder.layer.3.attention.self.key,NNCFLinear,weight,"[576, 768]",442368,178480,0.5965349674224854
|
| 22 |
+
57,nncf_module.bert.encoder.layer.3.attention.self.value,NNCFLinear,weight,"[576, 768]",442368,172200,0.6107313632965088
|
| 23 |
+
59,nncf_module.bert.encoder.layer.3.attention.output.dense,NNCFLinear,weight,"[768, 576]",442368,169194,0.6175265908241272
|
| 24 |
+
63,nncf_module.bert.encoder.layer.3.intermediate.dense,NNCFLinear,weight,"[368, 768]",282624,163234,0.4224340319633484
|
| 25 |
+
65,nncf_module.bert.encoder.layer.3.output.dense,NNCFLinear,weight,"[768, 368]",282624,157364,0.4432036876678467
|
| 26 |
+
69,nncf_module.bert.encoder.layer.4.attention.self.query,NNCFLinear,weight,"[576, 768]",442368,176138,0.6018292307853699
|
| 27 |
+
71,nncf_module.bert.encoder.layer.4.attention.self.key,NNCFLinear,weight,"[576, 768]",442368,177351,0.5990871787071228
|
| 28 |
+
73,nncf_module.bert.encoder.layer.4.attention.self.value,NNCFLinear,weight,"[576, 768]",442368,164248,0.6287072896957397
|
| 29 |
+
75,nncf_module.bert.encoder.layer.4.attention.output.dense,NNCFLinear,weight,"[768, 576]",442368,159471,0.6395059823989868
|
| 30 |
+
79,nncf_module.bert.encoder.layer.4.intermediate.dense,NNCFLinear,weight,"[386, 768]",296448,167744,0.4341537356376648
|
| 31 |
+
81,nncf_module.bert.encoder.layer.4.output.dense,NNCFLinear,weight,"[768, 386]",296448,159961,0.46040791273117065
|
| 32 |
+
85,nncf_module.bert.encoder.layer.5.attention.self.query,NNCFLinear,weight,"[384, 768]",294912,114241,0.6126267910003662
|
| 33 |
+
87,nncf_module.bert.encoder.layer.5.attention.self.key,NNCFLinear,weight,"[384, 768]",294912,132821,0.5496249794960022
|
| 34 |
+
89,nncf_module.bert.encoder.layer.5.attention.self.value,NNCFLinear,weight,"[384, 768]",294912,135092,0.5419243574142456
|
| 35 |
+
91,nncf_module.bert.encoder.layer.5.attention.output.dense,NNCFLinear,weight,"[768, 384]",294912,132344,0.5512424111366272
|
| 36 |
+
95,nncf_module.bert.encoder.layer.5.intermediate.dense,NNCFLinear,weight,"[336, 768]",258048,153822,0.4039015769958496
|
| 37 |
+
97,nncf_module.bert.encoder.layer.5.output.dense,NNCFLinear,weight,"[768, 336]",258048,145684,0.435438334941864
|
| 38 |
+
101,nncf_module.bert.encoder.layer.6.attention.self.query,NNCFLinear,weight,"[448, 768]",344064,131784,0.6169782280921936
|
| 39 |
+
103,nncf_module.bert.encoder.layer.6.attention.self.key,NNCFLinear,weight,"[448, 768]",344064,144539,0.5799066424369812
|
| 40 |
+
105,nncf_module.bert.encoder.layer.6.attention.self.value,NNCFLinear,weight,"[448, 768]",344064,131107,0.6189458966255188
|
| 41 |
+
107,nncf_module.bert.encoder.layer.6.attention.output.dense,NNCFLinear,weight,"[768, 448]",344064,126145,0.633367657661438
|
| 42 |
+
111,nncf_module.bert.encoder.layer.6.intermediate.dense,NNCFLinear,weight,"[280, 768]",215040,135219,0.3711913824081421
|
| 43 |
+
113,nncf_module.bert.encoder.layer.6.output.dense,NNCFLinear,weight,"[768, 280]",215040,131559,0.3882114887237549
|
| 44 |
+
117,nncf_module.bert.encoder.layer.7.attention.self.query,NNCFLinear,weight,"[448, 768]",344064,132226,0.6156935691833496
|
| 45 |
+
119,nncf_module.bert.encoder.layer.7.attention.self.key,NNCFLinear,weight,"[448, 768]",344064,152327,0.5572713017463684
|
| 46 |
+
121,nncf_module.bert.encoder.layer.7.attention.self.value,NNCFLinear,weight,"[448, 768]",344064,141141,0.58978271484375
|
| 47 |
+
123,nncf_module.bert.encoder.layer.7.attention.output.dense,NNCFLinear,weight,"[768, 448]",344064,135857,0.6051403284072876
|
| 48 |
+
127,nncf_module.bert.encoder.layer.7.intermediate.dense,NNCFLinear,weight,"[211, 768]",162048,109376,0.32503950595855713
|
| 49 |
+
129,nncf_module.bert.encoder.layer.7.output.dense,NNCFLinear,weight,"[768, 211]",162048,107132,0.33888721466064453
|
| 50 |
+
133,nncf_module.bert.encoder.layer.8.attention.self.query,NNCFLinear,weight,"[448, 768]",344064,129219,0.6244332790374756
|
| 51 |
+
135,nncf_module.bert.encoder.layer.8.attention.self.key,NNCFLinear,weight,"[448, 768]",344064,130088,0.6219075322151184
|
| 52 |
+
137,nncf_module.bert.encoder.layer.8.attention.self.value,NNCFLinear,weight,"[448, 768]",344064,108043,0.685979962348938
|
| 53 |
+
139,nncf_module.bert.encoder.layer.8.attention.output.dense,NNCFLinear,weight,"[768, 448]",344064,103567,0.6989891529083252
|
| 54 |
+
143,nncf_module.bert.encoder.layer.8.intermediate.dense,NNCFLinear,weight,"[108, 768]",82944,63183,0.23824506998062134
|
| 55 |
+
145,nncf_module.bert.encoder.layer.8.output.dense,NNCFLinear,weight,"[768, 108]",82944,62633,0.24487602710723877
|
| 56 |
+
149,nncf_module.bert.encoder.layer.9.attention.self.query,NNCFLinear,weight,"[320, 768]",245760,107216,0.5637369155883789
|
| 57 |
+
151,nncf_module.bert.encoder.layer.9.attention.self.key,NNCFLinear,weight,"[320, 768]",245760,101848,0.5855793952941895
|
| 58 |
+
153,nncf_module.bert.encoder.layer.9.attention.self.value,NNCFLinear,weight,"[320, 768]",245760,52063,0.7881550788879395
|
| 59 |
+
155,nncf_module.bert.encoder.layer.9.attention.output.dense,NNCFLinear,weight,"[768, 320]",245760,53127,0.7838256359100342
|
| 60 |
+
159,nncf_module.bert.encoder.layer.9.intermediate.dense,NNCFLinear,weight,"[53, 768]",40704,33339,0.1809404492378235
|
| 61 |
+
161,nncf_module.bert.encoder.layer.9.output.dense,NNCFLinear,weight,"[768, 53]",40704,32340,0.20548349618911743
|
| 62 |
+
165,nncf_module.bert.encoder.layer.10.attention.self.query,NNCFLinear,weight,"[384, 768]",294912,112357,0.6190151572227478
|
| 63 |
+
167,nncf_module.bert.encoder.layer.10.attention.self.key,NNCFLinear,weight,"[384, 768]",294912,109640,0.6282280683517456
|
| 64 |
+
169,nncf_module.bert.encoder.layer.10.attention.self.value,NNCFLinear,weight,"[384, 768]",294912,61630,0.7910224199295044
|
| 65 |
+
171,nncf_module.bert.encoder.layer.10.attention.output.dense,NNCFLinear,weight,"[768, 384]",294912,63912,0.7832844853401184
|
| 66 |
+
175,nncf_module.bert.encoder.layer.10.intermediate.dense,NNCFLinear,weight,"[86, 768]",66048,50252,0.23915940523147583
|
| 67 |
+
177,nncf_module.bert.encoder.layer.10.output.dense,NNCFLinear,weight,"[768, 86]",66048,49494,0.25063592195510864
|
| 68 |
+
181,nncf_module.bert.encoder.layer.11.attention.self.query,NNCFLinear,weight,"[384, 768]",294912,88056,0.701416015625
|
| 69 |
+
183,nncf_module.bert.encoder.layer.11.attention.self.key,NNCFLinear,weight,"[384, 768]",294912,85229,0.7110019326210022
|
| 70 |
+
185,nncf_module.bert.encoder.layer.11.attention.self.value,NNCFLinear,weight,"[384, 768]",294912,47126,0.8402031660079956
|
| 71 |
+
187,nncf_module.bert.encoder.layer.11.attention.output.dense,NNCFLinear,weight,"[768, 384]",294912,49010,0.8338148593902588
|
| 72 |
+
191,nncf_module.bert.encoder.layer.11.intermediate.dense,NNCFLinear,weight,"[105, 768]",80640,62069,0.2302951216697693
|
| 73 |
+
193,nncf_module.bert.encoder.layer.11.output.dense,NNCFLinear,weight,"[768, 105]",80640,61476,0.23764878511428833
|
XP_linear_layer_sparsity_20M_params_57.92_sparsity.md
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
| | layer_id | layer_type | param_type | shape | nparam | nnz | sparsity |
|
| 2 |
+
|----:|:---------------------------------------------------------|:-------------|:-------------|:-----------|---------:|-------:|-----------:|
|
| 3 |
+
| 5 | nncf_module.bert.encoder.layer.0.attention.self.query | NNCFLinear | weight | [320, 768] | 245760 | 93507 | 0.619519 |
|
| 4 |
+
| 7 | nncf_module.bert.encoder.layer.0.attention.self.key | NNCFLinear | weight | [320, 768] | 245760 | 98224 | 0.600325 |
|
| 5 |
+
| 9 | nncf_module.bert.encoder.layer.0.attention.self.value | NNCFLinear | weight | [320, 768] | 245760 | 113596 | 0.537777 |
|
| 6 |
+
| 11 | nncf_module.bert.encoder.layer.0.attention.output.dense | NNCFLinear | weight | [768, 320] | 245760 | 117320 | 0.522624 |
|
| 7 |
+
| 15 | nncf_module.bert.encoder.layer.0.intermediate.dense | NNCFLinear | weight | [185, 768] | 142080 | 97047 | 0.316955 |
|
| 8 |
+
| 17 | nncf_module.bert.encoder.layer.0.output.dense | NNCFLinear | weight | [768, 185] | 142080 | 94629 | 0.333974 |
|
| 9 |
+
| 21 | nncf_module.bert.encoder.layer.1.attention.self.query | NNCFLinear | weight | [320, 768] | 245760 | 118358 | 0.5184 |
|
| 10 |
+
| 23 | nncf_module.bert.encoder.layer.1.attention.self.key | NNCFLinear | weight | [320, 768] | 245760 | 118132 | 0.51932 |
|
| 11 |
+
| 25 | nncf_module.bert.encoder.layer.1.attention.self.value | NNCFLinear | weight | [320, 768] | 245760 | 107518 | 0.562508 |
|
| 12 |
+
| 27 | nncf_module.bert.encoder.layer.1.attention.output.dense | NNCFLinear | weight | [768, 320] | 245760 | 111172 | 0.54764 |
|
| 13 |
+
| 31 | nncf_module.bert.encoder.layer.1.intermediate.dense | NNCFLinear | weight | [315, 768] | 241920 | 148727 | 0.385222 |
|
| 14 |
+
| 33 | nncf_module.bert.encoder.layer.1.output.dense | NNCFLinear | weight | [768, 315] | 241920 | 143174 | 0.408176 |
|
| 15 |
+
| 37 | nncf_module.bert.encoder.layer.2.attention.self.query | NNCFLinear | weight | [576, 768] | 442368 | 162784 | 0.632017 |
|
| 16 |
+
| 39 | nncf_module.bert.encoder.layer.2.attention.self.key | NNCFLinear | weight | [576, 768] | 442368 | 164797 | 0.627466 |
|
| 17 |
+
| 41 | nncf_module.bert.encoder.layer.2.attention.self.value | NNCFLinear | weight | [576, 768] | 442368 | 135705 | 0.693231 |
|
| 18 |
+
| 43 | nncf_module.bert.encoder.layer.2.attention.output.dense | NNCFLinear | weight | [768, 576] | 442368 | 138448 | 0.68703 |
|
| 19 |
+
| 47 | nncf_module.bert.encoder.layer.2.intermediate.dense | NNCFLinear | weight | [339, 768] | 260352 | 154043 | 0.408328 |
|
| 20 |
+
| 49 | nncf_module.bert.encoder.layer.2.output.dense | NNCFLinear | weight | [768, 339] | 260352 | 150923 | 0.420312 |
|
| 21 |
+
| 53 | nncf_module.bert.encoder.layer.3.attention.self.query | NNCFLinear | weight | [576, 768] | 442368 | 170686 | 0.614154 |
|
| 22 |
+
| 55 | nncf_module.bert.encoder.layer.3.attention.self.key | NNCFLinear | weight | [576, 768] | 442368 | 178480 | 0.596535 |
|
| 23 |
+
| 57 | nncf_module.bert.encoder.layer.3.attention.self.value | NNCFLinear | weight | [576, 768] | 442368 | 172200 | 0.610731 |
|
| 24 |
+
| 59 | nncf_module.bert.encoder.layer.3.attention.output.dense | NNCFLinear | weight | [768, 576] | 442368 | 169194 | 0.617527 |
|
| 25 |
+
| 63 | nncf_module.bert.encoder.layer.3.intermediate.dense | NNCFLinear | weight | [368, 768] | 282624 | 163234 | 0.422434 |
|
| 26 |
+
| 65 | nncf_module.bert.encoder.layer.3.output.dense | NNCFLinear | weight | [768, 368] | 282624 | 157364 | 0.443204 |
|
| 27 |
+
| 69 | nncf_module.bert.encoder.layer.4.attention.self.query | NNCFLinear | weight | [576, 768] | 442368 | 176138 | 0.601829 |
|
| 28 |
+
| 71 | nncf_module.bert.encoder.layer.4.attention.self.key | NNCFLinear | weight | [576, 768] | 442368 | 177351 | 0.599087 |
|
| 29 |
+
| 73 | nncf_module.bert.encoder.layer.4.attention.self.value | NNCFLinear | weight | [576, 768] | 442368 | 164248 | 0.628707 |
|
| 30 |
+
| 75 | nncf_module.bert.encoder.layer.4.attention.output.dense | NNCFLinear | weight | [768, 576] | 442368 | 159471 | 0.639506 |
|
| 31 |
+
| 79 | nncf_module.bert.encoder.layer.4.intermediate.dense | NNCFLinear | weight | [386, 768] | 296448 | 167744 | 0.434154 |
|
| 32 |
+
| 81 | nncf_module.bert.encoder.layer.4.output.dense | NNCFLinear | weight | [768, 386] | 296448 | 159961 | 0.460408 |
|
| 33 |
+
| 85 | nncf_module.bert.encoder.layer.5.attention.self.query | NNCFLinear | weight | [384, 768] | 294912 | 114241 | 0.612627 |
|
| 34 |
+
| 87 | nncf_module.bert.encoder.layer.5.attention.self.key | NNCFLinear | weight | [384, 768] | 294912 | 132821 | 0.549625 |
|
| 35 |
+
| 89 | nncf_module.bert.encoder.layer.5.attention.self.value | NNCFLinear | weight | [384, 768] | 294912 | 135092 | 0.541924 |
|
| 36 |
+
| 91 | nncf_module.bert.encoder.layer.5.attention.output.dense | NNCFLinear | weight | [768, 384] | 294912 | 132344 | 0.551242 |
|
| 37 |
+
| 95 | nncf_module.bert.encoder.layer.5.intermediate.dense | NNCFLinear | weight | [336, 768] | 258048 | 153822 | 0.403902 |
|
| 38 |
+
| 97 | nncf_module.bert.encoder.layer.5.output.dense | NNCFLinear | weight | [768, 336] | 258048 | 145684 | 0.435438 |
|
| 39 |
+
| 101 | nncf_module.bert.encoder.layer.6.attention.self.query | NNCFLinear | weight | [448, 768] | 344064 | 131784 | 0.616978 |
|
| 40 |
+
| 103 | nncf_module.bert.encoder.layer.6.attention.self.key | NNCFLinear | weight | [448, 768] | 344064 | 144539 | 0.579907 |
|
| 41 |
+
| 105 | nncf_module.bert.encoder.layer.6.attention.self.value | NNCFLinear | weight | [448, 768] | 344064 | 131107 | 0.618946 |
|
| 42 |
+
| 107 | nncf_module.bert.encoder.layer.6.attention.output.dense | NNCFLinear | weight | [768, 448] | 344064 | 126145 | 0.633368 |
|
| 43 |
+
| 111 | nncf_module.bert.encoder.layer.6.intermediate.dense | NNCFLinear | weight | [280, 768] | 215040 | 135219 | 0.371191 |
|
| 44 |
+
| 113 | nncf_module.bert.encoder.layer.6.output.dense | NNCFLinear | weight | [768, 280] | 215040 | 131559 | 0.388211 |
|
| 45 |
+
| 117 | nncf_module.bert.encoder.layer.7.attention.self.query | NNCFLinear | weight | [448, 768] | 344064 | 132226 | 0.615694 |
|
| 46 |
+
| 119 | nncf_module.bert.encoder.layer.7.attention.self.key | NNCFLinear | weight | [448, 768] | 344064 | 152327 | 0.557271 |
|
| 47 |
+
| 121 | nncf_module.bert.encoder.layer.7.attention.self.value | NNCFLinear | weight | [448, 768] | 344064 | 141141 | 0.589783 |
|
| 48 |
+
| 123 | nncf_module.bert.encoder.layer.7.attention.output.dense | NNCFLinear | weight | [768, 448] | 344064 | 135857 | 0.60514 |
|
| 49 |
+
| 127 | nncf_module.bert.encoder.layer.7.intermediate.dense | NNCFLinear | weight | [211, 768] | 162048 | 109376 | 0.32504 |
|
| 50 |
+
| 129 | nncf_module.bert.encoder.layer.7.output.dense | NNCFLinear | weight | [768, 211] | 162048 | 107132 | 0.338887 |
|
| 51 |
+
| 133 | nncf_module.bert.encoder.layer.8.attention.self.query | NNCFLinear | weight | [448, 768] | 344064 | 129219 | 0.624433 |
|
| 52 |
+
| 135 | nncf_module.bert.encoder.layer.8.attention.self.key | NNCFLinear | weight | [448, 768] | 344064 | 130088 | 0.621908 |
|
| 53 |
+
| 137 | nncf_module.bert.encoder.layer.8.attention.self.value | NNCFLinear | weight | [448, 768] | 344064 | 108043 | 0.68598 |
|
| 54 |
+
| 139 | nncf_module.bert.encoder.layer.8.attention.output.dense | NNCFLinear | weight | [768, 448] | 344064 | 103567 | 0.698989 |
|
| 55 |
+
| 143 | nncf_module.bert.encoder.layer.8.intermediate.dense | NNCFLinear | weight | [108, 768] | 82944 | 63183 | 0.238245 |
|
| 56 |
+
| 145 | nncf_module.bert.encoder.layer.8.output.dense | NNCFLinear | weight | [768, 108] | 82944 | 62633 | 0.244876 |
|
| 57 |
+
| 149 | nncf_module.bert.encoder.layer.9.attention.self.query | NNCFLinear | weight | [320, 768] | 245760 | 107216 | 0.563737 |
|
| 58 |
+
| 151 | nncf_module.bert.encoder.layer.9.attention.self.key | NNCFLinear | weight | [320, 768] | 245760 | 101848 | 0.585579 |
|
| 59 |
+
| 153 | nncf_module.bert.encoder.layer.9.attention.self.value | NNCFLinear | weight | [320, 768] | 245760 | 52063 | 0.788155 |
|
| 60 |
+
| 155 | nncf_module.bert.encoder.layer.9.attention.output.dense | NNCFLinear | weight | [768, 320] | 245760 | 53127 | 0.783826 |
|
| 61 |
+
| 159 | nncf_module.bert.encoder.layer.9.intermediate.dense | NNCFLinear | weight | [53, 768] | 40704 | 33339 | 0.18094 |
|
| 62 |
+
| 161 | nncf_module.bert.encoder.layer.9.output.dense | NNCFLinear | weight | [768, 53] | 40704 | 32340 | 0.205483 |
|
| 63 |
+
| 165 | nncf_module.bert.encoder.layer.10.attention.self.query | NNCFLinear | weight | [384, 768] | 294912 | 112357 | 0.619015 |
|
| 64 |
+
| 167 | nncf_module.bert.encoder.layer.10.attention.self.key | NNCFLinear | weight | [384, 768] | 294912 | 109640 | 0.628228 |
|
| 65 |
+
| 169 | nncf_module.bert.encoder.layer.10.attention.self.value | NNCFLinear | weight | [384, 768] | 294912 | 61630 | 0.791022 |
|
| 66 |
+
| 171 | nncf_module.bert.encoder.layer.10.attention.output.dense | NNCFLinear | weight | [768, 384] | 294912 | 63912 | 0.783284 |
|
| 67 |
+
| 175 | nncf_module.bert.encoder.layer.10.intermediate.dense | NNCFLinear | weight | [86, 768] | 66048 | 50252 | 0.239159 |
|
| 68 |
+
| 177 | nncf_module.bert.encoder.layer.10.output.dense | NNCFLinear | weight | [768, 86] | 66048 | 49494 | 0.250636 |
|
| 69 |
+
| 181 | nncf_module.bert.encoder.layer.11.attention.self.query | NNCFLinear | weight | [384, 768] | 294912 | 88056 | 0.701416 |
|
| 70 |
+
| 183 | nncf_module.bert.encoder.layer.11.attention.self.key | NNCFLinear | weight | [384, 768] | 294912 | 85229 | 0.711002 |
|
| 71 |
+
| 185 | nncf_module.bert.encoder.layer.11.attention.self.value | NNCFLinear | weight | [384, 768] | 294912 | 47126 | 0.840203 |
|
| 72 |
+
| 187 | nncf_module.bert.encoder.layer.11.attention.output.dense | NNCFLinear | weight | [768, 384] | 294912 | 49010 | 0.833815 |
|
| 73 |
+
| 191 | nncf_module.bert.encoder.layer.11.intermediate.dense | NNCFLinear | weight | [105, 768] | 80640 | 62069 | 0.230295 |
|
| 74 |
+
| 193 | nncf_module.bert.encoder.layer.11.output.dense | NNCFLinear | weight | [768, 105] | 80640 | 61476 | 0.237649 |
|
XP_onnx_sparsity.csv
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
,layer_id,shape,nparam,nnz,sparsity
|
| 2 |
+
0,Constant_15,"[30522, 768]",23440896,23440896,0.0
|
| 3 |
+
1,Constant_23,"[2, 768]",1536,1536,0.0
|
| 4 |
+
2,Constant_35,"[512, 768]",393216,393216,0.0
|
| 5 |
+
3,Constant_61,"[320, 768]",245760,93507,0.61951904296875
|
| 6 |
+
4,Constant_71,"[320, 768]",245760,98224,0.6003255208333333
|
| 7 |
+
5,Constant_91,"[320, 768]",245760,113596,0.5377766927083334
|
| 8 |
+
6,Constant_150,"[768, 320]",245760,117320,0.5226236979166667
|
| 9 |
+
7,Constant_178,"[185, 768]",142080,97047,0.3169552364864865
|
| 10 |
+
8,Constant_196,"[768, 185]",142080,94629,0.3339738175675676
|
| 11 |
+
9,Constant_224,"[320, 768]",245760,118358,0.5184000651041667
|
| 12 |
+
10,Constant_234,"[320, 768]",245760,118132,0.5193196614583333
|
| 13 |
+
11,Constant_254,"[320, 768]",245760,107518,0.5625081380208333
|
| 14 |
+
12,Constant_313,"[768, 320]",245760,111172,0.5476399739583333
|
| 15 |
+
13,Constant_341,"[315, 768]",241920,148727,0.3852223875661376
|
| 16 |
+
14,Constant_359,"[768, 315]",241920,143174,0.40817625661375656
|
| 17 |
+
15,Constant_387,"[576, 768]",442368,162784,0.6320167824074074
|
| 18 |
+
16,Constant_397,"[576, 768]",442368,164797,0.6274662724247686
|
| 19 |
+
17,Constant_417,"[576, 768]",442368,135705,0.6932305230034722
|
| 20 |
+
18,Constant_476,"[768, 576]",442368,138448,0.6870298032407407
|
| 21 |
+
19,Constant_504,"[339, 768]",260352,154043,0.4083279559980334
|
| 22 |
+
20,Constant_522,"[768, 339]",260352,150923,0.42031173180924286
|
| 23 |
+
21,Constant_550,"[576, 768]",442368,170686,0.6141538266782407
|
| 24 |
+
22,Constant_560,"[576, 768]",442368,178480,0.5965350115740741
|
| 25 |
+
23,Constant_580,"[576, 768]",442368,172200,0.6107313368055556
|
| 26 |
+
24,Constant_639,"[768, 576]",442368,169194,0.6175265842013888
|
| 27 |
+
25,Constant_667,"[368, 768]",282624,163234,0.4224340466485508
|
| 28 |
+
26,Constant_685,"[768, 368]",282624,157364,0.44320369112318836
|
| 29 |
+
27,Constant_713,"[576, 768]",442368,176138,0.6018292462384259
|
| 30 |
+
28,Constant_723,"[576, 768]",442368,177351,0.5990871853298612
|
| 31 |
+
29,Constant_743,"[576, 768]",442368,164248,0.6287073206018519
|
| 32 |
+
30,Constant_802,"[768, 576]",442368,159471,0.6395060221354167
|
| 33 |
+
31,Constant_830,"[386, 768]",296448,167744,0.43415371329879104
|
| 34 |
+
32,Constant_848,"[768, 386]",296448,159961,0.4604078961571675
|
| 35 |
+
33,Constant_876,"[384, 768]",294912,114241,0.6126268174913194
|
| 36 |
+
34,Constant_886,"[384, 768]",294912,132821,0.5496249728732638
|
| 37 |
+
35,Constant_906,"[384, 768]",294912,135092,0.5419243706597222
|
| 38 |
+
36,Constant_965,"[768, 384]",294912,132344,0.5512424045138888
|
| 39 |
+
37,Constant_993,"[336, 768]",258048,153822,0.40390159970238093
|
| 40 |
+
38,Constant_1011,"[768, 336]",258048,145684,0.4354383680555556
|
| 41 |
+
39,Constant_1039,"[448, 768]",344064,131784,0.6169782366071428
|
| 42 |
+
40,Constant_1049,"[448, 768]",344064,144539,0.5799066452752977
|
| 43 |
+
41,Constant_1069,"[448, 768]",344064,131107,0.6189458937872023
|
| 44 |
+
42,Constant_1128,"[768, 448]",344064,126145,0.6333676292782738
|
| 45 |
+
43,Constant_1156,"[280, 768]",215040,135219,0.37119140625
|
| 46 |
+
44,Constant_1174,"[768, 280]",215040,131559,0.3882114955357143
|
| 47 |
+
45,Constant_1202,"[448, 768]",344064,132226,0.6156935918898809
|
| 48 |
+
46,Constant_1212,"[448, 768]",344064,152327,0.5572713216145833
|
| 49 |
+
47,Constant_1232,"[448, 768]",344064,141141,0.58978271484375
|
| 50 |
+
48,Constant_1291,"[768, 448]",344064,135857,0.6051403227306548
|
| 51 |
+
49,Constant_1319,"[211, 768]",162048,109376,0.32503949447077407
|
| 52 |
+
50,Constant_1337,"[768, 211]",162048,107132,0.33888724328594
|
| 53 |
+
51,Constant_1365,"[448, 768]",344064,129219,0.6244332449776786
|
| 54 |
+
52,Constant_1375,"[448, 768]",344064,130088,0.6219075520833333
|
| 55 |
+
53,Constant_1395,"[448, 768]",344064,108043,0.6859799339657738
|
| 56 |
+
54,Constant_1454,"[768, 448]",344064,103567,0.6989891415550595
|
| 57 |
+
55,Constant_1482,"[108, 768]",82944,63183,0.2382450810185185
|
| 58 |
+
56,Constant_1500,"[768, 108]",82944,62633,0.24487606095679015
|
| 59 |
+
57,Constant_1528,"[320, 768]",245760,107216,0.5637369791666667
|
| 60 |
+
58,Constant_1538,"[320, 768]",245760,101848,0.5855794270833333
|
| 61 |
+
59,Constant_1558,"[320, 768]",245760,52063,0.7881551106770833
|
| 62 |
+
60,Constant_1617,"[768, 320]",245760,53127,0.78382568359375
|
| 63 |
+
61,Constant_1645,"[53, 768]",40704,33339,0.18094044811320753
|
| 64 |
+
62,Constant_1663,"[768, 53]",40704,32340,0.20548349056603776
|
| 65 |
+
63,Constant_1691,"[384, 768]",294912,112357,0.6190151638454862
|
| 66 |
+
64,Constant_1701,"[384, 768]",294912,109640,0.6282280815972222
|
| 67 |
+
65,Constant_1721,"[384, 768]",294912,61630,0.7910224066840278
|
| 68 |
+
66,Constant_1780,"[768, 384]",294912,63912,0.7832845052083334
|
| 69 |
+
67,Constant_1808,"[86, 768]",66048,50252,0.23915939922480622
|
| 70 |
+
68,Constant_1826,"[768, 86]",66048,49494,0.25063590116279066
|
| 71 |
+
69,Constant_1854,"[384, 768]",294912,88056,0.701416015625
|
| 72 |
+
70,Constant_1864,"[384, 768]",294912,85229,0.7110019259982638
|
| 73 |
+
71,Constant_1884,"[384, 768]",294912,47126,0.8402031792534722
|
| 74 |
+
72,Constant_1943,"[768, 384]",294912,49010,0.8338148328993056
|
| 75 |
+
73,Constant_1971,"[105, 768]",80640,62069,0.23029513888888886
|
| 76 |
+
74,Constant_1989,"[768, 105]",80640,61476,0.2376488095238095
|
| 77 |
+
75,Constant_2017,"[2, 768]",1536,1536,0.0
|
XP_onnx_sparsity.md
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
| | layer_id | shape | nparam | nnz | sparsity |
|
| 2 |
+
|---:|:--------------|:-------------|---------:|---------:|-----------:|
|
| 3 |
+
| 0 | Constant_15 | [30522, 768] | 23440896 | 23440896 | 0 |
|
| 4 |
+
| 1 | Constant_23 | [2, 768] | 1536 | 1536 | 0 |
|
| 5 |
+
| 2 | Constant_35 | [512, 768] | 393216 | 393216 | 0 |
|
| 6 |
+
| 3 | Constant_61 | [320, 768] | 245760 | 93507 | 0.619519 |
|
| 7 |
+
| 4 | Constant_71 | [320, 768] | 245760 | 98224 | 0.600326 |
|
| 8 |
+
| 5 | Constant_91 | [320, 768] | 245760 | 113596 | 0.537777 |
|
| 9 |
+
| 6 | Constant_150 | [768, 320] | 245760 | 117320 | 0.522624 |
|
| 10 |
+
| 7 | Constant_178 | [185, 768] | 142080 | 97047 | 0.316955 |
|
| 11 |
+
| 8 | Constant_196 | [768, 185] | 142080 | 94629 | 0.333974 |
|
| 12 |
+
| 9 | Constant_224 | [320, 768] | 245760 | 118358 | 0.5184 |
|
| 13 |
+
| 10 | Constant_234 | [320, 768] | 245760 | 118132 | 0.51932 |
|
| 14 |
+
| 11 | Constant_254 | [320, 768] | 245760 | 107518 | 0.562508 |
|
| 15 |
+
| 12 | Constant_313 | [768, 320] | 245760 | 111172 | 0.54764 |
|
| 16 |
+
| 13 | Constant_341 | [315, 768] | 241920 | 148727 | 0.385222 |
|
| 17 |
+
| 14 | Constant_359 | [768, 315] | 241920 | 143174 | 0.408176 |
|
| 18 |
+
| 15 | Constant_387 | [576, 768] | 442368 | 162784 | 0.632017 |
|
| 19 |
+
| 16 | Constant_397 | [576, 768] | 442368 | 164797 | 0.627466 |
|
| 20 |
+
| 17 | Constant_417 | [576, 768] | 442368 | 135705 | 0.693231 |
|
| 21 |
+
| 18 | Constant_476 | [768, 576] | 442368 | 138448 | 0.68703 |
|
| 22 |
+
| 19 | Constant_504 | [339, 768] | 260352 | 154043 | 0.408328 |
|
| 23 |
+
| 20 | Constant_522 | [768, 339] | 260352 | 150923 | 0.420312 |
|
| 24 |
+
| 21 | Constant_550 | [576, 768] | 442368 | 170686 | 0.614154 |
|
| 25 |
+
| 22 | Constant_560 | [576, 768] | 442368 | 178480 | 0.596535 |
|
| 26 |
+
| 23 | Constant_580 | [576, 768] | 442368 | 172200 | 0.610731 |
|
| 27 |
+
| 24 | Constant_639 | [768, 576] | 442368 | 169194 | 0.617527 |
|
| 28 |
+
| 25 | Constant_667 | [368, 768] | 282624 | 163234 | 0.422434 |
|
| 29 |
+
| 26 | Constant_685 | [768, 368] | 282624 | 157364 | 0.443204 |
|
| 30 |
+
| 27 | Constant_713 | [576, 768] | 442368 | 176138 | 0.601829 |
|
| 31 |
+
| 28 | Constant_723 | [576, 768] | 442368 | 177351 | 0.599087 |
|
| 32 |
+
| 29 | Constant_743 | [576, 768] | 442368 | 164248 | 0.628707 |
|
| 33 |
+
| 30 | Constant_802 | [768, 576] | 442368 | 159471 | 0.639506 |
|
| 34 |
+
| 31 | Constant_830 | [386, 768] | 296448 | 167744 | 0.434154 |
|
| 35 |
+
| 32 | Constant_848 | [768, 386] | 296448 | 159961 | 0.460408 |
|
| 36 |
+
| 33 | Constant_876 | [384, 768] | 294912 | 114241 | 0.612627 |
|
| 37 |
+
| 34 | Constant_886 | [384, 768] | 294912 | 132821 | 0.549625 |
|
| 38 |
+
| 35 | Constant_906 | [384, 768] | 294912 | 135092 | 0.541924 |
|
| 39 |
+
| 36 | Constant_965 | [768, 384] | 294912 | 132344 | 0.551242 |
|
| 40 |
+
| 37 | Constant_993 | [336, 768] | 258048 | 153822 | 0.403902 |
|
| 41 |
+
| 38 | Constant_1011 | [768, 336] | 258048 | 145684 | 0.435438 |
|
| 42 |
+
| 39 | Constant_1039 | [448, 768] | 344064 | 131784 | 0.616978 |
|
| 43 |
+
| 40 | Constant_1049 | [448, 768] | 344064 | 144539 | 0.579907 |
|
| 44 |
+
| 41 | Constant_1069 | [448, 768] | 344064 | 131107 | 0.618946 |
|
| 45 |
+
| 42 | Constant_1128 | [768, 448] | 344064 | 126145 | 0.633368 |
|
| 46 |
+
| 43 | Constant_1156 | [280, 768] | 215040 | 135219 | 0.371191 |
|
| 47 |
+
| 44 | Constant_1174 | [768, 280] | 215040 | 131559 | 0.388211 |
|
| 48 |
+
| 45 | Constant_1202 | [448, 768] | 344064 | 132226 | 0.615694 |
|
| 49 |
+
| 46 | Constant_1212 | [448, 768] | 344064 | 152327 | 0.557271 |
|
| 50 |
+
| 47 | Constant_1232 | [448, 768] | 344064 | 141141 | 0.589783 |
|
| 51 |
+
| 48 | Constant_1291 | [768, 448] | 344064 | 135857 | 0.60514 |
|
| 52 |
+
| 49 | Constant_1319 | [211, 768] | 162048 | 109376 | 0.325039 |
|
| 53 |
+
| 50 | Constant_1337 | [768, 211] | 162048 | 107132 | 0.338887 |
|
| 54 |
+
| 51 | Constant_1365 | [448, 768] | 344064 | 129219 | 0.624433 |
|
| 55 |
+
| 52 | Constant_1375 | [448, 768] | 344064 | 130088 | 0.621908 |
|
| 56 |
+
| 53 | Constant_1395 | [448, 768] | 344064 | 108043 | 0.68598 |
|
| 57 |
+
| 54 | Constant_1454 | [768, 448] | 344064 | 103567 | 0.698989 |
|
| 58 |
+
| 55 | Constant_1482 | [108, 768] | 82944 | 63183 | 0.238245 |
|
| 59 |
+
| 56 | Constant_1500 | [768, 108] | 82944 | 62633 | 0.244876 |
|
| 60 |
+
| 57 | Constant_1528 | [320, 768] | 245760 | 107216 | 0.563737 |
|
| 61 |
+
| 58 | Constant_1538 | [320, 768] | 245760 | 101848 | 0.585579 |
|
| 62 |
+
| 59 | Constant_1558 | [320, 768] | 245760 | 52063 | 0.788155 |
|
| 63 |
+
| 60 | Constant_1617 | [768, 320] | 245760 | 53127 | 0.783826 |
|
| 64 |
+
| 61 | Constant_1645 | [53, 768] | 40704 | 33339 | 0.18094 |
|
| 65 |
+
| 62 | Constant_1663 | [768, 53] | 40704 | 32340 | 0.205483 |
|
| 66 |
+
| 63 | Constant_1691 | [384, 768] | 294912 | 112357 | 0.619015 |
|
| 67 |
+
| 64 | Constant_1701 | [384, 768] | 294912 | 109640 | 0.628228 |
|
| 68 |
+
| 65 | Constant_1721 | [384, 768] | 294912 | 61630 | 0.791022 |
|
| 69 |
+
| 66 | Constant_1780 | [768, 384] | 294912 | 63912 | 0.783285 |
|
| 70 |
+
| 67 | Constant_1808 | [86, 768] | 66048 | 50252 | 0.239159 |
|
| 71 |
+
| 68 | Constant_1826 | [768, 86] | 66048 | 49494 | 0.250636 |
|
| 72 |
+
| 69 | Constant_1854 | [384, 768] | 294912 | 88056 | 0.701416 |
|
| 73 |
+
| 70 | Constant_1864 | [384, 768] | 294912 | 85229 | 0.711002 |
|
| 74 |
+
| 71 | Constant_1884 | [384, 768] | 294912 | 47126 | 0.840203 |
|
| 75 |
+
| 72 | Constant_1943 | [768, 384] | 294912 | 49010 | 0.833815 |
|
| 76 |
+
| 73 | Constant_1971 | [105, 768] | 80640 | 62069 | 0.230295 |
|
| 77 |
+
| 74 | Constant_1989 | [768, 105] | 80640 | 61476 | 0.237649 |
|
| 78 |
+
| 75 | Constant_2017 | [2, 768] | 1536 | 1536 | 0 |
|
all_results.json
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"eval_exact_match": 80.45411542100284,
|
| 3 |
+
"eval_f1": 87.68317120239874,
|
| 4 |
+
"eval_samples": 10784
|
| 5 |
+
}
|
bert-base-squadv1-block-pruning-hybrid-filled-lt-nncf-57.92sparse-qat-lt.onnx
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:13940239ca16624231ca674113e4f739936950df8c5dd8dbd7439d55a627d723
|
| 3 |
+
size 176517681
|
checkpoint-21750/config.json
ADDED
|
@@ -0,0 +1,115 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "/data1/vchua/tld-poc/bert-base-squadv1-local-hybrid-compiled",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"NNCFNetwork"
|
| 5 |
+
],
|
| 6 |
+
"attention_probs_dropout_prob": 0.1,
|
| 7 |
+
"classifier_dropout": null,
|
| 8 |
+
"gradient_checkpointing": false,
|
| 9 |
+
"hidden_act": "gelu",
|
| 10 |
+
"hidden_dropout_prob": 0.1,
|
| 11 |
+
"hidden_size": 768,
|
| 12 |
+
"initializer_range": 0.02,
|
| 13 |
+
"intermediate_size": 3072,
|
| 14 |
+
"layer_norm_eps": 1e-12,
|
| 15 |
+
"max_position_embeddings": 512,
|
| 16 |
+
"model_type": "bert",
|
| 17 |
+
"num_attention_heads": 12,
|
| 18 |
+
"num_hidden_layers": 12,
|
| 19 |
+
"pad_token_id": 0,
|
| 20 |
+
"position_embedding_type": "absolute",
|
| 21 |
+
"pruned_heads": {
|
| 22 |
+
"0": [
|
| 23 |
+
0,
|
| 24 |
+
2,
|
| 25 |
+
4,
|
| 26 |
+
5,
|
| 27 |
+
6,
|
| 28 |
+
7,
|
| 29 |
+
11
|
| 30 |
+
],
|
| 31 |
+
"1": [
|
| 32 |
+
0,
|
| 33 |
+
2,
|
| 34 |
+
3,
|
| 35 |
+
5,
|
| 36 |
+
6,
|
| 37 |
+
7,
|
| 38 |
+
8
|
| 39 |
+
],
|
| 40 |
+
"2": [
|
| 41 |
+
8,
|
| 42 |
+
4,
|
| 43 |
+
7
|
| 44 |
+
],
|
| 45 |
+
"3": [
|
| 46 |
+
2,
|
| 47 |
+
4,
|
| 48 |
+
6
|
| 49 |
+
],
|
| 50 |
+
"4": [
|
| 51 |
+
1,
|
| 52 |
+
2,
|
| 53 |
+
11
|
| 54 |
+
],
|
| 55 |
+
"5": [
|
| 56 |
+
1,
|
| 57 |
+
2,
|
| 58 |
+
5,
|
| 59 |
+
6,
|
| 60 |
+
7,
|
| 61 |
+
11
|
| 62 |
+
],
|
| 63 |
+
"6": [
|
| 64 |
+
0,
|
| 65 |
+
2,
|
| 66 |
+
3,
|
| 67 |
+
7,
|
| 68 |
+
10
|
| 69 |
+
],
|
| 70 |
+
"7": [
|
| 71 |
+
1,
|
| 72 |
+
3,
|
| 73 |
+
6,
|
| 74 |
+
7,
|
| 75 |
+
11
|
| 76 |
+
],
|
| 77 |
+
"8": [
|
| 78 |
+
0,
|
| 79 |
+
3,
|
| 80 |
+
4,
|
| 81 |
+
5,
|
| 82 |
+
8
|
| 83 |
+
],
|
| 84 |
+
"9": [
|
| 85 |
+
1,
|
| 86 |
+
3,
|
| 87 |
+
4,
|
| 88 |
+
5,
|
| 89 |
+
7,
|
| 90 |
+
9,
|
| 91 |
+
10
|
| 92 |
+
],
|
| 93 |
+
"10": [
|
| 94 |
+
1,
|
| 95 |
+
4,
|
| 96 |
+
5,
|
| 97 |
+
6,
|
| 98 |
+
7,
|
| 99 |
+
8
|
| 100 |
+
],
|
| 101 |
+
"11": [
|
| 102 |
+
4,
|
| 103 |
+
5,
|
| 104 |
+
7,
|
| 105 |
+
8,
|
| 106 |
+
10,
|
| 107 |
+
11
|
| 108 |
+
]
|
| 109 |
+
},
|
| 110 |
+
"torch_dtype": "float32",
|
| 111 |
+
"transformers_version": "4.9.1",
|
| 112 |
+
"type_vocab_size": 2,
|
| 113 |
+
"use_cache": true,
|
| 114 |
+
"vocab_size": 30522
|
| 115 |
+
}
|
checkpoint-21750/optimizer.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:502d61b24766935d2d0b196d147d27f1f98bb63acb74c00544ff492f201d0276
|
| 3 |
+
size 353016429
|
checkpoint-21750/pytorch_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:aa3e31744b5ff287c4d8e872b1a0c219ff07abacf5f7eb4db37f72c9c75392e5
|
| 3 |
+
size 257716945
|
checkpoint-21750/rng_state.pth
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a94692d497d62f266652bbbc4a1a07abeff70560b00b87755fc4710f1be4ee3f
|
| 3 |
+
size 14503
|
checkpoint-21750/scheduler.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e602f6bb85fa11ee4dd91991bfc4d199ca92f7b80f61a8a792f4315172ceab0b
|
| 3 |
+
size 623
|
checkpoint-21750/special_tokens_map.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
|
checkpoint-21750/tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
checkpoint-21750/tokenizer_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "/data1/vchua/tld-poc/bert-base-squadv1-local-hybrid-compiled", "tokenizer_class": "BertTokenizer"}
|
checkpoint-21750/trainer_state.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:08d9f173574b7831a17f3e025503da75a134a1ea02db1d3b657468f835161f03
|
| 3 |
+
size 11398921
|
checkpoint-21750/training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:75b97d5ad5c00837e2ab0e008d14c563eb384f79123ee707c385a7929abfb8d9
|
| 3 |
+
size 3439
|
checkpoint-21750/vocab.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
compressed_graph.dot
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
config.json
ADDED
|
@@ -0,0 +1,115 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "/data1/vchua/tld-poc/bert-base-squadv1-local-hybrid-compiled",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"NNCFNetwork"
|
| 5 |
+
],
|
| 6 |
+
"attention_probs_dropout_prob": 0.1,
|
| 7 |
+
"classifier_dropout": null,
|
| 8 |
+
"gradient_checkpointing": false,
|
| 9 |
+
"hidden_act": "gelu",
|
| 10 |
+
"hidden_dropout_prob": 0.1,
|
| 11 |
+
"hidden_size": 768,
|
| 12 |
+
"initializer_range": 0.02,
|
| 13 |
+
"intermediate_size": 3072,
|
| 14 |
+
"layer_norm_eps": 1e-12,
|
| 15 |
+
"max_position_embeddings": 512,
|
| 16 |
+
"model_type": "bert",
|
| 17 |
+
"num_attention_heads": 12,
|
| 18 |
+
"num_hidden_layers": 12,
|
| 19 |
+
"pad_token_id": 0,
|
| 20 |
+
"position_embedding_type": "absolute",
|
| 21 |
+
"pruned_heads": {
|
| 22 |
+
"0": [
|
| 23 |
+
0,
|
| 24 |
+
2,
|
| 25 |
+
4,
|
| 26 |
+
5,
|
| 27 |
+
6,
|
| 28 |
+
7,
|
| 29 |
+
11
|
| 30 |
+
],
|
| 31 |
+
"1": [
|
| 32 |
+
0,
|
| 33 |
+
2,
|
| 34 |
+
3,
|
| 35 |
+
5,
|
| 36 |
+
6,
|
| 37 |
+
7,
|
| 38 |
+
8
|
| 39 |
+
],
|
| 40 |
+
"2": [
|
| 41 |
+
8,
|
| 42 |
+
4,
|
| 43 |
+
7
|
| 44 |
+
],
|
| 45 |
+
"3": [
|
| 46 |
+
2,
|
| 47 |
+
4,
|
| 48 |
+
6
|
| 49 |
+
],
|
| 50 |
+
"4": [
|
| 51 |
+
1,
|
| 52 |
+
2,
|
| 53 |
+
11
|
| 54 |
+
],
|
| 55 |
+
"5": [
|
| 56 |
+
1,
|
| 57 |
+
2,
|
| 58 |
+
5,
|
| 59 |
+
6,
|
| 60 |
+
7,
|
| 61 |
+
11
|
| 62 |
+
],
|
| 63 |
+
"6": [
|
| 64 |
+
0,
|
| 65 |
+
2,
|
| 66 |
+
3,
|
| 67 |
+
7,
|
| 68 |
+
10
|
| 69 |
+
],
|
| 70 |
+
"7": [
|
| 71 |
+
1,
|
| 72 |
+
3,
|
| 73 |
+
6,
|
| 74 |
+
7,
|
| 75 |
+
11
|
| 76 |
+
],
|
| 77 |
+
"8": [
|
| 78 |
+
0,
|
| 79 |
+
3,
|
| 80 |
+
4,
|
| 81 |
+
5,
|
| 82 |
+
8
|
| 83 |
+
],
|
| 84 |
+
"9": [
|
| 85 |
+
1,
|
| 86 |
+
3,
|
| 87 |
+
4,
|
| 88 |
+
5,
|
| 89 |
+
7,
|
| 90 |
+
9,
|
| 91 |
+
10
|
| 92 |
+
],
|
| 93 |
+
"10": [
|
| 94 |
+
1,
|
| 95 |
+
4,
|
| 96 |
+
5,
|
| 97 |
+
6,
|
| 98 |
+
7,
|
| 99 |
+
8
|
| 100 |
+
],
|
| 101 |
+
"11": [
|
| 102 |
+
4,
|
| 103 |
+
5,
|
| 104 |
+
7,
|
| 105 |
+
8,
|
| 106 |
+
10,
|
| 107 |
+
11
|
| 108 |
+
]
|
| 109 |
+
},
|
| 110 |
+
"torch_dtype": "float32",
|
| 111 |
+
"transformers_version": "4.9.1",
|
| 112 |
+
"type_vocab_size": 2,
|
| 113 |
+
"use_cache": true,
|
| 114 |
+
"vocab_size": 30522
|
| 115 |
+
}
|
eval_XP_results.json
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"eval_exact_match": 80.45411542100284,
|
| 3 |
+
"eval_f1": 87.68317120239874,
|
| 4 |
+
"eval_samples": 10784
|
| 5 |
+
}
|
eval_nbest_predictions.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:970be197c2360cc3568b9b8f4eafc22e81ec08240c64b74983e107e59763e0c7
|
| 3 |
+
size 48944660
|
eval_predictions.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
nncf_bert_squad_sparsity.json
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"input_info": [
|
| 3 |
+
{
|
| 4 |
+
"sample_size": [1, 384],
|
| 5 |
+
"type": "long"
|
| 6 |
+
},
|
| 7 |
+
{
|
| 8 |
+
"sample_size": [1, 384],
|
| 9 |
+
"type": "long"
|
| 10 |
+
},
|
| 11 |
+
{
|
| 12 |
+
"sample_size": [1, 384],
|
| 13 |
+
"type": "long"
|
| 14 |
+
}
|
| 15 |
+
],
|
| 16 |
+
"compression":
|
| 17 |
+
[
|
| 18 |
+
// {
|
| 19 |
+
// "algorithm": "knowledge_distillation",
|
| 20 |
+
// "type": "softmax"
|
| 21 |
+
// },
|
| 22 |
+
{
|
| 23 |
+
"algorithm": "magnitude_sparsity",
|
| 24 |
+
"sparsity_init": 0.579176,
|
| 25 |
+
"params": {
|
| 26 |
+
"schedule": "multistep",
|
| 27 |
+
"multistep_steps": [
|
| 28 |
+
2,
|
| 29 |
+
4,
|
| 30 |
+
6,
|
| 31 |
+
8
|
| 32 |
+
],
|
| 33 |
+
"multistep_sparsity_levels": [
|
| 34 |
+
0.579176,
|
| 35 |
+
0.579176,
|
| 36 |
+
0.579176,
|
| 37 |
+
0.579176,
|
| 38 |
+
0.579176,
|
| 39 |
+
]
|
| 40 |
+
},
|
| 41 |
+
"ignored_scopes": ["{re}.*NNCFEmbedding", "{re}.*qa_outputs*"]
|
| 42 |
+
},
|
| 43 |
+
{
|
| 44 |
+
"algorithm": "quantization",
|
| 45 |
+
"initializer": {
|
| 46 |
+
"range": {
|
| 47 |
+
"num_init_samples": 32,
|
| 48 |
+
"type": "percentile",
|
| 49 |
+
"params":
|
| 50 |
+
{
|
| 51 |
+
"min_percentile": 0.01,
|
| 52 |
+
"max_percentile": 99.99
|
| 53 |
+
}
|
| 54 |
+
},
|
| 55 |
+
|
| 56 |
+
"batchnorm_adaptation": {
|
| 57 |
+
"num_bn_adaptation_samples": 200
|
| 58 |
+
}
|
| 59 |
+
},
|
| 60 |
+
"activations":
|
| 61 |
+
{
|
| 62 |
+
"mode": "symmetric"
|
| 63 |
+
},
|
| 64 |
+
"weights":
|
| 65 |
+
{
|
| 66 |
+
"mode": "symmetric",
|
| 67 |
+
"signed": true,
|
| 68 |
+
"per_channel": false
|
| 69 |
+
}
|
| 70 |
+
}
|
| 71 |
+
]
|
| 72 |
+
}
|
original_graph.dot
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
pytorch_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6d70d29d47abbd4ae94ca7076c3720514267903aafe74d035c2e62b6018c7058
|
| 3 |
+
size 257716945
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
|
tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "/data1/vchua/tld-poc/bert-base-squadv1-local-hybrid-compiled", "tokenizer_class": "BertTokenizer"}
|
train_results.json
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"epoch": 5.0,
|
| 3 |
+
"train_loss": 0.2873655035744389,
|
| 4 |
+
"train_runtime": 48502.8217,
|
| 5 |
+
"train_samples": 88524,
|
| 6 |
+
"train_samples_per_second": 9.126,
|
| 7 |
+
"train_steps_per_second": 0.57
|
| 8 |
+
}
|
trainer_state.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e07609eac6a5d7f0612d1b0669bb02a31a536f9401b0de7b5e57fb90c2a16d09
|
| 3 |
+
size 14501803
|
training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:75b97d5ad5c00837e2ab0e008d14c563eb384f79123ee707c385a7929abfb8d9
|
| 3 |
+
size 3439
|
vocab.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|