Upload Phi3ForCausalLM
Browse files- config.json +4 -4
- generation_config.json +1 -1
config.json
CHANGED
|
@@ -5,9 +5,9 @@
|
|
| 5 |
"attention_bias": false,
|
| 6 |
"attention_dropout": 0.0,
|
| 7 |
"auto_map": {
|
| 8 |
-
"AutoConfig": "
|
| 9 |
-
"AutoModelForCausalLM": "
|
| 10 |
-
"AutoTokenizer": "
|
| 11 |
},
|
| 12 |
"bos_token_id": 199999,
|
| 13 |
"embd_pdrop": 0.0,
|
|
@@ -182,7 +182,7 @@
|
|
| 182 |
"sliding_window": 262144,
|
| 183 |
"tie_word_embeddings": true,
|
| 184 |
"torch_dtype": "bfloat16",
|
| 185 |
-
"transformers_version": "4.
|
| 186 |
"use_cache": true,
|
| 187 |
"vocab_size": 200064
|
| 188 |
}
|
|
|
|
| 5 |
"attention_bias": false,
|
| 6 |
"attention_dropout": 0.0,
|
| 7 |
"auto_map": {
|
| 8 |
+
"AutoConfig": "configuration_phi3.Phi3Config",
|
| 9 |
+
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM",
|
| 10 |
+
"AutoTokenizer": "Xenova/gpt-4o"
|
| 11 |
},
|
| 12 |
"bos_token_id": 199999,
|
| 13 |
"embd_pdrop": 0.0,
|
|
|
|
| 182 |
"sliding_window": 262144,
|
| 183 |
"tie_word_embeddings": true,
|
| 184 |
"torch_dtype": "bfloat16",
|
| 185 |
+
"transformers_version": "4.53.0.dev0",
|
| 186 |
"use_cache": true,
|
| 187 |
"vocab_size": 200064
|
| 188 |
}
|
generation_config.json
CHANGED
|
@@ -6,5 +6,5 @@
|
|
| 6 |
199999
|
| 7 |
],
|
| 8 |
"pad_token_id": 199999,
|
| 9 |
-
"transformers_version": "4.
|
| 10 |
}
|
|
|
|
| 6 |
199999
|
| 7 |
],
|
| 8 |
"pad_token_id": 199999,
|
| 9 |
+
"transformers_version": "4.53.0.dev0"
|
| 10 |
}
|