appy1234 commited on
Commit
11e6a83
·
verified ·
1 Parent(s): 89f6bfa

Upload Phi3ForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +4 -4
  2. generation_config.json +1 -1
config.json CHANGED
@@ -5,9 +5,9 @@
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "auto_map": {
8
- "AutoConfig": "microsoft/Phi-4-mini-instruct--configuration_phi3.Phi3Config",
9
- "AutoModelForCausalLM": "microsoft/Phi-4-mini-instruct--modeling_phi3.Phi3ForCausalLM",
10
- "AutoTokenizer": "microsoft/Phi-4-mini-instruct--Xenova/gpt-4o"
11
  },
12
  "bos_token_id": 199999,
13
  "embd_pdrop": 0.0,
@@ -182,7 +182,7 @@
182
  "sliding_window": 262144,
183
  "tie_word_embeddings": true,
184
  "torch_dtype": "bfloat16",
185
- "transformers_version": "4.52.4",
186
  "use_cache": true,
187
  "vocab_size": 200064
188
  }
 
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "auto_map": {
8
+ "AutoConfig": "configuration_phi3.Phi3Config",
9
+ "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM",
10
+ "AutoTokenizer": "Xenova/gpt-4o"
11
  },
12
  "bos_token_id": 199999,
13
  "embd_pdrop": 0.0,
 
182
  "sliding_window": 262144,
183
  "tie_word_embeddings": true,
184
  "torch_dtype": "bfloat16",
185
+ "transformers_version": "4.53.0.dev0",
186
  "use_cache": true,
187
  "vocab_size": 200064
188
  }
generation_config.json CHANGED
@@ -6,5 +6,5 @@
6
  199999
7
  ],
8
  "pad_token_id": 199999,
9
- "transformers_version": "4.52.4"
10
  }
 
6
  199999
7
  ],
8
  "pad_token_id": 199999,
9
+ "transformers_version": "4.53.0.dev0"
10
  }