Jia-ao commited on
Commit
987dc04
·
verified ·
1 Parent(s): 623472f

Add LoRA adapter and tokenizer

Browse files
Files changed (3) hide show
  1. README.md +2 -2
  2. adapter_config.json +5 -5
  3. adapter_model.safetensors +2 -2
README.md CHANGED
@@ -1,9 +1,9 @@
1
  ---
2
- base_model: unsloth/qwen2.5-coder-1.5b-instruct-bnb-4bit
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
- - base_model:adapter:unsloth/qwen2.5-coder-1.5b-instruct-bnb-4bit
7
  - lora
8
  - sft
9
  - transformers
 
1
  ---
2
+ base_model: unsloth/qwen2.5-coder-7b-instruct-bnb-4bit
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
+ - base_model:adapter:unsloth/qwen2.5-coder-7b-instruct-bnb-4bit
7
  - lora
8
  - sft
9
  - transformers
adapter_config.json CHANGED
@@ -5,7 +5,7 @@
5
  "parent_library": "transformers.models.qwen2.modeling_qwen2",
6
  "unsloth_fixed": true
7
  },
8
- "base_model_name_or_path": "unsloth/qwen2.5-coder-1.5b-instruct-bnb-4bit",
9
  "bias": "none",
10
  "corda_config": null,
11
  "eva_config": null,
@@ -30,12 +30,12 @@
30
  "revision": null,
31
  "target_modules": [
32
  "up_proj",
33
- "gate_proj",
34
- "k_proj",
35
- "q_proj",
36
  "v_proj",
 
 
37
  "o_proj",
38
- "down_proj"
 
39
  ],
40
  "target_parameters": null,
41
  "task_type": "CAUSAL_LM",
 
5
  "parent_library": "transformers.models.qwen2.modeling_qwen2",
6
  "unsloth_fixed": true
7
  },
8
+ "base_model_name_or_path": "unsloth/qwen2.5-coder-7b-instruct-bnb-4bit",
9
  "bias": "none",
10
  "corda_config": null,
11
  "eva_config": null,
 
30
  "revision": null,
31
  "target_modules": [
32
  "up_proj",
 
 
 
33
  "v_proj",
34
+ "q_proj",
35
+ "k_proj",
36
  "o_proj",
37
+ "down_proj",
38
+ "gate_proj"
39
  ],
40
  "target_parameters": null,
41
  "task_type": "CAUSAL_LM",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d508b0b9a36e292e3217e05da84a5d0c30640b4669a29911aa153cb177a56079
3
- size 73911112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3f68959e084202d27b9bcca6a5d8705a9dd23a78471e99f2d8cabc77590826b
3
+ size 161533192