suriya7 commited on
Commit
d64e992
·
verified ·
1 Parent(s): 4eb3214

(Trained with Unsloth)

Browse files
added_tokens.json CHANGED
@@ -1,7 +1,6 @@
1
  {
2
  "</tool_call>": 151658,
3
  "<tool_call>": 151657,
4
- "<|PAD_TOKEN|>": 151665,
5
  "<|box_end|>": 151649,
6
  "<|box_start|>": 151648,
7
  "<|endoftext|>": 151643,
 
1
  {
2
  "</tool_call>": 151658,
3
  "<tool_call>": 151657,
 
4
  "<|box_end|>": 151649,
5
  "<|box_start|>": 151648,
6
  "<|endoftext|>": 151643,
config.json CHANGED
@@ -1,10 +1,9 @@
1
  {
2
- "_name_or_path": "unsloth/qwen2.5-0.5b-instruct-bnb-4bit",
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
- "bos_token_id": 151643,
8
  "eos_token_id": 151645,
9
  "hidden_act": "silu",
10
  "hidden_size": 896,
@@ -16,7 +15,7 @@
16
  "num_attention_heads": 14,
17
  "num_hidden_layers": 24,
18
  "num_key_value_heads": 2,
19
- "pad_token_id": 151665,
20
  "rms_norm_eps": 1e-06,
21
  "rope_scaling": null,
22
  "rope_theta": 1000000.0,
@@ -25,7 +24,7 @@
25
  "torch_dtype": "bfloat16",
26
  "transformers_version": "4.47.1",
27
  "unsloth_fixed": true,
28
- "unsloth_version": "2024.12.12",
29
  "use_cache": true,
30
  "use_sliding_window": false,
31
  "vocab_size": 151936
 
1
  {
2
+ "_name_or_path": "unsloth/qwen2.5-0.5b-instruct-unsloth-bnb-4bit",
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
 
7
  "eos_token_id": 151645,
8
  "hidden_act": "silu",
9
  "hidden_size": 896,
 
15
  "num_attention_heads": 14,
16
  "num_hidden_layers": 24,
17
  "num_key_value_heads": 2,
18
+ "pad_token_id": 151654,
19
  "rms_norm_eps": 1e-06,
20
  "rope_scaling": null,
21
  "rope_theta": 1000000.0,
 
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.47.1",
26
  "unsloth_fixed": true,
27
+ "unsloth_version": "2025.3.19",
28
  "use_cache": true,
29
  "use_sliding_window": false,
30
  "vocab_size": 151936
generation_config.json CHANGED
@@ -6,7 +6,7 @@
6
  151643
7
  ],
8
  "max_length": 32768,
9
- "pad_token_id": 151665,
10
  "repetition_penalty": 1.1,
11
  "temperature": 0.7,
12
  "top_k": 20,
 
6
  151643
7
  ],
8
  "max_length": 32768,
9
+ "pad_token_id": 151654,
10
  "repetition_penalty": 1.1,
11
  "temperature": 0.7,
12
  "top_k": 20,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:faf6a6e4c2aae782dab5bd8a155d7178a2ff6dbda1c8beb33b27ddcee4c2a714
3
  size 988097824
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:13f3ce2c6607e6f4f928cf12309c34fd21f836720637233490d7fc0118147f98
3
  size 988097824
special_tokens_map.json CHANGED
@@ -22,7 +22,7 @@
22
  "single_word": false
23
  },
24
  "pad_token": {
25
- "content": "<|PAD_TOKEN|>",
26
  "lstrip": false,
27
  "normalized": false,
28
  "rstrip": false,
 
22
  "single_word": false
23
  },
24
  "pad_token": {
25
+ "content": "<|vision_pad|>",
26
  "lstrip": false,
27
  "normalized": false,
28
  "rstrip": false,
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fab42efe8d17406525a9154b728cf9e957629a8ed7ce997770efdd71128c6a1a
3
- size 11422086
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
+ size 11421896
tokenizer_config.json CHANGED
@@ -177,14 +177,6 @@
177
  "rstrip": false,
178
  "single_word": false,
179
  "special": false
180
- },
181
- "151665": {
182
- "content": "<|PAD_TOKEN|>",
183
- "lstrip": false,
184
- "normalized": false,
185
- "rstrip": false,
186
- "single_word": false,
187
- "special": true
188
  }
189
  },
190
  "additional_special_tokens": [
@@ -209,7 +201,7 @@
209
  "errors": "replace",
210
  "extra_special_tokens": {},
211
  "model_max_length": 32768,
212
- "pad_token": "<|PAD_TOKEN|>",
213
  "padding_side": "left",
214
  "split_special_tokens": false,
215
  "tokenizer_class": "Qwen2Tokenizer",
 
177
  "rstrip": false,
178
  "single_word": false,
179
  "special": false
 
 
 
 
 
 
 
 
180
  }
181
  },
182
  "additional_special_tokens": [
 
201
  "errors": "replace",
202
  "extra_special_tokens": {},
203
  "model_max_length": 32768,
204
+ "pad_token": "<|vision_pad|>",
205
  "padding_side": "left",
206
  "split_special_tokens": false,
207
  "tokenizer_class": "Qwen2Tokenizer",