zuhri025 commited on
Commit
1f69a10
·
verified ·
1 Parent(s): eb74f2c

Upload checkpoint-17000

Browse files
checkpoint-17000/config.json ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen3ForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 2,
8
+ "dtype": "float32",
9
+ "eos_token_id": 3,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 512,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 2048,
15
+ "layer_types": [
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention"
28
+ ],
29
+ "max_position_embeddings": 4096,
30
+ "max_window_layers": 28,
31
+ "model_type": "qwen3",
32
+ "num_attention_heads": 8,
33
+ "num_hidden_layers": 12,
34
+ "num_key_value_heads": 2,
35
+ "pad_token_id": 0,
36
+ "rms_norm_eps": 1e-06,
37
+ "rope_parameters": {
38
+ "rope_theta": 20000,
39
+ "rope_type": "default"
40
+ },
41
+ "sliding_window": null,
42
+ "tie_word_embeddings": true,
43
+ "transformers_version": "5.0.0",
44
+ "use_cache": false,
45
+ "use_sliding_window": false,
46
+ "vocab_size": 62813
47
+ }
checkpoint-17000/generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 2,
4
+ "eos_token_id": 3,
5
+ "output_attentions": false,
6
+ "output_hidden_states": false,
7
+ "pad_token_id": 0,
8
+ "transformers_version": "5.0.0",
9
+ "use_cache": false
10
+ }
checkpoint-17000/metadata.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ababc2f0524f99582bd40fdb52fd0083320656f85cbf08d6adbbd161cf3870b
3
+ size 1331
checkpoint-17000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8216831f7da56d993e134959d39bea8349a3650efbf98b4b138c9cbbe207aee2
3
+ size 311165392
checkpoint-17000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c5a89448be2df7e8462060a3d4684ff8574d4714a4a79e69e49df2feea44344
3
+ size 622416011
checkpoint-17000/scaler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f21cdd7a4a21957cab5cb5c8b52bab2d7bd6a95aa9c389f63d3a5e9ba519783
3
+ size 1383
checkpoint-17000/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-17000/tokenizer_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "backend": "tokenizers",
3
+ "bos_token": "<|bos|>",
4
+ "eos_token": "<|eos|>",
5
+ "is_local": true,
6
+ "model_max_length": 1000000000000000019884624838656,
7
+ "pad_token": "<|pad|>",
8
+ "tokenizer_class": "TokenizersBackend",
9
+ "unk_token": "<|unk|>"
10
+ }