Banana-Bae commited on
Commit
859db71
·
verified ·
1 Parent(s): 3755415

Fix metadata: add num_parameters (178B) and torch_dtype (float4_e2m1)

Browse files
Files changed (1) hide show
  1. config.json +176 -173
config.json CHANGED
@@ -1,177 +1,180 @@
1
  {
2
- "architectures": [
3
- "Qwen3MoeForCausalLM"
4
- ],
5
- "attention_bias": false,
6
- "attention_dropout": 0.0,
7
- "bos_token_id": 151643,
8
- "decoder_sparse_step": 1,
9
- "dtype": "bfloat16",
10
- "eos_token_id": 151645,
11
- "head_dim": 128,
12
- "hidden_act": "silu",
13
- "hidden_size": 4096,
14
- "initializer_range": 0.02,
15
- "intermediate_size": 12288,
16
- "max_position_embeddings": 262144,
17
- "max_window_layers": 94,
18
- "merge_args": {
19
- "balance_group_size": 0,
20
- "dataset": "c4+math+the-stack-smol",
21
- "expert_saliency": "reap",
22
- "gate_softmax": false,
23
- "group": "freq_logits",
24
- "merge": "none",
25
- "merge_size": 96,
26
- "merger_bs": 1024,
27
- "merger_seq_len": 512,
28
- "pca_dim": 64,
29
- "precompute_input": true,
30
- "use_gate_output": false
31
- },
32
- "mlp_only_layers": [],
33
- "model_type": "qwen3_moe",
34
- "moe_intermediate_size": 1536,
35
- "norm_topk_prob": true,
36
- "num_attention_heads": 64,
37
- "num_experts": 96,
38
- "num_experts_per_tok": 8,
39
- "num_hidden_layers": 94,
40
- "num_key_value_heads": 4,
41
- "output_router_logits": false,
42
- "rms_norm_eps": 1e-06,
43
- "rope_scaling": null,
44
- "rope_theta": 5000000,
45
- "router_aux_loss_coef": 0.001,
46
- "sliding_window": null,
47
- "tie_word_embeddings": false,
48
- "transformers_version": "4.57.6",
49
- "use_cache": true,
50
- "use_sliding_window": false,
51
- "vocab_size": 151936,
52
- "quantization_config": {
53
- "config_groups": {
54
- "group_0": {
55
- "input_activations": {
56
- "dynamic": false,
57
- "num_bits": 4,
58
- "type": "float",
59
- "group_size": 16
60
- },
61
- "weights": {
62
- "dynamic": false,
63
- "num_bits": 4,
64
- "type": "float",
65
- "group_size": 16
66
- },
67
- "targets": [
68
- "Linear"
69
- ]
70
- }
71
  },
72
- "ignore": [
73
- "lm_head",
74
- "model.layers.0.mlp.gate",
75
- "model.layers.1.mlp.gate",
76
- "model.layers.10.mlp.gate",
77
- "model.layers.11.mlp.gate",
78
- "model.layers.12.mlp.gate",
79
- "model.layers.13.mlp.gate",
80
- "model.layers.14.mlp.gate",
81
- "model.layers.15.mlp.gate",
82
- "model.layers.16.mlp.gate",
83
- "model.layers.17.mlp.gate",
84
- "model.layers.18.mlp.gate",
85
- "model.layers.19.mlp.gate",
86
- "model.layers.2.mlp.gate",
87
- "model.layers.20.mlp.gate",
88
- "model.layers.21.mlp.gate",
89
- "model.layers.22.mlp.gate",
90
- "model.layers.23.mlp.gate",
91
- "model.layers.24.mlp.gate",
92
- "model.layers.25.mlp.gate",
93
- "model.layers.26.mlp.gate",
94
- "model.layers.27.mlp.gate",
95
- "model.layers.28.mlp.gate",
96
- "model.layers.29.mlp.gate",
97
- "model.layers.3.mlp.gate",
98
- "model.layers.30.mlp.gate",
99
- "model.layers.31.mlp.gate",
100
- "model.layers.32.mlp.gate",
101
- "model.layers.33.mlp.gate",
102
- "model.layers.34.mlp.gate",
103
- "model.layers.35.mlp.gate",
104
- "model.layers.36.mlp.gate",
105
- "model.layers.37.mlp.gate",
106
- "model.layers.38.mlp.gate",
107
- "model.layers.39.mlp.gate",
108
- "model.layers.4.mlp.gate",
109
- "model.layers.40.mlp.gate",
110
- "model.layers.41.mlp.gate",
111
- "model.layers.42.mlp.gate",
112
- "model.layers.43.mlp.gate",
113
- "model.layers.44.mlp.gate",
114
- "model.layers.45.mlp.gate",
115
- "model.layers.46.mlp.gate",
116
- "model.layers.47.mlp.gate",
117
- "model.layers.48.mlp.gate",
118
- "model.layers.49.mlp.gate",
119
- "model.layers.5.mlp.gate",
120
- "model.layers.50.mlp.gate",
121
- "model.layers.51.mlp.gate",
122
- "model.layers.52.mlp.gate",
123
- "model.layers.53.mlp.gate",
124
- "model.layers.54.mlp.gate",
125
- "model.layers.55.mlp.gate",
126
- "model.layers.56.mlp.gate",
127
- "model.layers.57.mlp.gate",
128
- "model.layers.58.mlp.gate",
129
- "model.layers.59.mlp.gate",
130
- "model.layers.6.mlp.gate",
131
- "model.layers.60.mlp.gate",
132
- "model.layers.61.mlp.gate",
133
- "model.layers.62.mlp.gate",
134
- "model.layers.63.mlp.gate",
135
- "model.layers.64.mlp.gate",
136
- "model.layers.65.mlp.gate",
137
- "model.layers.66.mlp.gate",
138
- "model.layers.67.mlp.gate",
139
- "model.layers.68.mlp.gate",
140
- "model.layers.69.mlp.gate",
141
- "model.layers.7.mlp.gate",
142
- "model.layers.70.mlp.gate",
143
- "model.layers.71.mlp.gate",
144
- "model.layers.72.mlp.gate",
145
- "model.layers.73.mlp.gate",
146
- "model.layers.74.mlp.gate",
147
- "model.layers.75.mlp.gate",
148
- "model.layers.76.mlp.gate",
149
- "model.layers.77.mlp.gate",
150
- "model.layers.78.mlp.gate",
151
- "model.layers.79.mlp.gate",
152
- "model.layers.8.mlp.gate",
153
- "model.layers.80.mlp.gate",
154
- "model.layers.81.mlp.gate",
155
- "model.layers.82.mlp.gate",
156
- "model.layers.83.mlp.gate",
157
- "model.layers.84.mlp.gate",
158
- "model.layers.85.mlp.gate",
159
- "model.layers.86.mlp.gate",
160
- "model.layers.87.mlp.gate",
161
- "model.layers.88.mlp.gate",
162
- "model.layers.89.mlp.gate",
163
- "model.layers.9.mlp.gate",
164
- "model.layers.90.mlp.gate",
165
- "model.layers.91.mlp.gate",
166
- "model.layers.92.mlp.gate",
167
- "model.layers.93.mlp.gate",
168
- "lm_head"
169
- ],
170
- "quant_algo": "NVFP4",
171
- "producer": {
172
- "name": "modelopt",
173
- "version": "0.39.0"
174
  },
175
- "quant_method": "modelopt"
176
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
177
  }
 
1
  {
2
+ "architectures": [
3
+ "Qwen3MoeForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "decoder_sparse_step": 1,
9
+ "dtype": "bfloat16",
10
+ "eos_token_id": 151645,
11
+ "head_dim": 128,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 4096,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 12288,
16
+ "max_position_embeddings": 262144,
17
+ "max_window_layers": 94,
18
+ "merge_args": {
19
+ "balance_group_size": 0,
20
+ "dataset": "c4+math+the-stack-smol",
21
+ "expert_saliency": "reap",
22
+ "gate_softmax": false,
23
+ "group": "freq_logits",
24
+ "merge": "none",
25
+ "merge_size": 96,
26
+ "merger_bs": 1024,
27
+ "merger_seq_len": 512,
28
+ "pca_dim": 64,
29
+ "precompute_input": true,
30
+ "use_gate_output": false
31
+ },
32
+ "mlp_only_layers": [],
33
+ "model_type": "qwen3_moe",
34
+ "moe_intermediate_size": 1536,
35
+ "norm_topk_prob": true,
36
+ "num_attention_heads": 64,
37
+ "num_experts": 96,
38
+ "num_experts_per_tok": 8,
39
+ "num_hidden_layers": 94,
40
+ "num_key_value_heads": 4,
41
+ "output_router_logits": false,
42
+ "rms_norm_eps": 1e-06,
43
+ "rope_scaling": null,
44
+ "rope_theta": 5000000,
45
+ "router_aux_loss_coef": 0.001,
46
+ "sliding_window": null,
47
+ "tie_word_embeddings": false,
48
+ "transformers_version": "4.57.6",
49
+ "use_cache": true,
50
+ "use_sliding_window": false,
51
+ "vocab_size": 151936,
52
+ "quantization_config": {
53
+ "config_groups": {
54
+ "group_0": {
55
+ "input_activations": {
56
+ "dynamic": false,
57
+ "num_bits": 4,
58
+ "type": "float",
59
+ "group_size": 16
 
 
 
 
 
 
 
 
 
 
 
60
  },
61
+ "weights": {
62
+ "dynamic": false,
63
+ "num_bits": 4,
64
+ "type": "float",
65
+ "group_size": 16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
  },
67
+ "targets": [
68
+ "Linear"
69
+ ]
70
+ }
71
+ },
72
+ "ignore": [
73
+ "lm_head",
74
+ "model.layers.0.mlp.gate",
75
+ "model.layers.1.mlp.gate",
76
+ "model.layers.10.mlp.gate",
77
+ "model.layers.11.mlp.gate",
78
+ "model.layers.12.mlp.gate",
79
+ "model.layers.13.mlp.gate",
80
+ "model.layers.14.mlp.gate",
81
+ "model.layers.15.mlp.gate",
82
+ "model.layers.16.mlp.gate",
83
+ "model.layers.17.mlp.gate",
84
+ "model.layers.18.mlp.gate",
85
+ "model.layers.19.mlp.gate",
86
+ "model.layers.2.mlp.gate",
87
+ "model.layers.20.mlp.gate",
88
+ "model.layers.21.mlp.gate",
89
+ "model.layers.22.mlp.gate",
90
+ "model.layers.23.mlp.gate",
91
+ "model.layers.24.mlp.gate",
92
+ "model.layers.25.mlp.gate",
93
+ "model.layers.26.mlp.gate",
94
+ "model.layers.27.mlp.gate",
95
+ "model.layers.28.mlp.gate",
96
+ "model.layers.29.mlp.gate",
97
+ "model.layers.3.mlp.gate",
98
+ "model.layers.30.mlp.gate",
99
+ "model.layers.31.mlp.gate",
100
+ "model.layers.32.mlp.gate",
101
+ "model.layers.33.mlp.gate",
102
+ "model.layers.34.mlp.gate",
103
+ "model.layers.35.mlp.gate",
104
+ "model.layers.36.mlp.gate",
105
+ "model.layers.37.mlp.gate",
106
+ "model.layers.38.mlp.gate",
107
+ "model.layers.39.mlp.gate",
108
+ "model.layers.4.mlp.gate",
109
+ "model.layers.40.mlp.gate",
110
+ "model.layers.41.mlp.gate",
111
+ "model.layers.42.mlp.gate",
112
+ "model.layers.43.mlp.gate",
113
+ "model.layers.44.mlp.gate",
114
+ "model.layers.45.mlp.gate",
115
+ "model.layers.46.mlp.gate",
116
+ "model.layers.47.mlp.gate",
117
+ "model.layers.48.mlp.gate",
118
+ "model.layers.49.mlp.gate",
119
+ "model.layers.5.mlp.gate",
120
+ "model.layers.50.mlp.gate",
121
+ "model.layers.51.mlp.gate",
122
+ "model.layers.52.mlp.gate",
123
+ "model.layers.53.mlp.gate",
124
+ "model.layers.54.mlp.gate",
125
+ "model.layers.55.mlp.gate",
126
+ "model.layers.56.mlp.gate",
127
+ "model.layers.57.mlp.gate",
128
+ "model.layers.58.mlp.gate",
129
+ "model.layers.59.mlp.gate",
130
+ "model.layers.6.mlp.gate",
131
+ "model.layers.60.mlp.gate",
132
+ "model.layers.61.mlp.gate",
133
+ "model.layers.62.mlp.gate",
134
+ "model.layers.63.mlp.gate",
135
+ "model.layers.64.mlp.gate",
136
+ "model.layers.65.mlp.gate",
137
+ "model.layers.66.mlp.gate",
138
+ "model.layers.67.mlp.gate",
139
+ "model.layers.68.mlp.gate",
140
+ "model.layers.69.mlp.gate",
141
+ "model.layers.7.mlp.gate",
142
+ "model.layers.70.mlp.gate",
143
+ "model.layers.71.mlp.gate",
144
+ "model.layers.72.mlp.gate",
145
+ "model.layers.73.mlp.gate",
146
+ "model.layers.74.mlp.gate",
147
+ "model.layers.75.mlp.gate",
148
+ "model.layers.76.mlp.gate",
149
+ "model.layers.77.mlp.gate",
150
+ "model.layers.78.mlp.gate",
151
+ "model.layers.79.mlp.gate",
152
+ "model.layers.8.mlp.gate",
153
+ "model.layers.80.mlp.gate",
154
+ "model.layers.81.mlp.gate",
155
+ "model.layers.82.mlp.gate",
156
+ "model.layers.83.mlp.gate",
157
+ "model.layers.84.mlp.gate",
158
+ "model.layers.85.mlp.gate",
159
+ "model.layers.86.mlp.gate",
160
+ "model.layers.87.mlp.gate",
161
+ "model.layers.88.mlp.gate",
162
+ "model.layers.89.mlp.gate",
163
+ "model.layers.9.mlp.gate",
164
+ "model.layers.90.mlp.gate",
165
+ "model.layers.91.mlp.gate",
166
+ "model.layers.92.mlp.gate",
167
+ "model.layers.93.mlp.gate",
168
+ "lm_head"
169
+ ],
170
+ "quant_algo": "NVFP4",
171
+ "producer": {
172
+ "name": "modelopt",
173
+ "version": "0.39.0"
174
+ },
175
+ "quant_method": "modelopt"
176
+ },
177
+ "torch_dtype": "float4_e2m1",
178
+ "num_parameters": 178000000000,
179
+ "model_type_description": "NVFP4 quantized MoE (178B total params, 22B active per token)"
180
  }