awni commited on
Commit
6e302ea
·
verified ·
1 Parent(s): 4cfae14

Add files using upload-large-folder tool

Browse files
README.md CHANGED
@@ -12,7 +12,7 @@ base_model: Qwen/Qwen3-Coder-30B-A3B-Instruct
12
 
13
  This model [mlx-community/Qwen3-Coder-30B-A3B-Instruct-4bit](https://huggingface.co/mlx-community/Qwen3-Coder-30B-A3B-Instruct-4bit) was
14
  converted to MLX format from [Qwen/Qwen3-Coder-30B-A3B-Instruct](https://huggingface.co/Qwen/Qwen3-Coder-30B-A3B-Instruct)
15
- using mlx-lm version **0.26.0**.
16
 
17
  ## Use with mlx
18
 
 
12
 
13
  This model [mlx-community/Qwen3-Coder-30B-A3B-Instruct-4bit](https://huggingface.co/mlx-community/Qwen3-Coder-30B-A3B-Instruct-4bit) was
14
  converted to MLX format from [Qwen/Qwen3-Coder-30B-A3B-Instruct](https://huggingface.co/Qwen/Qwen3-Coder-30B-A3B-Instruct)
15
+ using mlx-lm version **0.26.3**.
16
 
17
  ## Use with mlx
18
 
config.json CHANGED
@@ -25,11 +25,395 @@
25
  "qkv_bias": false,
26
  "quantization": {
27
  "group_size": 64,
28
- "bits": 4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  },
30
  "quantization_config": {
31
  "group_size": 64,
32
- "bits": 4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  },
34
  "rms_norm_eps": 1e-06,
35
  "rope_scaling": null,
 
25
  "qkv_bias": false,
26
  "quantization": {
27
  "group_size": 64,
28
+ "bits": 4,
29
+ "model.layers.0.mlp.gate": {
30
+ "group_size": 64,
31
+ "bits": 8
32
+ },
33
+ "model.layers.1.mlp.gate": {
34
+ "group_size": 64,
35
+ "bits": 8
36
+ },
37
+ "model.layers.2.mlp.gate": {
38
+ "group_size": 64,
39
+ "bits": 8
40
+ },
41
+ "model.layers.3.mlp.gate": {
42
+ "group_size": 64,
43
+ "bits": 8
44
+ },
45
+ "model.layers.4.mlp.gate": {
46
+ "group_size": 64,
47
+ "bits": 8
48
+ },
49
+ "model.layers.5.mlp.gate": {
50
+ "group_size": 64,
51
+ "bits": 8
52
+ },
53
+ "model.layers.6.mlp.gate": {
54
+ "group_size": 64,
55
+ "bits": 8
56
+ },
57
+ "model.layers.7.mlp.gate": {
58
+ "group_size": 64,
59
+ "bits": 8
60
+ },
61
+ "model.layers.8.mlp.gate": {
62
+ "group_size": 64,
63
+ "bits": 8
64
+ },
65
+ "model.layers.9.mlp.gate": {
66
+ "group_size": 64,
67
+ "bits": 8
68
+ },
69
+ "model.layers.10.mlp.gate": {
70
+ "group_size": 64,
71
+ "bits": 8
72
+ },
73
+ "model.layers.11.mlp.gate": {
74
+ "group_size": 64,
75
+ "bits": 8
76
+ },
77
+ "model.layers.12.mlp.gate": {
78
+ "group_size": 64,
79
+ "bits": 8
80
+ },
81
+ "model.layers.13.mlp.gate": {
82
+ "group_size": 64,
83
+ "bits": 8
84
+ },
85
+ "model.layers.14.mlp.gate": {
86
+ "group_size": 64,
87
+ "bits": 8
88
+ },
89
+ "model.layers.15.mlp.gate": {
90
+ "group_size": 64,
91
+ "bits": 8
92
+ },
93
+ "model.layers.16.mlp.gate": {
94
+ "group_size": 64,
95
+ "bits": 8
96
+ },
97
+ "model.layers.17.mlp.gate": {
98
+ "group_size": 64,
99
+ "bits": 8
100
+ },
101
+ "model.layers.18.mlp.gate": {
102
+ "group_size": 64,
103
+ "bits": 8
104
+ },
105
+ "model.layers.19.mlp.gate": {
106
+ "group_size": 64,
107
+ "bits": 8
108
+ },
109
+ "model.layers.20.mlp.gate": {
110
+ "group_size": 64,
111
+ "bits": 8
112
+ },
113
+ "model.layers.21.mlp.gate": {
114
+ "group_size": 64,
115
+ "bits": 8
116
+ },
117
+ "model.layers.22.mlp.gate": {
118
+ "group_size": 64,
119
+ "bits": 8
120
+ },
121
+ "model.layers.23.mlp.gate": {
122
+ "group_size": 64,
123
+ "bits": 8
124
+ },
125
+ "model.layers.24.mlp.gate": {
126
+ "group_size": 64,
127
+ "bits": 8
128
+ },
129
+ "model.layers.25.mlp.gate": {
130
+ "group_size": 64,
131
+ "bits": 8
132
+ },
133
+ "model.layers.26.mlp.gate": {
134
+ "group_size": 64,
135
+ "bits": 8
136
+ },
137
+ "model.layers.27.mlp.gate": {
138
+ "group_size": 64,
139
+ "bits": 8
140
+ },
141
+ "model.layers.28.mlp.gate": {
142
+ "group_size": 64,
143
+ "bits": 8
144
+ },
145
+ "model.layers.29.mlp.gate": {
146
+ "group_size": 64,
147
+ "bits": 8
148
+ },
149
+ "model.layers.30.mlp.gate": {
150
+ "group_size": 64,
151
+ "bits": 8
152
+ },
153
+ "model.layers.31.mlp.gate": {
154
+ "group_size": 64,
155
+ "bits": 8
156
+ },
157
+ "model.layers.32.mlp.gate": {
158
+ "group_size": 64,
159
+ "bits": 8
160
+ },
161
+ "model.layers.33.mlp.gate": {
162
+ "group_size": 64,
163
+ "bits": 8
164
+ },
165
+ "model.layers.34.mlp.gate": {
166
+ "group_size": 64,
167
+ "bits": 8
168
+ },
169
+ "model.layers.35.mlp.gate": {
170
+ "group_size": 64,
171
+ "bits": 8
172
+ },
173
+ "model.layers.36.mlp.gate": {
174
+ "group_size": 64,
175
+ "bits": 8
176
+ },
177
+ "model.layers.37.mlp.gate": {
178
+ "group_size": 64,
179
+ "bits": 8
180
+ },
181
+ "model.layers.38.mlp.gate": {
182
+ "group_size": 64,
183
+ "bits": 8
184
+ },
185
+ "model.layers.39.mlp.gate": {
186
+ "group_size": 64,
187
+ "bits": 8
188
+ },
189
+ "model.layers.40.mlp.gate": {
190
+ "group_size": 64,
191
+ "bits": 8
192
+ },
193
+ "model.layers.41.mlp.gate": {
194
+ "group_size": 64,
195
+ "bits": 8
196
+ },
197
+ "model.layers.42.mlp.gate": {
198
+ "group_size": 64,
199
+ "bits": 8
200
+ },
201
+ "model.layers.43.mlp.gate": {
202
+ "group_size": 64,
203
+ "bits": 8
204
+ },
205
+ "model.layers.44.mlp.gate": {
206
+ "group_size": 64,
207
+ "bits": 8
208
+ },
209
+ "model.layers.45.mlp.gate": {
210
+ "group_size": 64,
211
+ "bits": 8
212
+ },
213
+ "model.layers.46.mlp.gate": {
214
+ "group_size": 64,
215
+ "bits": 8
216
+ },
217
+ "model.layers.47.mlp.gate": {
218
+ "group_size": 64,
219
+ "bits": 8
220
+ }
221
  },
222
  "quantization_config": {
223
  "group_size": 64,
224
+ "bits": 4,
225
+ "model.layers.0.mlp.gate": {
226
+ "group_size": 64,
227
+ "bits": 8
228
+ },
229
+ "model.layers.1.mlp.gate": {
230
+ "group_size": 64,
231
+ "bits": 8
232
+ },
233
+ "model.layers.2.mlp.gate": {
234
+ "group_size": 64,
235
+ "bits": 8
236
+ },
237
+ "model.layers.3.mlp.gate": {
238
+ "group_size": 64,
239
+ "bits": 8
240
+ },
241
+ "model.layers.4.mlp.gate": {
242
+ "group_size": 64,
243
+ "bits": 8
244
+ },
245
+ "model.layers.5.mlp.gate": {
246
+ "group_size": 64,
247
+ "bits": 8
248
+ },
249
+ "model.layers.6.mlp.gate": {
250
+ "group_size": 64,
251
+ "bits": 8
252
+ },
253
+ "model.layers.7.mlp.gate": {
254
+ "group_size": 64,
255
+ "bits": 8
256
+ },
257
+ "model.layers.8.mlp.gate": {
258
+ "group_size": 64,
259
+ "bits": 8
260
+ },
261
+ "model.layers.9.mlp.gate": {
262
+ "group_size": 64,
263
+ "bits": 8
264
+ },
265
+ "model.layers.10.mlp.gate": {
266
+ "group_size": 64,
267
+ "bits": 8
268
+ },
269
+ "model.layers.11.mlp.gate": {
270
+ "group_size": 64,
271
+ "bits": 8
272
+ },
273
+ "model.layers.12.mlp.gate": {
274
+ "group_size": 64,
275
+ "bits": 8
276
+ },
277
+ "model.layers.13.mlp.gate": {
278
+ "group_size": 64,
279
+ "bits": 8
280
+ },
281
+ "model.layers.14.mlp.gate": {
282
+ "group_size": 64,
283
+ "bits": 8
284
+ },
285
+ "model.layers.15.mlp.gate": {
286
+ "group_size": 64,
287
+ "bits": 8
288
+ },
289
+ "model.layers.16.mlp.gate": {
290
+ "group_size": 64,
291
+ "bits": 8
292
+ },
293
+ "model.layers.17.mlp.gate": {
294
+ "group_size": 64,
295
+ "bits": 8
296
+ },
297
+ "model.layers.18.mlp.gate": {
298
+ "group_size": 64,
299
+ "bits": 8
300
+ },
301
+ "model.layers.19.mlp.gate": {
302
+ "group_size": 64,
303
+ "bits": 8
304
+ },
305
+ "model.layers.20.mlp.gate": {
306
+ "group_size": 64,
307
+ "bits": 8
308
+ },
309
+ "model.layers.21.mlp.gate": {
310
+ "group_size": 64,
311
+ "bits": 8
312
+ },
313
+ "model.layers.22.mlp.gate": {
314
+ "group_size": 64,
315
+ "bits": 8
316
+ },
317
+ "model.layers.23.mlp.gate": {
318
+ "group_size": 64,
319
+ "bits": 8
320
+ },
321
+ "model.layers.24.mlp.gate": {
322
+ "group_size": 64,
323
+ "bits": 8
324
+ },
325
+ "model.layers.25.mlp.gate": {
326
+ "group_size": 64,
327
+ "bits": 8
328
+ },
329
+ "model.layers.26.mlp.gate": {
330
+ "group_size": 64,
331
+ "bits": 8
332
+ },
333
+ "model.layers.27.mlp.gate": {
334
+ "group_size": 64,
335
+ "bits": 8
336
+ },
337
+ "model.layers.28.mlp.gate": {
338
+ "group_size": 64,
339
+ "bits": 8
340
+ },
341
+ "model.layers.29.mlp.gate": {
342
+ "group_size": 64,
343
+ "bits": 8
344
+ },
345
+ "model.layers.30.mlp.gate": {
346
+ "group_size": 64,
347
+ "bits": 8
348
+ },
349
+ "model.layers.31.mlp.gate": {
350
+ "group_size": 64,
351
+ "bits": 8
352
+ },
353
+ "model.layers.32.mlp.gate": {
354
+ "group_size": 64,
355
+ "bits": 8
356
+ },
357
+ "model.layers.33.mlp.gate": {
358
+ "group_size": 64,
359
+ "bits": 8
360
+ },
361
+ "model.layers.34.mlp.gate": {
362
+ "group_size": 64,
363
+ "bits": 8
364
+ },
365
+ "model.layers.35.mlp.gate": {
366
+ "group_size": 64,
367
+ "bits": 8
368
+ },
369
+ "model.layers.36.mlp.gate": {
370
+ "group_size": 64,
371
+ "bits": 8
372
+ },
373
+ "model.layers.37.mlp.gate": {
374
+ "group_size": 64,
375
+ "bits": 8
376
+ },
377
+ "model.layers.38.mlp.gate": {
378
+ "group_size": 64,
379
+ "bits": 8
380
+ },
381
+ "model.layers.39.mlp.gate": {
382
+ "group_size": 64,
383
+ "bits": 8
384
+ },
385
+ "model.layers.40.mlp.gate": {
386
+ "group_size": 64,
387
+ "bits": 8
388
+ },
389
+ "model.layers.41.mlp.gate": {
390
+ "group_size": 64,
391
+ "bits": 8
392
+ },
393
+ "model.layers.42.mlp.gate": {
394
+ "group_size": 64,
395
+ "bits": 8
396
+ },
397
+ "model.layers.43.mlp.gate": {
398
+ "group_size": 64,
399
+ "bits": 8
400
+ },
401
+ "model.layers.44.mlp.gate": {
402
+ "group_size": 64,
403
+ "bits": 8
404
+ },
405
+ "model.layers.45.mlp.gate": {
406
+ "group_size": 64,
407
+ "bits": 8
408
+ },
409
+ "model.layers.46.mlp.gate": {
410
+ "group_size": 64,
411
+ "bits": 8
412
+ },
413
+ "model.layers.47.mlp.gate": {
414
+ "group_size": 64,
415
+ "bits": 8
416
+ }
417
  },
418
  "rms_norm_eps": 1e-06,
419
  "rope_scaling": null,
model-00001-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d8bf8c0a3ce56cfbf01ef0b961764e77ad4bd3aa8fcbbbb4542cb66570d4c2ff
3
- size 5319507334
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:25c6d0a62e02224605774ce2e9dc9a8c95910cde33557b938e55a368da1b5212
3
+ size 5321473414
model-00002-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3591d8c10a85f9527aa95354d022e110e66f5cc100003a3286c81f592ee0b0a5
3
- size 5364678700
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7007ce495b063155207c3e8b4038771d0d751655274cb6b76c42b8d9c0624412
3
+ size 5366644780
model-00003-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2d2f497688e8fadd238e62ee8e3cbeee931bc9ae4528a37a5d1bcb55c089d895
3
- size 5274790267
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a14f5bfdd0cd72ee6e71e7f0bc72f6fcde4d78a2da7ee0665f8cf071ac7d2675
3
+ size 5276887419
model-00004-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7742bbbbc3310be12c79c7445fa5c3eee5bd3eeaffb9ecf8047be37a413ba23b
3
- size 1215804237
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ea3484a221d674b60c604ea20cb5e5418a2cfd9e61df72c0d7fb5b1622ff5b4
3
+ size 1216066381
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 17174622208,
4
  "total_parameters": 30532122624
5
  },
6
  "weight_map": {
 
1
  {
2
  "metadata": {
3
+ "total_size": 17180913664,
4
  "total_parameters": 30532122624
5
  },
6
  "weight_map": {