jeffcookio commited on
Commit
00a4853
·
verified ·
1 Parent(s): f344b61

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +10 -1
config.json CHANGED
@@ -213,6 +213,9 @@
213
  },
214
  "spatial_merge_size": 2,
215
  "text_config": {
 
 
 
216
  "attention_dropout": 0.0,
217
  "head_dim": 128,
218
  "hidden_act": "silu",
@@ -235,6 +238,10 @@
235
  "transformers_version": "4.52.4",
236
  "unsloth_fixed": true,
237
  "vision_config": {
 
 
 
 
238
  "attention_dropout": 0.0,
239
  "head_dim": 64,
240
  "hidden_act": "silu",
@@ -248,7 +255,9 @@
248
  "num_hidden_layers": 24,
249
  "patch_size": 14,
250
  "rope_theta": 10000.0,
251
- "torch_dtype": "bfloat16"
 
 
252
  },
253
  "vision_feature_layer": -1
254
  }
 
213
  },
214
  "spatial_merge_size": 2,
215
  "text_config": {
216
+ "architectures": [
217
+ "MistralForCausalLM"
218
+ ],
219
  "attention_dropout": 0.0,
220
  "head_dim": 128,
221
  "hidden_act": "silu",
 
238
  "transformers_version": "4.52.4",
239
  "unsloth_fixed": true,
240
  "vision_config": {
241
+ "architectures": [
242
+ "Mistral3ForConditionalGeneration"
243
+ ],
244
+ "image_token_id": 10,
245
  "attention_dropout": 0.0,
246
  "head_dim": 64,
247
  "hidden_act": "silu",
 
255
  "num_hidden_layers": 24,
256
  "patch_size": 14,
257
  "rope_theta": 10000.0,
258
+ "torch_dtype": "bfloat16",
259
+ "mm_projector_id": "patch_merge",
260
+ "spatial_merge_size": 2
261
  },
262
  "vision_feature_layer": -1
263
  }