gsaon commited on
Commit
a853995
·
verified ·
1 Parent(s): 6ff8b9f

Upload folder using huggingface_hub

Browse files
adapter_config.json CHANGED
@@ -20,8 +20,8 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "q_proj",
24
- "v_proj"
25
  ],
26
  "task_type": "CAUSAL_LM",
27
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "v_proj",
24
+ "q_proj"
25
  ],
26
  "task_type": "CAUSAL_LM",
27
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9833221a08ad71b2406bcbc0364ae8682878cc9013b2e9dec65b95a03808883b
3
  size 68178600
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30de576bc1338e63d49d9bd74930aaeb0695745efe5cfd09bc62c8416c2db567
3
  size 68178600
config.json CHANGED
@@ -16,14 +16,13 @@
16
  "max_pos_emb": 512,
17
  "model_type": "granite_speech_encoder",
18
  "num_heads": 8,
19
- "num_layers": 10,
20
- "output_dim": 42
21
  },
22
  "has_lora_adapter": true,
23
  "initializer_range": 0.02,
24
  "model_type": "granite_speech",
25
  "projector_config": {
26
- "_attn_implementation_autoset": true,
27
  "attention_probs_dropout_prob": 0.1,
28
  "cross_attention_frequency": 1,
29
  "encoder_hidden_size": 1024,
@@ -74,6 +73,6 @@
74
  "vocab_size": 49160
75
  },
76
  "torch_dtype": "bfloat16",
77
- "transformers_version": "4.52.0.dev0",
78
  "window_size": 15
79
  }
 
16
  "max_pos_emb": 512,
17
  "model_type": "granite_speech_encoder",
18
  "num_heads": 8,
19
+ "num_layers": 16,
20
+ "output_dim": 256
21
  },
22
  "has_lora_adapter": true,
23
  "initializer_range": 0.02,
24
  "model_type": "granite_speech",
25
  "projector_config": {
 
26
  "attention_probs_dropout_prob": 0.1,
27
  "cross_attention_frequency": 1,
28
  "encoder_hidden_size": 1024,
 
73
  "vocab_size": 49160
74
  },
75
  "torch_dtype": "bfloat16",
76
+ "transformers_version": "4.52.4",
77
  "window_size": 15
78
  }
generation_config.json CHANGED
@@ -3,7 +3,6 @@
3
  "bos_token_id": 0,
4
  "eos_token_id": 0,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.52.0.dev0",
7
- "use_cache": true,
8
- "suppress_tokens": [49159]
9
  }
 
3
  "bos_token_id": 0,
4
  "eos_token_id": 0,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.52.4",
7
+ "use_cache": true
 
8
  }
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd355249426713a1628972b46d82d4cc1e5efc5af62b77884410d4d2b42004c7
3
+ size 1992459128
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a6fb9326afc6776e711662437ae38957b898b38716772ad24f6ef0a3c3e06694
3
+ size 1992453224
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9aa9c5af42824c1b0b928b98b5526fd745448a2e57a31d261753a7770b489143
3
+ size 1998075896
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:102d069fe3ff523609ca1e535fb494e0987e69f571438ab3c67e3dcd8f3cecb0
3
+ size 35696040
model.safetensors.index.json CHANGED
The diff for this file is too large to render. See raw diff