Upload folder using huggingface_hub
Browse files- checkpoint-12/config.json +38 -0
- checkpoint-12/pytorch_model.bin +3 -0
- checkpoint-12/random_states_0.pkl +3 -0
- checkpoint-12/scheduler.bin +3 -0
- checkpoint-14/config.json +38 -0
- checkpoint-14/pytorch_model.bin +3 -0
- checkpoint-14/random_states_0.pkl +3 -0
- checkpoint-14/scheduler.bin +3 -0
- checkpoint-16/config.json +38 -0
- checkpoint-16/pytorch_model.bin +3 -0
- checkpoint-16/random_states_0.pkl +3 -0
- checkpoint-16/scheduler.bin +3 -0
- checkpoint-18/config.json +38 -0
- checkpoint-18/pytorch_model.bin +3 -0
- checkpoint-18/random_states_0.pkl +3 -0
- checkpoint-18/scheduler.bin +3 -0
- checkpoint-20/config.json +38 -0
- checkpoint-20/pytorch_model.bin +3 -0
- checkpoint-20/random_states_0.pkl +3 -0
- checkpoint-20/scheduler.bin +3 -0
checkpoint-12/config.json
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"LlamaForCausalLM"
|
4 |
+
],
|
5 |
+
"attention_bias": false,
|
6 |
+
"attention_dropout": 0.0,
|
7 |
+
"bos_token_id": 128000,
|
8 |
+
"eos_token_id": [
|
9 |
+
128001,
|
10 |
+
128008,
|
11 |
+
128009
|
12 |
+
],
|
13 |
+
"hidden_act": "silu",
|
14 |
+
"hidden_size": 4096,
|
15 |
+
"initializer_range": 0.02,
|
16 |
+
"intermediate_size": 14336,
|
17 |
+
"max_position_embeddings": 131072,
|
18 |
+
"mlp_bias": false,
|
19 |
+
"model_type": "llama",
|
20 |
+
"num_attention_heads": 32,
|
21 |
+
"num_hidden_layers": 32,
|
22 |
+
"num_key_value_heads": 8,
|
23 |
+
"pretraining_tp": 1,
|
24 |
+
"rms_norm_eps": 1e-05,
|
25 |
+
"rope_scaling": {
|
26 |
+
"factor": 8.0,
|
27 |
+
"low_freq_factor": 1.0,
|
28 |
+
"high_freq_factor": 4.0,
|
29 |
+
"original_max_position_embeddings": 8192,
|
30 |
+
"rope_type": "llama3"
|
31 |
+
},
|
32 |
+
"rope_theta": 500000.0,
|
33 |
+
"tie_word_embeddings": false,
|
34 |
+
"torch_dtype": "bfloat16",
|
35 |
+
"transformers_version": "4.42.3",
|
36 |
+
"use_cache": true,
|
37 |
+
"vocab_size": 128256
|
38 |
+
}
|
checkpoint-12/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2ee8414f12acaf4f8933f000df8702246ea0d865e54efd2fcddb75a970bf5943
|
3 |
+
size 32121081646
|
checkpoint-12/random_states_0.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bacbf9f878f31f457ca768202bf3675c164d070417ae5e6695ab64d00f7b9639
|
3 |
+
size 14344
|
checkpoint-12/scheduler.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9af347201fee4751116e1a5d93fba120dfb9f0d1caf0a127bc8eb4c078207437
|
3 |
+
size 1064
|
checkpoint-14/config.json
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"LlamaForCausalLM"
|
4 |
+
],
|
5 |
+
"attention_bias": false,
|
6 |
+
"attention_dropout": 0.0,
|
7 |
+
"bos_token_id": 128000,
|
8 |
+
"eos_token_id": [
|
9 |
+
128001,
|
10 |
+
128008,
|
11 |
+
128009
|
12 |
+
],
|
13 |
+
"hidden_act": "silu",
|
14 |
+
"hidden_size": 4096,
|
15 |
+
"initializer_range": 0.02,
|
16 |
+
"intermediate_size": 14336,
|
17 |
+
"max_position_embeddings": 131072,
|
18 |
+
"mlp_bias": false,
|
19 |
+
"model_type": "llama",
|
20 |
+
"num_attention_heads": 32,
|
21 |
+
"num_hidden_layers": 32,
|
22 |
+
"num_key_value_heads": 8,
|
23 |
+
"pretraining_tp": 1,
|
24 |
+
"rms_norm_eps": 1e-05,
|
25 |
+
"rope_scaling": {
|
26 |
+
"factor": 8.0,
|
27 |
+
"low_freq_factor": 1.0,
|
28 |
+
"high_freq_factor": 4.0,
|
29 |
+
"original_max_position_embeddings": 8192,
|
30 |
+
"rope_type": "llama3"
|
31 |
+
},
|
32 |
+
"rope_theta": 500000.0,
|
33 |
+
"tie_word_embeddings": false,
|
34 |
+
"torch_dtype": "bfloat16",
|
35 |
+
"transformers_version": "4.42.3",
|
36 |
+
"use_cache": true,
|
37 |
+
"vocab_size": 128256
|
38 |
+
}
|
checkpoint-14/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7893652aa6f01b7a2aee3f57dd31ad38c7179ff3b7f670826e0903b355c6c16a
|
3 |
+
size 32121081646
|
checkpoint-14/random_states_0.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8b7fc0bc4841e058b500da4bef5fbf5478ba6466daa3a086f0f09777bcfa8dcb
|
3 |
+
size 14344
|
checkpoint-14/scheduler.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:34f0ce6e9bfbcd302b72d41137906a6b6a15960508f1d8b6f2db4be5294b7a2e
|
3 |
+
size 1064
|
checkpoint-16/config.json
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"LlamaForCausalLM"
|
4 |
+
],
|
5 |
+
"attention_bias": false,
|
6 |
+
"attention_dropout": 0.0,
|
7 |
+
"bos_token_id": 128000,
|
8 |
+
"eos_token_id": [
|
9 |
+
128001,
|
10 |
+
128008,
|
11 |
+
128009
|
12 |
+
],
|
13 |
+
"hidden_act": "silu",
|
14 |
+
"hidden_size": 4096,
|
15 |
+
"initializer_range": 0.02,
|
16 |
+
"intermediate_size": 14336,
|
17 |
+
"max_position_embeddings": 131072,
|
18 |
+
"mlp_bias": false,
|
19 |
+
"model_type": "llama",
|
20 |
+
"num_attention_heads": 32,
|
21 |
+
"num_hidden_layers": 32,
|
22 |
+
"num_key_value_heads": 8,
|
23 |
+
"pretraining_tp": 1,
|
24 |
+
"rms_norm_eps": 1e-05,
|
25 |
+
"rope_scaling": {
|
26 |
+
"factor": 8.0,
|
27 |
+
"low_freq_factor": 1.0,
|
28 |
+
"high_freq_factor": 4.0,
|
29 |
+
"original_max_position_embeddings": 8192,
|
30 |
+
"rope_type": "llama3"
|
31 |
+
},
|
32 |
+
"rope_theta": 500000.0,
|
33 |
+
"tie_word_embeddings": false,
|
34 |
+
"torch_dtype": "bfloat16",
|
35 |
+
"transformers_version": "4.42.3",
|
36 |
+
"use_cache": true,
|
37 |
+
"vocab_size": 128256
|
38 |
+
}
|
checkpoint-16/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b6a6d4dd4ae1c7a7db74635303b990c087dcc6d1daaa93031d2bc98356037b74
|
3 |
+
size 32121081646
|
checkpoint-16/random_states_0.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:199e5be02aa57e74f522005219dcbbf4692f15e0b65173837ce2ab661749b9cf
|
3 |
+
size 14344
|
checkpoint-16/scheduler.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c1f37ded0d75686e8acfacf487aad41e95d02fa0c5ee9da16a795c41cd8a7f8c
|
3 |
+
size 1064
|
checkpoint-18/config.json
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"LlamaForCausalLM"
|
4 |
+
],
|
5 |
+
"attention_bias": false,
|
6 |
+
"attention_dropout": 0.0,
|
7 |
+
"bos_token_id": 128000,
|
8 |
+
"eos_token_id": [
|
9 |
+
128001,
|
10 |
+
128008,
|
11 |
+
128009
|
12 |
+
],
|
13 |
+
"hidden_act": "silu",
|
14 |
+
"hidden_size": 4096,
|
15 |
+
"initializer_range": 0.02,
|
16 |
+
"intermediate_size": 14336,
|
17 |
+
"max_position_embeddings": 131072,
|
18 |
+
"mlp_bias": false,
|
19 |
+
"model_type": "llama",
|
20 |
+
"num_attention_heads": 32,
|
21 |
+
"num_hidden_layers": 32,
|
22 |
+
"num_key_value_heads": 8,
|
23 |
+
"pretraining_tp": 1,
|
24 |
+
"rms_norm_eps": 1e-05,
|
25 |
+
"rope_scaling": {
|
26 |
+
"factor": 8.0,
|
27 |
+
"low_freq_factor": 1.0,
|
28 |
+
"high_freq_factor": 4.0,
|
29 |
+
"original_max_position_embeddings": 8192,
|
30 |
+
"rope_type": "llama3"
|
31 |
+
},
|
32 |
+
"rope_theta": 500000.0,
|
33 |
+
"tie_word_embeddings": false,
|
34 |
+
"torch_dtype": "bfloat16",
|
35 |
+
"transformers_version": "4.42.3",
|
36 |
+
"use_cache": true,
|
37 |
+
"vocab_size": 128256
|
38 |
+
}
|
checkpoint-18/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f9e26f6fedb06fffb096a7e82d934d2cb982aee371b8081de82b77be6fd5d31b
|
3 |
+
size 32121081646
|
checkpoint-18/random_states_0.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9da6507ac745950b4a5ccea274b4cfa74f3ce26b95a4728f90369adb3f9f7623
|
3 |
+
size 14344
|
checkpoint-18/scheduler.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ca83f9abb5ebc7c370b3662a2468eaf17d3759786fa6385ba3d6397c5f2f1045
|
3 |
+
size 1064
|
checkpoint-20/config.json
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"LlamaForCausalLM"
|
4 |
+
],
|
5 |
+
"attention_bias": false,
|
6 |
+
"attention_dropout": 0.0,
|
7 |
+
"bos_token_id": 128000,
|
8 |
+
"eos_token_id": [
|
9 |
+
128001,
|
10 |
+
128008,
|
11 |
+
128009
|
12 |
+
],
|
13 |
+
"hidden_act": "silu",
|
14 |
+
"hidden_size": 4096,
|
15 |
+
"initializer_range": 0.02,
|
16 |
+
"intermediate_size": 14336,
|
17 |
+
"max_position_embeddings": 131072,
|
18 |
+
"mlp_bias": false,
|
19 |
+
"model_type": "llama",
|
20 |
+
"num_attention_heads": 32,
|
21 |
+
"num_hidden_layers": 32,
|
22 |
+
"num_key_value_heads": 8,
|
23 |
+
"pretraining_tp": 1,
|
24 |
+
"rms_norm_eps": 1e-05,
|
25 |
+
"rope_scaling": {
|
26 |
+
"factor": 8.0,
|
27 |
+
"low_freq_factor": 1.0,
|
28 |
+
"high_freq_factor": 4.0,
|
29 |
+
"original_max_position_embeddings": 8192,
|
30 |
+
"rope_type": "llama3"
|
31 |
+
},
|
32 |
+
"rope_theta": 500000.0,
|
33 |
+
"tie_word_embeddings": false,
|
34 |
+
"torch_dtype": "bfloat16",
|
35 |
+
"transformers_version": "4.42.3",
|
36 |
+
"use_cache": true,
|
37 |
+
"vocab_size": 128256
|
38 |
+
}
|
checkpoint-20/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:adcd7431cf03d503a301d171290152a760c4c4a42cd8914393e56d7a08687aa5
|
3 |
+
size 32121081646
|
checkpoint-20/random_states_0.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b849493c8c38ae3611c3c4e2362dfae3ea033ecaec63c50f2fccef2698f9e38a
|
3 |
+
size 14344
|
checkpoint-20/scheduler.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:40c4594deca9821e93924340834de5670e43466e3babbec2056078fb6531604d
|
3 |
+
size 1064
|