End of training
Browse files- README.md +4 -4
- config.json +2 -2
- pytorch_model.bin +1 -1
- special_tokens_map.json +35 -5
- tokenizer_config.json +42 -0
- training_args.bin +1 -1
README.md
CHANGED
@@ -31,8 +31,8 @@ More information needed
|
|
31 |
|
32 |
The following hyperparameters were used during training:
|
33 |
- learning_rate: 5e-05
|
34 |
-
- train_batch_size:
|
35 |
-
- eval_batch_size:
|
36 |
- seed: 42
|
37 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
38 |
- lr_scheduler_type: linear
|
@@ -40,7 +40,7 @@ The following hyperparameters were used during training:
|
|
40 |
|
41 |
### Framework versions
|
42 |
|
43 |
-
- Transformers 4.
|
44 |
- Pytorch 2.0.1+cu118
|
45 |
- Datasets 2.14.4
|
46 |
-
- Tokenizers 0.
|
|
|
31 |
|
32 |
The following hyperparameters were used during training:
|
33 |
- learning_rate: 5e-05
|
34 |
+
- train_batch_size: 16
|
35 |
+
- eval_batch_size: 16
|
36 |
- seed: 42
|
37 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
38 |
- lr_scheduler_type: linear
|
|
|
40 |
|
41 |
### Framework versions
|
42 |
|
43 |
+
- Transformers 4.34.1
|
44 |
- Pytorch 2.0.1+cu118
|
45 |
- Datasets 2.14.4
|
46 |
+
- Tokenizers 0.14.1
|
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "/home/evidnet/work/joon09/transformers-pytorch/finetune/ckpt/kor-bert-for-name-ner-ckpt/checkpoint-
|
3 |
"architectures": [
|
4 |
"BertForTokenClassification"
|
5 |
],
|
@@ -88,7 +88,7 @@
|
|
88 |
"pooler_type": "first_token_transform",
|
89 |
"position_embedding_type": "absolute",
|
90 |
"torch_dtype": "float32",
|
91 |
-
"transformers_version": "4.
|
92 |
"type_vocab_size": 2,
|
93 |
"use_cache": true,
|
94 |
"vocab_size": 42000
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "/home/evidnet/work/joon09/transformers-pytorch/finetune/ckpt/kor-bert-for-name-ner-ckpt/checkpoint-3",
|
3 |
"architectures": [
|
4 |
"BertForTokenClassification"
|
5 |
],
|
|
|
88 |
"pooler_type": "first_token_transform",
|
89 |
"position_embedding_type": "absolute",
|
90 |
"torch_dtype": "float32",
|
91 |
+
"transformers_version": "4.34.1",
|
92 |
"type_vocab_size": 2,
|
93 |
"use_cache": true,
|
94 |
"vocab_size": 42000
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 470983913
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:12dfb43769df81c612eb4269b8a151bde48290438c60e072bdf0f0c0430a6ce7
|
3 |
size 470983913
|
special_tokens_map.json
CHANGED
@@ -1,7 +1,37 @@
|
|
1 |
{
|
2 |
-
"cls_token":
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
}
|
|
|
1 |
{
|
2 |
+
"cls_token": {
|
3 |
+
"content": "[CLS]",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"mask_token": {
|
10 |
+
"content": "[MASK]",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"pad_token": {
|
17 |
+
"content": "[PAD]",
|
18 |
+
"lstrip": false,
|
19 |
+
"normalized": false,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
},
|
23 |
+
"sep_token": {
|
24 |
+
"content": "[SEP]",
|
25 |
+
"lstrip": false,
|
26 |
+
"normalized": false,
|
27 |
+
"rstrip": false,
|
28 |
+
"single_word": false
|
29 |
+
},
|
30 |
+
"unk_token": {
|
31 |
+
"content": "[UNK]",
|
32 |
+
"lstrip": false,
|
33 |
+
"normalized": false,
|
34 |
+
"rstrip": false,
|
35 |
+
"single_word": false
|
36 |
+
}
|
37 |
}
|
tokenizer_config.json
CHANGED
@@ -1,4 +1,46 @@
|
|
1 |
{
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
"clean_up_tokenization_spaces": true,
|
3 |
"cls_token": "[CLS]",
|
4 |
"do_basic_tokenize": true,
|
|
|
1 |
{
|
2 |
+
"added_tokens_decoder": {
|
3 |
+
"0": {
|
4 |
+
"content": "[PAD]",
|
5 |
+
"lstrip": false,
|
6 |
+
"normalized": false,
|
7 |
+
"rstrip": false,
|
8 |
+
"single_word": false,
|
9 |
+
"special": true
|
10 |
+
},
|
11 |
+
"1": {
|
12 |
+
"content": "[UNK]",
|
13 |
+
"lstrip": false,
|
14 |
+
"normalized": false,
|
15 |
+
"rstrip": false,
|
16 |
+
"single_word": false,
|
17 |
+
"special": true
|
18 |
+
},
|
19 |
+
"2": {
|
20 |
+
"content": "[CLS]",
|
21 |
+
"lstrip": false,
|
22 |
+
"normalized": false,
|
23 |
+
"rstrip": false,
|
24 |
+
"single_word": false,
|
25 |
+
"special": true
|
26 |
+
},
|
27 |
+
"3": {
|
28 |
+
"content": "[SEP]",
|
29 |
+
"lstrip": false,
|
30 |
+
"normalized": false,
|
31 |
+
"rstrip": false,
|
32 |
+
"single_word": false,
|
33 |
+
"special": true
|
34 |
+
},
|
35 |
+
"4": {
|
36 |
+
"content": "[MASK]",
|
37 |
+
"lstrip": false,
|
38 |
+
"normalized": false,
|
39 |
+
"rstrip": false,
|
40 |
+
"single_word": false,
|
41 |
+
"special": true
|
42 |
+
}
|
43 |
+
},
|
44 |
"clean_up_tokenization_spaces": true,
|
45 |
"cls_token": "[CLS]",
|
46 |
"do_basic_tokenize": true,
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4091
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:719e97850d211bf74d6f29f5b437ccac21a6ef0c323deec92d4df05a4d133dce
|
3 |
size 4091
|