End of training
Browse files- README.md +35 -73
- adapter.ardz.safetensors +3 -0
- config.json +2 -2
- model.safetensors +2 -2
- training_args.bin +1 -1
README.md
CHANGED
@@ -9,21 +9,21 @@ metrics:
|
|
9 |
- bleu
|
10 |
- rouge
|
11 |
model-index:
|
12 |
-
- name:
|
13 |
results: []
|
14 |
---
|
15 |
|
16 |
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
17 |
should probably proofread and complete it, then remove this comment. -->
|
18 |
|
19 |
-
#
|
20 |
|
21 |
This model is a fine-tuned version of [facebook/mms-1b-all](https://huggingface.co/facebook/mms-1b-all) on an unknown dataset.
|
22 |
It achieves the following results on the evaluation set:
|
23 |
-
- Loss: 0.
|
24 |
-
- Wer: 0.
|
25 |
-
- Bleu: 0.
|
26 |
-
- Rouge: {'rouge1': 0.
|
27 |
|
28 |
## Model description
|
29 |
|
@@ -42,83 +42,45 @@ More information needed
|
|
42 |
### Training hyperparameters
|
43 |
|
44 |
The following hyperparameters were used during training:
|
45 |
-
- learning_rate: 0.
|
46 |
- train_batch_size: 8
|
47 |
-
- eval_batch_size:
|
48 |
- seed: 42
|
49 |
- gradient_accumulation_steps: 4
|
50 |
- total_train_batch_size: 32
|
51 |
- optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
|
52 |
- lr_scheduler_type: linear
|
53 |
-
- lr_scheduler_warmup_steps:
|
54 |
-
- num_epochs:
|
55 |
- mixed_precision_training: Native AMP
|
56 |
|
57 |
### Training results
|
58 |
|
59 |
-
| Training Loss | Epoch
|
60 |
-
|
61 |
-
|
|
62 |
-
|
|
63 |
-
| 0.
|
64 |
-
| 0.
|
65 |
-
| 0.
|
66 |
-
| 0.
|
67 |
-
| 0.
|
68 |
-
| 0.
|
69 |
-
| 0.
|
70 |
-
| 0.
|
71 |
-
| 0.
|
72 |
-
| 0.
|
73 |
-
| 0.
|
74 |
-
| 0.
|
75 |
-
| 0.
|
76 |
-
| 0.
|
77 |
-
| 0.
|
78 |
-
| 0.
|
79 |
-
| 0.
|
80 |
-
| 0.
|
81 |
-
| 0.
|
82 |
-
| 0.
|
83 |
-
| 0.
|
84 |
-
| 0.6327 | 2.3413 | 2400 | 0.3475 | 0.4403 | 0.3480 | {'rouge1': 0.6437891592593289, 'rouge2': 0.44349428929588836, 'rougeL': 0.6434560290609752, 'rougeLsum': 0.6434450349375109} |
|
85 |
-
| 0.6226 | 2.4388 | 2500 | 0.3396 | 0.4257 | 0.3641 | {'rouge1': 0.6540925308500153, 'rouge2': 0.4547444984600552, 'rougeL': 0.6535404954649788, 'rougeLsum': 0.6534025409179112} |
|
86 |
-
| 0.6175 | 2.5363 | 2600 | 0.3420 | 0.4238 | 0.3618 | {'rouge1': 0.6570293538260654, 'rouge2': 0.45661791700993576, 'rougeL': 0.6565349834626244, 'rougeLsum': 0.6563589283917969} |
|
87 |
-
| 0.6087 | 2.6338 | 2700 | 0.3378 | 0.4340 | 0.3562 | {'rouge1': 0.6470455955080758, 'rouge2': 0.4490930911063734, 'rougeL': 0.6470109060599167, 'rougeLsum': 0.6468990937615842} |
|
88 |
-
| 0.6136 | 2.7314 | 2800 | 0.3357 | 0.4340 | 0.3569 | {'rouge1': 0.6462711100612597, 'rouge2': 0.44787292745812923, 'rougeL': 0.6457431436135709, 'rougeLsum': 0.6457443356600687} |
|
89 |
-
| 0.603 | 2.8289 | 2900 | 0.3333 | 0.4211 | 0.3665 | {'rouge1': 0.658326692310857, 'rouge2': 0.4591672202918055, 'rougeL': 0.6579324587764817, 'rougeLsum': 0.657831017441442} |
|
90 |
-
| 0.6111 | 2.9264 | 3000 | 0.3278 | 0.4115 | 0.3769 | {'rouge1': 0.6680275850005191, 'rouge2': 0.47111044939536956, 'rougeL': 0.667594152546402, 'rougeLsum': 0.6675417104884547} |
|
91 |
-
| 0.5823 | 3.0244 | 3100 | 0.3259 | 0.4138 | 0.3738 | {'rouge1': 0.6658105281688206, 'rouge2': 0.46831258779891827, 'rougeL': 0.6654463258976879, 'rougeLsum': 0.6653623068947464} |
|
92 |
-
| 0.596 | 3.1219 | 3200 | 0.3291 | 0.4075 | 0.3857 | {'rouge1': 0.6760034844772485, 'rouge2': 0.48075322253291103, 'rougeL': 0.6753352830167898, 'rougeLsum': 0.6753654994638207} |
|
93 |
-
| 0.585 | 3.2194 | 3300 | 0.3218 | 0.4066 | 0.3854 | {'rouge1': 0.6708033471134076, 'rouge2': 0.4760878515017991, 'rougeL': 0.6704396025219446, 'rougeLsum': 0.6705375169622321} |
|
94 |
-
| 0.5966 | 3.3169 | 3400 | 0.3225 | 0.4046 | 0.3855 | {'rouge1': 0.6733506484650071, 'rouge2': 0.47866589105906143, 'rougeL': 0.6725823331259932, 'rougeLsum': 0.6726322301227529} |
|
95 |
-
| 0.5927 | 3.4144 | 3500 | 0.3225 | 0.4036 | 0.3837 | {'rouge1': 0.6763429608911748, 'rouge2': 0.4799554819160149, 'rougeL': 0.6760199809645441, 'rougeLsum': 0.6759470534272627} |
|
96 |
-
| 0.5916 | 3.5119 | 3600 | 0.3211 | 0.3978 | 0.3948 | {'rouge1': 0.6835849587362359, 'rouge2': 0.49067004972462647, 'rougeL': 0.6830631826963516, 'rougeLsum': 0.683021270557737} |
|
97 |
-
| 0.5985 | 3.6095 | 3700 | 0.3193 | 0.3950 | 0.3971 | {'rouge1': 0.6838483117017133, 'rouge2': 0.49042401786291456, 'rougeL': 0.6832829603653612, 'rougeLsum': 0.6833601954138526} |
|
98 |
-
| 0.5967 | 3.7070 | 3800 | 0.3196 | 0.3944 | 0.3977 | {'rouge1': 0.6832470151375148, 'rouge2': 0.48997365042491053, 'rougeL': 0.6828971433052323, 'rougeLsum': 0.6826154295596226} |
|
99 |
-
| 0.5861 | 3.8045 | 3900 | 0.3164 | 0.3953 | 0.3966 | {'rouge1': 0.6815194009337533, 'rouge2': 0.48901293150413283, 'rougeL': 0.6811599766209577, 'rougeLsum': 0.681167364543434} |
|
100 |
-
| 0.5744 | 3.9020 | 4000 | 0.3115 | 0.3988 | 0.3949 | {'rouge1': 0.677956894381051, 'rouge2': 0.48577956453068094, 'rougeL': 0.677533258370969, 'rougeLsum': 0.6775310259371528} |
|
101 |
-
| 0.6032 | 3.9995 | 4100 | 0.3159 | 0.4112 | 0.3833 | {'rouge1': 0.666858825858121, 'rouge2': 0.47345226473180235, 'rougeL': 0.666467300377577, 'rougeLsum': 0.666485598809812} |
|
102 |
-
| 0.5642 | 4.0975 | 4200 | 0.3107 | 0.3960 | 0.3965 | {'rouge1': 0.6798209619380302, 'rouge2': 0.4877476049191384, 'rougeL': 0.6792681025423539, 'rougeLsum': 0.679441807529453} |
|
103 |
-
| 0.5772 | 4.1950 | 4300 | 0.3124 | 0.4021 | 0.3907 | {'rouge1': 0.6769968634627295, 'rouge2': 0.4855589558164317, 'rougeL': 0.6763733852000889, 'rougeLsum': 0.6764190462864809} |
|
104 |
-
| 0.5667 | 4.2925 | 4400 | 0.3090 | 0.3989 | 0.3963 | {'rouge1': 0.6784269657572182, 'rouge2': 0.48622812356075396, 'rougeL': 0.6778490035977014, 'rougeLsum': 0.6776967017467932} |
|
105 |
-
| 0.5713 | 4.3901 | 4500 | 0.3080 | 0.3925 | 0.4029 | {'rouge1': 0.6803911080488889, 'rouge2': 0.4889196805179151, 'rougeL': 0.679897767360062, 'rougeLsum': 0.6799181233054294} |
|
106 |
-
| 0.575 | 4.4876 | 4600 | 0.3069 | 0.3890 | 0.4032 | {'rouge1': 0.6866256247806055, 'rouge2': 0.4947643833678781, 'rougeL': 0.6863600226556883, 'rougeLsum': 0.6862665256307836} |
|
107 |
-
| 0.5669 | 4.5851 | 4700 | 0.3042 | 0.3886 | 0.4048 | {'rouge1': 0.6873428477118608, 'rouge2': 0.49739034530961257, 'rougeL': 0.6869537171773277, 'rougeLsum': 0.6869787755670816} |
|
108 |
-
| 0.563 | 4.6826 | 4800 | 0.3085 | 0.4161 | 0.3792 | {'rouge1': 0.6615511703414414, 'rouge2': 0.4676904378632397, 'rougeL': 0.6614414779567332, 'rougeLsum': 0.661189370181686} |
|
109 |
-
| 0.5465 | 4.7801 | 4900 | 0.3041 | 0.3921 | 0.4008 | {'rouge1': 0.6845900895016952, 'rouge2': 0.4927761748243858, 'rougeL': 0.684031365756208, 'rougeLsum': 0.6840579744316673} |
|
110 |
-
| 0.5703 | 4.8776 | 5000 | 0.3009 | 0.3863 | 0.4069 | {'rouge1': 0.6886709663424477, 'rouge2': 0.49819776939282057, 'rougeL': 0.6881154820537174, 'rougeLsum': 0.6883937877015258} |
|
111 |
-
| 0.5591 | 4.9751 | 5100 | 0.3016 | 0.3881 | 0.4052 | {'rouge1': 0.6889386752704709, 'rouge2': 0.4983154761005506, 'rougeL': 0.6887126877038267, 'rougeLsum': 0.6888475857510377} |
|
112 |
-
| 0.5447 | 5.0731 | 5200 | 0.2988 | 0.3783 | 0.4174 | {'rouge1': 0.6970646487324009, 'rouge2': 0.5084578165741072, 'rougeL': 0.6968278006524977, 'rougeLsum': 0.696619847658498} |
|
113 |
-
| 0.5569 | 5.1706 | 5300 | 0.2979 | 0.3792 | 0.4168 | {'rouge1': 0.6961276289823202, 'rouge2': 0.5070859993554945, 'rougeL': 0.6958119782278983, 'rougeLsum': 0.6956873029334603} |
|
114 |
-
| 0.55 | 5.2682 | 5400 | 0.2971 | 0.3805 | 0.4133 | {'rouge1': 0.6955416923122605, 'rouge2': 0.5065597123927292, 'rougeL': 0.6952089770716992, 'rougeLsum': 0.6951133501808373} |
|
115 |
-
| 0.5488 | 5.3657 | 5500 | 0.2977 | 0.3764 | 0.4190 | {'rouge1': 0.6997508899061681, 'rouge2': 0.511478673711731, 'rougeL': 0.6995150573688741, 'rougeLsum': 0.6993470178997533} |
|
116 |
-
| 0.5616 | 5.4632 | 5600 | 0.2965 | 0.3732 | 0.4235 | {'rouge1': 0.7013465455303209, 'rouge2': 0.5139126595456542, 'rougeL': 0.7008859589300989, 'rougeLsum': 0.7008571800466421} |
|
117 |
-
| 0.5511 | 5.5607 | 5700 | 0.2955 | 0.3755 | 0.4198 | {'rouge1': 0.6979003572654013, 'rouge2': 0.5104101490913873, 'rougeL': 0.6974357128935622, 'rougeLsum': 0.6976939197435674} |
|
118 |
-
| 0.5475 | 5.6582 | 5800 | 0.2944 | 0.3744 | 0.4224 | {'rouge1': 0.6997485561362005, 'rouge2': 0.5117237925763095, 'rougeL': 0.699315501838293, 'rougeLsum': 0.6993609881095812} |
|
119 |
-
| 0.535 | 5.7557 | 5900 | 0.2943 | 0.3745 | 0.4227 | {'rouge1': 0.7004123958977562, 'rouge2': 0.5130778231448121, 'rougeL': 0.6999992378819626, 'rougeLsum': 0.6999034701936646} |
|
120 |
-
| 0.5381 | 5.8532 | 6000 | 0.2939 | 0.3756 | 0.4204 | {'rouge1': 0.6993128402555228, 'rouge2': 0.5118520741032332, 'rougeL': 0.6990872979187841, 'rougeLsum': 0.6988728786986375} |
|
121 |
-
| 0.5542 | 5.9508 | 6100 | 0.2934 | 0.3763 | 0.4201 | {'rouge1': 0.6991211401254616, 'rouge2': 0.5113991327668065, 'rougeL': 0.6986732788231161, 'rougeLsum': 0.6986739135320972} |
|
122 |
|
123 |
|
124 |
### Framework versions
|
|
|
9 |
- bleu
|
10 |
- rouge
|
11 |
model-index:
|
12 |
+
- name: kab-dz
|
13 |
results: []
|
14 |
---
|
15 |
|
16 |
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
17 |
should probably proofread and complete it, then remove this comment. -->
|
18 |
|
19 |
+
# kab-dz
|
20 |
|
21 |
This model is a fine-tuned version of [facebook/mms-1b-all](https://huggingface.co/facebook/mms-1b-all) on an unknown dataset.
|
22 |
It achieves the following results on the evaluation set:
|
23 |
+
- Loss: 0.3296
|
24 |
+
- Wer: 0.5537
|
25 |
+
- Bleu: {'bleu': 0.17822041427852187, 'precisions': [0.46242010138858275, 0.24001479289940827, 0.13158998741434763, 0.0734417780641005], 'brevity_penalty': 0.984798238899528, 'length_ratio': 0.9849126234668404, 'translation_length': 9074, 'reference_length': 9213}
|
26 |
+
- Rouge: {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0}
|
27 |
|
28 |
## Model description
|
29 |
|
|
|
42 |
### Training hyperparameters
|
43 |
|
44 |
The following hyperparameters were used during training:
|
45 |
+
- learning_rate: 0.0001
|
46 |
- train_batch_size: 8
|
47 |
+
- eval_batch_size: 16
|
48 |
- seed: 42
|
49 |
- gradient_accumulation_steps: 4
|
50 |
- total_train_batch_size: 32
|
51 |
- optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
|
52 |
- lr_scheduler_type: linear
|
53 |
+
- lr_scheduler_warmup_steps: 500
|
54 |
+
- num_epochs: 30
|
55 |
- mixed_precision_training: Native AMP
|
56 |
|
57 |
### Training results
|
58 |
|
59 |
+
| Training Loss | Epoch | Step | Validation Loss | Wer | Bleu | Rouge |
|
60 |
+
|:-------------:|:-----:|:----:|:---------------:|:------:|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------:|
|
61 |
+
| 8.3957 | 1.0 | 121 | 6.4435 | 1.0002 | {'bleu': 0.0, 'precisions': [0.0, 0.0, 0.0, 0.0], 'brevity_penalty': 0.028413494474637858, 'length_ratio': 0.21925539997829155, 'translation_length': 2020, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
62 |
+
| 3.8246 | 2.0 | 242 | 1.7852 | 1.0036 | {'bleu': 0.0, 'precisions': [0.0019450800915331808, 0.0, 0.0, 0.0], 'brevity_penalty': 0.9475361779864253, 'length_ratio': 0.9488654869178157, 'translation_length': 8740, 'reference_length': 9211} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
63 |
+
| 0.8242 | 3.0 | 363 | 0.5552 | 0.7259 | {'bleu': 0.05984194666820544, 'precisions': [0.2893541597429932, 0.10006199628022319, 0.036166619757951025, 0.013298734998378203], 'brevity_penalty': 0.9796059773354316, 'length_ratio': 0.9798111364376425, 'translation_length': 9027, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
64 |
+
| 0.6641 | 4.0 | 484 | 0.4531 | 0.6539 | {'bleu': 0.09740569316232665, 'precisions': [0.36318407960199006, 0.15155264134603488, 0.06640460480134774, 0.026528631510837918], 'brevity_penalty': 0.9815976322925238, 'length_ratio': 0.981764897427548, 'translation_length': 9045, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
65 |
+
| 0.5731 | 5.0 | 605 | 0.4109 | 0.6272 | {'bleu': 0.12229576623270189, 'precisions': [0.38988526233708365, 0.17741734248284466, 0.08717221828490432, 0.04121013900245298], 'brevity_penalty': 0.9740531517333079, 'length_ratio': 0.9743840225767937, 'translation_length': 8977, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
66 |
+
| 0.5674 | 6.0 | 726 | 0.3918 | 0.6109 | {'bleu': 0.1305912953348509, 'precisions': [0.40551617190961453, 0.1891891891891892, 0.09289232934553132, 0.0442966087944183], 'brevity_penalty': 0.9797167269065808, 'length_ratio': 0.9799196787148594, 'translation_length': 9028, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
67 |
+
| 0.5257 | 7.0 | 847 | 0.3782 | 0.6064 | {'bleu': 0.13275289042482177, 'precisions': [0.41021946353358457, 0.19280397022332507, 0.09367516551626989, 0.045624289657411915], 'brevity_penalty': 0.9790520492063531, 'length_ratio': 0.9792684250515575, 'translation_length': 9022, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
68 |
+
| 0.5374 | 8.0 | 968 | 0.3713 | 0.5998 | {'bleu': 0.12766441539188614, 'precisions': [0.41609475315474875, 0.1911546085232904, 0.08957952468007313, 0.04035656401944895], 'brevity_penalty': 0.9803809720556327, 'length_ratio': 0.9805709323781613, 'translation_length': 9034, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
69 |
+
| 0.5153 | 9.0 | 1089 | 0.3626 | 0.5952 | {'bleu': 0.13409888931641242, 'precisions': [0.4208892338396718, 0.1984609656199578, 0.0951240135287486, 0.044354183590576766], 'brevity_penalty': 0.9787195480653427, 'length_ratio': 0.9789427982199067, 'translation_length': 9019, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
70 |
+
| 0.5001 | 10.0 | 1210 | 0.3580 | 0.5899 | {'bleu': 0.1381005218002083, 'precisions': [0.42647221301513644, 0.2020027197428607, 0.09708193041526375, 0.04671839637892014], 'brevity_penalty': 0.9822606533452595, 'length_ratio': 0.9824161510908499, 'translation_length': 9051, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
71 |
+
| 0.482 | 11.0 | 1331 | 0.3538 | 0.5894 | {'bleu': 0.1433986930790172, 'precisions': [0.4262295081967213, 0.20443838333746592, 0.10175932441942294, 0.051760506246957654], 'brevity_penalty': 0.9797167269065808, 'length_ratio': 0.9799196787148594, 'translation_length': 9028, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
72 |
+
| 0.4755 | 12.0 | 1452 | 0.3485 | 0.5860 | {'bleu': 0.14969579657748497, 'precisions': [0.4299645390070922, 0.21161002232696602, 0.10744965497817209, 0.055853222925799646], 'brevity_penalty': 0.9792736565176406, 'length_ratio': 0.9794855096059916, 'translation_length': 9024, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
73 |
+
| 0.4663 | 13.0 | 1573 | 0.3582 | 0.5948 | {'bleu': 0.13789844298163847, 'precisions': [0.42050093787928944, 0.1963955067275645, 0.09663865546218488, 0.048410521219945137], 'brevity_penalty': 0.9835854011732358, 'length_ratio': 0.9837186584174537, 'translation_length': 9063, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
74 |
+
| 0.4862 | 14.0 | 1694 | 0.3405 | 0.5753 | {'bleu': 0.16755433246839507, 'precisions': [0.44081497065662717, 0.22419134960961706, 0.12253798536859876, 0.07054816736944534], 'brevity_penalty': 0.9800489035331547, 'length_ratio': 0.9802453055465103, 'translation_length': 9031, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
75 |
+
| 0.4745 | 15.0 | 1815 | 0.3422 | 0.5763 | {'bleu': 0.1536234645948005, 'precisions': [0.43993794326241137, 0.21719176383031505, 0.10998450922405295, 0.05762987012987013], 'brevity_penalty': 0.9792736565176406, 'length_ratio': 0.9794855096059916, 'translation_length': 9024, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
76 |
+
| 0.4736 | 16.0 | 1936 | 0.3341 | 0.5685 | {'bleu': 0.17290916107086277, 'precisions': [0.4472433985195006, 0.2301891457534924, 0.1271043771043771, 0.07337966704380151], 'brevity_penalty': 0.9822606533452595, 'length_ratio': 0.9824161510908499, 'translation_length': 9051, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
77 |
+
| 0.4583 | 17.0 | 2057 | 0.3318 | 0.5657 | {'bleu': 0.1716852104726425, 'precisions': [0.4503597122302158, 0.23287501548371115, 0.1262654668166479, 0.07098865478119935], 'brevity_penalty': 0.9804916375458205, 'length_ratio': 0.9806794746553783, 'translation_length': 9035, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
78 |
+
| 0.4551 | 18.0 | 2178 | 0.3335 | 0.5633 | {'bleu': 0.16701443400029833, 'precisions': [0.4525805028672254, 0.22921292869479398, 0.1224632610216935, 0.06529098823150088], 'brevity_penalty': 0.9841368705211414, 'length_ratio': 0.9842613698035385, 'translation_length': 9068, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
79 |
+
| 0.4481 | 19.0 | 2299 | 0.3296 | 0.5607 | {'bleu': 0.1729834332448216, 'precisions': [0.4553581282419159, 0.2354611680454377, 0.1272065004202858, 0.07020658489347967], 'brevity_penalty': 0.9833647296422493, 'length_ratio': 0.9835015738630196, 'translation_length': 9061, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
80 |
+
| 0.4514 | 20.0 | 2420 | 0.3267 | 0.5555 | {'bleu': 0.17945797616615725, 'precisions': [0.4616664817485854, 0.24146068811327787, 0.13328631875881522, 0.07628497072218608], 'brevity_penalty': 0.9780542210332569, 'length_ratio': 0.9782915445566048, 'translation_length': 9013, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
81 |
+
| 0.4522 | 21.0 | 2541 | 0.3293 | 0.5527 | {'bleu': 0.17917597769682736, 'precisions': [0.46393805309734515, 0.2416439712800198, 0.13151608823942673, 0.07546558704453442], 'brevity_penalty': 0.9810447849231894, 'length_ratio': 0.9812221860414632, 'translation_length': 9040, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
82 |
+
| 0.439 | 22.0 | 2662 | 0.3274 | 0.5484 | {'bleu': 0.1837033708040611, 'precisions': [0.4683110275412012, 0.24495605891818295, 0.1361337454341107, 0.07869170984455959], 'brevity_penalty': 0.9811553783926978, 'length_ratio': 0.9813307283186802, 'translation_length': 9041, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
83 |
+
| 0.4342 | 23.0 | 2783 | 0.3296 | 0.5537 | {'bleu': 0.17822041427852187, 'precisions': [0.46242010138858275, 0.24001479289940827, 0.13158998741434763, 0.0734417780641005], 'brevity_penalty': 0.984798238899528, 'length_ratio': 0.9849126234668404, 'translation_length': 9074, 'reference_length': 9213} | {'rouge1': 0.0, 'rouge2': 0.0, 'rougeL': 0.0, 'rougeLsum': 0.0} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
84 |
|
85 |
|
86 |
### Framework versions
|
adapter.ardz.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:65c50b23ea98bb9ce3ef3154ae56856ca873a08591f6a1883f9b459390b50ab5
|
3 |
+
size 8936896
|
config.json
CHANGED
@@ -77,7 +77,7 @@
|
|
77 |
"num_hidden_layers": 48,
|
78 |
"num_negatives": 100,
|
79 |
"output_hidden_size": 1280,
|
80 |
-
"pad_token_id":
|
81 |
"proj_codevector_dim": 1024,
|
82 |
"tdnn_dilation": [
|
83 |
1,
|
@@ -103,6 +103,6 @@
|
|
103 |
"torch_dtype": "float32",
|
104 |
"transformers_version": "4.49.0",
|
105 |
"use_weighted_layer_sum": false,
|
106 |
-
"vocab_size":
|
107 |
"xvector_output_dim": 512
|
108 |
}
|
|
|
77 |
"num_hidden_layers": 48,
|
78 |
"num_negatives": 100,
|
79 |
"output_hidden_size": 1280,
|
80 |
+
"pad_token_id": 55,
|
81 |
"proj_codevector_dim": 1024,
|
82 |
"tdnn_dilation": [
|
83 |
1,
|
|
|
103 |
"torch_dtype": "float32",
|
104 |
"transformers_version": "4.49.0",
|
105 |
"use_weighted_layer_sum": false,
|
106 |
+
"vocab_size": 58,
|
107 |
"xvector_output_dim": 512
|
108 |
}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:eb0a136f7efce07970beea28ae31d204e05cf7bcaa3dac227d8e052067e33a1c
|
3 |
+
size 3859029272
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5368
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:deaac3c5bb95d3eaad58054e915651940eb25638a85929c5f3408035cb92f0de
|
3 |
size 5368
|