cantillation commited on
Commit
e6c0e6b
·
verified ·
1 Parent(s): f7e2e0d

Model save

Browse files
README.md ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: apache-2.0
4
+ base_model: ivrit-ai/whisper-large-v3-turbo
5
+ tags:
6
+ - generated_from_trainer
7
+ metrics:
8
+ - wer
9
+ model-index:
10
+ - name: Teamim-IvritAI-large-v3-turbo-new_WeightDecay-0.005_Augmented_date-07-05-2025
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # Teamim-IvritAI-large-v3-turbo-new_WeightDecay-0.005_Augmented_date-07-05-2025
18
+
19
+ This model is a fine-tuned version of [ivrit-ai/whisper-large-v3-turbo](https://huggingface.co/ivrit-ai/whisper-large-v3-turbo) on an unknown dataset.
20
+ It achieves the following results on the evaluation set:
21
+ - Loss: 5.0244
22
+ - Wer: 97.8059
23
+ - Avg Precision Exact: 0.0463
24
+ - Avg Recall Exact: 0.1014
25
+ - Avg F1 Exact: 0.0598
26
+ - Avg Precision Letter Shift: 0.0622
27
+ - Avg Recall Letter Shift: 0.1383
28
+ - Avg F1 Letter Shift: 0.0805
29
+ - Avg Precision Word Level: 0.0777
30
+ - Avg Recall Word Level: 0.1656
31
+ - Avg F1 Word Level: 0.0970
32
+ - Avg Precision Word Shift: 0.1542
33
+ - Avg Recall Word Shift: 0.3497
34
+ - Avg F1 Word Shift: 0.1988
35
+ - Precision Median Exact: 0.0227
36
+ - Recall Median Exact: 0.0625
37
+ - F1 Median Exact: 0.0357
38
+ - Precision Max Exact: 1.0
39
+ - Recall Max Exact: 1.0
40
+ - F1 Max Exact: 1.0
41
+ - Precision Min Exact: 0.0
42
+ - Recall Min Exact: 0.0
43
+ - F1 Min Exact: 0.0
44
+ - Precision Min Letter Shift: 0.0
45
+ - Recall Min Letter Shift: 0.0
46
+ - F1 Min Letter Shift: 0.0
47
+ - Precision Min Word Level: 0.0
48
+ - Recall Min Word Level: 0.0
49
+ - F1 Min Word Level: 0.0
50
+ - Precision Min Word Shift: 0.0
51
+ - Recall Min Word Shift: 0.0
52
+ - F1 Min Word Shift: 0.0
53
+
54
+ ## Model description
55
+
56
+ More information needed
57
+
58
+ ## Intended uses & limitations
59
+
60
+ More information needed
61
+
62
+ ## Training and evaluation data
63
+
64
+ More information needed
65
+
66
+ ## Training procedure
67
+
68
+ ### Training hyperparameters
69
+
70
+ The following hyperparameters were used during training:
71
+ - learning_rate: 1e-05
72
+ - train_batch_size: 16
73
+ - eval_batch_size: 2
74
+ - seed: 42
75
+ - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
76
+ - lr_scheduler_type: linear
77
+ - lr_scheduler_warmup_steps: 1000
78
+ - training_steps: 60000
79
+ - mixed_precision_training: Native AMP
80
+
81
+ ### Training results
82
+
83
+ | Training Loss | Epoch | Step | Validation Loss | Wer | Avg Precision Exact | Avg Recall Exact | Avg F1 Exact | Avg Precision Letter Shift | Avg Recall Letter Shift | Avg F1 Letter Shift | Avg Precision Word Level | Avg Recall Word Level | Avg F1 Word Level | Avg Precision Word Shift | Avg Recall Word Shift | Avg F1 Word Shift | Precision Median Exact | Recall Median Exact | F1 Median Exact | Precision Max Exact | Recall Max Exact | F1 Max Exact | Precision Min Exact | Recall Min Exact | F1 Min Exact | Precision Min Letter Shift | Recall Min Letter Shift | F1 Min Letter Shift | Precision Min Word Level | Recall Min Word Level | F1 Min Word Level | Precision Min Word Shift | Recall Min Word Shift | F1 Min Word Shift |
84
+ |:-------------:|:------:|:-----:|:---------------:|:--------:|:-------------------:|:----------------:|:------------:|:--------------------------:|:-----------------------:|:-------------------:|:------------------------:|:---------------------:|:-----------------:|:------------------------:|:---------------------:|:-----------------:|:----------------------:|:-------------------:|:---------------:|:-------------------:|:----------------:|:------------:|:-------------------:|:----------------:|:------------:|:--------------------------:|:-----------------------:|:-------------------:|:------------------------:|:---------------------:|:-----------------:|:------------------------:|:---------------------:|:-----------------:|
85
+ | No log | 0.0002 | 1 | 7.1581 | 109.4023 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
86
+ | 0.0568 | 0.3754 | 2500 | 2.0970 | 96.9729 | 0.0613 | 0.0775 | 0.0671 | 0.0842 | 0.1078 | 0.0923 | 0.1037 | 0.1324 | 0.1135 | 0.2206 | 0.2995 | 0.2485 | 0.0345 | 0.05 | 0.04 | 1.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
87
+ | 0.0314 | 0.7508 | 5000 | 2.3109 | 96.4492 | 0.0709 | 0.0953 | 0.0792 | 0.0932 | 0.1273 | 0.1045 | 0.1089 | 0.1502 | 0.1225 | 0.2270 | 0.3279 | 0.2605 | 0.0357 | 0.0588 | 0.0435 | 1.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
88
+ | 0.018 | 1.1261 | 7500 | 2.5849 | 97.4587 | 0.0458 | 0.0776 | 0.0548 | 0.0637 | 0.1096 | 0.0766 | 0.0775 | 0.1319 | 0.0926 | 0.1673 | 0.2959 | 0.2038 | 0.025 | 0.0455 | 0.0331 | 1.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
89
+ | 0.0075 | 1.5015 | 10000 | 3.1851 | 97.9489 | 0.0397 | 0.0824 | 0.0507 | 0.0539 | 0.1151 | 0.0693 | 0.0664 | 0.1414 | 0.0847 | 0.1476 | 0.3210 | 0.1893 | 0.0204 | 0.05 | 0.0317 | 1.0 | 1.0 | 0.8571 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
90
+ | 0.0153 | 1.8769 | 12500 | 2.7215 | 96.5921 | 0.0645 | 0.0950 | 0.0740 | 0.0868 | 0.1307 | 0.1001 | 0.1080 | 0.1624 | 0.1228 | 0.2293 | 0.3653 | 0.2688 | 0.0323 | 0.0625 | 0.0417 | 1.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
91
+ | 0.0108 | 2.2523 | 15000 | 3.2672 | 97.2092 | 0.0544 | 0.1084 | 0.0685 | 0.0710 | 0.1448 | 0.0901 | 0.0848 | 0.1708 | 0.1064 | 0.1695 | 0.3629 | 0.2190 | 0.0270 | 0.0714 | 0.0392 | 1.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
92
+ | 0.0077 | 2.6276 | 17500 | 3.2673 | 97.1625 | 0.0551 | 0.1014 | 0.0684 | 0.0749 | 0.1379 | 0.0922 | 0.0895 | 0.1631 | 0.1091 | 0.1845 | 0.3500 | 0.2292 | 0.0278 | 0.0625 | 0.0392 | 1.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
93
+ | 0.0179 | 3.0030 | 20000 | 3.2682 | 97.2967 | 0.0516 | 0.0918 | 0.0634 | 0.0712 | 0.1274 | 0.0875 | 0.0869 | 0.1538 | 0.1061 | 0.1856 | 0.3430 | 0.2305 | 0.0278 | 0.0625 | 0.0385 | 1.0 | 0.8125 | 0.6667 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
94
+ | 0.0066 | 3.3784 | 22500 | 3.5714 | 97.3726 | 0.0454 | 0.0955 | 0.0584 | 0.0621 | 0.1316 | 0.0796 | 0.0742 | 0.1559 | 0.0944 | 0.1542 | 0.3349 | 0.1987 | 0.0227 | 0.0556 | 0.0345 | 0.8571 | 1.0 | 0.8571 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
95
+ | 0.0042 | 3.7538 | 25000 | 3.8114 | 97.9270 | 0.0404 | 0.0894 | 0.0525 | 0.0558 | 0.1247 | 0.0726 | 0.0677 | 0.1516 | 0.0883 | 0.1420 | 0.3347 | 0.1890 | 0.0222 | 0.0588 | 0.0345 | 1.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
96
+ | 0.0014 | 4.1291 | 27500 | 4.0215 | 96.8212 | 0.0506 | 0.1030 | 0.0654 | 0.0667 | 0.1376 | 0.0863 | 0.0801 | 0.1627 | 0.1023 | 0.1651 | 0.3500 | 0.2142 | 0.0263 | 0.0667 | 0.0385 | 1.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
97
+ | 0.0021 | 4.5045 | 30000 | 4.0509 | 98.0262 | 0.0395 | 0.0905 | 0.0523 | 0.0545 | 0.1243 | 0.0716 | 0.0666 | 0.1525 | 0.0873 | 0.1443 | 0.3387 | 0.1904 | 0.0217 | 0.0625 | 0.0345 | 1.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
98
+ | 0.0028 | 4.8799 | 32500 | 3.9264 | 98.0685 | 0.0381 | 0.0844 | 0.0492 | 0.0513 | 0.1175 | 0.0667 | 0.0632 | 0.1427 | 0.0812 | 0.1319 | 0.3158 | 0.1729 | 0.0 | 0.0 | 0.0 | 0.7778 | 1.0 | 0.8750 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
99
+ | 0.0027 | 5.2553 | 35000 | 4.5942 | 97.9532 | 0.0446 | 0.0991 | 0.0585 | 0.0593 | 0.1323 | 0.0770 | 0.0731 | 0.1586 | 0.0924 | 0.1465 | 0.3347 | 0.1893 | 0.0227 | 0.0667 | 0.0348 | 1.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
100
+ | 0.0019 | 5.6306 | 37500 | 4.1899 | 97.8249 | 0.0437 | 0.1073 | 0.0581 | 0.0569 | 0.1405 | 0.0753 | 0.0679 | 0.1646 | 0.0884 | 0.1375 | 0.3415 | 0.1811 | 0.0204 | 0.0588 | 0.0331 | 1.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
101
+ | 0.0011 | 6.0060 | 40000 | 4.5501 | 98.1939 | 0.0396 | 0.0917 | 0.0528 | 0.0555 | 0.1270 | 0.0728 | 0.0689 | 0.1550 | 0.0890 | 0.1460 | 0.3409 | 0.1913 | 0.0196 | 0.0588 | 0.0317 | 0.7778 | 1.0 | 0.8235 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
102
+ | 0.0008 | 6.3814 | 42500 | 4.3012 | 97.9882 | 0.0421 | 0.0935 | 0.0552 | 0.0577 | 0.1284 | 0.0753 | 0.0710 | 0.1548 | 0.0909 | 0.1450 | 0.3337 | 0.1898 | 0.0217 | 0.0588 | 0.0339 | 1.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
103
+ | 0.0007 | 6.7568 | 45000 | 4.2077 | 97.8409 | 0.0456 | 0.0924 | 0.0583 | 0.0623 | 0.1262 | 0.0792 | 0.0756 | 0.1512 | 0.0949 | 0.1578 | 0.3287 | 0.2014 | 0.0244 | 0.0625 | 0.0357 | 1.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
104
+ | 0.0007 | 7.1321 | 47500 | 4.5387 | 97.6308 | 0.0510 | 0.1027 | 0.0652 | 0.0679 | 0.1377 | 0.0863 | 0.0815 | 0.1632 | 0.1026 | 0.1655 | 0.3474 | 0.2120 | 0.0263 | 0.0667 | 0.0385 | 1.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
105
+ | 0.001 | 7.5075 | 50000 | 4.3632 | 98.0670 | 0.0457 | 0.0830 | 0.0565 | 0.0635 | 0.1169 | 0.0785 | 0.0792 | 0.1419 | 0.0955 | 0.1735 | 0.3260 | 0.2133 | 0.0253 | 0.0556 | 0.0357 | 1.0 | 1.0 | 0.8 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
106
+ | 0.0004 | 7.8829 | 52500 | 4.4452 | 97.6060 | 0.0445 | 0.0961 | 0.0580 | 0.0599 | 0.1288 | 0.0772 | 0.0723 | 0.1532 | 0.0921 | 0.1513 | 0.3325 | 0.1962 | 0.0233 | 0.0625 | 0.0351 | 1.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
107
+ | 0.0001 | 8.2583 | 55000 | 4.5731 | 97.7607 | 0.0475 | 0.1018 | 0.0614 | 0.0635 | 0.1380 | 0.0821 | 0.0787 | 0.1659 | 0.0992 | 0.1603 | 0.3549 | 0.2071 | 0.025 | 0.0667 | 0.0377 | 1.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
108
+ | 0.0001 | 8.6336 | 57500 | 4.9017 | 97.6148 | 0.0474 | 0.1006 | 0.0609 | 0.0625 | 0.1370 | 0.0811 | 0.0777 | 0.1637 | 0.0978 | 0.1550 | 0.3454 | 0.2009 | 0.025 | 0.0667 | 0.0377 | 1.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
109
+ | 0.0 | 9.0090 | 60000 | 5.0244 | 97.8059 | 0.0463 | 0.1014 | 0.0598 | 0.0622 | 0.1383 | 0.0805 | 0.0777 | 0.1656 | 0.0970 | 0.1542 | 0.3497 | 0.1988 | 0.0227 | 0.0625 | 0.0357 | 1.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 |
110
+
111
+
112
+ ### Framework versions
113
+
114
+ - Transformers 4.49.0
115
+ - Pytorch 2.7.0+cu126
116
+ - Datasets 2.12.0
117
+ - Tokenizers 0.20.1
generation_config.json ADDED
@@ -0,0 +1,151 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alignment_heads": [
3
+ [
4
+ 2,
5
+ 4
6
+ ],
7
+ [
8
+ 2,
9
+ 11
10
+ ],
11
+ [
12
+ 3,
13
+ 3
14
+ ],
15
+ [
16
+ 3,
17
+ 6
18
+ ],
19
+ [
20
+ 3,
21
+ 11
22
+ ],
23
+ [
24
+ 3,
25
+ 14
26
+ ]
27
+ ],
28
+ "attn_implementation": "sdpa",
29
+ "begin_suppress_tokens": [
30
+ 220,
31
+ 50257
32
+ ],
33
+ "bos_token_id": 50257,
34
+ "decoder_start_token_id": 50258,
35
+ "eos_token_id": 50257,
36
+ "is_multilingual": true,
37
+ "lang_to_id": {
38
+ "<|af|>": 50327,
39
+ "<|am|>": 50334,
40
+ "<|ar|>": 50272,
41
+ "<|as|>": 50350,
42
+ "<|az|>": 50304,
43
+ "<|ba|>": 50355,
44
+ "<|be|>": 50330,
45
+ "<|bg|>": 50292,
46
+ "<|bn|>": 50302,
47
+ "<|bo|>": 50347,
48
+ "<|br|>": 50309,
49
+ "<|bs|>": 50315,
50
+ "<|ca|>": 50270,
51
+ "<|cs|>": 50283,
52
+ "<|cy|>": 50297,
53
+ "<|da|>": 50285,
54
+ "<|de|>": 50261,
55
+ "<|el|>": 50281,
56
+ "<|en|>": 50259,
57
+ "<|es|>": 50262,
58
+ "<|et|>": 50307,
59
+ "<|eu|>": 50310,
60
+ "<|fa|>": 50300,
61
+ "<|fi|>": 50277,
62
+ "<|fo|>": 50338,
63
+ "<|fr|>": 50265,
64
+ "<|gl|>": 50319,
65
+ "<|gu|>": 50333,
66
+ "<|haw|>": 50352,
67
+ "<|ha|>": 50354,
68
+ "<|he|>": 50279,
69
+ "<|hi|>": 50276,
70
+ "<|hr|>": 50291,
71
+ "<|ht|>": 50339,
72
+ "<|hu|>": 50286,
73
+ "<|hy|>": 50312,
74
+ "<|id|>": 50275,
75
+ "<|is|>": 50311,
76
+ "<|it|>": 50274,
77
+ "<|ja|>": 50266,
78
+ "<|jw|>": 50356,
79
+ "<|ka|>": 50329,
80
+ "<|kk|>": 50316,
81
+ "<|km|>": 50323,
82
+ "<|kn|>": 50306,
83
+ "<|ko|>": 50264,
84
+ "<|la|>": 50294,
85
+ "<|lb|>": 50345,
86
+ "<|ln|>": 50353,
87
+ "<|lo|>": 50336,
88
+ "<|lt|>": 50293,
89
+ "<|lv|>": 50301,
90
+ "<|mg|>": 50349,
91
+ "<|mi|>": 50295,
92
+ "<|mk|>": 50308,
93
+ "<|ml|>": 50296,
94
+ "<|mn|>": 50314,
95
+ "<|mr|>": 50320,
96
+ "<|ms|>": 50282,
97
+ "<|mt|>": 50343,
98
+ "<|my|>": 50346,
99
+ "<|ne|>": 50313,
100
+ "<|nl|>": 50271,
101
+ "<|nn|>": 50342,
102
+ "<|no|>": 50288,
103
+ "<|oc|>": 50328,
104
+ "<|pa|>": 50321,
105
+ "<|pl|>": 50269,
106
+ "<|ps|>": 50340,
107
+ "<|pt|>": 50267,
108
+ "<|ro|>": 50284,
109
+ "<|ru|>": 50263,
110
+ "<|sa|>": 50344,
111
+ "<|sd|>": 50332,
112
+ "<|si|>": 50322,
113
+ "<|sk|>": 50298,
114
+ "<|sl|>": 50305,
115
+ "<|sn|>": 50324,
116
+ "<|so|>": 50326,
117
+ "<|sq|>": 50317,
118
+ "<|sr|>": 50303,
119
+ "<|su|>": 50357,
120
+ "<|sv|>": 50273,
121
+ "<|sw|>": 50318,
122
+ "<|ta|>": 50287,
123
+ "<|te|>": 50299,
124
+ "<|tg|>": 50331,
125
+ "<|th|>": 50289,
126
+ "<|tk|>": 50341,
127
+ "<|tl|>": 50348,
128
+ "<|tr|>": 50268,
129
+ "<|tt|>": 50351,
130
+ "<|uk|>": 50280,
131
+ "<|ur|>": 50290,
132
+ "<|uz|>": 50337,
133
+ "<|vi|>": 50278,
134
+ "<|yi|>": 50335,
135
+ "<|yo|>": 50325,
136
+ "<|yue|>": 50358,
137
+ "<|zh|>": 50260
138
+ },
139
+ "max_initial_timestamp_index": 50,
140
+ "max_length": 448,
141
+ "no_timestamps_token_id": 50364,
142
+ "pad_token_id": 50257,
143
+ "prev_sot_token_id": 50362,
144
+ "return_timestamps": false,
145
+ "suppress_tokens": [],
146
+ "task_to_id": {
147
+ "transcribe": 50360,
148
+ "translate": 50359
149
+ },
150
+ "transformers_version": "4.49.0"
151
+ }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:50370b9a896ff91deaeb194bb5278297c0fb0af4f26c2236ef08701035906f4f
3
  size 3235740128
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f2e9c9698417b712c17d3e9790b829494ca10d57f882bcb778b546411594d1ab
3
  size 3235740128
runs/May07_19-18-10_hetro232/events.out.tfevents.1746634715.hetro232 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:702969f6c8155871f4aa7509b7253b4d1057a8dd7bc328f0e9238f0c9f750301
3
- size 575275
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:708108296ab8c3a6bc41297cab89f9d5364e423a28a780739a148fecd8b6bab2
3
+ size 575810