Upload 19 files
Browse files- README.md +315 -3
- config.json +54 -0
- configuration_modchembert.py +84 -0
- logs_modchembert_classification_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_bace_classification_20250923_084801.log +351 -0
- logs_modchembert_classification_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_bbbp_epochs100_batch_size64_20250923_021951.log +355 -0
- logs_modchembert_classification_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_clintox_epochs100_batch_size32_20250923_040853.log +359 -0
- logs_modchembert_classification_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_hiv_epochs100_batch_size32_20250923_080632.log +329 -0
- logs_modchembert_classification_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_sider_epochs100_batch_size32_20250923_034834.log +363 -0
- logs_modchembert_classification_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_tox21_epochs100_batch_size32_20250923_023906.log +329 -0
- logs_modchembert_regression_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_bace_regression_epochs100_batch_size32_20250923_015823.log +325 -0
- logs_modchembert_regression_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_clearance_epochs100_batch_size32_20250923_022405.log +331 -0
- logs_modchembert_regression_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_delaney_epochs100_batch_size64_20250923_024047.log +413 -0
- logs_modchembert_regression_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_freesolv_epochs100_batch_size32_20250923_025415.log +365 -0
- logs_modchembert_regression_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_lipo_epochs100_batch_size32_20250923_094951.log +365 -0
- model.safetensors +3 -0
- modeling_modchembert.py +554 -0
- special_tokens_map.json +37 -0
- tokenizer.json +2554 -0
- tokenizer_config.json +53 -0
README.md
CHANGED
@@ -1,3 +1,315 @@
|
|
1 |
-
---
|
2 |
-
license: apache-2.0
|
3 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
license: apache-2.0
|
3 |
+
base_model: Derify/ModChemBERT-MLM-DAPT
|
4 |
+
datasets:
|
5 |
+
- Derify/augmented_canonical_druglike_QED_Pfizer_15M
|
6 |
+
metrics:
|
7 |
+
- roc_auc
|
8 |
+
- rmse
|
9 |
+
library_name: transformers
|
10 |
+
tags:
|
11 |
+
- modernbert
|
12 |
+
- ModChemBERT
|
13 |
+
- cheminformatics
|
14 |
+
- chemical-language-model
|
15 |
+
- molecular-property-prediction
|
16 |
+
- mergekit
|
17 |
+
- merge
|
18 |
+
pipeline_tag: fill-mask
|
19 |
+
model-index:
|
20 |
+
- name: Derify/ModChemBERT-MLM
|
21 |
+
results:
|
22 |
+
- task:
|
23 |
+
type: text-classification
|
24 |
+
name: Classification (ROC AUC)
|
25 |
+
dataset:
|
26 |
+
name: BACE
|
27 |
+
type: BACE
|
28 |
+
metrics:
|
29 |
+
- type: roc_auc
|
30 |
+
value: 0.8346
|
31 |
+
- task:
|
32 |
+
type: text-classification
|
33 |
+
name: Classification (ROC AUC)
|
34 |
+
dataset:
|
35 |
+
name: BBBP
|
36 |
+
type: BBBP
|
37 |
+
metrics:
|
38 |
+
- type: roc_auc
|
39 |
+
value: 0.7573
|
40 |
+
- task:
|
41 |
+
type: text-classification
|
42 |
+
name: Classification (ROC AUC)
|
43 |
+
dataset:
|
44 |
+
name: CLINTOX
|
45 |
+
type: CLINTOX
|
46 |
+
metrics:
|
47 |
+
- type: roc_auc
|
48 |
+
value: 0.9938
|
49 |
+
- task:
|
50 |
+
type: text-classification
|
51 |
+
name: Classification (ROC AUC)
|
52 |
+
dataset:
|
53 |
+
name: HIV
|
54 |
+
type: HIV
|
55 |
+
metrics:
|
56 |
+
- type: roc_auc
|
57 |
+
value: 0.7737
|
58 |
+
- task:
|
59 |
+
type: text-classification
|
60 |
+
name: Classification (ROC AUC)
|
61 |
+
dataset:
|
62 |
+
name: SIDER
|
63 |
+
type: SIDER
|
64 |
+
metrics:
|
65 |
+
- type: roc_auc
|
66 |
+
value: 0.6600
|
67 |
+
- task:
|
68 |
+
type: text-classification
|
69 |
+
name: Classification (ROC AUC)
|
70 |
+
dataset:
|
71 |
+
name: TOX21
|
72 |
+
type: TOX21
|
73 |
+
metrics:
|
74 |
+
- type: roc_auc
|
75 |
+
value: 0.7518
|
76 |
+
- task:
|
77 |
+
type: regression
|
78 |
+
name: Regression (RMSE)
|
79 |
+
dataset:
|
80 |
+
name: BACE
|
81 |
+
type: BACE
|
82 |
+
metrics:
|
83 |
+
- type: rmse
|
84 |
+
value: 0.9665
|
85 |
+
- task:
|
86 |
+
type: regression
|
87 |
+
name: Regression (RMSE)
|
88 |
+
dataset:
|
89 |
+
name: CLEARANCE
|
90 |
+
type: CLEARANCE
|
91 |
+
metrics:
|
92 |
+
- type: rmse
|
93 |
+
value: 44.0137
|
94 |
+
- task:
|
95 |
+
type: regression
|
96 |
+
name: Regression (RMSE)
|
97 |
+
dataset:
|
98 |
+
name: ESOL
|
99 |
+
type: ESOL
|
100 |
+
metrics:
|
101 |
+
- type: rmse
|
102 |
+
value: 0.8158
|
103 |
+
- task:
|
104 |
+
type: regression
|
105 |
+
name: Regression (RMSE)
|
106 |
+
dataset:
|
107 |
+
name: FREESOLV
|
108 |
+
type: FREESOLV
|
109 |
+
metrics:
|
110 |
+
- type: rmse
|
111 |
+
value: 0.4979
|
112 |
+
- task:
|
113 |
+
type: regression
|
114 |
+
name: Regression (RMSE)
|
115 |
+
dataset:
|
116 |
+
name: LIPO
|
117 |
+
type: LIPO
|
118 |
+
metrics:
|
119 |
+
- type: rmse
|
120 |
+
value: 0.6505
|
121 |
+
---
|
122 |
+
|
123 |
+
# ModChemBERT: ModernBERT as a Chemical Language Model
|
124 |
+
ModChemBERT is a ModernBERT-based chemical language model (CLM), trained on SMILES strings for masked language modeling (MLM) and downstream molecular property prediction (classification & regression).
|
125 |
+
|
126 |
+
## Usage
|
127 |
+
### Load Model
|
128 |
+
```python
|
129 |
+
from transformers import AutoModelForMaskedLM, AutoTokenizer
|
130 |
+
|
131 |
+
model_id = "Derify/ModChemBERT"
|
132 |
+
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
133 |
+
model = AutoModelForMaskedLM.from_pretrained(
|
134 |
+
model_id,
|
135 |
+
trust_remote_code=True,
|
136 |
+
dtype="float16",
|
137 |
+
device_map="auto",
|
138 |
+
)
|
139 |
+
```
|
140 |
+
|
141 |
+
### Fill-Mask Pipeline
|
142 |
+
```python
|
143 |
+
from transformers import pipeline
|
144 |
+
|
145 |
+
fill = pipeline("fill-mask", model=model, tokenizer=tokenizer)
|
146 |
+
print(fill("c1ccccc1[MASK]"))
|
147 |
+
```
|
148 |
+
|
149 |
+
## Intended Use
|
150 |
+
* Primary: Research and development for molecular property prediction, experimentation with pooling strategies, and as a foundational model for downstream applications.
|
151 |
+
* Appropriate for: Binary / multi-class classification (e.g., toxicity, activity) and single-task or multi-task regression (e.g., solubility, clearance) after fine-tuning.
|
152 |
+
* Not intended for generating novel molecules.
|
153 |
+
|
154 |
+
## Limitations
|
155 |
+
- Out-of-domain performance may degrade for: very long (>128 token) SMILES, inorganic / organometallic compounds, polymers, or charged / enumerated tautomers are not well represented in training.
|
156 |
+
- No guarantee of synthesizability, safety, or biological efficacy.
|
157 |
+
|
158 |
+
## Ethical Considerations & Responsible Use
|
159 |
+
- Potential biases arise from training corpora skewed to drug-like space.
|
160 |
+
- Do not deploy in clinical or regulatory settings without rigorous, domain-specific validation.
|
161 |
+
|
162 |
+
## Architecture
|
163 |
+
- Backbone: ModernBERT
|
164 |
+
- Hidden size: 768
|
165 |
+
- Intermediate size: 1152
|
166 |
+
- Encoder Layers: 22
|
167 |
+
- Attention heads: 12
|
168 |
+
- Max sequence length: 256 tokens (MLM primarily trained with 128-token sequences)
|
169 |
+
- Vocabulary: BPE tokenizer using [MolFormer's vocab](https://github.com/emapco/ModChemBERT/blob/main/modchembert/tokenizers/molformer/vocab.json) (2362 tokens)
|
170 |
+
|
171 |
+
## Pooling (Classifier / Regressor Head)
|
172 |
+
Kallergis et al. [1] demonstrated that the CLM embedding method prior to the prediction head can significantly impact downstream performance.
|
173 |
+
|
174 |
+
Behrendt et al. [2] noted that the last few layers contain task-specific information and that pooling methods leveraging information from multiple layers can enhance model performance. Their results further demonstrated that the `max_seq_mha` pooling method was particularly effective in low-data regimes, which is often the case for molecular property prediction tasks.
|
175 |
+
|
176 |
+
Multiple pooling strategies are supported by ModChemBERT to explore their impact on downstream performance:
|
177 |
+
- `cls`: Last layer [CLS]
|
178 |
+
- `mean`: Mean over last hidden layer
|
179 |
+
- `max_cls`: Max over last k layers of [CLS]
|
180 |
+
- `cls_mha`: MHA with [CLS] as query
|
181 |
+
- `max_seq_mha`: MHA with max pooled sequence as KV and max pooled [CLS] as query
|
182 |
+
- `sum_mean`: Sum over all layers then mean tokens
|
183 |
+
- `sum_sum`: Sum over all layers then sum tokens
|
184 |
+
- `mean_mean`: Mean over all layers then mean tokens
|
185 |
+
- `mean_sum`: Mean over all layers then sum tokens
|
186 |
+
- `max_seq_mean`: Max over last k layers then mean tokens
|
187 |
+
|
188 |
+
## Training Pipeline
|
189 |
+
<div align="center">
|
190 |
+
<img src="https://cdn-uploads.huggingface.co/production/uploads/656892962693fa22e18b5331/bxNbpgMkU8m60ypyEJoWQ.png" alt="ModChemBERT Training Pipeline" width="650"/>
|
191 |
+
</div>
|
192 |
+
|
193 |
+
### Rationale for MTR Stage
|
194 |
+
Following Sultan et al. [3], multi-task regression (physicochemical properties) biases the latent space toward ADME-related representations prior to narrow TAFT specialization. Sultan et al. observed that MLM + DAPT (MTR) outperforms MLM-only, MTR-only, and MTR + DAPT (MTR).
|
195 |
+
|
196 |
+
### Checkpoint Averaging Motivation
|
197 |
+
Inspired by ModernBERT [4], JaColBERTv2.5 [5], and Llama 3.1 [6], where results show that model merging can enhance generalization or performance while mitigating overfitting to any single fine-tune or annealing checkpoint.
|
198 |
+
|
199 |
+
## Datasets
|
200 |
+
- Pretraining: [Derify/augmented_canonical_druglike_QED_Pfizer_15M](https://huggingface.co/datasets/Derify/augmented_canonical_druglike_QED_Pfizer_15M)
|
201 |
+
- Domain Adaptive Pretraining (DAPT) & Task Adaptive Fine-tuning (TAFT): ADME + AstraZeneca datasets (10 tasks) with scaffold splits from DA4MT pipeline (see [domain-adaptation-molecular-transformers](https://github.com/emapco/ModChemBERT/tree/main/domain-adaptation-molecular-transformers))
|
202 |
+
- Benchmarking: ChemBERTa-3 [7] tasks (BACE, BBBP, TOX21, HIV, SIDER, CLINTOX for classification; ESOL, FREESOLV, LIPO, BACE, CLEARANCE for regression)
|
203 |
+
|
204 |
+
## Benchmarking
|
205 |
+
Benchmarks were conducted with the ChemBERTa-3 framework using DeepChem scaffold splits. Each task was trained for 100 epochs with 3 random seeds.
|
206 |
+
|
207 |
+
### Evaluation Methodology
|
208 |
+
- Classification Metric: ROC AUC.
|
209 |
+
- Regression Metric: RMSE.
|
210 |
+
- Aggregation: Mean ± standard deviation of the triplicate results.
|
211 |
+
- Input Constraints: SMILES truncated / filtered to ≤200 tokens, following the MolFormer paper's recommendation.
|
212 |
+
|
213 |
+
### Results
|
214 |
+
<details><summary>Click to expand</summary>
|
215 |
+
|
216 |
+
#### Classification Datasets (ROC AUC - Higher is better)
|
217 |
+
|
218 |
+
| Model | BACE↑ | BBBP↑ | CLINTOX↑ | HIV↑ | SIDER↑ | TOX21↑ | AVG† |
|
219 |
+
| ---------------------------------------------------------------------------- | ----------------- | ----------------- | --------------------- | --------------------- | --------------------- | ----------------- | ------ |
|
220 |
+
| **Tasks** | 1 | 1 | 2 | 1 | 27 | 12 | |
|
221 |
+
| [ChemBERTa-100M-MLM](https://huggingface.co/DeepChem/ChemBERTa-100M-MLM)* | 0.781 ± 0.019 | 0.700 ± 0.027 | 0.979 ± 0.022 | 0.740 ± 0.013 | 0.611 ± 0.002 | 0.718 ± 0.011 | 0.7548 |
|
222 |
+
| [c3-MoLFormer-1.1B](https://huggingface.co/DeepChem/MoLFormer-c3-1.1B)* | 0.819 ± 0.019 | 0.735 ± 0.019 | 0.839 ± 0.013 | 0.762 ± 0.005 | 0.618 ± 0.005 | 0.723 ± 0.012 | 0.7493 |
|
223 |
+
| MoLFormer-LHPC* | **0.887 ± 0.004** | **0.908 ± 0.013** | 0.993 ± 0.004 | 0.750 ± 0.003 | 0.622 ± 0.007 | **0.791 ± 0.014** | 0.8252 |
|
224 |
+
| ------------------------- | ----------------- | ----------------- | ------------------- | ------------------- | ------------------- | ----------------- | ------ |
|
225 |
+
| [MLM](https://huggingface.co/Derify/ModChemBERT-MLM) | 0.8065 ± 0.0103 | 0.7222 ± 0.0150 | 0.9709 ± 0.0227 | ***0.7800 ± 0.0133*** | 0.6419 ± 0.0113 | 0.7400 ± 0.0044 | 0.7769 |
|
226 |
+
| [MLM + DAPT](https://huggingface.co/Derify/ModChemBERT-MLM-DAPT) | 0.8224 ± 0.0156 | 0.7402 ± 0.0095 | 0.9820 ± 0.0138 | 0.7702 ± 0.0020 | 0.6303 ± 0.0039 | 0.7360 ± 0.0036 | 0.7802 |
|
227 |
+
| [MLM + TAFT](https://huggingface.co/Derify/ModChemBERT-MLM-TAFT) | 0.7924 ± 0.0155 | 0.7282 ± 0.0058 | 0.9725 ± 0.0213 | 0.7770 ± 0.0047 | 0.6542 ± 0.0128 | *0.7646 ± 0.0039* | 0.7815 |
|
228 |
+
| [MLM + DAPT + TAFT](https://huggingface.co/Derify/ModChemBERT-MLM-DAPT-TAFT) | 0.8213 ± 0.0051 | 0.7356 ± 0.0094 | 0.9664 ± 0.0202 | 0.7750 ± 0.0048 | 0.6415 ± 0.0094 | 0.7263 ± 0.0036 | 0.7777 |
|
229 |
+
| [MLM + DAPT + TAFT OPT](https://huggingface.co/Derify/ModChemBERT) | *0.8346 ± 0.0045* | *0.7573 ± 0.0120* | ***0.9938 ± 0.0017*** | 0.7737 ± 0.0034 | ***0.6600 ± 0.0061*** | 0.7518 ± 0.0047 | 0.7952 |
|
230 |
+
|
231 |
+
#### Regression Datasets (RMSE - Lower is better)
|
232 |
+
|
233 |
+
| Model | BACE↓ | CLEARANCE↓ | ESOL↓ | FREESOLV↓ | LIPO↓ | AVG‡ |
|
234 |
+
| ---------------------------------------------------------------------------- | --------------------- | ---------------------- | --------------------- | --------------------- | --------------------- | ---------------- |
|
235 |
+
| **Tasks** | 1 | 1 | 1 | 1 | 1 | |
|
236 |
+
| [ChemBERTa-100M-MLM](https://huggingface.co/DeepChem/ChemBERTa-100M-MLM)* | 1.011 ± 0.038 | 51.582 ± 3.079 | 0.920 ± 0.011 | 0.536 ± 0.016 | 0.758 ± 0.013 | 0.8063 / 10.9614 |
|
237 |
+
| [c3-MoLFormer-1.1B](https://huggingface.co/DeepChem/MoLFormer-c3-1.1B)* | 1.094 ± 0.126 | 52.058 ± 2.767 | 0.829 ± 0.019 | 0.572 ± 0.023 | 0.728 ± 0.016 | 0.8058 / 11.0562 |
|
238 |
+
| MoLFormer-LHPC* | 1.201 ± 0.100 | 45.74 ± 2.637 | 0.848 ± 0.031 | 0.683 ± 0.040 | 0.895 ± 0.080 | 0.9068 / 9.8734 |
|
239 |
+
| ------------------------- | ------------------- | -------------------- | ------------------- | ------------------- | ------------------- | ---------------- |
|
240 |
+
| [MLM](https://huggingface.co/Derify/ModChemBERT-MLM) | 1.0893 ± 0.1319 | 49.0005 ± 1.2787 | 0.8456 ± 0.0406 | 0.5491 ± 0.0134 | 0.7147 ± 0.0062 | 0.7997 / 10.4398 |
|
241 |
+
| [MLM + DAPT](https://huggingface.co/Derify/ModChemBERT-MLM-DAPT) | 0.9931 ± 0.0258 | 45.4951 ± 0.7112 | 0.9319 ± 0.0153 | 0.6049 ± 0.0666 | 0.6874 ± 0.0040 | 0.8043 / 9.7425 |
|
242 |
+
| [MLM + TAFT](https://huggingface.co/Derify/ModChemBERT-MLM-TAFT) | 1.0304 ± 0.1146 | 47.8418 ± 0.4070 | ***0.7669 ± 0.0024*** | 0.5293 ± 0.0267 | 0.6708 ± 0.0074 | 0.7493 / 10.1678 |
|
243 |
+
| [MLM + DAPT + TAFT](https://huggingface.co/Derify/ModChemBERT-MLM-DAPT-TAFT) | 0.9713 ± 0.0224 | ***42.8010 ± 3.3475*** | 0.8169 ± 0.0268 | 0.5445 ± 0.0257 | 0.6820 ± 0.0028 | 0.7537 / 9.1631 |
|
244 |
+
| [MLM + DAPT + TAFT OPT](https://huggingface.co/Derify/ModChemBERT) | ***0.9665 ± 0.0250*** | 44.0137 ± 1.1110 | 0.8158 ± 0.0115 | ***0.4979 ± 0.0158*** | ***0.6505 ± 0.0126*** | 0.7327 / 9.3889 |
|
245 |
+
|
246 |
+
**Bold** indicates the best result in the column; *italic* indicates the best result among ModChemBERT checkpoints.<br/>
|
247 |
+
\* Published results from the ChemBERTa-3 [7] paper for optimized chemical language models using DeepChem scaffold splits.<br/>
|
248 |
+
† AVG column shows the mean score across all classification tasks.<br/>
|
249 |
+
‡ AVG column shows the mean scores across all regression tasks without and with the clearance score.
|
250 |
+
|
251 |
+
</details>
|
252 |
+
|
253 |
+
## Optimized ModChemBERT Hyperparameters
|
254 |
+
|
255 |
+
<details><summary>Click to expand</summary>
|
256 |
+
|
257 |
+
### TAFT Datasets
|
258 |
+
Optimal parameters (per dataset) for the `MLM + DAPT + TAFT OPT` merged model:
|
259 |
+
|
260 |
+
| Dataset | Learning Rate | Batch Size | Warmup Ratio | Classifier Pooling | Last k Layers |
|
261 |
+
| ---------------------- | ------------- | ---------- | ------------ | ------------------ | ------------- |
|
262 |
+
| adme_microsom_stab_h | 3e-5 | 8 | 0.0 | max_seq_mean | 5 |
|
263 |
+
| adme_microsom_stab_r | 3e-5 | 16 | 0.2 | max_cls | 3 |
|
264 |
+
| adme_permeability | 3e-5 | 8 | 0.0 | max_cls | 3 |
|
265 |
+
| adme_ppb_h | 1e-5 | 32 | 0.1 | max_seq_mean | 5 |
|
266 |
+
| adme_ppb_r | 1e-5 | 32 | 0.0 | sum_mean | N/A |
|
267 |
+
| adme_solubility | 3e-5 | 32 | 0.0 | sum_mean | N/A |
|
268 |
+
| astrazeneca_CL | 3e-5 | 8 | 0.1 | max_seq_mha | 3 |
|
269 |
+
| astrazeneca_LogD74 | 1e-5 | 8 | 0.0 | max_seq_mean | 5 |
|
270 |
+
| astrazeneca_PPB | 1e-5 | 32 | 0.0 | max_cls | 3 |
|
271 |
+
| astrazeneca_Solubility | 1e-5 | 32 | 0.0 | max_seq_mean | 5 |
|
272 |
+
|
273 |
+
### Benchmarking Datasets
|
274 |
+
Optimal parameters (per dataset) for the `MLM + DAPT + TAFT OPT` merged model:
|
275 |
+
|
276 |
+
| Dataset | Batch Size | Classifier Pooling | Last k Layers | Pooling Attention Dropout | Classifier Dropout | Embedding Dropout |
|
277 |
+
| ------------------- | ---------- | ------------------ | ------------- | ------------------------- | ------------------ | ----------------- |
|
278 |
+
| bace_classification | 32 | max_seq_mha | 3 | 0.0 | 0.0 | 0.0 |
|
279 |
+
| bbbp | 64 | max_cls | 3 | 0.1 | 0.0 | 0.0 |
|
280 |
+
| clintox | 32 | max_seq_mha | 5 | 0.1 | 0.0 | 0.0 |
|
281 |
+
| hiv | 32 | max_seq_mha | 3 | 0.0 | 0.0 | 0.0 |
|
282 |
+
| sider | 32 | mean | N/A | 0.1 | 0.0 | 0.1 |
|
283 |
+
| tox21 | 32 | max_seq_mha | 5 | 0.1 | 0.0 | 0.0 |
|
284 |
+
| base_regression | 32 | max_seq_mha | 5 | 0.1 | 0.0 | 0.0 |
|
285 |
+
| clearance | 32 | max_seq_mha | 5 | 0.1 | 0.0 | 0.0 |
|
286 |
+
| esol | 64 | sum_mean | N/A | 0.1 | 0.0 | 0.1 |
|
287 |
+
| freesolv | 32 | max_seq_mha | 5 | 0.1 | 0.0 | 0.0 |
|
288 |
+
| lipo | 32 | max_seq_mha | 3 | 0.1 | 0.1 | 0.1 |
|
289 |
+
|
290 |
+
</details>
|
291 |
+
|
292 |
+
## Hardware
|
293 |
+
Training and experiments were performed on 2 NVIDIA RTX 3090 GPUs.
|
294 |
+
|
295 |
+
## Citation
|
296 |
+
If you use ModChemBERT in your research, please cite the checkpoint and the following:
|
297 |
+
```
|
298 |
+
@software{cortes-2025-modchembert,
|
299 |
+
author = {Emmanuel Cortes},
|
300 |
+
title = {ModChemBERT: ModernBERT as a Chemical Language Model},
|
301 |
+
year = {2025},
|
302 |
+
publisher = {GitHub},
|
303 |
+
howpublished = {GitHub repository},
|
304 |
+
url = {https://github.com/emapco/ModChemBERT}
|
305 |
+
}
|
306 |
+
```
|
307 |
+
|
308 |
+
## References
|
309 |
+
1. Kallergis, Georgios, et al. "Domain adaptable language modeling of chemical compounds identifies potent pathoblockers for Pseudomonas aeruginosa." Communications Chemistry 8.1 (2025): 114.
|
310 |
+
2. Behrendt, Maike, Stefan Sylvius Wagner, and Stefan Harmeling. "MaxPoolBERT: Enhancing BERT Classification via Layer-and Token-Wise Aggregation." arXiv preprint arXiv:2505.15696 (2025).
|
311 |
+
3. Sultan, Afnan, et al. "Transformers for molecular property prediction: Domain adaptation efficiently improves performance." arXiv preprint arXiv:2503.03360 (2025).
|
312 |
+
4. Warner, Benjamin, et al. "Smarter, better, faster, longer: A modern bidirectional encoder for fast, memory efficient, and long context finetuning and inference." arXiv preprint arXiv:2412.13663 (2024).
|
313 |
+
5. Clavié, Benjamin. "JaColBERTv2.5: Optimising Multi-Vector Retrievers to Create State-of-the-Art Japanese Retrievers with Constrained Resources." Journal of Natural Language Processing 32.1 (2025): 176-218.
|
314 |
+
6. Grattafiori, Aaron, et al. "The llama 3 herd of models." arXiv preprint arXiv:2407.21783 (2024).
|
315 |
+
7. Singh, Riya, et al. "ChemBERTa-3: An Open Source Training Framework for Chemical Foundation Models." (2025).
|
config.json
ADDED
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"ModChemBertForMaskedLM",
|
4 |
+
"ModChemBertForSequenceClassification"
|
5 |
+
],
|
6 |
+
"attention_bias": false,
|
7 |
+
"attention_dropout": 0.1,
|
8 |
+
"auto_map": {
|
9 |
+
"AutoConfig": "configuration_modchembert.ModChemBertConfig",
|
10 |
+
"AutoModelForMaskedLM": "modeling_modchembert.ModChemBertForMaskedLM",
|
11 |
+
"AutoModelForSequenceClassification": "modeling_modchembert.ModChemBertForSequenceClassification"
|
12 |
+
},
|
13 |
+
"bos_token_id": 0,
|
14 |
+
"classifier_activation": "gelu",
|
15 |
+
"classifier_bias": false,
|
16 |
+
"classifier_dropout": 0.0,
|
17 |
+
"classifier_pooling": "max_seq_mha",
|
18 |
+
"classifier_pooling_attention_dropout": 0.1,
|
19 |
+
"classifier_pooling_last_k": 3,
|
20 |
+
"classifier_pooling_num_attention_heads": 4,
|
21 |
+
"cls_token_id": 0,
|
22 |
+
"decoder_bias": true,
|
23 |
+
"deterministic_flash_attn": false,
|
24 |
+
"dtype": "float32",
|
25 |
+
"embedding_dropout": 0.1,
|
26 |
+
"eos_token_id": 1,
|
27 |
+
"global_attn_every_n_layers": 3,
|
28 |
+
"global_rope_theta": 160000.0,
|
29 |
+
"hidden_activation": "gelu",
|
30 |
+
"hidden_size": 768,
|
31 |
+
"initializer_cutoff_factor": 2.0,
|
32 |
+
"initializer_range": 0.02,
|
33 |
+
"intermediate_size": 1152,
|
34 |
+
"layer_norm_eps": 1e-05,
|
35 |
+
"local_attention": 8,
|
36 |
+
"local_rope_theta": 10000.0,
|
37 |
+
"max_position_embeddings": 256,
|
38 |
+
"mlp_bias": false,
|
39 |
+
"mlp_dropout": 0.1,
|
40 |
+
"model_type": "modchembert",
|
41 |
+
"norm_bias": false,
|
42 |
+
"norm_eps": 1e-05,
|
43 |
+
"num_attention_heads": 12,
|
44 |
+
"num_hidden_layers": 22,
|
45 |
+
"num_labels": 1,
|
46 |
+
"pad_token_id": 2,
|
47 |
+
"position_embedding_type": "absolute",
|
48 |
+
"repad_logits_with_grad": false,
|
49 |
+
"sep_token_id": 1,
|
50 |
+
"sparse_pred_ignore_index": -100,
|
51 |
+
"sparse_prediction": false,
|
52 |
+
"transformers_version": "4.56.1",
|
53 |
+
"vocab_size": 2362
|
54 |
+
}
|
configuration_modchembert.py
ADDED
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright 2025 Emmanuel Cortes, All Rights Reserved.
|
2 |
+
#
|
3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
+
# you may not use this file except in compliance with the License.
|
5 |
+
# You may obtain a copy of the License at
|
6 |
+
#
|
7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
+
#
|
9 |
+
# Unless required by applicable law or agreed to in writing, software
|
10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
# See the License for the specific language governing permissions and
|
13 |
+
# limitations under the License.
|
14 |
+
|
15 |
+
from typing import Literal
|
16 |
+
|
17 |
+
from transformers.models.modernbert.configuration_modernbert import ModernBertConfig
|
18 |
+
|
19 |
+
|
20 |
+
class ModChemBertConfig(ModernBertConfig):
|
21 |
+
"""
|
22 |
+
Configuration class for ModChemBert models.
|
23 |
+
|
24 |
+
This configuration class extends ModernBertConfig with additional parameters specific to
|
25 |
+
chemical molecule modeling and custom pooling strategies for classification/regression tasks.
|
26 |
+
It accepts all arguments and keyword arguments from ModernBertConfig.
|
27 |
+
|
28 |
+
Args:
|
29 |
+
classifier_pooling (str, optional): Pooling strategy for sequence classification.
|
30 |
+
Available options:
|
31 |
+
- "cls": Use CLS token representation
|
32 |
+
- "mean": Attention-weighted average pooling
|
33 |
+
- "sum_mean": Sum all hidden states across layers, then mean pool over sequence (ChemLM approach)
|
34 |
+
- "sum_sum": Sum all hidden states across layers, then sum pool over sequence
|
35 |
+
- "mean_mean": Mean all hidden states across layers, then mean pool over sequence
|
36 |
+
- "mean_sum": Mean all hidden states across layers, then sum pool over sequence
|
37 |
+
- "max_cls": Element-wise max pooling over last k hidden states, then take CLS token
|
38 |
+
- "cls_mha": Multi-head attention with CLS token as query and full sequence as keys/values
|
39 |
+
- "max_seq_mha": Max pooling over last k states + multi-head attention with CLS as query
|
40 |
+
- "max_seq_mean": Max pooling over last k hidden states, then mean pooling over sequence
|
41 |
+
Defaults to "sum_mean".
|
42 |
+
classifier_pooling_num_attention_heads (int, optional): Number of attention heads for multi-head attention
|
43 |
+
pooling strategies (cls_mha, max_seq_mha). Defaults to 4.
|
44 |
+
classifier_pooling_attention_dropout (float, optional): Dropout probability for multi-head attention
|
45 |
+
pooling strategies (cls_mha, max_seq_mha). Defaults to 0.0.
|
46 |
+
classifier_pooling_last_k (int, optional): Number of last hidden layers to use for max pooling
|
47 |
+
strategies (max_cls, max_seq_mha, max_seq_mean). Defaults to 8.
|
48 |
+
*args: Variable length argument list passed to ModernBertConfig.
|
49 |
+
**kwargs: Arbitrary keyword arguments passed to ModernBertConfig.
|
50 |
+
|
51 |
+
Note:
|
52 |
+
This class inherits all configuration parameters from ModernBertConfig including
|
53 |
+
hidden_size, num_hidden_layers, num_attention_heads, intermediate_size, etc.
|
54 |
+
"""
|
55 |
+
|
56 |
+
model_type = "modchembert"
|
57 |
+
|
58 |
+
def __init__(
|
59 |
+
self,
|
60 |
+
*args,
|
61 |
+
classifier_pooling: Literal[
|
62 |
+
"cls",
|
63 |
+
"mean",
|
64 |
+
"sum_mean",
|
65 |
+
"sum_sum",
|
66 |
+
"mean_mean",
|
67 |
+
"mean_sum",
|
68 |
+
"max_cls",
|
69 |
+
"cls_mha",
|
70 |
+
"max_seq_mha",
|
71 |
+
"max_seq_mean",
|
72 |
+
] = "max_seq_mha",
|
73 |
+
classifier_pooling_num_attention_heads: int = 4,
|
74 |
+
classifier_pooling_attention_dropout: float = 0.0,
|
75 |
+
classifier_pooling_last_k: int = 8,
|
76 |
+
**kwargs,
|
77 |
+
):
|
78 |
+
# Pass classifier_pooling="cls" to circumvent ValueError in ModernBertConfig init
|
79 |
+
super().__init__(*args, classifier_pooling="cls", **kwargs)
|
80 |
+
# Override with custom value
|
81 |
+
self.classifier_pooling = classifier_pooling
|
82 |
+
self.classifier_pooling_num_attention_heads = classifier_pooling_num_attention_heads
|
83 |
+
self.classifier_pooling_attention_dropout = classifier_pooling_attention_dropout
|
84 |
+
self.classifier_pooling_last_k = classifier_pooling_last_k
|
logs_modchembert_classification_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_bace_classification_20250923_084801.log
ADDED
@@ -0,0 +1,351 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2025-09-23 08:48:01,476 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Running benchmark for dataset: bace_classification
|
2 |
+
2025-09-23 08:48:01,476 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - dataset: bace_classification, tasks: ['Class'], epochs: 100, learning rate: 3e-05
|
3 |
+
2025-09-23 08:48:01,481 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset bace_classification at 2025-09-23_08-48-01
|
4 |
+
2025-09-23 08:48:08,238 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.5132 | Val mean-roc_auc_score: 0.6498
|
5 |
+
2025-09-23 08:48:08,238 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 38
|
6 |
+
2025-09-23 08:48:08,767 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.6498
|
7 |
+
2025-09-23 08:48:13,994 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.3586 | Val mean-roc_auc_score: 0.6810
|
8 |
+
2025-09-23 08:48:14,195 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 76
|
9 |
+
2025-09-23 08:48:14,762 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.6810
|
10 |
+
2025-09-23 08:48:19,666 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.3214 | Val mean-roc_auc_score: 0.6796
|
11 |
+
2025-09-23 08:48:25,095 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.2615 | Val mean-roc_auc_score: 0.6721
|
12 |
+
2025-09-23 08:48:30,348 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.2237 | Val mean-roc_auc_score: 0.6854
|
13 |
+
2025-09-23 08:48:30,493 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 190
|
14 |
+
2025-09-23 08:48:31,044 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val mean-roc_auc_score: 0.6854
|
15 |
+
2025-09-23 08:48:36,328 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.2121 | Val mean-roc_auc_score: 0.6962
|
16 |
+
2025-09-23 08:48:36,731 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 228
|
17 |
+
2025-09-23 08:48:37,268 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val mean-roc_auc_score: 0.6962
|
18 |
+
2025-09-23 08:48:42,579 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.2237 | Val mean-roc_auc_score: 0.7266
|
19 |
+
2025-09-23 08:48:42,765 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 266
|
20 |
+
2025-09-23 08:48:43,301 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 7 with val mean-roc_auc_score: 0.7266
|
21 |
+
2025-09-23 08:48:48,497 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.1895 | Val mean-roc_auc_score: 0.6788
|
22 |
+
2025-09-23 08:48:53,591 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.1439 | Val mean-roc_auc_score: 0.6866
|
23 |
+
2025-09-23 08:48:58,883 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.1826 | Val mean-roc_auc_score: 0.6923
|
24 |
+
2025-09-23 08:49:04,042 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.1667 | Val mean-roc_auc_score: 0.7149
|
25 |
+
2025-09-23 08:49:09,499 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.1077 | Val mean-roc_auc_score: 0.7062
|
26 |
+
2025-09-23 08:49:14,845 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.1040 | Val mean-roc_auc_score: 0.6946
|
27 |
+
2025-09-23 08:49:20,132 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.1172 | Val mean-roc_auc_score: 0.6950
|
28 |
+
2025-09-23 08:49:25,354 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0975 | Val mean-roc_auc_score: 0.7006
|
29 |
+
2025-09-23 08:49:30,603 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0693 | Val mean-roc_auc_score: 0.7036
|
30 |
+
2025-09-23 08:49:35,899 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0843 | Val mean-roc_auc_score: 0.7092
|
31 |
+
2025-09-23 08:49:41,084 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0843 | Val mean-roc_auc_score: 0.7111
|
32 |
+
2025-09-23 08:49:46,227 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0724 | Val mean-roc_auc_score: 0.7177
|
33 |
+
2025-09-23 08:49:51,330 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0650 | Val mean-roc_auc_score: 0.7022
|
34 |
+
2025-09-23 08:49:56,431 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0483 | Val mean-roc_auc_score: 0.7221
|
35 |
+
2025-09-23 08:50:01,857 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0462 | Val mean-roc_auc_score: 0.7229
|
36 |
+
2025-09-23 08:50:07,042 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0495 | Val mean-roc_auc_score: 0.7184
|
37 |
+
2025-09-23 08:50:12,283 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0291 | Val mean-roc_auc_score: 0.7105
|
38 |
+
2025-09-23 08:50:17,476 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0199 | Val mean-roc_auc_score: 0.7193
|
39 |
+
2025-09-23 08:50:22,664 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0306 | Val mean-roc_auc_score: 0.6909
|
40 |
+
2025-09-23 08:50:29,118 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0781 | Val mean-roc_auc_score: 0.7328
|
41 |
+
2025-09-23 08:50:29,263 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 1026
|
42 |
+
2025-09-23 08:50:29,798 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 27 with val mean-roc_auc_score: 0.7328
|
43 |
+
2025-09-23 08:50:34,992 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0469 | Val mean-roc_auc_score: 0.7183
|
44 |
+
2025-09-23 08:50:40,323 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0327 | Val mean-roc_auc_score: 0.7315
|
45 |
+
2025-09-23 08:50:45,614 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0347 | Val mean-roc_auc_score: 0.7134
|
46 |
+
2025-09-23 08:50:51,208 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0495 | Val mean-roc_auc_score: 0.7180
|
47 |
+
2025-09-23 08:50:56,626 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0243 | Val mean-roc_auc_score: 0.7357
|
48 |
+
2025-09-23 08:50:56,771 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 1216
|
49 |
+
2025-09-23 08:50:57,303 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 32 with val mean-roc_auc_score: 0.7357
|
50 |
+
2025-09-23 08:51:02,461 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0191 | Val mean-roc_auc_score: 0.7362
|
51 |
+
2025-09-23 08:51:02,644 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 1254
|
52 |
+
2025-09-23 08:51:03,178 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 33 with val mean-roc_auc_score: 0.7362
|
53 |
+
2025-09-23 08:51:08,656 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0162 | Val mean-roc_auc_score: 0.7192
|
54 |
+
2025-09-23 08:51:13,922 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0161 | Val mean-roc_auc_score: 0.7235
|
55 |
+
2025-09-23 08:51:19,215 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0123 | Val mean-roc_auc_score: 0.7115
|
56 |
+
2025-09-23 08:51:24,679 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0065 | Val mean-roc_auc_score: 0.7122
|
57 |
+
2025-09-23 08:51:29,945 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0082 | Val mean-roc_auc_score: 0.7114
|
58 |
+
2025-09-23 08:51:35,266 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0090 | Val mean-roc_auc_score: 0.7194
|
59 |
+
2025-09-23 08:51:40,471 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0135 | Val mean-roc_auc_score: 0.7308
|
60 |
+
2025-09-23 08:51:45,572 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0071 | Val mean-roc_auc_score: 0.7267
|
61 |
+
2025-09-23 08:51:50,993 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0099 | Val mean-roc_auc_score: 0.7276
|
62 |
+
2025-09-23 08:51:56,098 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0064 | Val mean-roc_auc_score: 0.7323
|
63 |
+
2025-09-23 08:52:01,231 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0207 | Val mean-roc_auc_score: 0.7157
|
64 |
+
2025-09-23 08:52:06,313 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0299 | Val mean-roc_auc_score: 0.6971
|
65 |
+
2025-09-23 08:52:11,543 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0271 | Val mean-roc_auc_score: 0.7232
|
66 |
+
2025-09-23 08:52:17,023 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0216 | Val mean-roc_auc_score: 0.7154
|
67 |
+
2025-09-23 08:52:22,247 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0080 | Val mean-roc_auc_score: 0.7187
|
68 |
+
2025-09-23 08:52:27,420 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0101 | Val mean-roc_auc_score: 0.6777
|
69 |
+
2025-09-23 08:52:32,302 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0621 | Val mean-roc_auc_score: 0.6875
|
70 |
+
2025-09-23 08:52:37,512 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0650 | Val mean-roc_auc_score: 0.6815
|
71 |
+
2025-09-23 08:52:42,968 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0241 | Val mean-roc_auc_score: 0.6905
|
72 |
+
2025-09-23 08:52:49,514 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0209 | Val mean-roc_auc_score: 0.6899
|
73 |
+
2025-09-23 08:52:54,781 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0120 | Val mean-roc_auc_score: 0.6891
|
74 |
+
2025-09-23 08:52:59,983 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0093 | Val mean-roc_auc_score: 0.6912
|
75 |
+
2025-09-23 08:53:05,288 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0077 | Val mean-roc_auc_score: 0.6918
|
76 |
+
2025-09-23 08:53:10,728 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0088 | Val mean-roc_auc_score: 0.6936
|
77 |
+
2025-09-23 08:53:15,644 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.6959
|
78 |
+
2025-09-23 08:53:20,763 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.6963
|
79 |
+
2025-09-23 08:53:25,996 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.6975
|
80 |
+
2025-09-23 08:53:31,290 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0076 | Val mean-roc_auc_score: 0.6966
|
81 |
+
2025-09-23 08:53:36,792 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0057 | Val mean-roc_auc_score: 0.6983
|
82 |
+
2025-09-23 08:53:42,014 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7002
|
83 |
+
2025-09-23 08:53:47,449 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.6990
|
84 |
+
2025-09-23 08:53:52,815 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0053 | Val mean-roc_auc_score: 0.6971
|
85 |
+
2025-09-23 08:53:58,097 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0114 | Val mean-roc_auc_score: 0.6994
|
86 |
+
2025-09-23 08:54:03,564 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7008
|
87 |
+
2025-09-23 08:54:08,786 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0098 | Val mean-roc_auc_score: 0.6998
|
88 |
+
2025-09-23 08:54:14,034 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.6990
|
89 |
+
2025-09-23 08:54:19,271 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.6986
|
90 |
+
2025-09-23 08:54:24,487 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.6985
|
91 |
+
2025-09-23 08:54:29,955 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.6994
|
92 |
+
2025-09-23 08:54:35,112 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.6969
|
93 |
+
2025-09-23 08:54:39,952 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.6974
|
94 |
+
2025-09-23 08:54:45,057 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.6988
|
95 |
+
2025-09-23 08:54:50,160 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.7007
|
96 |
+
2025-09-23 08:54:55,544 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7007
|
97 |
+
2025-09-23 08:55:00,705 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.6991
|
98 |
+
2025-09-23 08:55:07,137 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0085 | Val mean-roc_auc_score: 0.7001
|
99 |
+
2025-09-23 08:55:12,421 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.7014
|
100 |
+
2025-09-23 08:55:17,598 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.7020
|
101 |
+
2025-09-23 08:55:23,012 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.7001
|
102 |
+
2025-09-23 08:55:28,165 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.7003
|
103 |
+
2025-09-23 08:55:33,218 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0107 | Val mean-roc_auc_score: 0.7002
|
104 |
+
2025-09-23 08:55:38,433 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0093 | Val mean-roc_auc_score: 0.7045
|
105 |
+
2025-09-23 08:55:43,652 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0113 | Val mean-roc_auc_score: 0.7128
|
106 |
+
2025-09-23 08:55:49,223 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7144
|
107 |
+
2025-09-23 08:55:54,480 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0065 | Val mean-roc_auc_score: 0.7162
|
108 |
+
2025-09-23 08:55:59,939 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.7154
|
109 |
+
2025-09-23 08:56:05,043 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0039 | Val mean-roc_auc_score: 0.7143
|
110 |
+
2025-09-23 08:56:10,111 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.7150
|
111 |
+
2025-09-23 08:56:15,490 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0039 | Val mean-roc_auc_score: 0.7126
|
112 |
+
2025-09-23 08:56:20,624 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.7111
|
113 |
+
2025-09-23 08:56:25,715 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7109
|
114 |
+
2025-09-23 08:56:31,037 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.7091
|
115 |
+
2025-09-23 08:56:36,265 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.7192
|
116 |
+
2025-09-23 08:56:41,778 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.7239
|
117 |
+
2025-09-23 08:56:46,663 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.7237
|
118 |
+
2025-09-23 08:56:51,885 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.7228
|
119 |
+
2025-09-23 08:56:57,157 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.7224
|
120 |
+
2025-09-23 08:56:58,003 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.8337
|
121 |
+
2025-09-23 08:56:58,332 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset bace_classification at 2025-09-23_08-56-58
|
122 |
+
2025-09-23 08:57:02,723 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.5493 | Val mean-roc_auc_score: 0.6854
|
123 |
+
2025-09-23 08:57:02,723 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 38
|
124 |
+
2025-09-23 08:57:03,256 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.6854
|
125 |
+
2025-09-23 08:57:08,532 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.4030 | Val mean-roc_auc_score: 0.6708
|
126 |
+
2025-09-23 08:57:14,188 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.3125 | Val mean-roc_auc_score: 0.6761
|
127 |
+
2025-09-23 08:57:19,401 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.2730 | Val mean-roc_auc_score: 0.6947
|
128 |
+
2025-09-23 08:57:19,534 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 152
|
129 |
+
2025-09-23 08:57:20,054 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.6947
|
130 |
+
2025-09-23 08:57:25,214 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.2188 | Val mean-roc_auc_score: 0.6914
|
131 |
+
2025-09-23 08:57:30,328 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.2210 | Val mean-roc_auc_score: 0.6858
|
132 |
+
2025-09-23 08:57:35,586 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1949 | Val mean-roc_auc_score: 0.6911
|
133 |
+
2025-09-23 08:57:40,764 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.1543 | Val mean-roc_auc_score: 0.7249
|
134 |
+
2025-09-23 08:57:40,940 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 304
|
135 |
+
2025-09-23 08:57:41,498 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 8 with val mean-roc_auc_score: 0.7249
|
136 |
+
2025-09-23 08:57:46,647 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.1538 | Val mean-roc_auc_score: 0.7236
|
137 |
+
2025-09-23 08:57:51,874 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.1299 | Val mean-roc_auc_score: 0.6952
|
138 |
+
2025-09-23 08:57:57,065 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.1823 | Val mean-roc_auc_score: 0.6939
|
139 |
+
2025-09-23 08:58:02,901 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.1340 | Val mean-roc_auc_score: 0.7218
|
140 |
+
2025-09-23 08:58:08,141 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0900 | Val mean-roc_auc_score: 0.7090
|
141 |
+
2025-09-23 08:58:13,255 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0952 | Val mean-roc_auc_score: 0.6962
|
142 |
+
2025-09-23 08:58:18,386 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0942 | Val mean-roc_auc_score: 0.7173
|
143 |
+
2025-09-23 08:58:23,454 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0889 | Val mean-roc_auc_score: 0.7132
|
144 |
+
2025-09-23 08:58:28,843 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0678 | Val mean-roc_auc_score: 0.7266
|
145 |
+
2025-09-23 08:58:28,993 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 646
|
146 |
+
2025-09-23 08:58:29,526 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 17 with val mean-roc_auc_score: 0.7266
|
147 |
+
2025-09-23 08:58:34,672 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0773 | Val mean-roc_auc_score: 0.6853
|
148 |
+
2025-09-23 08:58:39,885 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.1023 | Val mean-roc_auc_score: 0.7079
|
149 |
+
2025-09-23 08:58:45,054 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0421 | Val mean-roc_auc_score: 0.7158
|
150 |
+
2025-09-23 08:58:50,299 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0254 | Val mean-roc_auc_score: 0.7060
|
151 |
+
2025-09-23 08:58:55,694 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0295 | Val mean-roc_auc_score: 0.7118
|
152 |
+
2025-09-23 08:59:00,875 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0219 | Val mean-roc_auc_score: 0.7141
|
153 |
+
2025-09-23 08:59:06,148 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0553 | Val mean-roc_auc_score: 0.6900
|
154 |
+
2025-09-23 08:59:11,357 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0500 | Val mean-roc_auc_score: 0.7044
|
155 |
+
2025-09-23 08:59:16,602 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0368 | Val mean-roc_auc_score: 0.6921
|
156 |
+
2025-09-23 08:59:23,326 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.1064 | Val mean-roc_auc_score: 0.6855
|
157 |
+
2025-09-23 08:59:28,531 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0707 | Val mean-roc_auc_score: 0.6976
|
158 |
+
2025-09-23 08:59:33,708 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0308 | Val mean-roc_auc_score: 0.7139
|
159 |
+
2025-09-23 08:59:38,904 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0227 | Val mean-roc_auc_score: 0.7088
|
160 |
+
2025-09-23 08:59:44,068 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0159 | Val mean-roc_auc_score: 0.7082
|
161 |
+
2025-09-23 08:59:49,535 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0112 | Val mean-roc_auc_score: 0.7082
|
162 |
+
2025-09-23 08:59:54,479 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0121 | Val mean-roc_auc_score: 0.7109
|
163 |
+
2025-09-23 08:59:59,691 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0166 | Val mean-roc_auc_score: 0.7106
|
164 |
+
2025-09-23 09:00:04,870 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0163 | Val mean-roc_auc_score: 0.6939
|
165 |
+
2025-09-23 09:00:10,045 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0117 | Val mean-roc_auc_score: 0.7037
|
166 |
+
2025-09-23 09:00:15,596 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0197 | Val mean-roc_auc_score: 0.7077
|
167 |
+
2025-09-23 09:00:20,770 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0350 | Val mean-roc_auc_score: 0.6969
|
168 |
+
2025-09-23 09:00:25,991 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0157 | Val mean-roc_auc_score: 0.7159
|
169 |
+
2025-09-23 09:00:31,122 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0146 | Val mean-roc_auc_score: 0.7120
|
170 |
+
2025-09-23 09:00:36,340 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0075 | Val mean-roc_auc_score: 0.7083
|
171 |
+
2025-09-23 09:00:41,792 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0086 | Val mean-roc_auc_score: 0.7053
|
172 |
+
2025-09-23 09:00:47,008 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0082 | Val mean-roc_auc_score: 0.7019
|
173 |
+
2025-09-23 09:00:52,275 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0096 | Val mean-roc_auc_score: 0.7138
|
174 |
+
2025-09-23 09:00:57,385 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7134
|
175 |
+
2025-09-23 09:01:02,480 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.7134
|
176 |
+
2025-09-23 09:01:07,868 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0078 | Val mean-roc_auc_score: 0.7116
|
177 |
+
2025-09-23 09:01:12,970 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0065 | Val mean-roc_auc_score: 0.7069
|
178 |
+
2025-09-23 09:01:18,151 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7101
|
179 |
+
2025-09-23 09:01:23,212 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0123 | Val mean-roc_auc_score: 0.7132
|
180 |
+
2025-09-23 09:01:28,373 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0130 | Val mean-roc_auc_score: 0.6994
|
181 |
+
2025-09-23 09:01:33,900 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7054
|
182 |
+
2025-09-23 09:01:40,307 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7084
|
183 |
+
2025-09-23 09:01:45,480 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7075
|
184 |
+
2025-09-23 09:01:50,679 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.7098
|
185 |
+
2025-09-23 09:01:55,753 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.7118
|
186 |
+
2025-09-23 09:02:01,240 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.7163
|
187 |
+
2025-09-23 09:02:06,418 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0134 | Val mean-roc_auc_score: 0.7160
|
188 |
+
2025-09-23 09:02:11,657 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.7162
|
189 |
+
2025-09-23 09:02:16,944 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.7127
|
190 |
+
2025-09-23 09:02:22,186 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.7178
|
191 |
+
2025-09-23 09:02:27,669 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.7174
|
192 |
+
2025-09-23 09:02:32,875 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7153
|
193 |
+
2025-09-23 09:02:38,083 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7121
|
194 |
+
2025-09-23 09:02:43,291 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0085 | Val mean-roc_auc_score: 0.7151
|
195 |
+
2025-09-23 09:02:48,475 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7192
|
196 |
+
2025-09-23 09:02:53,977 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0358 | Val mean-roc_auc_score: 0.7240
|
197 |
+
2025-09-23 09:02:59,211 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0374 | Val mean-roc_auc_score: 0.6917
|
198 |
+
2025-09-23 09:03:04,450 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0193 | Val mean-roc_auc_score: 0.7005
|
199 |
+
2025-09-23 09:03:09,662 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0093 | Val mean-roc_auc_score: 0.7051
|
200 |
+
2025-09-23 09:03:14,802 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.7027
|
201 |
+
2025-09-23 09:03:20,260 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0054 | Val mean-roc_auc_score: 0.7036
|
202 |
+
2025-09-23 09:03:25,371 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7031
|
203 |
+
2025-09-23 09:03:30,535 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.7004
|
204 |
+
2025-09-23 09:03:35,716 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.6959
|
205 |
+
2025-09-23 09:03:40,941 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0053 | Val mean-roc_auc_score: 0.6955
|
206 |
+
2025-09-23 09:03:46,331 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0195 | Val mean-roc_auc_score: 0.7026
|
207 |
+
2025-09-23 09:03:51,423 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0110 | Val mean-roc_auc_score: 0.7003
|
208 |
+
2025-09-23 09:03:57,838 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.6956
|
209 |
+
2025-09-23 09:04:03,039 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0471 | Val mean-roc_auc_score: 0.7381
|
210 |
+
2025-09-23 09:04:03,184 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 3040
|
211 |
+
2025-09-23 09:04:03,715 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 80 with val mean-roc_auc_score: 0.7381
|
212 |
+
2025-09-23 09:04:08,893 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0399 | Val mean-roc_auc_score: 0.7353
|
213 |
+
2025-09-23 09:04:14,783 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0110 | Val mean-roc_auc_score: 0.7278
|
214 |
+
2025-09-23 09:04:20,307 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7249
|
215 |
+
2025-09-23 09:04:25,535 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0062 | Val mean-roc_auc_score: 0.7225
|
216 |
+
2025-09-23 09:04:30,730 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0076 | Val mean-roc_auc_score: 0.7114
|
217 |
+
2025-09-23 09:04:35,960 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.7242
|
218 |
+
2025-09-23 09:04:41,448 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.7217
|
219 |
+
2025-09-23 09:04:46,686 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.7204
|
220 |
+
2025-09-23 09:04:51,827 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.7208
|
221 |
+
2025-09-23 09:04:56,890 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.7207
|
222 |
+
2025-09-23 09:05:02,187 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.7201
|
223 |
+
2025-09-23 09:05:07,783 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.7201
|
224 |
+
2025-09-23 09:05:12,896 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.7198
|
225 |
+
2025-09-23 09:05:18,069 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7210
|
226 |
+
2025-09-23 09:05:23,188 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.7195
|
227 |
+
2025-09-23 09:05:28,399 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.7192
|
228 |
+
2025-09-23 09:05:33,865 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.7193
|
229 |
+
2025-09-23 09:05:39,054 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7177
|
230 |
+
2025-09-23 09:05:44,323 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.7174
|
231 |
+
2025-09-23 09:05:49,538 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.7150
|
232 |
+
2025-09-23 09:05:50,327 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.8296
|
233 |
+
2025-09-23 09:05:50,661 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset bace_classification at 2025-09-23_09-05-50
|
234 |
+
2025-09-23 09:05:55,119 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.5296 | Val mean-roc_auc_score: 0.6726
|
235 |
+
2025-09-23 09:05:55,119 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 38
|
236 |
+
2025-09-23 09:05:55,664 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.6726
|
237 |
+
2025-09-23 09:06:00,934 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.3734 | Val mean-roc_auc_score: 0.6776
|
238 |
+
2025-09-23 09:06:01,116 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 76
|
239 |
+
2025-09-23 09:06:01,671 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.6776
|
240 |
+
2025-09-23 09:06:06,950 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.3549 | Val mean-roc_auc_score: 0.6899
|
241 |
+
2025-09-23 09:06:07,141 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 114
|
242 |
+
2025-09-23 09:06:07,677 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.6899
|
243 |
+
2025-09-23 09:06:13,078 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.2599 | Val mean-roc_auc_score: 0.7008
|
244 |
+
2025-09-23 09:06:13,271 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 152
|
245 |
+
2025-09-23 09:06:13,818 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.7008
|
246 |
+
2025-09-23 09:06:19,038 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.2484 | Val mean-roc_auc_score: 0.7333
|
247 |
+
2025-09-23 09:06:19,225 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 190
|
248 |
+
2025-09-23 09:06:19,755 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val mean-roc_auc_score: 0.7333
|
249 |
+
2025-09-23 09:06:25,112 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.1964 | Val mean-roc_auc_score: 0.6939
|
250 |
+
2025-09-23 09:06:30,789 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1793 | Val mean-roc_auc_score: 0.6967
|
251 |
+
2025-09-23 09:06:36,132 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.2598 | Val mean-roc_auc_score: 0.6956
|
252 |
+
2025-09-23 09:06:41,378 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.1505 | Val mean-roc_auc_score: 0.7200
|
253 |
+
2025-09-23 09:06:46,589 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.1447 | Val mean-roc_auc_score: 0.6631
|
254 |
+
2025-09-23 09:06:51,787 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.1337 | Val mean-roc_auc_score: 0.7072
|
255 |
+
2025-09-23 09:06:57,318 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0999 | Val mean-roc_auc_score: 0.6934
|
256 |
+
2025-09-23 09:07:02,499 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.1414 | Val mean-roc_auc_score: 0.7247
|
257 |
+
2025-09-23 09:07:07,616 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.1250 | Val mean-roc_auc_score: 0.7249
|
258 |
+
2025-09-23 09:07:12,806 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0736 | Val mean-roc_auc_score: 0.7428
|
259 |
+
2025-09-23 09:07:12,952 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 570
|
260 |
+
2025-09-23 09:07:13,485 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 15 with val mean-roc_auc_score: 0.7428
|
261 |
+
2025-09-23 09:07:18,580 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0854 | Val mean-roc_auc_score: 0.7422
|
262 |
+
2025-09-23 09:07:23,932 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0625 | Val mean-roc_auc_score: 0.7240
|
263 |
+
2025-09-23 09:07:28,918 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0389 | Val mean-roc_auc_score: 0.7340
|
264 |
+
2025-09-23 09:07:34,193 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0682 | Val mean-roc_auc_score: 0.7501
|
265 |
+
2025-09-23 09:07:34,357 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 722
|
266 |
+
2025-09-23 09:07:34,893 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 19 with val mean-roc_auc_score: 0.7501
|
267 |
+
2025-09-23 09:07:40,162 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0925 | Val mean-roc_auc_score: 0.7494
|
268 |
+
2025-09-23 09:07:45,412 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0446 | Val mean-roc_auc_score: 0.7382
|
269 |
+
2025-09-23 09:07:50,920 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0286 | Val mean-roc_auc_score: 0.7371
|
270 |
+
2025-09-23 09:07:56,157 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0252 | Val mean-roc_auc_score: 0.7389
|
271 |
+
2025-09-23 09:08:01,318 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.1758 | Val mean-roc_auc_score: 0.7325
|
272 |
+
2025-09-23 09:08:06,516 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0868 | Val mean-roc_auc_score: 0.7293
|
273 |
+
2025-09-23 09:08:11,716 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0465 | Val mean-roc_auc_score: 0.7228
|
274 |
+
2025-09-23 09:08:18,467 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0335 | Val mean-roc_auc_score: 0.7115
|
275 |
+
2025-09-23 09:08:23,664 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0267 | Val mean-roc_auc_score: 0.7098
|
276 |
+
2025-09-23 09:08:28,949 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0090 | Val mean-roc_auc_score: 0.7095
|
277 |
+
2025-09-23 09:08:34,027 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0304 | Val mean-roc_auc_score: 0.7050
|
278 |
+
2025-09-23 09:08:39,052 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0452 | Val mean-roc_auc_score: 0.7224
|
279 |
+
2025-09-23 09:08:44,566 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0513 | Val mean-roc_auc_score: 0.7185
|
280 |
+
2025-09-23 09:08:49,789 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0241 | Val mean-roc_auc_score: 0.7234
|
281 |
+
2025-09-23 09:08:54,953 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0325 | Val mean-roc_auc_score: 0.7189
|
282 |
+
2025-09-23 09:09:00,169 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0214 | Val mean-roc_auc_score: 0.7293
|
283 |
+
2025-09-23 09:09:05,419 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0084 | Val mean-roc_auc_score: 0.7280
|
284 |
+
2025-09-23 09:09:10,938 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0231 | Val mean-roc_auc_score: 0.7269
|
285 |
+
2025-09-23 09:09:16,060 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0144 | Val mean-roc_auc_score: 0.7265
|
286 |
+
2025-09-23 09:09:21,195 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0185 | Val mean-roc_auc_score: 0.7295
|
287 |
+
2025-09-23 09:09:26,329 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0254 | Val mean-roc_auc_score: 0.7296
|
288 |
+
2025-09-23 09:09:31,452 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0364 | Val mean-roc_auc_score: 0.7104
|
289 |
+
2025-09-23 09:09:36,622 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0278 | Val mean-roc_auc_score: 0.7221
|
290 |
+
2025-09-23 09:09:41,775 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0722 | Val mean-roc_auc_score: 0.7299
|
291 |
+
2025-09-23 09:09:46,995 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0210 | Val mean-roc_auc_score: 0.7388
|
292 |
+
2025-09-23 09:09:52,173 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0137 | Val mean-roc_auc_score: 0.7357
|
293 |
+
2025-09-23 09:09:57,418 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0137 | Val mean-roc_auc_score: 0.7375
|
294 |
+
2025-09-23 09:10:02,953 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0093 | Val mean-roc_auc_score: 0.7439
|
295 |
+
2025-09-23 09:10:08,230 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0118 | Val mean-roc_auc_score: 0.7415
|
296 |
+
2025-09-23 09:10:13,451 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0089 | Val mean-roc_auc_score: 0.7438
|
297 |
+
2025-09-23 09:10:18,704 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0074 | Val mean-roc_auc_score: 0.7472
|
298 |
+
2025-09-23 09:10:23,914 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0076 | Val mean-roc_auc_score: 0.7400
|
299 |
+
2025-09-23 09:10:29,415 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7426
|
300 |
+
2025-09-23 09:10:35,778 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0083 | Val mean-roc_auc_score: 0.7377
|
301 |
+
2025-09-23 09:10:40,974 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7403
|
302 |
+
2025-09-23 09:10:46,054 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7410
|
303 |
+
2025-09-23 09:10:50,937 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7381
|
304 |
+
2025-09-23 09:10:56,300 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.7405
|
305 |
+
2025-09-23 09:11:01,439 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.7403
|
306 |
+
2025-09-23 09:11:06,546 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.7439
|
307 |
+
2025-09-23 09:11:11,673 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0691 | Val mean-roc_auc_score: 0.7421
|
308 |
+
2025-09-23 09:11:16,794 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0490 | Val mean-roc_auc_score: 0.7427
|
309 |
+
2025-09-23 09:11:22,322 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0197 | Val mean-roc_auc_score: 0.7442
|
310 |
+
2025-09-23 09:11:27,582 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0352 | Val mean-roc_auc_score: 0.7243
|
311 |
+
2025-09-23 09:11:32,872 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0238 | Val mean-roc_auc_score: 0.7516
|
312 |
+
2025-09-23 09:11:33,027 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 2432
|
313 |
+
2025-09-23 09:11:33,563 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 64 with val mean-roc_auc_score: 0.7516
|
314 |
+
2025-09-23 09:11:38,765 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0103 | Val mean-roc_auc_score: 0.7485
|
315 |
+
2025-09-23 09:11:43,746 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.7469
|
316 |
+
2025-09-23 09:11:49,269 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0089 | Val mean-roc_auc_score: 0.7425
|
317 |
+
2025-09-23 09:11:54,468 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0081 | Val mean-roc_auc_score: 0.7407
|
318 |
+
2025-09-23 09:11:59,744 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7432
|
319 |
+
2025-09-23 09:12:04,939 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0075 | Val mean-roc_auc_score: 0.7437
|
320 |
+
2025-09-23 09:12:10,129 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7444
|
321 |
+
2025-09-23 09:12:15,667 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7423
|
322 |
+
2025-09-23 09:12:20,900 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.7422
|
323 |
+
2025-09-23 09:12:26,168 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.7427
|
324 |
+
2025-09-23 09:12:31,461 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.7435
|
325 |
+
2025-09-23 09:12:36,712 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0066 | Val mean-roc_auc_score: 0.7432
|
326 |
+
2025-09-23 09:12:42,230 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.7426
|
327 |
+
2025-09-23 09:12:47,466 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7399
|
328 |
+
2025-09-23 09:12:53,994 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0078 | Val mean-roc_auc_score: 0.7431
|
329 |
+
2025-09-23 09:12:59,394 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0048 | Val mean-roc_auc_score: 0.7413
|
330 |
+
2025-09-23 09:13:04,807 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.7422
|
331 |
+
2025-09-23 09:13:10,190 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0072 | Val mean-roc_auc_score: 0.7430
|
332 |
+
2025-09-23 09:13:15,221 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.7431
|
333 |
+
2025-09-23 09:13:20,321 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.7429
|
334 |
+
2025-09-23 09:13:25,422 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0039 | Val mean-roc_auc_score: 0.7422
|
335 |
+
2025-09-23 09:13:30,614 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.7432
|
336 |
+
2025-09-23 09:13:36,082 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0120 | Val mean-roc_auc_score: 0.7421
|
337 |
+
2025-09-23 09:13:41,311 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.7421
|
338 |
+
2025-09-23 09:13:46,578 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.7414
|
339 |
+
2025-09-23 09:13:51,584 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0068 | Val mean-roc_auc_score: 0.7394
|
340 |
+
2025-09-23 09:13:56,817 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7381
|
341 |
+
2025-09-23 09:14:02,317 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.7381
|
342 |
+
2025-09-23 09:14:07,564 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.7378
|
343 |
+
2025-09-23 09:14:12,760 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.7372
|
344 |
+
2025-09-23 09:14:18,006 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.7364
|
345 |
+
2025-09-23 09:14:23,123 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.7352
|
346 |
+
2025-09-23 09:14:28,969 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.7365
|
347 |
+
2025-09-23 09:14:34,255 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.7359
|
348 |
+
2025-09-23 09:14:39,580 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.7380
|
349 |
+
2025-09-23 09:14:44,811 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.7389
|
350 |
+
2025-09-23 09:14:45,598 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.8404
|
351 |
+
2025-09-23 09:14:45,932 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg mean-roc_auc_score: 0.8346, Std Dev: 0.0045
|
logs_modchembert_classification_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_bbbp_epochs100_batch_size64_20250923_021951.log
ADDED
@@ -0,0 +1,355 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2025-09-23 02:19:51,927 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Running benchmark for dataset: bbbp
|
2 |
+
2025-09-23 02:19:51,927 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - dataset: bbbp, tasks: ['p_np'], epochs: 100, learning rate: 3e-05
|
3 |
+
2025-09-23 02:19:51,931 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Starting triplicate run 1 for dataset bbbp at 2025-09-23_02-19-51
|
4 |
+
2025-09-23 02:19:55,043 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 1/100 | Train Loss: 0.2764 | Val mean-roc_auc_score: 0.9905
|
5 |
+
2025-09-23 02:19:55,044 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 26
|
6 |
+
2025-09-23 02:19:55,547 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.9905
|
7 |
+
2025-09-23 02:19:59,232 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 2/100 | Train Loss: 0.1575 | Val mean-roc_auc_score: 0.9937
|
8 |
+
2025-09-23 02:19:59,404 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 52
|
9 |
+
2025-09-23 02:19:59,920 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.9937
|
10 |
+
2025-09-23 02:20:03,634 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 3/100 | Train Loss: 0.1244 | Val mean-roc_auc_score: 0.9940
|
11 |
+
2025-09-23 02:20:03,799 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 78
|
12 |
+
2025-09-23 02:20:04,307 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.9940
|
13 |
+
2025-09-23 02:20:07,975 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 4/100 | Train Loss: 0.1201 | Val mean-roc_auc_score: 0.9941
|
14 |
+
2025-09-23 02:20:08,158 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 104
|
15 |
+
2025-09-23 02:20:08,676 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.9941
|
16 |
+
2025-09-23 02:20:12,379 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 5/100 | Train Loss: 0.0733 | Val mean-roc_auc_score: 0.9942
|
17 |
+
2025-09-23 02:20:12,553 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 130
|
18 |
+
2025-09-23 02:20:13,060 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 5 with val mean-roc_auc_score: 0.9942
|
19 |
+
2025-09-23 02:20:16,788 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 6/100 | Train Loss: 0.0619 | Val mean-roc_auc_score: 0.9941
|
20 |
+
2025-09-23 02:20:20,905 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 7/100 | Train Loss: 0.0481 | Val mean-roc_auc_score: 0.9940
|
21 |
+
2025-09-23 02:20:24,546 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 8/100 | Train Loss: 0.0449 | Val mean-roc_auc_score: 0.9947
|
22 |
+
2025-09-23 02:20:24,688 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 208
|
23 |
+
2025-09-23 02:20:25,193 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 8 with val mean-roc_auc_score: 0.9947
|
24 |
+
2025-09-23 02:20:28,929 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 9/100 | Train Loss: 0.0281 | Val mean-roc_auc_score: 0.9951
|
25 |
+
2025-09-23 02:20:29,101 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 234
|
26 |
+
2025-09-23 02:20:29,616 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 9 with val mean-roc_auc_score: 0.9951
|
27 |
+
2025-09-23 02:20:33,332 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 10/100 | Train Loss: 0.0192 | Val mean-roc_auc_score: 0.9946
|
28 |
+
2025-09-23 02:20:37,136 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 11/100 | Train Loss: 0.0191 | Val mean-roc_auc_score: 0.9945
|
29 |
+
2025-09-23 02:20:41,185 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 12/100 | Train Loss: 0.0172 | Val mean-roc_auc_score: 0.9937
|
30 |
+
2025-09-23 02:20:44,841 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 13/100 | Train Loss: 0.0165 | Val mean-roc_auc_score: 0.9945
|
31 |
+
2025-09-23 02:20:48,549 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 14/100 | Train Loss: 0.0136 | Val mean-roc_auc_score: 0.9938
|
32 |
+
2025-09-23 02:20:52,243 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 15/100 | Train Loss: 0.0107 | Val mean-roc_auc_score: 0.9943
|
33 |
+
2025-09-23 02:20:55,889 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 16/100 | Train Loss: 0.0078 | Val mean-roc_auc_score: 0.9945
|
34 |
+
2025-09-23 02:20:59,945 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 17/100 | Train Loss: 0.0082 | Val mean-roc_auc_score: 0.9943
|
35 |
+
2025-09-23 02:21:03,620 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 18/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.9943
|
36 |
+
2025-09-23 02:21:07,401 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 19/100 | Train Loss: 0.0068 | Val mean-roc_auc_score: 0.9943
|
37 |
+
2025-09-23 02:21:11,065 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 20/100 | Train Loss: 0.0095 | Val mean-roc_auc_score: 0.9942
|
38 |
+
2025-09-23 02:21:14,736 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 21/100 | Train Loss: 0.0096 | Val mean-roc_auc_score: 0.9943
|
39 |
+
2025-09-23 02:21:18,751 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 22/100 | Train Loss: 0.0087 | Val mean-roc_auc_score: 0.9946
|
40 |
+
2025-09-23 02:21:22,437 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 23/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.9943
|
41 |
+
2025-09-23 02:21:26,073 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 24/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.9945
|
42 |
+
2025-09-23 02:21:29,694 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 25/100 | Train Loss: 0.0053 | Val mean-roc_auc_score: 0.9948
|
43 |
+
2025-09-23 02:21:33,402 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 26/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.9948
|
44 |
+
2025-09-23 02:21:37,392 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 27/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.9948
|
45 |
+
2025-09-23 02:21:41,073 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 28/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.9949
|
46 |
+
2025-09-23 02:21:44,741 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 29/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.9949
|
47 |
+
2025-09-23 02:21:48,448 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 30/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9950
|
48 |
+
2025-09-23 02:21:52,140 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 31/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9951
|
49 |
+
2025-09-23 02:21:56,178 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 32/100 | Train Loss: 0.0057 | Val mean-roc_auc_score: 0.9947
|
50 |
+
2025-09-23 02:21:59,830 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 33/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.9951
|
51 |
+
2025-09-23 02:22:03,548 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 34/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.9952
|
52 |
+
2025-09-23 02:22:03,693 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 884
|
53 |
+
2025-09-23 02:22:04,207 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 34 with val mean-roc_auc_score: 0.9952
|
54 |
+
2025-09-23 02:22:07,874 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 35/100 | Train Loss: 0.0012 | Val mean-roc_auc_score: 0.9955
|
55 |
+
2025-09-23 02:22:08,046 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 910
|
56 |
+
2025-09-23 02:22:08,559 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 35 with val mean-roc_auc_score: 0.9955
|
57 |
+
2025-09-23 02:22:12,240 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 36/100 | Train Loss: 0.0039 | Val mean-roc_auc_score: 0.9954
|
58 |
+
2025-09-23 02:22:16,287 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 37/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9954
|
59 |
+
2025-09-23 02:22:19,943 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 38/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9954
|
60 |
+
2025-09-23 02:22:24,799 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 39/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.9954
|
61 |
+
2025-09-23 02:22:28,540 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 40/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9952
|
62 |
+
2025-09-23 02:22:32,235 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 41/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9953
|
63 |
+
2025-09-23 02:22:36,296 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 42/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.9953
|
64 |
+
2025-09-23 02:22:40,000 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 43/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9952
|
65 |
+
2025-09-23 02:22:43,738 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 44/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9954
|
66 |
+
2025-09-23 02:22:47,471 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 45/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9953
|
67 |
+
2025-09-23 02:22:51,161 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 46/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.9952
|
68 |
+
2025-09-23 02:22:55,234 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 47/100 | Train Loss: 0.0088 | Val mean-roc_auc_score: 0.9948
|
69 |
+
2025-09-23 02:22:58,921 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 48/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.9939
|
70 |
+
2025-09-23 02:23:02,599 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 49/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.9941
|
71 |
+
2025-09-23 02:23:06,260 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 50/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9942
|
72 |
+
2025-09-23 02:23:09,955 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 51/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9942
|
73 |
+
2025-09-23 02:23:14,048 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 52/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9943
|
74 |
+
2025-09-23 02:23:17,731 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 53/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9943
|
75 |
+
2025-09-23 02:23:21,454 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 54/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9943
|
76 |
+
2025-09-23 02:23:25,132 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 55/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9942
|
77 |
+
2025-09-23 02:23:28,802 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 56/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9944
|
78 |
+
2025-09-23 02:23:32,874 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 57/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9944
|
79 |
+
2025-09-23 02:23:36,614 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 58/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9942
|
80 |
+
2025-09-23 02:23:40,309 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 59/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9942
|
81 |
+
2025-09-23 02:23:43,993 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 60/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9942
|
82 |
+
2025-09-23 02:23:47,691 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 61/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9942
|
83 |
+
2025-09-23 02:23:51,717 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 62/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.9945
|
84 |
+
2025-09-23 02:23:55,434 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 63/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.9943
|
85 |
+
2025-09-23 02:23:59,118 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 64/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9943
|
86 |
+
2025-09-23 02:24:02,815 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 65/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.9945
|
87 |
+
2025-09-23 02:24:06,534 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 66/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.9934
|
88 |
+
2025-09-23 02:24:10,574 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 67/100 | Train Loss: 0.0343 | Val mean-roc_auc_score: 0.9956
|
89 |
+
2025-09-23 02:24:10,712 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 1742
|
90 |
+
2025-09-23 02:24:11,235 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 67 with val mean-roc_auc_score: 0.9956
|
91 |
+
2025-09-23 02:24:14,948 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 68/100 | Train Loss: 0.0151 | Val mean-roc_auc_score: 0.9949
|
92 |
+
2025-09-23 02:24:18,646 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 69/100 | Train Loss: 0.0099 | Val mean-roc_auc_score: 0.9949
|
93 |
+
2025-09-23 02:24:22,378 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 70/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9951
|
94 |
+
2025-09-23 02:24:26,058 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 71/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9952
|
95 |
+
2025-09-23 02:24:30,143 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 72/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9952
|
96 |
+
2025-09-23 02:24:33,930 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 73/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9951
|
97 |
+
2025-09-23 02:24:37,678 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 74/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9952
|
98 |
+
2025-09-23 02:24:41,351 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 75/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9953
|
99 |
+
2025-09-23 02:24:45,141 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 76/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9952
|
100 |
+
2025-09-23 02:24:50,458 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 77/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9953
|
101 |
+
2025-09-23 02:24:54,140 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 78/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9955
|
102 |
+
2025-09-23 02:24:57,817 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 79/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9951
|
103 |
+
2025-09-23 02:25:01,530 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 80/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.9951
|
104 |
+
2025-09-23 02:25:05,224 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 81/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9951
|
105 |
+
2025-09-23 02:25:09,174 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 82/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.9951
|
106 |
+
2025-09-23 02:25:12,907 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 83/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9950
|
107 |
+
2025-09-23 02:25:16,662 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 84/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9951
|
108 |
+
2025-09-23 02:25:20,386 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 85/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9951
|
109 |
+
2025-09-23 02:25:24,066 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 86/100 | Train Loss: 0.0012 | Val mean-roc_auc_score: 0.9951
|
110 |
+
2025-09-23 02:25:28,042 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 87/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.9951
|
111 |
+
2025-09-23 02:25:31,732 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 88/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9952
|
112 |
+
2025-09-23 02:25:35,433 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 89/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.9952
|
113 |
+
2025-09-23 02:25:39,171 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 90/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.9951
|
114 |
+
2025-09-23 02:25:42,880 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 91/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9952
|
115 |
+
2025-09-23 02:25:46,938 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 92/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.9952
|
116 |
+
2025-09-23 02:25:50,702 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 93/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9950
|
117 |
+
2025-09-23 02:25:54,463 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 94/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.9951
|
118 |
+
2025-09-23 02:25:58,174 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 95/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9950
|
119 |
+
2025-09-23 02:26:01,933 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 96/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.9950
|
120 |
+
2025-09-23 02:26:05,954 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 97/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9950
|
121 |
+
2025-09-23 02:26:09,690 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 98/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9950
|
122 |
+
2025-09-23 02:26:13,353 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 99/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.9950
|
123 |
+
2025-09-23 02:26:17,050 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 100/100 | Train Loss: 0.0012 | Val mean-roc_auc_score: 0.9951
|
124 |
+
2025-09-23 02:26:17,646 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Test mean-roc_auc_score: 0.7558
|
125 |
+
2025-09-23 02:26:18,019 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Starting triplicate run 2 for dataset bbbp at 2025-09-23_02-26-18
|
126 |
+
2025-09-23 02:26:21,184 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 1/100 | Train Loss: 0.2488 | Val mean-roc_auc_score: 0.9881
|
127 |
+
2025-09-23 02:26:21,184 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 26
|
128 |
+
2025-09-23 02:26:21,701 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.9881
|
129 |
+
2025-09-23 02:26:25,401 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 2/100 | Train Loss: 0.1442 | Val mean-roc_auc_score: 0.9923
|
130 |
+
2025-09-23 02:26:25,572 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 52
|
131 |
+
2025-09-23 02:26:26,090 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.9923
|
132 |
+
2025-09-23 02:26:29,800 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 3/100 | Train Loss: 0.1118 | Val mean-roc_auc_score: 0.9927
|
133 |
+
2025-09-23 02:26:29,977 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 78
|
134 |
+
2025-09-23 02:26:30,480 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.9927
|
135 |
+
2025-09-23 02:26:34,159 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 4/100 | Train Loss: 0.1074 | Val mean-roc_auc_score: 0.9856
|
136 |
+
2025-09-23 02:26:37,867 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 5/100 | Train Loss: 0.0694 | Val mean-roc_auc_score: 0.9874
|
137 |
+
2025-09-23 02:26:41,558 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 6/100 | Train Loss: 0.0523 | Val mean-roc_auc_score: 0.9914
|
138 |
+
2025-09-23 02:26:45,604 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 7/100 | Train Loss: 0.0362 | Val mean-roc_auc_score: 0.9886
|
139 |
+
2025-09-23 02:26:49,337 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 8/100 | Train Loss: 0.0227 | Val mean-roc_auc_score: 0.9907
|
140 |
+
2025-09-23 02:26:53,184 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 9/100 | Train Loss: 0.0227 | Val mean-roc_auc_score: 0.9899
|
141 |
+
2025-09-23 02:26:56,895 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 10/100 | Train Loss: 0.0237 | Val mean-roc_auc_score: 0.9905
|
142 |
+
2025-09-23 02:27:00,574 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 11/100 | Train Loss: 0.0212 | Val mean-roc_auc_score: 0.9923
|
143 |
+
2025-09-23 02:27:04,648 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 12/100 | Train Loss: 0.0194 | Val mean-roc_auc_score: 0.9911
|
144 |
+
2025-09-23 02:27:08,370 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 13/100 | Train Loss: 0.0128 | Val mean-roc_auc_score: 0.9917
|
145 |
+
2025-09-23 02:27:12,042 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 14/100 | Train Loss: 0.0120 | Val mean-roc_auc_score: 0.9923
|
146 |
+
2025-09-23 02:27:15,766 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 15/100 | Train Loss: 0.0083 | Val mean-roc_auc_score: 0.9919
|
147 |
+
2025-09-23 02:27:19,508 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 16/100 | Train Loss: 0.0093 | Val mean-roc_auc_score: 0.9927
|
148 |
+
2025-09-23 02:27:19,988 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 416
|
149 |
+
2025-09-23 02:27:20,499 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 16 with val mean-roc_auc_score: 0.9927
|
150 |
+
2025-09-23 02:27:24,237 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 17/100 | Train Loss: 0.0084 | Val mean-roc_auc_score: 0.9924
|
151 |
+
2025-09-23 02:27:27,927 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 18/100 | Train Loss: 0.0089 | Val mean-roc_auc_score: 0.9937
|
152 |
+
2025-09-23 02:27:28,103 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 468
|
153 |
+
2025-09-23 02:27:28,636 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 18 with val mean-roc_auc_score: 0.9937
|
154 |
+
2025-09-23 02:27:32,331 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 19/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.9932
|
155 |
+
2025-09-23 02:27:36,055 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 20/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.9934
|
156 |
+
2025-09-23 02:27:39,737 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 21/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.9936
|
157 |
+
2025-09-23 02:27:43,792 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 22/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.9936
|
158 |
+
2025-09-23 02:27:47,524 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 23/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.9935
|
159 |
+
2025-09-23 02:27:51,237 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 24/100 | Train Loss: 0.0065 | Val mean-roc_auc_score: 0.9942
|
160 |
+
2025-09-23 02:27:51,415 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 624
|
161 |
+
2025-09-23 02:27:51,929 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 24 with val mean-roc_auc_score: 0.9942
|
162 |
+
2025-09-23 02:27:55,594 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 25/100 | Train Loss: 0.0089 | Val mean-roc_auc_score: 0.9936
|
163 |
+
2025-09-23 02:27:59,354 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 26/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.9932
|
164 |
+
2025-09-23 02:28:03,456 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 27/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9936
|
165 |
+
2025-09-23 02:28:07,133 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 28/100 | Train Loss: 0.0048 | Val mean-roc_auc_score: 0.9939
|
166 |
+
2025-09-23 02:28:10,799 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 29/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.9939
|
167 |
+
2025-09-23 02:28:14,485 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 30/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.9934
|
168 |
+
2025-09-23 02:28:18,175 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 31/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.9935
|
169 |
+
2025-09-23 02:28:22,207 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 32/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.9939
|
170 |
+
2025-09-23 02:28:25,996 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 33/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9938
|
171 |
+
2025-09-23 02:28:29,686 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 34/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.9938
|
172 |
+
2025-09-23 02:28:33,551 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 35/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.9937
|
173 |
+
2025-09-23 02:28:37,228 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 36/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9938
|
174 |
+
2025-09-23 02:28:41,257 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 37/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9939
|
175 |
+
2025-09-23 02:28:44,956 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 38/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9939
|
176 |
+
2025-09-23 02:28:49,890 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 39/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.9940
|
177 |
+
2025-09-23 02:28:53,572 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 40/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.9945
|
178 |
+
2025-09-23 02:28:53,709 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 1040
|
179 |
+
2025-09-23 02:28:54,225 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 40 with val mean-roc_auc_score: 0.9945
|
180 |
+
2025-09-23 02:28:57,921 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 41/100 | Train Loss: 0.0136 | Val mean-roc_auc_score: 0.9896
|
181 |
+
2025-09-23 02:29:01,992 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 42/100 | Train Loss: 0.0113 | Val mean-roc_auc_score: 0.9892
|
182 |
+
2025-09-23 02:29:05,674 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 43/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.9904
|
183 |
+
2025-09-23 02:29:09,386 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 44/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.9919
|
184 |
+
2025-09-23 02:29:13,056 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 45/100 | Train Loss: 0.0105 | Val mean-roc_auc_score: 0.9901
|
185 |
+
2025-09-23 02:29:16,740 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 46/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.9913
|
186 |
+
2025-09-23 02:29:20,867 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 47/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.9913
|
187 |
+
2025-09-23 02:29:24,549 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 48/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.9923
|
188 |
+
2025-09-23 02:29:28,262 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 49/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9924
|
189 |
+
2025-09-23 02:29:31,967 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 50/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9923
|
190 |
+
2025-09-23 02:29:35,651 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 51/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9924
|
191 |
+
2025-09-23 02:29:39,702 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 52/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9923
|
192 |
+
2025-09-23 02:29:43,421 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 53/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9923
|
193 |
+
2025-09-23 02:29:47,118 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 54/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.9923
|
194 |
+
2025-09-23 02:29:50,864 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 55/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9925
|
195 |
+
2025-09-23 02:29:54,562 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 56/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9923
|
196 |
+
2025-09-23 02:29:58,635 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 57/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9922
|
197 |
+
2025-09-23 02:30:02,332 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 58/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9926
|
198 |
+
2025-09-23 02:30:06,022 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 59/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9925
|
199 |
+
2025-09-23 02:30:09,712 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 60/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9923
|
200 |
+
2025-09-23 02:30:13,367 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 61/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.9920
|
201 |
+
2025-09-23 02:30:17,476 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 62/100 | Train Loss: 0.0108 | Val mean-roc_auc_score: 0.9955
|
202 |
+
2025-09-23 02:30:17,626 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 1612
|
203 |
+
2025-09-23 02:30:18,147 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 62 with val mean-roc_auc_score: 0.9955
|
204 |
+
2025-09-23 02:30:21,816 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 63/100 | Train Loss: 0.0523 | Val mean-roc_auc_score: 0.9921
|
205 |
+
2025-09-23 02:30:25,566 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 64/100 | Train Loss: 0.0493 | Val mean-roc_auc_score: 0.9939
|
206 |
+
2025-09-23 02:30:29,220 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 65/100 | Train Loss: 0.0149 | Val mean-roc_auc_score: 0.9909
|
207 |
+
2025-09-23 02:30:32,910 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 66/100 | Train Loss: 0.0080 | Val mean-roc_auc_score: 0.9914
|
208 |
+
2025-09-23 02:30:37,039 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 67/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.9915
|
209 |
+
2025-09-23 02:30:40,767 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 68/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.9912
|
210 |
+
2025-09-23 02:30:44,465 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 69/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.9915
|
211 |
+
2025-09-23 02:30:48,153 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 70/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.9917
|
212 |
+
2025-09-23 02:30:51,783 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 71/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.9914
|
213 |
+
2025-09-23 02:30:55,899 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 72/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.9916
|
214 |
+
2025-09-23 02:30:59,701 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 73/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.9916
|
215 |
+
2025-09-23 02:31:03,436 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 74/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.9917
|
216 |
+
2025-09-23 02:31:07,098 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 75/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.9920
|
217 |
+
2025-09-23 02:31:10,758 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 76/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9919
|
218 |
+
2025-09-23 02:31:15,984 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 77/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9920
|
219 |
+
2025-09-23 02:31:19,768 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 78/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9920
|
220 |
+
2025-09-23 02:31:23,523 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 79/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9918
|
221 |
+
2025-09-23 02:31:27,239 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 80/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9920
|
222 |
+
2025-09-23 02:31:30,951 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 81/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.9920
|
223 |
+
2025-09-23 02:31:34,980 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 82/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9923
|
224 |
+
2025-09-23 02:31:38,620 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 83/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9922
|
225 |
+
2025-09-23 02:31:42,444 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 84/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9925
|
226 |
+
2025-09-23 02:31:46,138 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 85/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9925
|
227 |
+
2025-09-23 02:31:49,789 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 86/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9925
|
228 |
+
2025-09-23 02:31:53,873 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 87/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9925
|
229 |
+
2025-09-23 02:31:57,625 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 88/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9926
|
230 |
+
2025-09-23 02:32:01,308 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 89/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9932
|
231 |
+
2025-09-23 02:32:04,983 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 90/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.9921
|
232 |
+
2025-09-23 02:32:08,702 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 91/100 | Train Loss: 0.0111 | Val mean-roc_auc_score: 0.9928
|
233 |
+
2025-09-23 02:32:12,783 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 92/100 | Train Loss: 0.0093 | Val mean-roc_auc_score: 0.9912
|
234 |
+
2025-09-23 02:32:16,479 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 93/100 | Train Loss: 0.0066 | Val mean-roc_auc_score: 0.9916
|
235 |
+
2025-09-23 02:32:20,211 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 94/100 | Train Loss: 0.0053 | Val mean-roc_auc_score: 0.9913
|
236 |
+
2025-09-23 02:32:23,928 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 95/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.9917
|
237 |
+
2025-09-23 02:32:27,586 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 96/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.9915
|
238 |
+
2025-09-23 02:32:31,663 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 97/100 | Train Loss: 0.0039 | Val mean-roc_auc_score: 0.9917
|
239 |
+
2025-09-23 02:32:35,395 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 98/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9919
|
240 |
+
2025-09-23 02:32:39,133 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 99/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9920
|
241 |
+
2025-09-23 02:32:42,892 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 100/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9919
|
242 |
+
2025-09-23 02:32:43,493 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Test mean-roc_auc_score: 0.7727
|
243 |
+
2025-09-23 02:32:43,893 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Starting triplicate run 3 for dataset bbbp at 2025-09-23_02-32-43
|
244 |
+
2025-09-23 02:32:47,002 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 1/100 | Train Loss: 0.3149 | Val mean-roc_auc_score: 0.9929
|
245 |
+
2025-09-23 02:32:47,002 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 26
|
246 |
+
2025-09-23 02:32:47,504 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.9929
|
247 |
+
2025-09-23 02:32:51,187 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 2/100 | Train Loss: 0.1755 | Val mean-roc_auc_score: 0.9955
|
248 |
+
2025-09-23 02:32:51,354 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 52
|
249 |
+
2025-09-23 02:32:51,867 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.9955
|
250 |
+
2025-09-23 02:32:55,532 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 3/100 | Train Loss: 0.1352 | Val mean-roc_auc_score: 0.9959
|
251 |
+
2025-09-23 02:32:55,706 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 78
|
252 |
+
2025-09-23 02:32:56,221 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.9959
|
253 |
+
2025-09-23 02:32:59,960 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 4/100 | Train Loss: 0.1094 | Val mean-roc_auc_score: 0.9959
|
254 |
+
2025-09-23 02:33:00,136 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 104
|
255 |
+
2025-09-23 02:33:00,639 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.9959
|
256 |
+
2025-09-23 02:33:04,279 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 5/100 | Train Loss: 0.0883 | Val mean-roc_auc_score: 0.9955
|
257 |
+
2025-09-23 02:33:07,999 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 6/100 | Train Loss: 0.0730 | Val mean-roc_auc_score: 0.9966
|
258 |
+
2025-09-23 02:33:08,492 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Global step of best model: 156
|
259 |
+
2025-09-23 02:33:08,999 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Best model saved at epoch 6 with val mean-roc_auc_score: 0.9966
|
260 |
+
2025-09-23 02:33:12,827 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 7/100 | Train Loss: 0.0496 | Val mean-roc_auc_score: 0.9953
|
261 |
+
2025-09-23 02:33:16,495 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 8/100 | Train Loss: 0.0376 | Val mean-roc_auc_score: 0.9954
|
262 |
+
2025-09-23 02:33:20,292 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 9/100 | Train Loss: 0.0297 | Val mean-roc_auc_score: 0.9958
|
263 |
+
2025-09-23 02:33:24,012 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 10/100 | Train Loss: 0.0269 | Val mean-roc_auc_score: 0.9942
|
264 |
+
2025-09-23 02:33:27,686 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 11/100 | Train Loss: 0.0201 | Val mean-roc_auc_score: 0.9949
|
265 |
+
2025-09-23 02:33:31,739 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 12/100 | Train Loss: 0.0167 | Val mean-roc_auc_score: 0.9956
|
266 |
+
2025-09-23 02:33:35,481 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 13/100 | Train Loss: 0.0136 | Val mean-roc_auc_score: 0.9956
|
267 |
+
2025-09-23 02:33:39,150 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 14/100 | Train Loss: 0.0112 | Val mean-roc_auc_score: 0.9948
|
268 |
+
2025-09-23 02:33:42,876 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 15/100 | Train Loss: 0.0108 | Val mean-roc_auc_score: 0.9950
|
269 |
+
2025-09-23 02:33:46,579 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 16/100 | Train Loss: 0.0077 | Val mean-roc_auc_score: 0.9950
|
270 |
+
2025-09-23 02:33:50,628 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 17/100 | Train Loss: 0.0079 | Val mean-roc_auc_score: 0.9949
|
271 |
+
2025-09-23 02:33:54,373 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 18/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.9948
|
272 |
+
2025-09-23 02:33:58,112 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 19/100 | Train Loss: 0.0081 | Val mean-roc_auc_score: 0.9945
|
273 |
+
2025-09-23 02:34:01,798 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 20/100 | Train Loss: 0.0079 | Val mean-roc_auc_score: 0.9950
|
274 |
+
2025-09-23 02:34:05,501 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 21/100 | Train Loss: 0.0074 | Val mean-roc_auc_score: 0.9946
|
275 |
+
2025-09-23 02:34:09,600 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 22/100 | Train Loss: 0.0082 | Val mean-roc_auc_score: 0.9950
|
276 |
+
2025-09-23 02:34:13,306 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 23/100 | Train Loss: 0.0128 | Val mean-roc_auc_score: 0.9941
|
277 |
+
2025-09-23 02:34:17,074 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 24/100 | Train Loss: 0.0142 | Val mean-roc_auc_score: 0.9957
|
278 |
+
2025-09-23 02:34:20,811 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 25/100 | Train Loss: 0.0222 | Val mean-roc_auc_score: 0.9886
|
279 |
+
2025-09-23 02:34:24,515 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 26/100 | Train Loss: 0.0130 | Val mean-roc_auc_score: 0.9899
|
280 |
+
2025-09-23 02:34:28,579 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 27/100 | Train Loss: 0.0083 | Val mean-roc_auc_score: 0.9917
|
281 |
+
2025-09-23 02:34:32,226 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 28/100 | Train Loss: 0.0057 | Val mean-roc_auc_score: 0.9927
|
282 |
+
2025-09-23 02:34:35,942 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 29/100 | Train Loss: 0.0048 | Val mean-roc_auc_score: 0.9926
|
283 |
+
2025-09-23 02:34:39,587 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 30/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.9924
|
284 |
+
2025-09-23 02:34:43,278 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 31/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.9922
|
285 |
+
2025-09-23 02:34:47,355 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 32/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.9922
|
286 |
+
2025-09-23 02:34:51,108 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 33/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.9922
|
287 |
+
2025-09-23 02:34:54,799 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 34/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.9923
|
288 |
+
2025-09-23 02:34:58,464 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 35/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9922
|
289 |
+
2025-09-23 02:35:02,066 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 36/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.9928
|
290 |
+
2025-09-23 02:35:06,074 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 37/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.9922
|
291 |
+
2025-09-23 02:35:09,789 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 38/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9917
|
292 |
+
2025-09-23 02:35:14,702 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 39/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.9921
|
293 |
+
2025-09-23 02:35:18,416 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 40/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9916
|
294 |
+
2025-09-23 02:35:22,148 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 41/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9914
|
295 |
+
2025-09-23 02:35:26,191 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 42/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.9910
|
296 |
+
2025-09-23 02:35:29,858 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 43/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.9909
|
297 |
+
2025-09-23 02:35:33,554 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 44/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9915
|
298 |
+
2025-09-23 02:35:37,252 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 45/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9917
|
299 |
+
2025-09-23 02:35:40,917 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 46/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9911
|
300 |
+
2025-09-23 02:35:44,934 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 47/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9907
|
301 |
+
2025-09-23 02:35:48,596 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 48/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9909
|
302 |
+
2025-09-23 02:35:52,285 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 49/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9909
|
303 |
+
2025-09-23 02:35:55,971 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 50/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9909
|
304 |
+
2025-09-23 02:35:59,749 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 51/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9908
|
305 |
+
2025-09-23 02:36:03,805 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 52/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.9924
|
306 |
+
2025-09-23 02:36:07,522 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 53/100 | Train Loss: 0.0128 | Val mean-roc_auc_score: 0.9941
|
307 |
+
2025-09-23 02:36:11,289 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 54/100 | Train Loss: 0.0287 | Val mean-roc_auc_score: 0.9911
|
308 |
+
2025-09-23 02:36:15,025 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 55/100 | Train Loss: 0.0116 | Val mean-roc_auc_score: 0.9908
|
309 |
+
2025-09-23 02:36:18,722 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 56/100 | Train Loss: 0.0065 | Val mean-roc_auc_score: 0.9909
|
310 |
+
2025-09-23 02:36:22,757 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 57/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.9908
|
311 |
+
2025-09-23 02:36:26,443 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 58/100 | Train Loss: 0.0075 | Val mean-roc_auc_score: 0.9908
|
312 |
+
2025-09-23 02:36:30,140 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 59/100 | Train Loss: 0.0062 | Val mean-roc_auc_score: 0.9906
|
313 |
+
2025-09-23 02:36:33,802 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 60/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.9910
|
314 |
+
2025-09-23 02:36:37,523 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 61/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9910
|
315 |
+
2025-09-23 02:36:41,560 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 62/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9909
|
316 |
+
2025-09-23 02:36:45,195 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 63/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.9911
|
317 |
+
2025-09-23 02:36:48,891 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 64/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.9912
|
318 |
+
2025-09-23 02:36:52,620 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 65/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9906
|
319 |
+
2025-09-23 02:36:56,351 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 66/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9904
|
320 |
+
2025-09-23 02:37:00,438 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 67/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9905
|
321 |
+
2025-09-23 02:37:04,145 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 68/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9902
|
322 |
+
2025-09-23 02:37:07,780 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 69/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.9903
|
323 |
+
2025-09-23 02:37:11,460 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 70/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.9904
|
324 |
+
2025-09-23 02:37:15,161 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 71/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9903
|
325 |
+
2025-09-23 02:37:19,171 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 72/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9904
|
326 |
+
2025-09-23 02:37:22,841 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 73/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9905
|
327 |
+
2025-09-23 02:37:26,516 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 74/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9904
|
328 |
+
2025-09-23 02:37:30,250 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 75/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.9905
|
329 |
+
2025-09-23 02:37:33,970 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 76/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.9903
|
330 |
+
2025-09-23 02:37:39,187 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 77/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9903
|
331 |
+
2025-09-23 02:37:42,883 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 78/100 | Train Loss: 0.0012 | Val mean-roc_auc_score: 0.9905
|
332 |
+
2025-09-23 02:37:46,597 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 79/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.9905
|
333 |
+
2025-09-23 02:37:50,283 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 80/100 | Train Loss: 0.0012 | Val mean-roc_auc_score: 0.9905
|
334 |
+
2025-09-23 02:37:54,021 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 81/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9901
|
335 |
+
2025-09-23 02:37:58,096 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 82/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9905
|
336 |
+
2025-09-23 02:38:01,797 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 83/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9903
|
337 |
+
2025-09-23 02:38:05,485 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 84/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9905
|
338 |
+
2025-09-23 02:38:09,163 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 85/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.9905
|
339 |
+
2025-09-23 02:38:12,832 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 86/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.9902
|
340 |
+
2025-09-23 02:38:16,928 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 87/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.9902
|
341 |
+
2025-09-23 02:38:20,595 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 88/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.9902
|
342 |
+
2025-09-23 02:38:24,267 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 89/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9899
|
343 |
+
2025-09-23 02:38:27,963 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 90/100 | Train Loss: 0.0012 | Val mean-roc_auc_score: 0.9905
|
344 |
+
2025-09-23 02:38:31,640 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 91/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9904
|
345 |
+
2025-09-23 02:38:35,695 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 92/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.9905
|
346 |
+
2025-09-23 02:38:39,392 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 93/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.9903
|
347 |
+
2025-09-23 02:38:43,109 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 94/100 | Train Loss: 0.0012 | Val mean-roc_auc_score: 0.9903
|
348 |
+
2025-09-23 02:38:46,783 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 95/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.9903
|
349 |
+
2025-09-23 02:38:50,474 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 96/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9902
|
350 |
+
2025-09-23 02:38:54,543 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 97/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9903
|
351 |
+
2025-09-23 02:38:58,246 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 98/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9902
|
352 |
+
2025-09-23 02:39:01,936 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 99/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9899
|
353 |
+
2025-09-23 02:39:05,642 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Epoch 100/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9901
|
354 |
+
2025-09-23 02:39:06,217 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Test mean-roc_auc_score: 0.7433
|
355 |
+
2025-09-23 02:39:06,616 - logs_modchembert_bbbp_epochs100_batch_size64 - INFO - Final Triplicate Test Results — Avg mean-roc_auc_score: 0.7573, Std Dev: 0.0120
|
logs_modchembert_classification_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_clintox_epochs100_batch_size32_20250923_040853.log
ADDED
@@ -0,0 +1,359 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2025-09-23 04:08:53,606 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Running benchmark for dataset: clintox
|
2 |
+
2025-09-23 04:08:53,606 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - dataset: clintox, tasks: ['FDA_APPROVED', 'CT_TOX'], epochs: 100, learning rate: 3e-05
|
3 |
+
2025-09-23 04:08:53,610 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset clintox at 2025-09-23_04-08-53
|
4 |
+
2025-09-23 04:08:57,113 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.1208 | Val mean-roc_auc_score: 0.9272
|
5 |
+
2025-09-23 04:08:57,113 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 37
|
6 |
+
2025-09-23 04:08:57,637 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.9272
|
7 |
+
2025-09-23 04:09:01,716 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.0355 | Val mean-roc_auc_score: 0.9818
|
8 |
+
2025-09-23 04:09:01,884 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 74
|
9 |
+
2025-09-23 04:09:02,396 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.9818
|
10 |
+
2025-09-23 04:09:06,562 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.0162 | Val mean-roc_auc_score: 0.9434
|
11 |
+
2025-09-23 04:09:10,619 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.0329 | Val mean-roc_auc_score: 0.9794
|
12 |
+
2025-09-23 04:09:14,647 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0209 | Val mean-roc_auc_score: 0.9696
|
13 |
+
2025-09-23 04:09:18,760 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0170 | Val mean-roc_auc_score: 0.9868
|
14 |
+
2025-09-23 04:09:19,284 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 222
|
15 |
+
2025-09-23 04:09:19,801 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val mean-roc_auc_score: 0.9868
|
16 |
+
2025-09-23 04:09:23,908 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0119 | Val mean-roc_auc_score: 0.9725
|
17 |
+
2025-09-23 04:09:28,051 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0146 | Val mean-roc_auc_score: 0.9866
|
18 |
+
2025-09-23 04:09:32,139 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0136 | Val mean-roc_auc_score: 0.9838
|
19 |
+
2025-09-23 04:09:36,237 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0106 | Val mean-roc_auc_score: 0.9828
|
20 |
+
2025-09-23 04:09:40,367 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.9840
|
21 |
+
2025-09-23 04:09:44,862 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0102 | Val mean-roc_auc_score: 0.9799
|
22 |
+
2025-09-23 04:09:49,045 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0134 | Val mean-roc_auc_score: 0.9860
|
23 |
+
2025-09-23 04:09:53,123 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0110 | Val mean-roc_auc_score: 0.9852
|
24 |
+
2025-09-23 04:09:57,246 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.9870
|
25 |
+
2025-09-23 04:09:57,382 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 555
|
26 |
+
2025-09-23 04:09:57,897 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 15 with val mean-roc_auc_score: 0.9870
|
27 |
+
2025-09-23 04:10:01,993 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.9882
|
28 |
+
2025-09-23 04:10:02,559 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 592
|
29 |
+
2025-09-23 04:10:03,089 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 16 with val mean-roc_auc_score: 0.9882
|
30 |
+
2025-09-23 04:10:07,281 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.9881
|
31 |
+
2025-09-23 04:10:11,376 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.9876
|
32 |
+
2025-09-23 04:10:15,463 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9851
|
33 |
+
2025-09-23 04:10:19,519 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0048 | Val mean-roc_auc_score: 0.9874
|
34 |
+
2025-09-23 04:10:23,624 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.9881
|
35 |
+
2025-09-23 04:10:28,080 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9893
|
36 |
+
2025-09-23 04:10:28,253 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 814
|
37 |
+
2025-09-23 04:10:28,765 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 22 with val mean-roc_auc_score: 0.9893
|
38 |
+
2025-09-23 04:10:32,829 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.9882
|
39 |
+
2025-09-23 04:10:36,911 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0128 | Val mean-roc_auc_score: 0.9870
|
40 |
+
2025-09-23 04:10:41,029 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.9894
|
41 |
+
2025-09-23 04:10:41,203 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 925
|
42 |
+
2025-09-23 04:10:41,726 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 25 with val mean-roc_auc_score: 0.9894
|
43 |
+
2025-09-23 04:10:45,755 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.9894
|
44 |
+
2025-09-23 04:10:51,460 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.9894
|
45 |
+
2025-09-23 04:10:55,561 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.9899
|
46 |
+
2025-09-23 04:10:55,739 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 1036
|
47 |
+
2025-09-23 04:10:56,260 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 28 with val mean-roc_auc_score: 0.9899
|
48 |
+
2025-09-23 04:11:00,346 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.9894
|
49 |
+
2025-09-23 04:11:04,472 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0064 | Val mean-roc_auc_score: 0.9894
|
50 |
+
2025-09-23 04:11:08,625 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9894
|
51 |
+
2025-09-23 04:11:13,108 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9894
|
52 |
+
2025-09-23 04:11:17,208 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.9894
|
53 |
+
2025-09-23 04:11:21,292 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9876
|
54 |
+
2025-09-23 04:11:25,387 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9876
|
55 |
+
2025-09-23 04:11:29,512 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.9876
|
56 |
+
2025-09-23 04:11:34,054 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9876
|
57 |
+
2025-09-23 04:11:38,150 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9876
|
58 |
+
2025-09-23 04:11:42,216 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9876
|
59 |
+
2025-09-23 04:11:46,325 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.9876
|
60 |
+
2025-09-23 04:11:50,410 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9876
|
61 |
+
2025-09-23 04:11:54,915 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9865
|
62 |
+
2025-09-23 04:11:59,006 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9873
|
63 |
+
2025-09-23 04:12:03,100 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9859
|
64 |
+
2025-09-23 04:12:07,181 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9881
|
65 |
+
2025-09-23 04:12:11,330 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0003 | Val mean-roc_auc_score: 0.9881
|
66 |
+
2025-09-23 04:12:15,851 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9876
|
67 |
+
2025-09-23 04:12:19,996 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9876
|
68 |
+
2025-09-23 04:12:24,040 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9873
|
69 |
+
2025-09-23 04:12:28,198 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9871
|
70 |
+
2025-09-23 04:12:32,274 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9865
|
71 |
+
2025-09-23 04:12:36,767 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9859
|
72 |
+
2025-09-23 04:12:40,866 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9853
|
73 |
+
2025-09-23 04:12:44,973 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9850
|
74 |
+
2025-09-23 04:12:50,262 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9859
|
75 |
+
2025-09-23 04:12:54,340 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9821
|
76 |
+
2025-09-23 04:12:58,800 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0003 | Val mean-roc_auc_score: 0.9798
|
77 |
+
2025-09-23 04:13:02,879 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9813
|
78 |
+
2025-09-23 04:13:06,952 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9848
|
79 |
+
2025-09-23 04:13:11,079 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.9838
|
80 |
+
2025-09-23 04:13:15,156 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.9794
|
81 |
+
2025-09-23 04:13:19,717 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9746
|
82 |
+
2025-09-23 04:13:23,787 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9769
|
83 |
+
2025-09-23 04:13:27,938 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9773
|
84 |
+
2025-09-23 04:13:31,992 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9680
|
85 |
+
2025-09-23 04:13:36,066 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9689
|
86 |
+
2025-09-23 04:13:40,616 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9675
|
87 |
+
2025-09-23 04:13:44,744 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9712
|
88 |
+
2025-09-23 04:13:48,814 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9645
|
89 |
+
2025-09-23 04:13:52,880 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9597
|
90 |
+
2025-09-23 04:13:56,939 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9698
|
91 |
+
2025-09-23 04:14:01,436 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.9874
|
92 |
+
2025-09-23 04:14:05,566 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0001 | Val mean-roc_auc_score: 0.9871
|
93 |
+
2025-09-23 04:14:09,701 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9862
|
94 |
+
2025-09-23 04:14:13,771 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9839
|
95 |
+
2025-09-23 04:14:17,875 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9780
|
96 |
+
2025-09-23 04:14:22,304 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9718
|
97 |
+
2025-09-23 04:14:26,389 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9677
|
98 |
+
2025-09-23 04:14:30,723 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9645
|
99 |
+
2025-09-23 04:14:34,777 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9659
|
100 |
+
2025-09-23 04:14:38,858 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9642
|
101 |
+
2025-09-23 04:14:44,504 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9639
|
102 |
+
2025-09-23 04:14:48,534 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9505
|
103 |
+
2025-09-23 04:14:52,641 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.9481
|
104 |
+
2025-09-23 04:14:56,763 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9454
|
105 |
+
2025-09-23 04:15:00,957 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9375
|
106 |
+
2025-09-23 04:15:05,473 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.9101
|
107 |
+
2025-09-23 04:15:09,578 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9072
|
108 |
+
2025-09-23 04:15:13,669 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9236
|
109 |
+
2025-09-23 04:15:17,744 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.8938
|
110 |
+
2025-09-23 04:15:21,821 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9575
|
111 |
+
2025-09-23 04:15:26,364 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0002 | Val mean-roc_auc_score: 0.9656
|
112 |
+
2025-09-23 04:15:30,481 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9653
|
113 |
+
2025-09-23 04:15:34,573 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9609
|
114 |
+
2025-09-23 04:15:38,674 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9559
|
115 |
+
2025-09-23 04:15:42,735 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.9544
|
116 |
+
2025-09-23 04:15:47,221 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.9511
|
117 |
+
2025-09-23 04:15:51,293 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.9515
|
118 |
+
2025-09-23 04:15:55,358 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.9487
|
119 |
+
2025-09-23 04:15:59,491 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.9503
|
120 |
+
2025-09-23 04:16:00,142 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.9955
|
121 |
+
2025-09-23 04:16:00,603 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset clintox at 2025-09-23_04-16-00
|
122 |
+
2025-09-23 04:16:04,085 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.1858 | Val mean-roc_auc_score: 0.8855
|
123 |
+
2025-09-23 04:16:04,085 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 37
|
124 |
+
2025-09-23 04:16:04,600 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.8855
|
125 |
+
2025-09-23 04:16:08,697 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.0458 | Val mean-roc_auc_score: 0.9797
|
126 |
+
2025-09-23 04:16:08,867 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 74
|
127 |
+
2025-09-23 04:16:09,392 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.9797
|
128 |
+
2025-09-23 04:16:13,500 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.0180 | Val mean-roc_auc_score: 0.9862
|
129 |
+
2025-09-23 04:16:13,671 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 111
|
130 |
+
2025-09-23 04:16:14,183 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.9862
|
131 |
+
2025-09-23 04:16:18,271 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.0220 | Val mean-roc_auc_score: 0.9874
|
132 |
+
2025-09-23 04:16:18,448 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 148
|
133 |
+
2025-09-23 04:16:18,960 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.9874
|
134 |
+
2025-09-23 04:16:23,013 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0225 | Val mean-roc_auc_score: 0.9834
|
135 |
+
2025-09-23 04:16:27,063 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0165 | Val mean-roc_auc_score: 0.9873
|
136 |
+
2025-09-23 04:16:31,521 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0171 | Val mean-roc_auc_score: 0.9850
|
137 |
+
2025-09-23 04:16:35,599 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0120 | Val mean-roc_auc_score: 0.9839
|
138 |
+
2025-09-23 04:16:39,684 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0095 | Val mean-roc_auc_score: 0.9803
|
139 |
+
2025-09-23 04:16:43,805 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0092 | Val mean-roc_auc_score: 0.9854
|
140 |
+
2025-09-23 04:16:47,857 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0088 | Val mean-roc_auc_score: 0.9857
|
141 |
+
2025-09-23 04:16:52,313 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0087 | Val mean-roc_auc_score: 0.9897
|
142 |
+
2025-09-23 04:16:52,491 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 444
|
143 |
+
2025-09-23 04:16:53,025 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 12 with val mean-roc_auc_score: 0.9897
|
144 |
+
2025-09-23 04:16:57,247 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0091 | Val mean-roc_auc_score: 0.9874
|
145 |
+
2025-09-23 04:17:01,308 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0089 | Val mean-roc_auc_score: 0.9869
|
146 |
+
2025-09-23 04:17:05,402 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0064 | Val mean-roc_auc_score: 0.9886
|
147 |
+
2025-09-23 04:17:09,485 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0064 | Val mean-roc_auc_score: 0.9859
|
148 |
+
2025-09-23 04:17:13,945 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0090 | Val mean-roc_auc_score: 0.9884
|
149 |
+
2025-09-23 04:17:18,005 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0064 | Val mean-roc_auc_score: 0.9881
|
150 |
+
2025-09-23 04:17:22,074 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0103 | Val mean-roc_auc_score: 0.9876
|
151 |
+
2025-09-23 04:17:26,159 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.9876
|
152 |
+
2025-09-23 04:17:30,244 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.9877
|
153 |
+
2025-09-23 04:17:34,744 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0177 | Val mean-roc_auc_score: 0.9872
|
154 |
+
2025-09-23 04:17:38,802 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.9853
|
155 |
+
2025-09-23 04:17:42,845 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.9862
|
156 |
+
2025-09-23 04:17:46,911 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.9869
|
157 |
+
2025-09-23 04:17:51,084 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.9859
|
158 |
+
2025-09-23 04:17:56,765 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9865
|
159 |
+
2025-09-23 04:18:00,796 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9872
|
160 |
+
2025-09-23 04:18:04,878 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9872
|
161 |
+
2025-09-23 04:18:08,952 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9856
|
162 |
+
2025-09-23 04:18:13,051 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9868
|
163 |
+
2025-09-23 04:18:17,530 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9871
|
164 |
+
2025-09-23 04:18:21,613 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9871
|
165 |
+
2025-09-23 04:18:25,681 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9872
|
166 |
+
2025-09-23 04:18:29,745 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9872
|
167 |
+
2025-09-23 04:18:33,797 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9872
|
168 |
+
2025-09-23 04:18:38,256 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9877
|
169 |
+
2025-09-23 04:18:42,381 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9877
|
170 |
+
2025-09-23 04:18:46,529 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9872
|
171 |
+
2025-09-23 04:18:50,589 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9872
|
172 |
+
2025-09-23 04:18:54,702 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.9875
|
173 |
+
2025-09-23 04:18:59,190 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9872
|
174 |
+
2025-09-23 04:19:03,376 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0039 | Val mean-roc_auc_score: 0.9878
|
175 |
+
2025-09-23 04:19:07,433 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9882
|
176 |
+
2025-09-23 04:19:11,489 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9888
|
177 |
+
2025-09-23 04:19:15,593 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0001 | Val mean-roc_auc_score: 0.9888
|
178 |
+
2025-09-23 04:19:20,086 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9883
|
179 |
+
2025-09-23 04:19:24,134 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9888
|
180 |
+
2025-09-23 04:19:28,261 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9894
|
181 |
+
2025-09-23 04:19:32,315 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9889
|
182 |
+
2025-09-23 04:19:36,402 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9883
|
183 |
+
2025-09-23 04:19:40,883 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9883
|
184 |
+
2025-09-23 04:19:45,058 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9856
|
185 |
+
2025-09-23 04:19:49,103 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9877
|
186 |
+
2025-09-23 04:19:54,367 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9865
|
187 |
+
2025-09-23 04:19:58,477 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9883
|
188 |
+
2025-09-23 04:20:03,007 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9883
|
189 |
+
2025-09-23 04:20:07,123 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9877
|
190 |
+
2025-09-23 04:20:11,183 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9856
|
191 |
+
2025-09-23 04:20:15,264 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.9853
|
192 |
+
2025-09-23 04:20:19,359 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9856
|
193 |
+
2025-09-23 04:20:23,895 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9856
|
194 |
+
2025-09-23 04:20:27,948 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9829
|
195 |
+
2025-09-23 04:20:32,080 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9857
|
196 |
+
2025-09-23 04:20:36,217 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.9868
|
197 |
+
2025-09-23 04:20:40,278 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9877
|
198 |
+
2025-09-23 04:20:44,793 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9859
|
199 |
+
2025-09-23 04:20:48,892 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9848
|
200 |
+
2025-09-23 04:20:52,978 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9834
|
201 |
+
2025-09-23 04:20:57,101 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9839
|
202 |
+
2025-09-23 04:21:01,157 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9834
|
203 |
+
2025-09-23 04:21:05,655 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9831
|
204 |
+
2025-09-23 04:21:09,740 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0002 | Val mean-roc_auc_score: 0.9831
|
205 |
+
2025-09-23 04:21:13,768 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9839
|
206 |
+
2025-09-23 04:21:17,817 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9819
|
207 |
+
2025-09-23 04:21:21,904 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9772
|
208 |
+
2025-09-23 04:21:26,398 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9837
|
209 |
+
2025-09-23 04:21:30,448 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.9899
|
210 |
+
2025-09-23 04:21:30,590 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 2886
|
211 |
+
2025-09-23 04:21:31,106 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 78 with val mean-roc_auc_score: 0.9899
|
212 |
+
2025-09-23 04:21:35,185 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.9875
|
213 |
+
2025-09-23 04:21:39,257 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9872
|
214 |
+
2025-09-23 04:21:43,291 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9854
|
215 |
+
2025-09-23 04:21:48,929 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9806
|
216 |
+
2025-09-23 04:21:52,966 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9814
|
217 |
+
2025-09-23 04:21:57,060 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.9778
|
218 |
+
2025-09-23 04:22:01,146 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.9736
|
219 |
+
2025-09-23 04:22:05,241 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.9618
|
220 |
+
2025-09-23 04:22:09,780 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.9602
|
221 |
+
2025-09-23 04:22:13,907 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9506
|
222 |
+
2025-09-23 04:22:17,949 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.9616
|
223 |
+
2025-09-23 04:22:22,046 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9390
|
224 |
+
2025-09-23 04:22:26,115 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9325
|
225 |
+
2025-09-23 04:22:30,562 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0001 | Val mean-roc_auc_score: 0.9308
|
226 |
+
2025-09-23 04:22:34,631 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.9322
|
227 |
+
2025-09-23 04:22:38,701 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.9893
|
228 |
+
2025-09-23 04:22:42,799 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0128 | Val mean-roc_auc_score: 0.9917
|
229 |
+
2025-09-23 04:22:42,942 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 3515
|
230 |
+
2025-09-23 04:22:43,463 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 95 with val mean-roc_auc_score: 0.9917
|
231 |
+
2025-09-23 04:22:47,592 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0103 | Val mean-roc_auc_score: 0.9853
|
232 |
+
2025-09-23 04:22:52,067 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0141 | Val mean-roc_auc_score: 0.9858
|
233 |
+
2025-09-23 04:22:56,204 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.9856
|
234 |
+
2025-09-23 04:23:00,279 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.9848
|
235 |
+
2025-09-23 04:23:04,413 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.9821
|
236 |
+
2025-09-23 04:23:05,038 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.9944
|
237 |
+
2025-09-23 04:23:05,488 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset clintox at 2025-09-23_04-23-05
|
238 |
+
2025-09-23 04:23:08,971 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.1410 | Val mean-roc_auc_score: 0.9526
|
239 |
+
2025-09-23 04:23:08,971 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 37
|
240 |
+
2025-09-23 04:23:09,489 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.9526
|
241 |
+
2025-09-23 04:23:13,532 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.0437 | Val mean-roc_auc_score: 0.9610
|
242 |
+
2025-09-23 04:23:13,699 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 74
|
243 |
+
2025-09-23 04:23:14,213 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.9610
|
244 |
+
2025-09-23 04:23:18,294 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.0196 | Val mean-roc_auc_score: 0.9811
|
245 |
+
2025-09-23 04:23:18,470 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 111
|
246 |
+
2025-09-23 04:23:18,978 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.9811
|
247 |
+
2025-09-23 04:23:23,032 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.0287 | Val mean-roc_auc_score: 0.9835
|
248 |
+
2025-09-23 04:23:23,211 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 148
|
249 |
+
2025-09-23 04:23:23,734 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.9835
|
250 |
+
2025-09-23 04:23:27,846 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0248 | Val mean-roc_auc_score: 0.9835
|
251 |
+
2025-09-23 04:23:31,959 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0189 | Val mean-roc_auc_score: 0.9841
|
252 |
+
2025-09-23 04:23:32,537 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 222
|
253 |
+
2025-09-23 04:23:33,046 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val mean-roc_auc_score: 0.9841
|
254 |
+
2025-09-23 04:23:37,152 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0170 | Val mean-roc_auc_score: 0.9864
|
255 |
+
2025-09-23 04:23:37,327 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 259
|
256 |
+
2025-09-23 04:23:37,832 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 7 with val mean-roc_auc_score: 0.9864
|
257 |
+
2025-09-23 04:23:42,168 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0135 | Val mean-roc_auc_score: 0.9847
|
258 |
+
2025-09-23 04:23:46,206 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0114 | Val mean-roc_auc_score: 0.9864
|
259 |
+
2025-09-23 04:23:50,286 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0097 | Val mean-roc_auc_score: 0.9869
|
260 |
+
2025-09-23 04:23:50,468 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 370
|
261 |
+
2025-09-23 04:23:50,988 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 10 with val mean-roc_auc_score: 0.9869
|
262 |
+
2025-09-23 04:23:55,092 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9857
|
263 |
+
2025-09-23 04:23:59,564 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0078 | Val mean-roc_auc_score: 0.9881
|
264 |
+
2025-09-23 04:23:59,738 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 444
|
265 |
+
2025-09-23 04:24:00,249 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 12 with val mean-roc_auc_score: 0.9881
|
266 |
+
2025-09-23 04:24:04,291 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0064 | Val mean-roc_auc_score: 0.9881
|
267 |
+
2025-09-23 04:24:08,369 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0146 | Val mean-roc_auc_score: 0.9919
|
268 |
+
2025-09-23 04:24:08,540 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 518
|
269 |
+
2025-09-23 04:24:09,050 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 14 with val mean-roc_auc_score: 0.9919
|
270 |
+
2025-09-23 04:24:13,158 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0118 | Val mean-roc_auc_score: 0.9881
|
271 |
+
2025-09-23 04:24:17,222 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.9893
|
272 |
+
2025-09-23 04:24:21,704 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.9894
|
273 |
+
2025-09-23 04:24:25,796 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0086 | Val mean-roc_auc_score: 0.9855
|
274 |
+
2025-09-23 04:24:29,888 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0194 | Val mean-roc_auc_score: 0.9870
|
275 |
+
2025-09-23 04:24:33,951 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.9887
|
276 |
+
2025-09-23 04:24:37,998 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.9876
|
277 |
+
2025-09-23 04:24:42,466 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.9881
|
278 |
+
2025-09-23 04:24:46,566 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9876
|
279 |
+
2025-09-23 04:24:50,651 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.9882
|
280 |
+
2025-09-23 04:24:54,736 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.9877
|
281 |
+
2025-09-23 04:24:58,797 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.9877
|
282 |
+
2025-09-23 04:25:04,496 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.9882
|
283 |
+
2025-09-23 04:25:08,606 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.9882
|
284 |
+
2025-09-23 04:25:12,738 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.9882
|
285 |
+
2025-09-23 04:25:16,802 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9882
|
286 |
+
2025-09-23 04:25:20,886 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.9882
|
287 |
+
2025-09-23 04:25:25,424 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.9882
|
288 |
+
2025-09-23 04:25:29,513 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.9882
|
289 |
+
2025-09-23 04:25:33,580 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9885
|
290 |
+
2025-09-23 04:25:37,675 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9877
|
291 |
+
2025-09-23 04:25:41,768 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9877
|
292 |
+
2025-09-23 04:25:46,233 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9879
|
293 |
+
2025-09-23 04:25:50,298 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0002 | Val mean-roc_auc_score: 0.9877
|
294 |
+
2025-09-23 04:25:54,363 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9882
|
295 |
+
2025-09-23 04:25:58,420 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9877
|
296 |
+
2025-09-23 04:26:02,510 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.9882
|
297 |
+
2025-09-23 04:26:07,004 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9877
|
298 |
+
2025-09-23 04:26:11,025 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.9877
|
299 |
+
2025-09-23 04:26:15,160 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.9877
|
300 |
+
2025-09-23 04:26:19,246 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9865
|
301 |
+
2025-09-23 04:26:23,343 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9877
|
302 |
+
2025-09-23 04:26:27,807 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9887
|
303 |
+
2025-09-23 04:26:31,917 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9887
|
304 |
+
2025-09-23 04:26:35,978 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9877
|
305 |
+
2025-09-23 04:26:40,029 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9887
|
306 |
+
2025-09-23 04:26:44,111 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9877
|
307 |
+
2025-09-23 04:26:48,600 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.9877
|
308 |
+
2025-09-23 04:26:52,684 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9877
|
309 |
+
2025-09-23 04:26:56,771 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9877
|
310 |
+
2025-09-23 04:27:02,103 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9877
|
311 |
+
2025-09-23 04:27:06,242 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9874
|
312 |
+
2025-09-23 04:27:10,761 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.9871
|
313 |
+
2025-09-23 04:27:14,849 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9877
|
314 |
+
2025-09-23 04:27:18,936 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9877
|
315 |
+
2025-09-23 04:27:23,015 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9865
|
316 |
+
2025-09-23 04:27:27,065 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.9877
|
317 |
+
2025-09-23 04:27:31,546 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9882
|
318 |
+
2025-09-23 04:27:35,627 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9877
|
319 |
+
2025-09-23 04:27:39,721 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9871
|
320 |
+
2025-09-23 04:27:43,790 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0002 | Val mean-roc_auc_score: 0.9871
|
321 |
+
2025-09-23 04:27:47,888 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9871
|
322 |
+
2025-09-23 04:27:52,359 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9865
|
323 |
+
2025-09-23 04:27:56,444 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0054 | Val mean-roc_auc_score: 0.9865
|
324 |
+
2025-09-23 04:28:00,538 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.9882
|
325 |
+
2025-09-23 04:28:04,614 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0120 | Val mean-roc_auc_score: 0.9812
|
326 |
+
2025-09-23 04:28:08,708 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.9818
|
327 |
+
2025-09-23 04:28:13,436 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9816
|
328 |
+
2025-09-23 04:28:17,519 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0001 | Val mean-roc_auc_score: 0.9783
|
329 |
+
2025-09-23 04:28:21,566 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9778
|
330 |
+
2025-09-23 04:28:25,645 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9760
|
331 |
+
2025-09-23 04:28:29,723 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9716
|
332 |
+
2025-09-23 04:28:34,189 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9676
|
333 |
+
2025-09-23 04:28:38,226 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.9740
|
334 |
+
2025-09-23 04:28:42,326 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0147 | Val mean-roc_auc_score: 0.9860
|
335 |
+
2025-09-23 04:28:46,456 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0178 | Val mean-roc_auc_score: 0.9938
|
336 |
+
2025-09-23 04:28:46,604 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 2960
|
337 |
+
2025-09-23 04:28:47,119 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 80 with val mean-roc_auc_score: 0.9938
|
338 |
+
2025-09-23 04:28:51,211 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.9926
|
339 |
+
2025-09-23 04:28:56,902 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9920
|
340 |
+
2025-09-23 04:29:00,950 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.9925
|
341 |
+
2025-09-23 04:29:05,055 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.9925
|
342 |
+
2025-09-23 04:29:09,126 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9925
|
343 |
+
2025-09-23 04:29:13,199 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9925
|
344 |
+
2025-09-23 04:29:17,780 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.9925
|
345 |
+
2025-09-23 04:29:21,843 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9925
|
346 |
+
2025-09-23 04:29:25,935 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9923
|
347 |
+
2025-09-23 04:29:30,025 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9925
|
348 |
+
2025-09-23 04:29:34,057 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9923
|
349 |
+
2025-09-23 04:29:38,572 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0002 | Val mean-roc_auc_score: 0.9925
|
350 |
+
2025-09-23 04:29:42,721 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9915
|
351 |
+
2025-09-23 04:29:46,769 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9923
|
352 |
+
2025-09-23 04:29:50,864 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9915
|
353 |
+
2025-09-23 04:29:54,990 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9915
|
354 |
+
2025-09-23 04:29:59,519 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9915
|
355 |
+
2025-09-23 04:30:03,589 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9909
|
356 |
+
2025-09-23 04:30:07,630 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.9909
|
357 |
+
2025-09-23 04:30:11,696 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9915
|
358 |
+
2025-09-23 04:30:12,322 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.9915
|
359 |
+
2025-09-23 04:30:12,782 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg mean-roc_auc_score: 0.9938, Std Dev: 0.0017
|
logs_modchembert_classification_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_hiv_epochs100_batch_size32_20250923_080632.log
ADDED
@@ -0,0 +1,329 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2025-09-23 08:06:32,216 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Running benchmark for dataset: hiv
|
2 |
+
2025-09-23 08:06:32,216 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - dataset: hiv, tasks: ['HIV_active'], epochs: 100, learning rate: 3e-05
|
3 |
+
2025-09-23 08:06:32,222 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset hiv at 2025-09-23_08-06-32
|
4 |
+
2025-09-23 08:07:43,657 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.1267 | Val mean-roc_auc_score: 0.8142
|
5 |
+
2025-09-23 08:07:43,657 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 1027
|
6 |
+
2025-09-23 08:07:44,188 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.8142
|
7 |
+
2025-09-23 08:08:57,955 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.1071 | Val mean-roc_auc_score: 0.8143
|
8 |
+
2025-09-23 08:08:58,093 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 2054
|
9 |
+
2025-09-23 08:08:58,626 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.8143
|
10 |
+
2025-09-23 08:10:12,595 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.1211 | Val mean-roc_auc_score: 0.8301
|
11 |
+
2025-09-23 08:10:12,741 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 3081
|
12 |
+
2025-09-23 08:10:13,287 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.8301
|
13 |
+
2025-09-23 08:11:26,560 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.0830 | Val mean-roc_auc_score: 0.8170
|
14 |
+
2025-09-23 08:12:40,706 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0634 | Val mean-roc_auc_score: 0.8190
|
15 |
+
2025-09-23 08:13:54,675 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0519 | Val mean-roc_auc_score: 0.8123
|
16 |
+
2025-09-23 08:15:08,319 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0502 | Val mean-roc_auc_score: 0.7999
|
17 |
+
2025-09-23 08:16:22,667 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0309 | Val mean-roc_auc_score: 0.8221
|
18 |
+
2025-09-23 08:17:36,094 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0371 | Val mean-roc_auc_score: 0.7995
|
19 |
+
2025-09-23 08:18:49,713 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0312 | Val mean-roc_auc_score: 0.7784
|
20 |
+
2025-09-23 08:20:03,582 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0121 | Val mean-roc_auc_score: 0.7890
|
21 |
+
2025-09-23 08:21:17,424 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0162 | Val mean-roc_auc_score: 0.8042
|
22 |
+
2025-09-23 08:22:31,117 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0251 | Val mean-roc_auc_score: 0.7946
|
23 |
+
2025-09-23 08:23:45,019 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0190 | Val mean-roc_auc_score: 0.7948
|
24 |
+
2025-09-23 08:24:58,791 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.7945
|
25 |
+
2025-09-23 08:26:12,469 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0171 | Val mean-roc_auc_score: 0.7931
|
26 |
+
2025-09-23 08:27:25,746 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0110 | Val mean-roc_auc_score: 0.7893
|
27 |
+
2025-09-23 08:28:38,584 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0161 | Val mean-roc_auc_score: 0.7844
|
28 |
+
2025-09-23 08:29:52,657 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7902
|
29 |
+
2025-09-23 08:31:05,782 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0148 | Val mean-roc_auc_score: 0.7958
|
30 |
+
2025-09-23 08:32:19,477 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7979
|
31 |
+
2025-09-23 08:33:32,135 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0099 | Val mean-roc_auc_score: 0.7920
|
32 |
+
2025-09-23 08:34:44,973 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0107 | Val mean-roc_auc_score: 0.7915
|
33 |
+
2025-09-23 08:35:57,554 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0077 | Val mean-roc_auc_score: 0.7862
|
34 |
+
2025-09-23 08:37:10,333 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0083 | Val mean-roc_auc_score: 0.7886
|
35 |
+
2025-09-23 08:38:22,885 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0134 | Val mean-roc_auc_score: 0.7836
|
36 |
+
2025-09-23 08:39:36,349 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0122 | Val mean-roc_auc_score: 0.7885
|
37 |
+
2025-09-23 08:40:48,966 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0078 | Val mean-roc_auc_score: 0.7798
|
38 |
+
2025-09-23 08:42:01,856 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.7756
|
39 |
+
2025-09-23 08:43:14,899 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.7765
|
40 |
+
2025-09-23 08:44:27,453 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0066 | Val mean-roc_auc_score: 0.7860
|
41 |
+
2025-09-23 08:45:40,564 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0075 | Val mean-roc_auc_score: 0.7841
|
42 |
+
2025-09-23 08:46:53,049 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.7857
|
43 |
+
2025-09-23 08:48:06,952 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0039 | Val mean-roc_auc_score: 0.7752
|
44 |
+
2025-09-23 08:49:20,876 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.7814
|
45 |
+
2025-09-23 08:50:35,802 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.7820
|
46 |
+
2025-09-23 08:51:51,459 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7778
|
47 |
+
2025-09-23 08:53:06,124 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.7826
|
48 |
+
2025-09-23 08:54:21,146 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0080 | Val mean-roc_auc_score: 0.7857
|
49 |
+
2025-09-23 08:55:36,035 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.7896
|
50 |
+
2025-09-23 08:56:50,865 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7875
|
51 |
+
2025-09-23 08:58:05,708 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7880
|
52 |
+
2025-09-23 08:59:20,587 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0078 | Val mean-roc_auc_score: 0.7920
|
53 |
+
2025-09-23 09:00:34,604 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.7912
|
54 |
+
2025-09-23 09:01:48,596 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0072 | Val mean-roc_auc_score: 0.7890
|
55 |
+
2025-09-23 09:03:02,500 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.7819
|
56 |
+
2025-09-23 09:04:15,681 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.7835
|
57 |
+
2025-09-23 09:05:28,883 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.7833
|
58 |
+
2025-09-23 09:06:43,304 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7863
|
59 |
+
2025-09-23 09:07:56,868 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.7858
|
60 |
+
2025-09-23 09:09:11,289 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7912
|
61 |
+
2025-09-23 09:10:26,154 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0111 | Val mean-roc_auc_score: 0.7910
|
62 |
+
2025-09-23 09:11:39,820 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.7897
|
63 |
+
2025-09-23 09:12:54,479 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7885
|
64 |
+
2025-09-23 09:14:08,919 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.7882
|
65 |
+
2025-09-23 09:15:22,546 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0068 | Val mean-roc_auc_score: 0.7907
|
66 |
+
2025-09-23 09:16:35,074 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7868
|
67 |
+
2025-09-23 09:17:46,438 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7907
|
68 |
+
2025-09-23 09:18:59,493 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.7885
|
69 |
+
2025-09-23 09:20:12,125 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.7910
|
70 |
+
2025-09-23 09:21:25,330 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0039 | Val mean-roc_auc_score: 0.7904
|
71 |
+
2025-09-23 09:22:37,958 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.7827
|
72 |
+
2025-09-23 09:23:50,665 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0000 | Val mean-roc_auc_score: 0.7817
|
73 |
+
2025-09-23 09:25:03,375 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.7827
|
74 |
+
2025-09-23 09:26:15,937 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0039 | Val mean-roc_auc_score: 0.7849
|
75 |
+
2025-09-23 09:27:28,768 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.7810
|
76 |
+
2025-09-23 09:28:41,938 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.7822
|
77 |
+
2025-09-23 09:29:54,125 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.7821
|
78 |
+
2025-09-23 09:31:07,101 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.7836
|
79 |
+
2025-09-23 09:32:20,179 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.7827
|
80 |
+
2025-09-23 09:33:32,657 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0076 | Val mean-roc_auc_score: 0.7815
|
81 |
+
2025-09-23 09:34:45,805 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.7819
|
82 |
+
2025-09-23 09:35:58,275 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.7774
|
83 |
+
2025-09-23 09:37:10,851 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.7828
|
84 |
+
2025-09-23 09:38:24,260 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.7860
|
85 |
+
2025-09-23 09:39:37,551 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7839
|
86 |
+
2025-09-23 09:40:50,045 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7831
|
87 |
+
2025-09-23 09:42:03,003 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0004 | Val mean-roc_auc_score: 0.7831
|
88 |
+
2025-09-23 09:43:16,919 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.7830
|
89 |
+
2025-09-23 09:44:30,425 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.7814
|
90 |
+
2025-09-23 09:45:44,991 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.7850
|
91 |
+
2025-09-23 09:46:59,353 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.7814
|
92 |
+
2025-09-23 09:48:13,654 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.7818
|
93 |
+
2025-09-23 09:49:27,841 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.7829
|
94 |
+
2025-09-23 09:50:43,570 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.7823
|
95 |
+
2025-09-23 09:51:59,836 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.7817
|
96 |
+
2025-09-23 09:53:15,753 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.7848
|
97 |
+
2025-09-23 09:54:31,231 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.7845
|
98 |
+
2025-09-23 09:55:47,083 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.7839
|
99 |
+
2025-09-23 09:57:02,251 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7838
|
100 |
+
2025-09-23 09:58:18,052 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.7847
|
101 |
+
2025-09-23 09:59:33,637 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.7830
|
102 |
+
2025-09-23 10:00:49,442 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.7865
|
103 |
+
2025-09-23 10:02:04,608 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7840
|
104 |
+
2025-09-23 10:03:20,067 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.7818
|
105 |
+
2025-09-23 10:04:36,110 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7841
|
106 |
+
2025-09-23 10:05:52,428 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0078 | Val mean-roc_auc_score: 0.7815
|
107 |
+
2025-09-23 10:07:08,016 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7849
|
108 |
+
2025-09-23 10:08:23,494 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.7848
|
109 |
+
2025-09-23 10:09:38,876 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.7838
|
110 |
+
2025-09-23 10:09:43,098 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.7774
|
111 |
+
2025-09-23 10:09:43,569 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset hiv at 2025-09-23_10-09-43
|
112 |
+
2025-09-23 10:10:54,225 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.1140 | Val mean-roc_auc_score: 0.8210
|
113 |
+
2025-09-23 10:10:54,225 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 1027
|
114 |
+
2025-09-23 10:10:54,762 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.8210
|
115 |
+
2025-09-23 10:12:10,053 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.1082 | Val mean-roc_auc_score: 0.8269
|
116 |
+
2025-09-23 10:12:10,205 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 2054
|
117 |
+
2025-09-23 10:12:10,752 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.8269
|
118 |
+
2025-09-23 10:13:25,445 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.0718 | Val mean-roc_auc_score: 0.8212
|
119 |
+
2025-09-23 10:14:40,850 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.1260 | Val mean-roc_auc_score: 0.8173
|
120 |
+
2025-09-23 10:15:56,990 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0737 | Val mean-roc_auc_score: 0.7903
|
121 |
+
2025-09-23 10:17:12,673 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0514 | Val mean-roc_auc_score: 0.8364
|
122 |
+
2025-09-23 10:17:13,292 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 6162
|
123 |
+
2025-09-23 10:17:13,824 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val mean-roc_auc_score: 0.8364
|
124 |
+
2025-09-23 10:18:29,400 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0393 | Val mean-roc_auc_score: 0.8227
|
125 |
+
2025-09-23 10:19:44,404 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0312 | Val mean-roc_auc_score: 0.8341
|
126 |
+
2025-09-23 10:20:59,917 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0236 | Val mean-roc_auc_score: 0.8164
|
127 |
+
2025-09-23 10:22:15,432 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0218 | Val mean-roc_auc_score: 0.7967
|
128 |
+
2025-09-23 10:23:31,001 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0125 | Val mean-roc_auc_score: 0.8168
|
129 |
+
2025-09-23 10:24:32,588 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0131 | Val mean-roc_auc_score: 0.8179
|
130 |
+
2025-09-23 10:25:33,509 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0124 | Val mean-roc_auc_score: 0.7880
|
131 |
+
2025-09-23 10:26:34,334 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0141 | Val mean-roc_auc_score: 0.8039
|
132 |
+
2025-09-23 10:27:35,037 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0077 | Val mean-roc_auc_score: 0.8033
|
133 |
+
2025-09-23 10:28:35,959 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.8028
|
134 |
+
2025-09-23 10:29:37,136 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0130 | Val mean-roc_auc_score: 0.8130
|
135 |
+
2025-09-23 10:30:37,733 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0111 | Val mean-roc_auc_score: 0.7994
|
136 |
+
2025-09-23 10:31:38,651 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.7974
|
137 |
+
2025-09-23 10:32:39,272 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.7952
|
138 |
+
2025-09-23 10:33:40,323 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7883
|
139 |
+
2025-09-23 10:34:41,157 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.7840
|
140 |
+
2025-09-23 10:35:41,967 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.7906
|
141 |
+
2025-09-23 10:36:42,841 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0065 | Val mean-roc_auc_score: 0.7924
|
142 |
+
2025-09-23 10:37:43,445 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.7887
|
143 |
+
2025-09-23 10:38:44,502 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.7853
|
144 |
+
2025-09-23 10:39:45,808 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0086 | Val mean-roc_auc_score: 0.7843
|
145 |
+
2025-09-23 10:40:46,231 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7879
|
146 |
+
2025-09-23 10:41:47,290 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.7830
|
147 |
+
2025-09-23 10:42:46,679 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7762
|
148 |
+
2025-09-23 10:43:46,163 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0098 | Val mean-roc_auc_score: 0.8043
|
149 |
+
2025-09-23 10:44:45,523 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.7976
|
150 |
+
2025-09-23 10:45:44,315 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.7976
|
151 |
+
2025-09-23 10:46:43,673 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7936
|
152 |
+
2025-09-23 10:47:42,962 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.8007
|
153 |
+
2025-09-23 10:48:42,302 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7968
|
154 |
+
2025-09-23 10:49:42,835 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.7930
|
155 |
+
2025-09-23 10:50:41,958 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7859
|
156 |
+
2025-09-23 10:51:41,563 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.7934
|
157 |
+
2025-09-23 10:52:40,402 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7868
|
158 |
+
2025-09-23 10:53:39,641 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0001 | Val mean-roc_auc_score: 0.7888
|
159 |
+
2025-09-23 10:54:39,022 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.7878
|
160 |
+
2025-09-23 10:55:37,904 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7847
|
161 |
+
2025-09-23 10:56:37,440 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0053 | Val mean-roc_auc_score: 0.7874
|
162 |
+
2025-09-23 10:57:36,290 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.7911
|
163 |
+
2025-09-23 10:58:35,876 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7825
|
164 |
+
2025-09-23 10:59:35,255 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0039 | Val mean-roc_auc_score: 0.7795
|
165 |
+
2025-09-23 11:00:34,245 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.7878
|
166 |
+
2025-09-23 11:01:33,449 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0057 | Val mean-roc_auc_score: 0.7816
|
167 |
+
2025-09-23 11:02:32,336 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.7785
|
168 |
+
2025-09-23 11:03:31,588 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.7855
|
169 |
+
2025-09-23 11:04:30,863 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7794
|
170 |
+
2025-09-23 11:05:29,973 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.7831
|
171 |
+
2025-09-23 11:06:29,443 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7874
|
172 |
+
2025-09-23 11:07:28,244 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.7875
|
173 |
+
2025-09-23 11:08:27,988 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.7865
|
174 |
+
2025-09-23 11:09:27,254 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.7826
|
175 |
+
2025-09-23 11:10:25,970 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.7846
|
176 |
+
2025-09-23 11:11:25,234 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.7885
|
177 |
+
2025-09-23 11:12:24,501 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.7828
|
178 |
+
2025-09-23 11:13:23,738 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.7838
|
179 |
+
2025-09-23 11:14:23,261 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.7834
|
180 |
+
2025-09-23 11:15:22,184 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0000 | Val mean-roc_auc_score: 0.7830
|
181 |
+
2025-09-23 11:16:21,599 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.7886
|
182 |
+
2025-09-23 11:17:20,345 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.7852
|
183 |
+
2025-09-23 11:18:19,611 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.7810
|
184 |
+
2025-09-23 11:19:19,249 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0054 | Val mean-roc_auc_score: 0.8145
|
185 |
+
2025-09-23 11:20:18,170 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0057 | Val mean-roc_auc_score: 0.8106
|
186 |
+
2025-09-23 11:21:17,433 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.8082
|
187 |
+
2025-09-23 11:22:16,198 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.8069
|
188 |
+
2025-09-23 11:23:15,610 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.8093
|
189 |
+
2025-09-23 11:24:14,925 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.8080
|
190 |
+
2025-09-23 11:25:13,851 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.8048
|
191 |
+
2025-09-23 11:26:13,110 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.8063
|
192 |
+
2025-09-23 11:27:12,493 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.8050
|
193 |
+
2025-09-23 11:28:11,807 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.8070
|
194 |
+
2025-09-23 11:29:11,042 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.8051
|
195 |
+
2025-09-23 11:30:10,240 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.8044
|
196 |
+
2025-09-23 11:31:09,583 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.8053
|
197 |
+
2025-09-23 11:32:08,600 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.8049
|
198 |
+
2025-09-23 11:33:08,105 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.7982
|
199 |
+
2025-09-23 11:34:07,751 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.7995
|
200 |
+
2025-09-23 11:35:07,016 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0012 | Val mean-roc_auc_score: 0.8003
|
201 |
+
2025-09-23 11:36:06,561 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.7989
|
202 |
+
2025-09-23 11:37:05,480 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7988
|
203 |
+
2025-09-23 11:38:04,969 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.8001
|
204 |
+
2025-09-23 11:39:04,522 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.7997
|
205 |
+
2025-09-23 11:40:03,200 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.8005
|
206 |
+
2025-09-23 11:41:02,642 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0001 | Val mean-roc_auc_score: 0.8011
|
207 |
+
2025-09-23 11:42:01,665 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.8012
|
208 |
+
2025-09-23 11:43:01,092 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.8026
|
209 |
+
2025-09-23 11:44:00,256 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.8019
|
210 |
+
2025-09-23 11:44:59,226 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.8047
|
211 |
+
2025-09-23 11:45:58,925 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.8042
|
212 |
+
2025-09-23 11:46:57,778 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.8022
|
213 |
+
2025-09-23 11:47:57,286 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.8020
|
214 |
+
2025-09-23 11:48:56,642 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.8020
|
215 |
+
2025-09-23 11:49:55,606 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.8030
|
216 |
+
2025-09-23 11:50:54,975 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.7992
|
217 |
+
2025-09-23 11:51:53,647 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7973
|
218 |
+
2025-09-23 11:51:56,852 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.7743
|
219 |
+
2025-09-23 11:51:57,489 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset hiv at 2025-09-23_11-51-57
|
220 |
+
2025-09-23 11:52:53,214 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.1059 | Val mean-roc_auc_score: 0.7784
|
221 |
+
2025-09-23 11:52:53,214 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 1027
|
222 |
+
2025-09-23 11:52:53,726 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.7784
|
223 |
+
2025-09-23 11:53:53,334 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.1019 | Val mean-roc_auc_score: 0.8107
|
224 |
+
2025-09-23 11:53:53,465 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 2054
|
225 |
+
2025-09-23 11:53:53,970 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.8107
|
226 |
+
2025-09-23 11:54:52,522 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.1204 | Val mean-roc_auc_score: 0.8196
|
227 |
+
2025-09-23 11:54:52,655 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 3081
|
228 |
+
2025-09-23 11:54:53,162 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.8196
|
229 |
+
2025-09-23 11:55:51,995 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.0349 | Val mean-roc_auc_score: 0.8314
|
230 |
+
2025-09-23 11:55:52,127 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 4108
|
231 |
+
2025-09-23 11:55:52,633 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.8314
|
232 |
+
2025-09-23 11:56:52,122 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0621 | Val mean-roc_auc_score: 0.8265
|
233 |
+
2025-09-23 11:57:51,064 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0418 | Val mean-roc_auc_score: 0.8149
|
234 |
+
2025-09-23 11:58:50,718 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0471 | Val mean-roc_auc_score: 0.8136
|
235 |
+
2025-09-23 11:59:49,465 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0435 | Val mean-roc_auc_score: 0.7832
|
236 |
+
2025-09-23 12:00:48,992 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0278 | Val mean-roc_auc_score: 0.7980
|
237 |
+
2025-09-23 12:01:48,036 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0250 | Val mean-roc_auc_score: 0.7917
|
238 |
+
2025-09-23 12:02:47,523 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0206 | Val mean-roc_auc_score: 0.7479
|
239 |
+
2025-09-23 12:03:47,066 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0122 | Val mean-roc_auc_score: 0.7810
|
240 |
+
2025-09-23 12:04:45,812 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0162 | Val mean-roc_auc_score: 0.7801
|
241 |
+
2025-09-23 12:05:45,442 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0095 | Val mean-roc_auc_score: 0.7718
|
242 |
+
2025-09-23 12:06:44,440 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.7668
|
243 |
+
2025-09-23 12:07:43,696 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0105 | Val mean-roc_auc_score: 0.7686
|
244 |
+
2025-09-23 12:08:43,368 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0107 | Val mean-roc_auc_score: 0.7622
|
245 |
+
2025-09-23 12:09:42,477 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.7601
|
246 |
+
2025-09-23 12:10:41,936 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0079 | Val mean-roc_auc_score: 0.7549
|
247 |
+
2025-09-23 12:11:41,124 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7605
|
248 |
+
2025-09-23 12:12:40,546 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.7542
|
249 |
+
2025-09-23 12:13:40,135 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0077 | Val mean-roc_auc_score: 0.7655
|
250 |
+
2025-09-23 12:14:39,566 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0086 | Val mean-roc_auc_score: 0.7665
|
251 |
+
2025-09-23 12:15:38,822 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0118 | Val mean-roc_auc_score: 0.7630
|
252 |
+
2025-09-23 12:16:37,945 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7621
|
253 |
+
2025-09-23 12:17:37,439 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.7474
|
254 |
+
2025-09-23 12:18:37,168 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.7689
|
255 |
+
2025-09-23 12:19:36,313 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.7589
|
256 |
+
2025-09-23 12:20:35,958 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0064 | Val mean-roc_auc_score: 0.7614
|
257 |
+
2025-09-23 12:21:35,209 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.7563
|
258 |
+
2025-09-23 12:22:35,376 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.7601
|
259 |
+
2025-09-23 12:23:35,339 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.7590
|
260 |
+
2025-09-23 12:24:34,834 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.7535
|
261 |
+
2025-09-23 12:25:34,225 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7549
|
262 |
+
2025-09-23 12:26:33,546 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0057 | Val mean-roc_auc_score: 0.7521
|
263 |
+
2025-09-23 12:27:33,503 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.7523
|
264 |
+
2025-09-23 12:28:33,941 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7514
|
265 |
+
2025-09-23 12:29:33,050 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.7542
|
266 |
+
2025-09-23 12:30:32,795 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7546
|
267 |
+
2025-09-23 12:31:31,561 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.7406
|
268 |
+
2025-09-23 12:32:31,063 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.7531
|
269 |
+
2025-09-23 12:33:30,731 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7578
|
270 |
+
2025-09-23 12:34:29,636 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0057 | Val mean-roc_auc_score: 0.7580
|
271 |
+
2025-09-23 12:35:29,538 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0065 | Val mean-roc_auc_score: 0.7634
|
272 |
+
2025-09-23 12:36:28,367 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.7660
|
273 |
+
2025-09-23 12:37:28,218 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0142 | Val mean-roc_auc_score: 0.7700
|
274 |
+
2025-09-23 12:38:27,989 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.7620
|
275 |
+
2025-09-23 12:39:26,950 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0107 | Val mean-roc_auc_score: 0.7656
|
276 |
+
2025-09-23 12:40:26,487 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7551
|
277 |
+
2025-09-23 12:41:25,615 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.7526
|
278 |
+
2025-09-23 12:42:25,483 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.7565
|
279 |
+
2025-09-23 12:43:25,192 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.7620
|
280 |
+
2025-09-23 12:44:24,037 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.7581
|
281 |
+
2025-09-23 12:45:23,983 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7568
|
282 |
+
2025-09-23 12:46:22,860 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7583
|
283 |
+
2025-09-23 12:47:22,385 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7539
|
284 |
+
2025-09-23 12:48:21,708 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.7510
|
285 |
+
2025-09-23 12:49:20,530 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.7554
|
286 |
+
2025-09-23 12:50:20,580 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.7590
|
287 |
+
2025-09-23 12:51:19,705 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.7593
|
288 |
+
2025-09-23 12:52:19,252 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7545
|
289 |
+
2025-09-23 12:53:19,066 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.7590
|
290 |
+
2025-09-23 12:54:18,038 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.7642
|
291 |
+
2025-09-23 12:55:17,535 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7616
|
292 |
+
2025-09-23 12:56:16,852 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0054 | Val mean-roc_auc_score: 0.7591
|
293 |
+
2025-09-23 12:57:16,560 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.7625
|
294 |
+
2025-09-23 12:58:16,234 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.7691
|
295 |
+
2025-09-23 12:59:14,993 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.7666
|
296 |
+
2025-09-23 13:00:14,220 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.7650
|
297 |
+
2025-09-23 13:01:13,010 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.7663
|
298 |
+
2025-09-23 13:02:12,751 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.7621
|
299 |
+
2025-09-23 13:03:12,517 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.7642
|
300 |
+
2025-09-23 13:04:11,551 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.7615
|
301 |
+
2025-09-23 13:05:11,398 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.7610
|
302 |
+
2025-09-23 13:06:11,380 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.7623
|
303 |
+
2025-09-23 13:07:11,174 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.7597
|
304 |
+
2025-09-23 13:08:10,784 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.7557
|
305 |
+
2025-09-23 13:09:09,835 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0160 | Val mean-roc_auc_score: 0.7554
|
306 |
+
2025-09-23 13:10:09,527 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.7582
|
307 |
+
2025-09-23 13:11:08,796 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.7604
|
308 |
+
2025-09-23 13:12:08,298 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.7620
|
309 |
+
2025-09-23 13:13:08,041 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0111 | Val mean-roc_auc_score: 0.7642
|
310 |
+
2025-09-23 13:14:07,076 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.7701
|
311 |
+
2025-09-23 13:15:06,865 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.7683
|
312 |
+
2025-09-23 13:16:05,769 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.7697
|
313 |
+
2025-09-23 13:17:05,685 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0062 | Val mean-roc_auc_score: 0.7669
|
314 |
+
2025-09-23 13:18:05,487 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0064 | Val mean-roc_auc_score: 0.7647
|
315 |
+
2025-09-23 13:19:04,461 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.7671
|
316 |
+
2025-09-23 13:20:04,403 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.7681
|
317 |
+
2025-09-23 13:21:03,098 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0065 | Val mean-roc_auc_score: 0.7656
|
318 |
+
2025-09-23 13:22:02,851 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.7672
|
319 |
+
2025-09-23 13:23:02,671 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.7655
|
320 |
+
2025-09-23 13:24:01,989 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.7626
|
321 |
+
2025-09-23 13:25:01,663 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.7697
|
322 |
+
2025-09-23 13:26:00,752 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.7660
|
323 |
+
2025-09-23 13:27:00,481 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7674
|
324 |
+
2025-09-23 13:28:00,107 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.7639
|
325 |
+
2025-09-23 13:28:59,292 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.7675
|
326 |
+
2025-09-23 13:29:59,005 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.7643
|
327 |
+
2025-09-23 13:30:58,161 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.7621
|
328 |
+
2025-09-23 13:31:01,432 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.7692
|
329 |
+
2025-09-23 13:31:02,251 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg mean-roc_auc_score: 0.7737, Std Dev: 0.0034
|
logs_modchembert_classification_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_sider_epochs100_batch_size32_20250923_034834.log
ADDED
@@ -0,0 +1,363 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2025-09-23 03:48:34,409 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Running benchmark for dataset: sider
|
2 |
+
2025-09-23 03:48:34,409 - logs_modchembert_sider_epochs100_batch_size32 - INFO - dataset: sider, tasks: ['Hepatobiliary disorders', 'Metabolism and nutrition disorders', 'Product issues', 'Eye disorders', 'Investigations', 'Musculoskeletal and connective tissue disorders', 'Gastrointestinal disorders', 'Social circumstances', 'Immune system disorders', 'Reproductive system and breast disorders', 'Neoplasms benign, malignant and unspecified (incl cysts and polyps)', 'General disorders and administration site conditions', 'Endocrine disorders', 'Surgical and medical procedures', 'Vascular disorders', 'Blood and lymphatic system disorders', 'Skin and subcutaneous tissue disorders', 'Congenital, familial and genetic disorders', 'Infections and infestations', 'Respiratory, thoracic and mediastinal disorders', 'Psychiatric disorders', 'Renal and urinary disorders', 'Pregnancy, puerperium and perinatal conditions', 'Ear and labyrinth disorders', 'Cardiac disorders', 'Nervous system disorders', 'Injury, poisoning and procedural complications'], epochs: 100, learning rate: 3e-05
|
3 |
+
2025-09-23 03:48:34,422 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset sider at 2025-09-23_03-48-34
|
4 |
+
2025-09-23 03:48:38,036 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.5786 | Val mean-roc_auc_score: 0.5419
|
5 |
+
2025-09-23 03:48:38,036 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 35
|
6 |
+
2025-09-23 03:48:38,577 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.5419
|
7 |
+
2025-09-23 03:48:42,478 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.5143 | Val mean-roc_auc_score: 0.5483
|
8 |
+
2025-09-23 03:48:42,650 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 70
|
9 |
+
2025-09-23 03:48:43,160 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.5483
|
10 |
+
2025-09-23 03:48:47,145 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.5094 | Val mean-roc_auc_score: 0.5629
|
11 |
+
2025-09-23 03:48:47,318 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 105
|
12 |
+
2025-09-23 03:48:47,824 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.5629
|
13 |
+
2025-09-23 03:48:51,729 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.4964 | Val mean-roc_auc_score: 0.5782
|
14 |
+
2025-09-23 03:48:51,900 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 140
|
15 |
+
2025-09-23 03:48:52,404 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.5782
|
16 |
+
2025-09-23 03:48:56,297 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.4821 | Val mean-roc_auc_score: 0.5772
|
17 |
+
2025-09-23 03:49:00,279 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.4656 | Val mean-roc_auc_score: 0.6011
|
18 |
+
2025-09-23 03:49:00,856 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 210
|
19 |
+
2025-09-23 03:49:01,365 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val mean-roc_auc_score: 0.6011
|
20 |
+
2025-09-23 03:49:05,261 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.4446 | Val mean-roc_auc_score: 0.6058
|
21 |
+
2025-09-23 03:49:05,435 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 245
|
22 |
+
2025-09-23 03:49:05,945 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 7 with val mean-roc_auc_score: 0.6058
|
23 |
+
2025-09-23 03:49:09,831 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.4161 | Val mean-roc_auc_score: 0.6168
|
24 |
+
2025-09-23 03:49:10,009 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 280
|
25 |
+
2025-09-23 03:49:10,520 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 8 with val mean-roc_auc_score: 0.6168
|
26 |
+
2025-09-23 03:49:14,414 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.3896 | Val mean-roc_auc_score: 0.6305
|
27 |
+
2025-09-23 03:49:14,586 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 315
|
28 |
+
2025-09-23 03:49:15,096 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 9 with val mean-roc_auc_score: 0.6305
|
29 |
+
2025-09-23 03:49:19,092 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.3571 | Val mean-roc_auc_score: 0.6201
|
30 |
+
2025-09-23 03:49:22,998 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.3411 | Val mean-roc_auc_score: 0.6170
|
31 |
+
2025-09-23 03:49:27,315 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.3219 | Val mean-roc_auc_score: 0.6029
|
32 |
+
2025-09-23 03:49:31,163 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.3071 | Val mean-roc_auc_score: 0.6090
|
33 |
+
2025-09-23 03:49:35,082 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.2929 | Val mean-roc_auc_score: 0.6175
|
34 |
+
2025-09-23 03:49:38,996 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.2812 | Val mean-roc_auc_score: 0.6017
|
35 |
+
2025-09-23 03:49:42,975 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.2661 | Val mean-roc_auc_score: 0.6020
|
36 |
+
2025-09-23 03:49:47,278 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.2571 | Val mean-roc_auc_score: 0.6034
|
37 |
+
2025-09-23 03:49:51,387 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.2510 | Val mean-roc_auc_score: 0.6059
|
38 |
+
2025-09-23 03:49:55,283 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.2429 | Val mean-roc_auc_score: 0.5933
|
39 |
+
2025-09-23 03:49:59,227 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.2375 | Val mean-roc_auc_score: 0.5986
|
40 |
+
2025-09-23 03:50:03,162 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.2304 | Val mean-roc_auc_score: 0.5953
|
41 |
+
2025-09-23 03:50:07,549 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.2188 | Val mean-roc_auc_score: 0.5975
|
42 |
+
2025-09-23 03:50:11,404 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.2281 | Val mean-roc_auc_score: 0.6031
|
43 |
+
2025-09-23 03:50:15,364 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.2116 | Val mean-roc_auc_score: 0.5968
|
44 |
+
2025-09-23 03:50:19,218 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.2036 | Val mean-roc_auc_score: 0.6064
|
45 |
+
2025-09-23 03:50:23,141 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.2031 | Val mean-roc_auc_score: 0.5909
|
46 |
+
2025-09-23 03:50:27,432 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.1982 | Val mean-roc_auc_score: 0.6008
|
47 |
+
2025-09-23 03:50:31,375 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.1902 | Val mean-roc_auc_score: 0.5953
|
48 |
+
2025-09-23 03:50:36,485 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.1948 | Val mean-roc_auc_score: 0.5905
|
49 |
+
2025-09-23 03:50:40,447 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.1848 | Val mean-roc_auc_score: 0.5983
|
50 |
+
2025-09-23 03:50:44,358 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.1768 | Val mean-roc_auc_score: 0.6065
|
51 |
+
2025-09-23 03:50:48,695 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.1727 | Val mean-roc_auc_score: 0.6014
|
52 |
+
2025-09-23 03:50:52,617 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.1705 | Val mean-roc_auc_score: 0.5979
|
53 |
+
2025-09-23 03:50:56,677 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.1652 | Val mean-roc_auc_score: 0.5962
|
54 |
+
2025-09-23 03:51:00,571 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.1638 | Val mean-roc_auc_score: 0.5995
|
55 |
+
2025-09-23 03:51:04,516 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.1625 | Val mean-roc_auc_score: 0.5949
|
56 |
+
2025-09-23 03:51:08,840 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.1571 | Val mean-roc_auc_score: 0.5926
|
57 |
+
2025-09-23 03:51:12,785 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.1542 | Val mean-roc_auc_score: 0.6030
|
58 |
+
2025-09-23 03:51:16,688 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.1500 | Val mean-roc_auc_score: 0.5880
|
59 |
+
2025-09-23 03:51:20,633 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.1491 | Val mean-roc_auc_score: 0.5828
|
60 |
+
2025-09-23 03:51:24,526 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.1455 | Val mean-roc_auc_score: 0.5868
|
61 |
+
2025-09-23 03:51:28,844 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.1446 | Val mean-roc_auc_score: 0.5878
|
62 |
+
2025-09-23 03:51:32,811 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.1531 | Val mean-roc_auc_score: 0.5823
|
63 |
+
2025-09-23 03:51:36,740 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.1402 | Val mean-roc_auc_score: 0.5851
|
64 |
+
2025-09-23 03:51:40,747 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.1357 | Val mean-roc_auc_score: 0.5854
|
65 |
+
2025-09-23 03:51:44,606 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.1375 | Val mean-roc_auc_score: 0.5771
|
66 |
+
2025-09-23 03:51:48,947 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.1330 | Val mean-roc_auc_score: 0.5839
|
67 |
+
2025-09-23 03:51:52,844 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.1286 | Val mean-roc_auc_score: 0.5824
|
68 |
+
2025-09-23 03:51:56,756 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.1307 | Val mean-roc_auc_score: 0.5816
|
69 |
+
2025-09-23 03:52:00,704 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.1277 | Val mean-roc_auc_score: 0.5816
|
70 |
+
2025-09-23 03:52:04,688 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.1268 | Val mean-roc_auc_score: 0.5863
|
71 |
+
2025-09-23 03:52:08,969 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.1289 | Val mean-roc_auc_score: 0.5849
|
72 |
+
2025-09-23 03:52:12,899 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.1223 | Val mean-roc_auc_score: 0.5893
|
73 |
+
2025-09-23 03:52:16,816 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.1205 | Val mean-roc_auc_score: 0.5829
|
74 |
+
2025-09-23 03:52:20,741 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.1206 | Val mean-roc_auc_score: 0.5794
|
75 |
+
2025-09-23 03:52:24,690 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.1196 | Val mean-roc_auc_score: 0.5779
|
76 |
+
2025-09-23 03:52:29,050 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.1161 | Val mean-roc_auc_score: 0.5843
|
77 |
+
2025-09-23 03:52:34,253 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.1135 | Val mean-roc_auc_score: 0.5796
|
78 |
+
2025-09-23 03:52:38,146 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.1121 | Val mean-roc_auc_score: 0.5771
|
79 |
+
2025-09-23 03:52:41,990 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.1112 | Val mean-roc_auc_score: 0.5800
|
80 |
+
2025-09-23 03:52:45,917 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.1094 | Val mean-roc_auc_score: 0.5773
|
81 |
+
2025-09-23 03:52:50,235 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.1112 | Val mean-roc_auc_score: 0.5849
|
82 |
+
2025-09-23 03:52:54,341 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.1039 | Val mean-roc_auc_score: 0.5815
|
83 |
+
2025-09-23 03:52:58,217 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.1085 | Val mean-roc_auc_score: 0.5801
|
84 |
+
2025-09-23 03:53:02,138 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.1058 | Val mean-roc_auc_score: 0.5841
|
85 |
+
2025-09-23 03:53:05,981 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.1102 | Val mean-roc_auc_score: 0.5752
|
86 |
+
2025-09-23 03:53:10,258 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.1040 | Val mean-roc_auc_score: 0.5823
|
87 |
+
2025-09-23 03:53:14,176 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.1036 | Val mean-roc_auc_score: 0.5733
|
88 |
+
2025-09-23 03:53:18,109 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.1073 | Val mean-roc_auc_score: 0.5807
|
89 |
+
2025-09-23 03:53:22,004 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.1018 | Val mean-roc_auc_score: 0.5805
|
90 |
+
2025-09-23 03:53:25,897 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.1000 | Val mean-roc_auc_score: 0.5862
|
91 |
+
2025-09-23 03:53:30,198 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0996 | Val mean-roc_auc_score: 0.5826
|
92 |
+
2025-09-23 03:53:34,163 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0978 | Val mean-roc_auc_score: 0.5776
|
93 |
+
2025-09-23 03:53:38,109 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0964 | Val mean-roc_auc_score: 0.5811
|
94 |
+
2025-09-23 03:53:42,002 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0994 | Val mean-roc_auc_score: 0.5798
|
95 |
+
2025-09-23 03:53:45,938 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0955 | Val mean-roc_auc_score: 0.5823
|
96 |
+
2025-09-23 03:53:50,230 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0946 | Val mean-roc_auc_score: 0.5805
|
97 |
+
2025-09-23 03:53:54,170 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0948 | Val mean-roc_auc_score: 0.5790
|
98 |
+
2025-09-23 03:53:58,040 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0938 | Val mean-roc_auc_score: 0.5839
|
99 |
+
2025-09-23 03:54:01,957 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0938 | Val mean-roc_auc_score: 0.5808
|
100 |
+
2025-09-23 03:54:05,864 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0920 | Val mean-roc_auc_score: 0.5782
|
101 |
+
2025-09-23 03:54:10,183 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0924 | Val mean-roc_auc_score: 0.5891
|
102 |
+
2025-09-23 03:54:14,076 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0914 | Val mean-roc_auc_score: 0.5839
|
103 |
+
2025-09-23 03:54:18,044 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0924 | Val mean-roc_auc_score: 0.5781
|
104 |
+
2025-09-23 03:54:22,005 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0911 | Val mean-roc_auc_score: 0.5772
|
105 |
+
2025-09-23 03:54:27,086 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0941 | Val mean-roc_auc_score: 0.5761
|
106 |
+
2025-09-23 03:54:31,450 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0893 | Val mean-roc_auc_score: 0.5718
|
107 |
+
2025-09-23 03:54:35,359 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0879 | Val mean-roc_auc_score: 0.5789
|
108 |
+
2025-09-23 03:54:39,281 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0885 | Val mean-roc_auc_score: 0.5731
|
109 |
+
2025-09-23 03:54:43,198 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0884 | Val mean-roc_auc_score: 0.5815
|
110 |
+
2025-09-23 03:54:47,204 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0871 | Val mean-roc_auc_score: 0.5780
|
111 |
+
2025-09-23 03:54:51,557 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0867 | Val mean-roc_auc_score: 0.5818
|
112 |
+
2025-09-23 03:54:55,480 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0866 | Val mean-roc_auc_score: 0.5826
|
113 |
+
2025-09-23 03:54:59,338 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0839 | Val mean-roc_auc_score: 0.5765
|
114 |
+
2025-09-23 03:55:03,199 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0844 | Val mean-roc_auc_score: 0.5728
|
115 |
+
2025-09-23 03:55:07,086 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0839 | Val mean-roc_auc_score: 0.5820
|
116 |
+
2025-09-23 03:55:11,393 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0835 | Val mean-roc_auc_score: 0.5805
|
117 |
+
2025-09-23 03:55:15,303 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0839 | Val mean-roc_auc_score: 0.5755
|
118 |
+
2025-09-23 03:55:19,161 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0830 | Val mean-roc_auc_score: 0.5828
|
119 |
+
2025-09-23 03:55:23,083 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0830 | Val mean-roc_auc_score: 0.5822
|
120 |
+
2025-09-23 03:55:23,730 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.6515
|
121 |
+
2025-09-23 03:55:24,156 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset sider at 2025-09-23_03-55-24
|
122 |
+
2025-09-23 03:55:27,518 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.5679 | Val mean-roc_auc_score: 0.5446
|
123 |
+
2025-09-23 03:55:27,519 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 35
|
124 |
+
2025-09-23 03:55:28,047 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.5446
|
125 |
+
2025-09-23 03:55:32,300 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.5143 | Val mean-roc_auc_score: 0.5570
|
126 |
+
2025-09-23 03:55:32,464 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 70
|
127 |
+
2025-09-23 03:55:33,004 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.5570
|
128 |
+
2025-09-23 03:55:36,925 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.5000 | Val mean-roc_auc_score: 0.5727
|
129 |
+
2025-09-23 03:55:37,094 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 105
|
130 |
+
2025-09-23 03:55:37,600 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.5727
|
131 |
+
2025-09-23 03:55:41,460 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.4893 | Val mean-roc_auc_score: 0.5914
|
132 |
+
2025-09-23 03:55:41,637 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 140
|
133 |
+
2025-09-23 03:55:42,150 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.5914
|
134 |
+
2025-09-23 03:55:46,070 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.4714 | Val mean-roc_auc_score: 0.5947
|
135 |
+
2025-09-23 03:55:46,252 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 175
|
136 |
+
2025-09-23 03:55:46,758 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val mean-roc_auc_score: 0.5947
|
137 |
+
2025-09-23 03:55:50,697 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.4437 | Val mean-roc_auc_score: 0.5924
|
138 |
+
2025-09-23 03:55:55,082 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.4214 | Val mean-roc_auc_score: 0.6068
|
139 |
+
2025-09-23 03:55:55,257 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 245
|
140 |
+
2025-09-23 03:55:55,770 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 7 with val mean-roc_auc_score: 0.6068
|
141 |
+
2025-09-23 03:55:59,704 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.4071 | Val mean-roc_auc_score: 0.5916
|
142 |
+
2025-09-23 03:56:03,610 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.3792 | Val mean-roc_auc_score: 0.5895
|
143 |
+
2025-09-23 03:56:07,539 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.3589 | Val mean-roc_auc_score: 0.6174
|
144 |
+
2025-09-23 03:56:07,714 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 350
|
145 |
+
2025-09-23 03:56:08,232 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 10 with val mean-roc_auc_score: 0.6174
|
146 |
+
2025-09-23 03:56:12,143 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.3446 | Val mean-roc_auc_score: 0.6003
|
147 |
+
2025-09-23 03:56:16,443 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.3250 | Val mean-roc_auc_score: 0.5995
|
148 |
+
2025-09-23 03:56:20,387 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.3179 | Val mean-roc_auc_score: 0.6160
|
149 |
+
2025-09-23 03:56:24,287 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.3161 | Val mean-roc_auc_score: 0.5995
|
150 |
+
2025-09-23 03:56:28,228 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.3013 | Val mean-roc_auc_score: 0.5948
|
151 |
+
2025-09-23 03:56:32,087 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.2857 | Val mean-roc_auc_score: 0.6080
|
152 |
+
2025-09-23 03:56:36,459 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.2804 | Val mean-roc_auc_score: 0.6098
|
153 |
+
2025-09-23 03:56:40,373 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.2750 | Val mean-roc_auc_score: 0.6072
|
154 |
+
2025-09-23 03:56:44,256 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.2661 | Val mean-roc_auc_score: 0.6144
|
155 |
+
2025-09-23 03:56:48,190 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.2607 | Val mean-roc_auc_score: 0.6120
|
156 |
+
2025-09-23 03:56:52,108 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.2536 | Val mean-roc_auc_score: 0.6095
|
157 |
+
2025-09-23 03:56:56,389 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.2500 | Val mean-roc_auc_score: 0.6040
|
158 |
+
2025-09-23 03:57:00,316 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.2531 | Val mean-roc_auc_score: 0.6119
|
159 |
+
2025-09-23 03:57:04,253 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.2357 | Val mean-roc_auc_score: 0.6125
|
160 |
+
2025-09-23 03:57:08,140 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.2321 | Val mean-roc_auc_score: 0.6104
|
161 |
+
2025-09-23 03:57:12,056 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.2328 | Val mean-roc_auc_score: 0.6165
|
162 |
+
2025-09-23 03:57:16,417 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.2268 | Val mean-roc_auc_score: 0.6130
|
163 |
+
2025-09-23 03:57:20,342 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.2196 | Val mean-roc_auc_score: 0.6183
|
164 |
+
2025-09-23 03:57:20,496 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 980
|
165 |
+
2025-09-23 03:57:21,004 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 28 with val mean-roc_auc_score: 0.6183
|
166 |
+
2025-09-23 03:57:26,053 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.2156 | Val mean-roc_auc_score: 0.6152
|
167 |
+
2025-09-23 03:57:30,035 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.2080 | Val mean-roc_auc_score: 0.6161
|
168 |
+
2025-09-23 03:57:33,979 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.2027 | Val mean-roc_auc_score: 0.6178
|
169 |
+
2025-09-23 03:57:38,336 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.1992 | Val mean-roc_auc_score: 0.6219
|
170 |
+
2025-09-23 03:57:38,512 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 1120
|
171 |
+
2025-09-23 03:57:39,040 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 32 with val mean-roc_auc_score: 0.6219
|
172 |
+
2025-09-23 03:57:43,066 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.1946 | Val mean-roc_auc_score: 0.6164
|
173 |
+
2025-09-23 03:57:46,998 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.1946 | Val mean-roc_auc_score: 0.6152
|
174 |
+
2025-09-23 03:57:50,937 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.1900 | Val mean-roc_auc_score: 0.6162
|
175 |
+
2025-09-23 03:57:54,873 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.1884 | Val mean-roc_auc_score: 0.6201
|
176 |
+
2025-09-23 03:57:59,163 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.1848 | Val mean-roc_auc_score: 0.6222
|
177 |
+
2025-09-23 03:57:59,337 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 1295
|
178 |
+
2025-09-23 03:57:59,844 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 37 with val mean-roc_auc_score: 0.6222
|
179 |
+
2025-09-23 03:58:03,834 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.1823 | Val mean-roc_auc_score: 0.6176
|
180 |
+
2025-09-23 03:58:07,792 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.1741 | Val mean-roc_auc_score: 0.6151
|
181 |
+
2025-09-23 03:58:11,749 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.1741 | Val mean-roc_auc_score: 0.6187
|
182 |
+
2025-09-23 03:58:15,702 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.1705 | Val mean-roc_auc_score: 0.6139
|
183 |
+
2025-09-23 03:58:19,969 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.1661 | Val mean-roc_auc_score: 0.6172
|
184 |
+
2025-09-23 03:58:23,897 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.1555 | Val mean-roc_auc_score: 0.6198
|
185 |
+
2025-09-23 03:58:27,869 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.1616 | Val mean-roc_auc_score: 0.6168
|
186 |
+
2025-09-23 03:58:31,742 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.1616 | Val mean-roc_auc_score: 0.6161
|
187 |
+
2025-09-23 03:58:35,647 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.1547 | Val mean-roc_auc_score: 0.6182
|
188 |
+
2025-09-23 03:58:39,939 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.1562 | Val mean-roc_auc_score: 0.6102
|
189 |
+
2025-09-23 03:58:43,916 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.1518 | Val mean-roc_auc_score: 0.6094
|
190 |
+
2025-09-23 03:58:47,836 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.1510 | Val mean-roc_auc_score: 0.6124
|
191 |
+
2025-09-23 03:58:51,793 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.1482 | Val mean-roc_auc_score: 0.6137
|
192 |
+
2025-09-23 03:58:55,702 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.1464 | Val mean-roc_auc_score: 0.6224
|
193 |
+
2025-09-23 03:58:56,226 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 1785
|
194 |
+
2025-09-23 03:58:56,730 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 51 with val mean-roc_auc_score: 0.6224
|
195 |
+
2025-09-23 03:59:00,640 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.1445 | Val mean-roc_auc_score: 0.6137
|
196 |
+
2025-09-23 03:59:04,568 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.1429 | Val mean-roc_auc_score: 0.6109
|
197 |
+
2025-09-23 03:59:08,467 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.1420 | Val mean-roc_auc_score: 0.6061
|
198 |
+
2025-09-23 03:59:12,409 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.1394 | Val mean-roc_auc_score: 0.6132
|
199 |
+
2025-09-23 03:59:16,334 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.1375 | Val mean-roc_auc_score: 0.6101
|
200 |
+
2025-09-23 03:59:20,655 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.1339 | Val mean-roc_auc_score: 0.6094
|
201 |
+
2025-09-23 03:59:25,729 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.1344 | Val mean-roc_auc_score: 0.6094
|
202 |
+
2025-09-23 03:59:29,618 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.1304 | Val mean-roc_auc_score: 0.6064
|
203 |
+
2025-09-23 03:59:33,611 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.1295 | Val mean-roc_auc_score: 0.6088
|
204 |
+
2025-09-23 03:59:37,489 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.1321 | Val mean-roc_auc_score: 0.6140
|
205 |
+
2025-09-23 03:59:41,775 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.1286 | Val mean-roc_auc_score: 0.6062
|
206 |
+
2025-09-23 03:59:45,756 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.1297 | Val mean-roc_auc_score: 0.6024
|
207 |
+
2025-09-23 03:59:49,675 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.1259 | Val mean-roc_auc_score: 0.6121
|
208 |
+
2025-09-23 03:59:53,576 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.1250 | Val mean-roc_auc_score: 0.6061
|
209 |
+
2025-09-23 03:59:57,508 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.1242 | Val mean-roc_auc_score: 0.6082
|
210 |
+
2025-09-23 04:00:01,915 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.1241 | Val mean-roc_auc_score: 0.6042
|
211 |
+
2025-09-23 04:00:05,790 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.1232 | Val mean-roc_auc_score: 0.6074
|
212 |
+
2025-09-23 04:00:09,766 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.1172 | Val mean-roc_auc_score: 0.6061
|
213 |
+
2025-09-23 04:00:13,704 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.1179 | Val mean-roc_auc_score: 0.6041
|
214 |
+
2025-09-23 04:00:17,585 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.1161 | Val mean-roc_auc_score: 0.6062
|
215 |
+
2025-09-23 04:00:21,845 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.1156 | Val mean-roc_auc_score: 0.6067
|
216 |
+
2025-09-23 04:00:25,769 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.1152 | Val mean-roc_auc_score: 0.6097
|
217 |
+
2025-09-23 04:00:29,677 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.1152 | Val mean-roc_auc_score: 0.6100
|
218 |
+
2025-09-23 04:00:33,617 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.1150 | Val mean-roc_auc_score: 0.6081
|
219 |
+
2025-09-23 04:00:37,522 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.1112 | Val mean-roc_auc_score: 0.6045
|
220 |
+
2025-09-23 04:00:41,713 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.1121 | Val mean-roc_auc_score: 0.6032
|
221 |
+
2025-09-23 04:00:45,521 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.1099 | Val mean-roc_auc_score: 0.6081
|
222 |
+
2025-09-23 04:00:49,360 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.1085 | Val mean-roc_auc_score: 0.6062
|
223 |
+
2025-09-23 04:00:53,168 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.1089 | Val mean-roc_auc_score: 0.6060
|
224 |
+
2025-09-23 04:00:57,005 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.1071 | Val mean-roc_auc_score: 0.6066
|
225 |
+
2025-09-23 04:01:01,223 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.1067 | Val mean-roc_auc_score: 0.6027
|
226 |
+
2025-09-23 04:01:05,120 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.1039 | Val mean-roc_auc_score: 0.5987
|
227 |
+
2025-09-23 04:01:08,939 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.1045 | Val mean-roc_auc_score: 0.6016
|
228 |
+
2025-09-23 04:01:12,740 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.1036 | Val mean-roc_auc_score: 0.6060
|
229 |
+
2025-09-23 04:01:17,763 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.1047 | Val mean-roc_auc_score: 0.6017
|
230 |
+
2025-09-23 04:01:21,965 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.1027 | Val mean-roc_auc_score: 0.6003
|
231 |
+
2025-09-23 04:01:25,837 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.1036 | Val mean-roc_auc_score: 0.6041
|
232 |
+
2025-09-23 04:01:29,671 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0990 | Val mean-roc_auc_score: 0.5949
|
233 |
+
2025-09-23 04:01:33,557 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.1018 | Val mean-roc_auc_score: 0.6021
|
234 |
+
2025-09-23 04:01:37,389 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.1018 | Val mean-roc_auc_score: 0.6019
|
235 |
+
2025-09-23 04:01:41,618 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.1000 | Val mean-roc_auc_score: 0.6020
|
236 |
+
2025-09-23 04:01:45,480 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.1000 | Val mean-roc_auc_score: 0.6085
|
237 |
+
2025-09-23 04:01:49,328 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0987 | Val mean-roc_auc_score: 0.6063
|
238 |
+
2025-09-23 04:01:53,144 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0994 | Val mean-roc_auc_score: 0.6049
|
239 |
+
2025-09-23 04:01:56,984 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0978 | Val mean-roc_auc_score: 0.6037
|
240 |
+
2025-09-23 04:02:01,208 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0969 | Val mean-roc_auc_score: 0.5985
|
241 |
+
2025-09-23 04:02:05,039 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0948 | Val mean-roc_auc_score: 0.6014
|
242 |
+
2025-09-23 04:02:08,833 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0969 | Val mean-roc_auc_score: 0.5984
|
243 |
+
2025-09-23 04:02:12,624 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0942 | Val mean-roc_auc_score: 0.6013
|
244 |
+
2025-09-23 04:02:13,267 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.6638
|
245 |
+
2025-09-23 04:02:13,687 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset sider at 2025-09-23_04-02-13
|
246 |
+
2025-09-23 04:02:16,993 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.5786 | Val mean-roc_auc_score: 0.5551
|
247 |
+
2025-09-23 04:02:16,993 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 35
|
248 |
+
2025-09-23 04:02:17,506 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.5551
|
249 |
+
2025-09-23 04:02:21,352 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.5107 | Val mean-roc_auc_score: 0.5629
|
250 |
+
2025-09-23 04:02:21,516 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 70
|
251 |
+
2025-09-23 04:02:22,018 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.5629
|
252 |
+
2025-09-23 04:02:25,881 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.5000 | Val mean-roc_auc_score: 0.5788
|
253 |
+
2025-09-23 04:02:26,054 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 105
|
254 |
+
2025-09-23 04:02:26,579 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.5788
|
255 |
+
2025-09-23 04:02:30,357 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.4857 | Val mean-roc_auc_score: 0.5970
|
256 |
+
2025-09-23 04:02:30,532 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 140
|
257 |
+
2025-09-23 04:02:31,042 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.5970
|
258 |
+
2025-09-23 04:02:34,878 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.4679 | Val mean-roc_auc_score: 0.6126
|
259 |
+
2025-09-23 04:02:35,050 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 175
|
260 |
+
2025-09-23 04:02:35,555 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val mean-roc_auc_score: 0.6126
|
261 |
+
2025-09-23 04:02:39,328 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.4281 | Val mean-roc_auc_score: 0.6203
|
262 |
+
2025-09-23 04:02:39,874 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 210
|
263 |
+
2025-09-23 04:02:40,387 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val mean-roc_auc_score: 0.6203
|
264 |
+
2025-09-23 04:02:44,213 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.4107 | Val mean-roc_auc_score: 0.6324
|
265 |
+
2025-09-23 04:02:44,385 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 245
|
266 |
+
2025-09-23 04:02:44,906 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 7 with val mean-roc_auc_score: 0.6324
|
267 |
+
2025-09-23 04:02:48,725 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.3982 | Val mean-roc_auc_score: 0.6390
|
268 |
+
2025-09-23 04:02:48,899 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 280
|
269 |
+
2025-09-23 04:02:49,405 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 8 with val mean-roc_auc_score: 0.6390
|
270 |
+
2025-09-23 04:02:53,237 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.3729 | Val mean-roc_auc_score: 0.6214
|
271 |
+
2025-09-23 04:02:57,041 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.3518 | Val mean-roc_auc_score: 0.6193
|
272 |
+
2025-09-23 04:03:00,874 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.3375 | Val mean-roc_auc_score: 0.6272
|
273 |
+
2025-09-23 04:03:05,050 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.3187 | Val mean-roc_auc_score: 0.6344
|
274 |
+
2025-09-23 04:03:08,886 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.3125 | Val mean-roc_auc_score: 0.6260
|
275 |
+
2025-09-23 04:03:12,898 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.3036 | Val mean-roc_auc_score: 0.6130
|
276 |
+
2025-09-23 04:03:16,706 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.2988 | Val mean-roc_auc_score: 0.6239
|
277 |
+
2025-09-23 04:03:20,536 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.2911 | Val mean-roc_auc_score: 0.6103
|
278 |
+
2025-09-23 04:03:24,744 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.2839 | Val mean-roc_auc_score: 0.6200
|
279 |
+
2025-09-23 04:03:28,571 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.2750 | Val mean-roc_auc_score: 0.6204
|
280 |
+
2025-09-23 04:03:32,377 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.2661 | Val mean-roc_auc_score: 0.6141
|
281 |
+
2025-09-23 04:03:36,194 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.2589 | Val mean-roc_auc_score: 0.6206
|
282 |
+
2025-09-23 04:03:39,996 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.2554 | Val mean-roc_auc_score: 0.6164
|
283 |
+
2025-09-23 04:03:44,243 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.2482 | Val mean-roc_auc_score: 0.6116
|
284 |
+
2025-09-23 04:03:48,041 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.2453 | Val mean-roc_auc_score: 0.6157
|
285 |
+
2025-09-23 04:03:51,863 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.2393 | Val mean-roc_auc_score: 0.6156
|
286 |
+
2025-09-23 04:03:55,703 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.2321 | Val mean-roc_auc_score: 0.6084
|
287 |
+
2025-09-23 04:03:59,601 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.2313 | Val mean-roc_auc_score: 0.6131
|
288 |
+
2025-09-23 04:04:03,867 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.2205 | Val mean-roc_auc_score: 0.6242
|
289 |
+
2025-09-23 04:04:07,772 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.2223 | Val mean-roc_auc_score: 0.6272
|
290 |
+
2025-09-23 04:04:12,751 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.2115 | Val mean-roc_auc_score: 0.6196
|
291 |
+
2025-09-23 04:04:16,532 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.2054 | Val mean-roc_auc_score: 0.6208
|
292 |
+
2025-09-23 04:04:20,316 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.2045 | Val mean-roc_auc_score: 0.6065
|
293 |
+
2025-09-23 04:04:24,581 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.2031 | Val mean-roc_auc_score: 0.6245
|
294 |
+
2025-09-23 04:04:28,450 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.1938 | Val mean-roc_auc_score: 0.6198
|
295 |
+
2025-09-23 04:04:32,269 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.1938 | Val mean-roc_auc_score: 0.6098
|
296 |
+
2025-09-23 04:04:36,057 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.1900 | Val mean-roc_auc_score: 0.6150
|
297 |
+
2025-09-23 04:04:39,825 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.1848 | Val mean-roc_auc_score: 0.6132
|
298 |
+
2025-09-23 04:04:44,028 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.1839 | Val mean-roc_auc_score: 0.6111
|
299 |
+
2025-09-23 04:04:48,002 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.1771 | Val mean-roc_auc_score: 0.6165
|
300 |
+
2025-09-23 04:04:51,813 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.1768 | Val mean-roc_auc_score: 0.6121
|
301 |
+
2025-09-23 04:04:55,672 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.1759 | Val mean-roc_auc_score: 0.6208
|
302 |
+
2025-09-23 04:04:59,507 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.1723 | Val mean-roc_auc_score: 0.6231
|
303 |
+
2025-09-23 04:05:03,796 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.1696 | Val mean-roc_auc_score: 0.6105
|
304 |
+
2025-09-23 04:05:07,600 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.1594 | Val mean-roc_auc_score: 0.6129
|
305 |
+
2025-09-23 04:05:11,391 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.1634 | Val mean-roc_auc_score: 0.6166
|
306 |
+
2025-09-23 04:05:15,205 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.1616 | Val mean-roc_auc_score: 0.6206
|
307 |
+
2025-09-23 04:05:19,045 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.1562 | Val mean-roc_auc_score: 0.6143
|
308 |
+
2025-09-23 04:05:23,222 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.1562 | Val mean-roc_auc_score: 0.6201
|
309 |
+
2025-09-23 04:05:27,103 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.1536 | Val mean-roc_auc_score: 0.6146
|
310 |
+
2025-09-23 04:05:30,943 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.1594 | Val mean-roc_auc_score: 0.6171
|
311 |
+
2025-09-23 04:05:34,760 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.1500 | Val mean-roc_auc_score: 0.6219
|
312 |
+
2025-09-23 04:05:38,591 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.1446 | Val mean-roc_auc_score: 0.6153
|
313 |
+
2025-09-23 04:05:42,838 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.1516 | Val mean-roc_auc_score: 0.6114
|
314 |
+
2025-09-23 04:05:46,665 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.1437 | Val mean-roc_auc_score: 0.6116
|
315 |
+
2025-09-23 04:05:50,464 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.1455 | Val mean-roc_auc_score: 0.6135
|
316 |
+
2025-09-23 04:05:54,277 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.1419 | Val mean-roc_auc_score: 0.6122
|
317 |
+
2025-09-23 04:05:58,109 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.1402 | Val mean-roc_auc_score: 0.6206
|
318 |
+
2025-09-23 04:06:02,373 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.1384 | Val mean-roc_auc_score: 0.6100
|
319 |
+
2025-09-23 04:06:07,421 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.1375 | Val mean-roc_auc_score: 0.6114
|
320 |
+
2025-09-23 04:06:11,210 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.1375 | Val mean-roc_auc_score: 0.6063
|
321 |
+
2025-09-23 04:06:15,056 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.1339 | Val mean-roc_auc_score: 0.6169
|
322 |
+
2025-09-23 04:06:18,949 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.1313 | Val mean-roc_auc_score: 0.6169
|
323 |
+
2025-09-23 04:06:23,183 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.1295 | Val mean-roc_auc_score: 0.6187
|
324 |
+
2025-09-23 04:06:26,989 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.1250 | Val mean-roc_auc_score: 0.6270
|
325 |
+
2025-09-23 04:06:30,796 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.1277 | Val mean-roc_auc_score: 0.6201
|
326 |
+
2025-09-23 04:06:34,550 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.1277 | Val mean-roc_auc_score: 0.6197
|
327 |
+
2025-09-23 04:06:38,392 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.1266 | Val mean-roc_auc_score: 0.6207
|
328 |
+
2025-09-23 04:06:42,607 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.1232 | Val mean-roc_auc_score: 0.6200
|
329 |
+
2025-09-23 04:06:46,399 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.1232 | Val mean-roc_auc_score: 0.6151
|
330 |
+
2025-09-23 04:06:50,210 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.1245 | Val mean-roc_auc_score: 0.6236
|
331 |
+
2025-09-23 04:06:54,062 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.1205 | Val mean-roc_auc_score: 0.6156
|
332 |
+
2025-09-23 04:06:57,910 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.1196 | Val mean-roc_auc_score: 0.6177
|
333 |
+
2025-09-23 04:07:02,136 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.1219 | Val mean-roc_auc_score: 0.6157
|
334 |
+
2025-09-23 04:07:05,991 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.1187 | Val mean-roc_auc_score: 0.6175
|
335 |
+
2025-09-23 04:07:09,792 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.1170 | Val mean-roc_auc_score: 0.6173
|
336 |
+
2025-09-23 04:07:13,637 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.1169 | Val mean-roc_auc_score: 0.6193
|
337 |
+
2025-09-23 04:07:17,468 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.1129 | Val mean-roc_auc_score: 0.6228
|
338 |
+
2025-09-23 04:07:21,697 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.1161 | Val mean-roc_auc_score: 0.6142
|
339 |
+
2025-09-23 04:07:25,535 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.1135 | Val mean-roc_auc_score: 0.6220
|
340 |
+
2025-09-23 04:07:29,338 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.1116 | Val mean-roc_auc_score: 0.6162
|
341 |
+
2025-09-23 04:07:33,163 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.1121 | Val mean-roc_auc_score: 0.6210
|
342 |
+
2025-09-23 04:07:36,994 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.1116 | Val mean-roc_auc_score: 0.6227
|
343 |
+
2025-09-23 04:07:41,232 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.1094 | Val mean-roc_auc_score: 0.6199
|
344 |
+
2025-09-23 04:07:45,057 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.1133 | Val mean-roc_auc_score: 0.6199
|
345 |
+
2025-09-23 04:07:48,869 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.1071 | Val mean-roc_auc_score: 0.6226
|
346 |
+
2025-09-23 04:07:52,719 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.1080 | Val mean-roc_auc_score: 0.6169
|
347 |
+
2025-09-23 04:07:57,708 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.1062 | Val mean-roc_auc_score: 0.6245
|
348 |
+
2025-09-23 04:08:01,926 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.1054 | Val mean-roc_auc_score: 0.6218
|
349 |
+
2025-09-23 04:08:05,760 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.1054 | Val mean-roc_auc_score: 0.6195
|
350 |
+
2025-09-23 04:08:09,580 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.1062 | Val mean-roc_auc_score: 0.6178
|
351 |
+
2025-09-23 04:08:13,413 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.1031 | Val mean-roc_auc_score: 0.6239
|
352 |
+
2025-09-23 04:08:17,225 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.1027 | Val mean-roc_auc_score: 0.6210
|
353 |
+
2025-09-23 04:08:21,424 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.1031 | Val mean-roc_auc_score: 0.6184
|
354 |
+
2025-09-23 04:08:25,263 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.1027 | Val mean-roc_auc_score: 0.6227
|
355 |
+
2025-09-23 04:08:29,113 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0991 | Val mean-roc_auc_score: 0.6221
|
356 |
+
2025-09-23 04:08:33,031 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.1000 | Val mean-roc_auc_score: 0.6208
|
357 |
+
2025-09-23 04:08:36,848 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0996 | Val mean-roc_auc_score: 0.6187
|
358 |
+
2025-09-23 04:08:41,070 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0996 | Val mean-roc_auc_score: 0.6267
|
359 |
+
2025-09-23 04:08:44,845 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.1000 | Val mean-roc_auc_score: 0.6225
|
360 |
+
2025-09-23 04:08:48,728 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0982 | Val mean-roc_auc_score: 0.6217
|
361 |
+
2025-09-23 04:08:52,518 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.1027 | Val mean-roc_auc_score: 0.6215
|
362 |
+
2025-09-23 04:08:53,151 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.6649
|
363 |
+
2025-09-23 04:08:53,604 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg mean-roc_auc_score: 0.6600, Std Dev: 0.0061
|
logs_modchembert_classification_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_tox21_epochs100_batch_size32_20250923_023906.log
ADDED
@@ -0,0 +1,329 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2025-09-23 02:39:06,618 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Running benchmark for dataset: tox21
|
2 |
+
2025-09-23 02:39:06,618 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - dataset: tox21, tasks: ['NR-AR', 'NR-AR-LBD', 'NR-AhR', 'NR-Aromatase', 'NR-ER', 'NR-ER-LBD', 'NR-PPAR-gamma', 'SR-ARE', 'SR-ATAD5', 'SR-HSE', 'SR-MMP', 'SR-p53'], epochs: 100, learning rate: 3e-05
|
3 |
+
2025-09-23 02:39:06,631 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset tox21 at 2025-09-23_02-39-06
|
4 |
+
2025-09-23 02:39:18,713 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.1745 | Val mean-roc_auc_score: 0.7384
|
5 |
+
2025-09-23 02:39:18,713 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 196
|
6 |
+
2025-09-23 02:39:19,229 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.7384
|
7 |
+
2025-09-23 02:39:32,788 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.1590 | Val mean-roc_auc_score: 0.7653
|
8 |
+
2025-09-23 02:39:32,962 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 392
|
9 |
+
2025-09-23 02:39:33,493 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.7653
|
10 |
+
2025-09-23 02:39:47,128 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.1435 | Val mean-roc_auc_score: 0.7750
|
11 |
+
2025-09-23 02:39:47,304 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 588
|
12 |
+
2025-09-23 02:39:47,818 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.7750
|
13 |
+
2025-09-23 02:40:01,552 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.1347 | Val mean-roc_auc_score: 0.7780
|
14 |
+
2025-09-23 02:40:01,723 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 784
|
15 |
+
2025-09-23 02:40:02,234 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.7780
|
16 |
+
2025-09-23 02:40:15,791 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.1266 | Val mean-roc_auc_score: 0.7718
|
17 |
+
2025-09-23 02:40:30,524 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.1110 | Val mean-roc_auc_score: 0.7722
|
18 |
+
2025-09-23 02:40:44,527 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1024 | Val mean-roc_auc_score: 0.7642
|
19 |
+
2025-09-23 02:40:58,074 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0873 | Val mean-roc_auc_score: 0.7558
|
20 |
+
2025-09-23 02:41:11,562 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0786 | Val mean-roc_auc_score: 0.7576
|
21 |
+
2025-09-23 02:41:25,156 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0677 | Val mean-roc_auc_score: 0.7443
|
22 |
+
2025-09-23 02:41:39,882 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0592 | Val mean-roc_auc_score: 0.7454
|
23 |
+
2025-09-23 02:41:53,906 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0484 | Val mean-roc_auc_score: 0.7423
|
24 |
+
2025-09-23 02:42:07,366 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0426 | Val mean-roc_auc_score: 0.7453
|
25 |
+
2025-09-23 02:42:21,012 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0421 | Val mean-roc_auc_score: 0.7412
|
26 |
+
2025-09-23 02:42:34,708 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0359 | Val mean-roc_auc_score: 0.7397
|
27 |
+
2025-09-23 02:42:49,463 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0302 | Val mean-roc_auc_score: 0.7390
|
28 |
+
2025-09-23 02:43:03,487 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0325 | Val mean-roc_auc_score: 0.7414
|
29 |
+
2025-09-23 02:43:17,095 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0273 | Val mean-roc_auc_score: 0.7354
|
30 |
+
2025-09-23 02:43:30,623 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0269 | Val mean-roc_auc_score: 0.7311
|
31 |
+
2025-09-23 02:43:44,349 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0233 | Val mean-roc_auc_score: 0.7333
|
32 |
+
2025-09-23 02:43:59,080 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0234 | Val mean-roc_auc_score: 0.7368
|
33 |
+
2025-09-23 02:44:12,921 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0208 | Val mean-roc_auc_score: 0.7339
|
34 |
+
2025-09-23 02:44:26,518 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0167 | Val mean-roc_auc_score: 0.7351
|
35 |
+
2025-09-23 02:44:40,164 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0184 | Val mean-roc_auc_score: 0.7364
|
36 |
+
2025-09-23 02:44:53,832 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0171 | Val mean-roc_auc_score: 0.7357
|
37 |
+
2025-09-23 02:45:08,612 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0160 | Val mean-roc_auc_score: 0.7344
|
38 |
+
2025-09-23 02:45:22,480 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0160 | Val mean-roc_auc_score: 0.7330
|
39 |
+
2025-09-23 02:45:36,262 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0154 | Val mean-roc_auc_score: 0.7344
|
40 |
+
2025-09-23 02:45:49,757 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0156 | Val mean-roc_auc_score: 0.7342
|
41 |
+
2025-09-23 02:46:03,333 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0134 | Val mean-roc_auc_score: 0.7331
|
42 |
+
2025-09-23 02:46:18,277 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0139 | Val mean-roc_auc_score: 0.7349
|
43 |
+
2025-09-23 02:46:32,257 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0133 | Val mean-roc_auc_score: 0.7324
|
44 |
+
2025-09-23 02:46:45,788 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0133 | Val mean-roc_auc_score: 0.7308
|
45 |
+
2025-09-23 02:46:59,273 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0129 | Val mean-roc_auc_score: 0.7337
|
46 |
+
2025-09-23 02:47:12,861 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0111 | Val mean-roc_auc_score: 0.7317
|
47 |
+
2025-09-23 02:47:27,597 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0116 | Val mean-roc_auc_score: 0.7310
|
48 |
+
2025-09-23 02:47:41,558 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0117 | Val mean-roc_auc_score: 0.7319
|
49 |
+
2025-09-23 02:47:55,081 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0118 | Val mean-roc_auc_score: 0.7320
|
50 |
+
2025-09-23 02:48:08,873 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0107 | Val mean-roc_auc_score: 0.7309
|
51 |
+
2025-09-23 02:48:22,591 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0104 | Val mean-roc_auc_score: 0.7313
|
52 |
+
2025-09-23 02:48:37,369 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0107 | Val mean-roc_auc_score: 0.7293
|
53 |
+
2025-09-23 02:48:51,115 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0102 | Val mean-roc_auc_score: 0.7307
|
54 |
+
2025-09-23 02:49:04,721 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0105 | Val mean-roc_auc_score: 0.7299
|
55 |
+
2025-09-23 02:49:18,340 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0107 | Val mean-roc_auc_score: 0.7303
|
56 |
+
2025-09-23 02:49:31,719 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0093 | Val mean-roc_auc_score: 0.7312
|
57 |
+
2025-09-23 02:49:46,507 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0106 | Val mean-roc_auc_score: 0.7286
|
58 |
+
2025-09-23 02:50:00,184 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0087 | Val mean-roc_auc_score: 0.7290
|
59 |
+
2025-09-23 02:50:13,802 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0089 | Val mean-roc_auc_score: 0.7272
|
60 |
+
2025-09-23 02:50:27,426 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0117 | Val mean-roc_auc_score: 0.7285
|
61 |
+
2025-09-23 02:50:40,839 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0090 | Val mean-roc_auc_score: 0.7296
|
62 |
+
2025-09-23 02:50:54,269 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0093 | Val mean-roc_auc_score: 0.7303
|
63 |
+
2025-09-23 02:51:09,206 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0084 | Val mean-roc_auc_score: 0.7281
|
64 |
+
2025-09-23 02:51:22,839 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0080 | Val mean-roc_auc_score: 0.7287
|
65 |
+
2025-09-23 02:51:36,629 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0086 | Val mean-roc_auc_score: 0.7284
|
66 |
+
2025-09-23 02:51:50,192 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0086 | Val mean-roc_auc_score: 0.7293
|
67 |
+
2025-09-23 02:52:03,908 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0079 | Val mean-roc_auc_score: 0.7282
|
68 |
+
2025-09-23 02:52:19,138 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0085 | Val mean-roc_auc_score: 0.7297
|
69 |
+
2025-09-23 02:52:32,733 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0082 | Val mean-roc_auc_score: 0.7314
|
70 |
+
2025-09-23 02:52:46,469 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0085 | Val mean-roc_auc_score: 0.7295
|
71 |
+
2025-09-23 02:52:59,964 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0082 | Val mean-roc_auc_score: 0.7289
|
72 |
+
2025-09-23 02:53:13,449 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0081 | Val mean-roc_auc_score: 0.7298
|
73 |
+
2025-09-23 02:53:28,363 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.7303
|
74 |
+
2025-09-23 02:53:41,855 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0080 | Val mean-roc_auc_score: 0.7285
|
75 |
+
2025-09-23 02:53:55,589 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0080 | Val mean-roc_auc_score: 0.7292
|
76 |
+
2025-09-23 02:54:09,105 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0083 | Val mean-roc_auc_score: 0.7289
|
77 |
+
2025-09-23 02:54:22,751 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7287
|
78 |
+
2025-09-23 02:54:37,893 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7286
|
79 |
+
2025-09-23 02:54:51,525 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0078 | Val mean-roc_auc_score: 0.7286
|
80 |
+
2025-09-23 02:55:05,079 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.7282
|
81 |
+
2025-09-23 02:55:18,604 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7302
|
82 |
+
2025-09-23 02:55:32,445 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0078 | Val mean-roc_auc_score: 0.7288
|
83 |
+
2025-09-23 02:55:47,523 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7282
|
84 |
+
2025-09-23 02:56:01,438 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7279
|
85 |
+
2025-09-23 02:56:15,080 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0072 | Val mean-roc_auc_score: 0.7292
|
86 |
+
2025-09-23 02:56:28,772 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0068 | Val mean-roc_auc_score: 0.7281
|
87 |
+
2025-09-23 02:56:42,466 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0071 | Val mean-roc_auc_score: 0.7265
|
88 |
+
2025-09-23 02:56:57,702 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0066 | Val mean-roc_auc_score: 0.7280
|
89 |
+
2025-09-23 02:57:11,165 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7262
|
90 |
+
2025-09-23 02:57:24,805 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0068 | Val mean-roc_auc_score: 0.7276
|
91 |
+
2025-09-23 02:57:38,147 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7268
|
92 |
+
2025-09-23 02:57:51,842 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7273
|
93 |
+
2025-09-23 02:58:07,023 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0064 | Val mean-roc_auc_score: 0.7275
|
94 |
+
2025-09-23 02:58:20,881 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0064 | Val mean-roc_auc_score: 0.7276
|
95 |
+
2025-09-23 02:58:34,688 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7264
|
96 |
+
2025-09-23 02:58:48,313 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0062 | Val mean-roc_auc_score: 0.7273
|
97 |
+
2025-09-23 02:59:02,005 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7278
|
98 |
+
2025-09-23 02:59:17,113 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0065 | Val mean-roc_auc_score: 0.7275
|
99 |
+
2025-09-23 02:59:30,599 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7287
|
100 |
+
2025-09-23 02:59:44,443 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7279
|
101 |
+
2025-09-23 02:59:58,021 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0074 | Val mean-roc_auc_score: 0.7275
|
102 |
+
2025-09-23 03:00:11,767 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.7271
|
103 |
+
2025-09-23 03:00:26,980 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7276
|
104 |
+
2025-09-23 03:00:40,607 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7277
|
105 |
+
2025-09-23 03:00:54,101 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7278
|
106 |
+
2025-09-23 03:01:07,696 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.7267
|
107 |
+
2025-09-23 03:01:21,445 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7264
|
108 |
+
2025-09-23 03:01:36,979 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7271
|
109 |
+
2025-09-23 03:01:50,529 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7265
|
110 |
+
2025-09-23 03:02:04,055 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0066 | Val mean-roc_auc_score: 0.7276
|
111 |
+
2025-09-23 03:02:17,729 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7282
|
112 |
+
2025-09-23 03:02:18,853 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.7585
|
113 |
+
2025-09-23 03:02:19,241 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset tox21 at 2025-09-23_03-02-19
|
114 |
+
2025-09-23 03:02:31,280 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.1784 | Val mean-roc_auc_score: 0.7501
|
115 |
+
2025-09-23 03:02:31,281 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 196
|
116 |
+
2025-09-23 03:02:31,803 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.7501
|
117 |
+
2025-09-23 03:02:45,478 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.1603 | Val mean-roc_auc_score: 0.7635
|
118 |
+
2025-09-23 03:02:45,648 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 392
|
119 |
+
2025-09-23 03:02:46,173 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.7635
|
120 |
+
2025-09-23 03:02:59,716 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.1456 | Val mean-roc_auc_score: 0.7762
|
121 |
+
2025-09-23 03:02:59,885 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 588
|
122 |
+
2025-09-23 03:03:00,400 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.7762
|
123 |
+
2025-09-23 03:03:14,066 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.1317 | Val mean-roc_auc_score: 0.7665
|
124 |
+
2025-09-23 03:03:27,765 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.1273 | Val mean-roc_auc_score: 0.7694
|
125 |
+
2025-09-23 03:03:42,621 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.1184 | Val mean-roc_auc_score: 0.7707
|
126 |
+
2025-09-23 03:03:56,636 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1003 | Val mean-roc_auc_score: 0.7478
|
127 |
+
2025-09-23 03:04:10,200 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0869 | Val mean-roc_auc_score: 0.7590
|
128 |
+
2025-09-23 03:04:23,838 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0757 | Val mean-roc_auc_score: 0.7506
|
129 |
+
2025-09-23 03:04:37,555 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0635 | Val mean-roc_auc_score: 0.7497
|
130 |
+
2025-09-23 03:04:52,586 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0605 | Val mean-roc_auc_score: 0.7484
|
131 |
+
2025-09-23 03:05:06,603 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0484 | Val mean-roc_auc_score: 0.7406
|
132 |
+
2025-09-23 03:05:20,316 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0433 | Val mean-roc_auc_score: 0.7402
|
133 |
+
2025-09-23 03:05:34,294 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0421 | Val mean-roc_auc_score: 0.7429
|
134 |
+
2025-09-23 03:05:47,976 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0381 | Val mean-roc_auc_score: 0.7405
|
135 |
+
2025-09-23 03:06:02,742 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0319 | Val mean-roc_auc_score: 0.7366
|
136 |
+
2025-09-23 03:06:16,734 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0298 | Val mean-roc_auc_score: 0.7378
|
137 |
+
2025-09-23 03:06:30,545 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0322 | Val mean-roc_auc_score: 0.7390
|
138 |
+
2025-09-23 03:06:44,162 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0260 | Val mean-roc_auc_score: 0.7365
|
139 |
+
2025-09-23 03:06:57,806 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0239 | Val mean-roc_auc_score: 0.7356
|
140 |
+
2025-09-23 03:07:12,760 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0217 | Val mean-roc_auc_score: 0.7370
|
141 |
+
2025-09-23 03:07:26,634 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0226 | Val mean-roc_auc_score: 0.7340
|
142 |
+
2025-09-23 03:07:40,184 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0197 | Val mean-roc_auc_score: 0.7331
|
143 |
+
2025-09-23 03:07:53,797 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0125 | Val mean-roc_auc_score: 0.7360
|
144 |
+
2025-09-23 03:08:07,302 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0182 | Val mean-roc_auc_score: 0.7342
|
145 |
+
2025-09-23 03:08:21,923 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0177 | Val mean-roc_auc_score: 0.7317
|
146 |
+
2025-09-23 03:08:35,997 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0161 | Val mean-roc_auc_score: 0.7342
|
147 |
+
2025-09-23 03:08:49,696 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0158 | Val mean-roc_auc_score: 0.7352
|
148 |
+
2025-09-23 03:09:03,279 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0150 | Val mean-roc_auc_score: 0.7319
|
149 |
+
2025-09-23 03:09:16,985 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0152 | Val mean-roc_auc_score: 0.7342
|
150 |
+
2025-09-23 03:09:31,793 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0147 | Val mean-roc_auc_score: 0.7356
|
151 |
+
2025-09-23 03:09:45,752 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0125 | Val mean-roc_auc_score: 0.7374
|
152 |
+
2025-09-23 03:09:59,432 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0133 | Val mean-roc_auc_score: 0.7336
|
153 |
+
2025-09-23 03:10:12,911 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0132 | Val mean-roc_auc_score: 0.7329
|
154 |
+
2025-09-23 03:10:26,352 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0128 | Val mean-roc_auc_score: 0.7330
|
155 |
+
2025-09-23 03:10:41,237 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0111 | Val mean-roc_auc_score: 0.7337
|
156 |
+
2025-09-23 03:10:55,066 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0123 | Val mean-roc_auc_score: 0.7330
|
157 |
+
2025-09-23 03:11:08,490 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0120 | Val mean-roc_auc_score: 0.7334
|
158 |
+
2025-09-23 03:11:21,992 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0126 | Val mean-roc_auc_score: 0.7307
|
159 |
+
2025-09-23 03:11:35,486 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0109 | Val mean-roc_auc_score: 0.7289
|
160 |
+
2025-09-23 03:11:50,107 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0125 | Val mean-roc_auc_score: 0.7330
|
161 |
+
2025-09-23 03:12:04,083 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0118 | Val mean-roc_auc_score: 0.7312
|
162 |
+
2025-09-23 03:12:17,663 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0103 | Val mean-roc_auc_score: 0.7317
|
163 |
+
2025-09-23 03:12:31,222 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0112 | Val mean-roc_auc_score: 0.7313
|
164 |
+
2025-09-23 03:12:44,827 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0136 | Val mean-roc_auc_score: 0.7318
|
165 |
+
2025-09-23 03:12:59,519 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0120 | Val mean-roc_auc_score: 0.7301
|
166 |
+
2025-09-23 03:13:13,427 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0087 | Val mean-roc_auc_score: 0.7306
|
167 |
+
2025-09-23 03:13:26,860 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0100 | Val mean-roc_auc_score: 0.7291
|
168 |
+
2025-09-23 03:13:40,545 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0096 | Val mean-roc_auc_score: 0.7295
|
169 |
+
2025-09-23 03:13:53,993 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0090 | Val mean-roc_auc_score: 0.7303
|
170 |
+
2025-09-23 03:14:07,616 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0094 | Val mean-roc_auc_score: 0.7296
|
171 |
+
2025-09-23 03:14:22,613 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0094 | Val mean-roc_auc_score: 0.7289
|
172 |
+
2025-09-23 03:14:36,259 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0091 | Val mean-roc_auc_score: 0.7286
|
173 |
+
2025-09-23 03:14:49,948 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0109 | Val mean-roc_auc_score: 0.7287
|
174 |
+
2025-09-23 03:15:03,652 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0082 | Val mean-roc_auc_score: 0.7290
|
175 |
+
2025-09-23 03:15:16,991 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0088 | Val mean-roc_auc_score: 0.7299
|
176 |
+
2025-09-23 03:15:31,969 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0088 | Val mean-roc_auc_score: 0.7303
|
177 |
+
2025-09-23 03:15:45,537 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0084 | Val mean-roc_auc_score: 0.7296
|
178 |
+
2025-09-23 03:15:59,026 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0088 | Val mean-roc_auc_score: 0.7286
|
179 |
+
2025-09-23 03:16:12,639 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0074 | Val mean-roc_auc_score: 0.7302
|
180 |
+
2025-09-23 03:16:26,176 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0078 | Val mean-roc_auc_score: 0.7291
|
181 |
+
2025-09-23 03:16:41,181 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0083 | Val mean-roc_auc_score: 0.7289
|
182 |
+
2025-09-23 03:16:54,717 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0081 | Val mean-roc_auc_score: 0.7298
|
183 |
+
2025-09-23 03:17:08,272 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0072 | Val mean-roc_auc_score: 0.7305
|
184 |
+
2025-09-23 03:17:21,833 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0077 | Val mean-roc_auc_score: 0.7289
|
185 |
+
2025-09-23 03:17:35,388 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0091 | Val mean-roc_auc_score: 0.7301
|
186 |
+
2025-09-23 03:17:50,489 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0076 | Val mean-roc_auc_score: 0.7278
|
187 |
+
2025-09-23 03:18:04,023 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0079 | Val mean-roc_auc_score: 0.7300
|
188 |
+
2025-09-23 03:18:17,411 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0098 | Val mean-roc_auc_score: 0.7288
|
189 |
+
2025-09-23 03:18:31,007 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0074 | Val mean-roc_auc_score: 0.7288
|
190 |
+
2025-09-23 03:18:44,497 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0093 | Val mean-roc_auc_score: 0.7280
|
191 |
+
2025-09-23 03:18:59,513 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0077 | Val mean-roc_auc_score: 0.7294
|
192 |
+
2025-09-23 03:19:12,940 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0084 | Val mean-roc_auc_score: 0.7278
|
193 |
+
2025-09-23 03:19:26,419 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0075 | Val mean-roc_auc_score: 0.7291
|
194 |
+
2025-09-23 03:19:39,971 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0075 | Val mean-roc_auc_score: 0.7281
|
195 |
+
2025-09-23 03:19:53,461 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7277
|
196 |
+
2025-09-23 03:20:08,570 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0074 | Val mean-roc_auc_score: 0.7288
|
197 |
+
2025-09-23 03:20:22,190 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0071 | Val mean-roc_auc_score: 0.7283
|
198 |
+
2025-09-23 03:20:35,720 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.7293
|
199 |
+
2025-09-23 03:20:49,164 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.7297
|
200 |
+
2025-09-23 03:21:02,589 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0074 | Val mean-roc_auc_score: 0.7286
|
201 |
+
2025-09-23 03:21:17,700 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0071 | Val mean-roc_auc_score: 0.7290
|
202 |
+
2025-09-23 03:21:31,263 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0064 | Val mean-roc_auc_score: 0.7292
|
203 |
+
2025-09-23 03:21:44,755 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0071 | Val mean-roc_auc_score: 0.7293
|
204 |
+
2025-09-23 03:21:58,461 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0074 | Val mean-roc_auc_score: 0.7291
|
205 |
+
2025-09-23 03:22:12,012 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7289
|
206 |
+
2025-09-23 03:22:27,198 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7283
|
207 |
+
2025-09-23 03:22:40,745 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0068 | Val mean-roc_auc_score: 0.7282
|
208 |
+
2025-09-23 03:22:54,182 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7269
|
209 |
+
2025-09-23 03:23:07,695 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0065 | Val mean-roc_auc_score: 0.7292
|
210 |
+
2025-09-23 03:23:21,076 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0064 | Val mean-roc_auc_score: 0.7281
|
211 |
+
2025-09-23 03:23:36,063 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0066 | Val mean-roc_auc_score: 0.7296
|
212 |
+
2025-09-23 03:23:49,685 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0057 | Val mean-roc_auc_score: 0.7284
|
213 |
+
2025-09-23 03:24:03,317 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0062 | Val mean-roc_auc_score: 0.7293
|
214 |
+
2025-09-23 03:24:16,841 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0065 | Val mean-roc_auc_score: 0.7292
|
215 |
+
2025-09-23 03:24:30,143 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.7293
|
216 |
+
2025-09-23 03:24:45,179 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7296
|
217 |
+
2025-09-23 03:24:58,804 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.7297
|
218 |
+
2025-09-23 03:25:12,248 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0062 | Val mean-roc_auc_score: 0.7291
|
219 |
+
2025-09-23 03:25:25,732 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0066 | Val mean-roc_auc_score: 0.7294
|
220 |
+
2025-09-23 03:25:26,904 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.7480
|
221 |
+
2025-09-23 03:25:27,310 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset tox21 at 2025-09-23_03-25-27
|
222 |
+
2025-09-23 03:25:39,213 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.1771 | Val mean-roc_auc_score: 0.7470
|
223 |
+
2025-09-23 03:25:39,213 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 196
|
224 |
+
2025-09-23 03:25:39,722 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.7470
|
225 |
+
2025-09-23 03:25:53,180 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.1549 | Val mean-roc_auc_score: 0.7565
|
226 |
+
2025-09-23 03:25:53,347 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 392
|
227 |
+
2025-09-23 03:25:53,866 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.7565
|
228 |
+
2025-09-23 03:26:07,361 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.1513 | Val mean-roc_auc_score: 0.7688
|
229 |
+
2025-09-23 03:26:07,531 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 588
|
230 |
+
2025-09-23 03:26:08,041 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.7688
|
231 |
+
2025-09-23 03:26:21,488 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.1287 | Val mean-roc_auc_score: 0.7682
|
232 |
+
2025-09-23 03:26:34,868 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.1234 | Val mean-roc_auc_score: 0.7686
|
233 |
+
2025-09-23 03:26:49,835 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.1053 | Val mean-roc_auc_score: 0.7548
|
234 |
+
2025-09-23 03:27:03,747 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1003 | Val mean-roc_auc_score: 0.7614
|
235 |
+
2025-09-23 03:27:17,319 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0878 | Val mean-roc_auc_score: 0.7511
|
236 |
+
2025-09-23 03:27:30,999 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0781 | Val mean-roc_auc_score: 0.7427
|
237 |
+
2025-09-23 03:27:44,609 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0656 | Val mean-roc_auc_score: 0.7477
|
238 |
+
2025-09-23 03:27:59,308 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0550 | Val mean-roc_auc_score: 0.7457
|
239 |
+
2025-09-23 03:28:13,321 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0505 | Val mean-roc_auc_score: 0.7423
|
240 |
+
2025-09-23 03:28:26,813 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0462 | Val mean-roc_auc_score: 0.7376
|
241 |
+
2025-09-23 03:28:40,348 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0401 | Val mean-roc_auc_score: 0.7347
|
242 |
+
2025-09-23 03:28:53,761 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0363 | Val mean-roc_auc_score: 0.7391
|
243 |
+
2025-09-23 03:29:08,350 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0304 | Val mean-roc_auc_score: 0.7416
|
244 |
+
2025-09-23 03:29:22,304 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0317 | Val mean-roc_auc_score: 0.7344
|
245 |
+
2025-09-23 03:29:35,873 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0264 | Val mean-roc_auc_score: 0.7332
|
246 |
+
2025-09-23 03:29:49,342 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0295 | Val mean-roc_auc_score: 0.7376
|
247 |
+
2025-09-23 03:30:02,992 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0268 | Val mean-roc_auc_score: 0.7342
|
248 |
+
2025-09-23 03:30:17,770 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0260 | Val mean-roc_auc_score: 0.7347
|
249 |
+
2025-09-23 03:30:31,679 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0259 | Val mean-roc_auc_score: 0.7330
|
250 |
+
2025-09-23 03:30:45,217 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0177 | Val mean-roc_auc_score: 0.7348
|
251 |
+
2025-09-23 03:30:58,682 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0245 | Val mean-roc_auc_score: 0.7343
|
252 |
+
2025-09-23 03:31:12,341 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0177 | Val mean-roc_auc_score: 0.7346
|
253 |
+
2025-09-23 03:31:27,180 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0163 | Val mean-roc_auc_score: 0.7345
|
254 |
+
2025-09-23 03:31:41,166 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0159 | Val mean-roc_auc_score: 0.7322
|
255 |
+
2025-09-23 03:31:54,621 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0153 | Val mean-roc_auc_score: 0.7309
|
256 |
+
2025-09-23 03:32:07,963 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0157 | Val mean-roc_auc_score: 0.7332
|
257 |
+
2025-09-23 03:32:21,472 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0156 | Val mean-roc_auc_score: 0.7340
|
258 |
+
2025-09-23 03:32:36,097 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0137 | Val mean-roc_auc_score: 0.7319
|
259 |
+
2025-09-23 03:32:50,158 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0145 | Val mean-roc_auc_score: 0.7351
|
260 |
+
2025-09-23 03:33:03,448 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0129 | Val mean-roc_auc_score: 0.7338
|
261 |
+
2025-09-23 03:33:16,945 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0134 | Val mean-roc_auc_score: 0.7338
|
262 |
+
2025-09-23 03:33:30,374 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0121 | Val mean-roc_auc_score: 0.7326
|
263 |
+
2025-09-23 03:33:45,158 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0113 | Val mean-roc_auc_score: 0.7312
|
264 |
+
2025-09-23 03:33:59,271 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0125 | Val mean-roc_auc_score: 0.7331
|
265 |
+
2025-09-23 03:34:12,884 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0131 | Val mean-roc_auc_score: 0.7323
|
266 |
+
2025-09-23 03:34:26,480 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0111 | Val mean-roc_auc_score: 0.7311
|
267 |
+
2025-09-23 03:34:39,943 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0101 | Val mean-roc_auc_score: 0.7347
|
268 |
+
2025-09-23 03:34:54,625 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0122 | Val mean-roc_auc_score: 0.7326
|
269 |
+
2025-09-23 03:35:08,495 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0125 | Val mean-roc_auc_score: 0.7302
|
270 |
+
2025-09-23 03:35:22,003 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0128 | Val mean-roc_auc_score: 0.7296
|
271 |
+
2025-09-23 03:35:35,587 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0110 | Val mean-roc_auc_score: 0.7311
|
272 |
+
2025-09-23 03:35:48,892 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0107 | Val mean-roc_auc_score: 0.7316
|
273 |
+
2025-09-23 03:36:03,381 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0093 | Val mean-roc_auc_score: 0.7333
|
274 |
+
2025-09-23 03:36:17,205 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0112 | Val mean-roc_auc_score: 0.7318
|
275 |
+
2025-09-23 03:36:30,726 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0131 | Val mean-roc_auc_score: 0.7331
|
276 |
+
2025-09-23 03:36:44,289 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0074 | Val mean-roc_auc_score: 0.7325
|
277 |
+
2025-09-23 03:36:57,883 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0095 | Val mean-roc_auc_score: 0.7310
|
278 |
+
2025-09-23 03:37:11,327 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0091 | Val mean-roc_auc_score: 0.7328
|
279 |
+
2025-09-23 03:37:26,366 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0096 | Val mean-roc_auc_score: 0.7335
|
280 |
+
2025-09-23 03:37:39,917 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0093 | Val mean-roc_auc_score: 0.7326
|
281 |
+
2025-09-23 03:37:53,596 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0082 | Val mean-roc_auc_score: 0.7304
|
282 |
+
2025-09-23 03:38:07,145 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0086 | Val mean-roc_auc_score: 0.7307
|
283 |
+
2025-09-23 03:38:20,769 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0079 | Val mean-roc_auc_score: 0.7290
|
284 |
+
2025-09-23 03:38:36,089 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0084 | Val mean-roc_auc_score: 0.7305
|
285 |
+
2025-09-23 03:38:49,685 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0084 | Val mean-roc_auc_score: 0.7313
|
286 |
+
2025-09-23 03:39:03,305 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0082 | Val mean-roc_auc_score: 0.7295
|
287 |
+
2025-09-23 03:39:16,849 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0090 | Val mean-roc_auc_score: 0.7304
|
288 |
+
2025-09-23 03:39:30,426 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0088 | Val mean-roc_auc_score: 0.7307
|
289 |
+
2025-09-23 03:39:45,530 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0082 | Val mean-roc_auc_score: 0.7292
|
290 |
+
2025-09-23 03:39:59,133 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0095 | Val mean-roc_auc_score: 0.7318
|
291 |
+
2025-09-23 03:40:12,770 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0084 | Val mean-roc_auc_score: 0.7289
|
292 |
+
2025-09-23 03:40:26,219 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0079 | Val mean-roc_auc_score: 0.7312
|
293 |
+
2025-09-23 03:40:39,879 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0071 | Val mean-roc_auc_score: 0.7306
|
294 |
+
2025-09-23 03:40:55,022 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0084 | Val mean-roc_auc_score: 0.7313
|
295 |
+
2025-09-23 03:41:08,585 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0077 | Val mean-roc_auc_score: 0.7294
|
296 |
+
2025-09-23 03:41:22,070 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.7294
|
297 |
+
2025-09-23 03:41:35,623 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0081 | Val mean-roc_auc_score: 0.7308
|
298 |
+
2025-09-23 03:41:49,220 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0077 | Val mean-roc_auc_score: 0.7283
|
299 |
+
2025-09-23 03:42:04,503 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0076 | Val mean-roc_auc_score: 0.7282
|
300 |
+
2025-09-23 03:42:18,238 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0083 | Val mean-roc_auc_score: 0.7284
|
301 |
+
2025-09-23 03:42:32,008 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.7292
|
302 |
+
2025-09-23 03:42:45,710 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0076 | Val mean-roc_auc_score: 0.7293
|
303 |
+
2025-09-23 03:42:59,234 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0072 | Val mean-roc_auc_score: 0.7287
|
304 |
+
2025-09-23 03:43:14,290 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0072 | Val mean-roc_auc_score: 0.7291
|
305 |
+
2025-09-23 03:43:27,762 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0068 | Val mean-roc_auc_score: 0.7282
|
306 |
+
2025-09-23 03:43:41,341 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0076 | Val mean-roc_auc_score: 0.7293
|
307 |
+
2025-09-23 03:43:54,883 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0074 | Val mean-roc_auc_score: 0.7289
|
308 |
+
2025-09-23 03:44:08,564 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.7289
|
309 |
+
2025-09-23 03:44:23,814 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0065 | Val mean-roc_auc_score: 0.7274
|
310 |
+
2025-09-23 03:44:37,110 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0068 | Val mean-roc_auc_score: 0.7285
|
311 |
+
2025-09-23 03:44:50,764 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0066 | Val mean-roc_auc_score: 0.7287
|
312 |
+
2025-09-23 03:45:04,192 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0068 | Val mean-roc_auc_score: 0.7267
|
313 |
+
2025-09-23 03:45:17,897 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7278
|
314 |
+
2025-09-23 03:45:33,140 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7281
|
315 |
+
2025-09-23 03:45:46,597 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7270
|
316 |
+
2025-09-23 03:46:00,146 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.7280
|
317 |
+
2025-09-23 03:46:13,839 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7279
|
318 |
+
2025-09-23 03:46:27,372 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7267
|
319 |
+
2025-09-23 03:46:42,624 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.7275
|
320 |
+
2025-09-23 03:46:56,250 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.7270
|
321 |
+
2025-09-23 03:47:09,906 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0062 | Val mean-roc_auc_score: 0.7270
|
322 |
+
2025-09-23 03:47:23,490 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7287
|
323 |
+
2025-09-23 03:47:37,044 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7300
|
324 |
+
2025-09-23 03:47:52,446 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0078 | Val mean-roc_auc_score: 0.7281
|
325 |
+
2025-09-23 03:48:05,975 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0054 | Val mean-roc_auc_score: 0.7287
|
326 |
+
2025-09-23 03:48:19,268 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.7275
|
327 |
+
2025-09-23 03:48:32,818 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7290
|
328 |
+
2025-09-23 03:48:33,967 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.7489
|
329 |
+
2025-09-23 03:48:34,407 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg mean-roc_auc_score: 0.7518, Std Dev: 0.0047
|
logs_modchembert_regression_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_bace_regression_epochs100_batch_size32_20250923_015823.log
ADDED
@@ -0,0 +1,325 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2025-09-23 01:58:23,234 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Running benchmark for dataset: bace_regression
|
2 |
+
2025-09-23 01:58:23,234 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - dataset: bace_regression, tasks: ['pIC50'], epochs: 100, learning rate: 3e-05, transform: True
|
3 |
+
2025-09-23 01:58:23,248 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset bace_regression at 2025-09-23_01-58-23
|
4 |
+
2025-09-23 01:58:29,897 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.6316 | Val rms_score: 0.6965
|
5 |
+
2025-09-23 01:58:29,898 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Global step of best model: 38
|
6 |
+
2025-09-23 01:58:30,436 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 0.6965
|
7 |
+
2025-09-23 01:58:35,002 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.3158 | Val rms_score: 0.7280
|
8 |
+
2025-09-23 01:58:39,639 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.2600 | Val rms_score: 0.7366
|
9 |
+
2025-09-23 01:58:44,421 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.2253 | Val rms_score: 0.7495
|
10 |
+
2025-09-23 01:58:49,430 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.2015 | Val rms_score: 0.6073
|
11 |
+
2025-09-23 01:58:49,635 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Global step of best model: 190
|
12 |
+
2025-09-23 01:58:50,214 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val rms_score: 0.6073
|
13 |
+
2025-09-23 01:58:55,025 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.1752 | Val rms_score: 0.7702
|
14 |
+
2025-09-23 01:59:00,062 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1620 | Val rms_score: 0.7180
|
15 |
+
2025-09-23 01:59:04,886 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.1406 | Val rms_score: 0.6693
|
16 |
+
2025-09-23 01:59:09,780 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.1291 | Val rms_score: 0.7082
|
17 |
+
2025-09-23 01:59:14,550 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.1242 | Val rms_score: 0.7244
|
18 |
+
2025-09-23 01:59:18,866 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.1241 | Val rms_score: 0.6246
|
19 |
+
2025-09-23 01:59:23,452 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.1127 | Val rms_score: 0.7382
|
20 |
+
2025-09-23 01:59:28,185 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.1044 | Val rms_score: 0.7621
|
21 |
+
2025-09-23 01:59:32,940 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.1104 | Val rms_score: 0.6958
|
22 |
+
2025-09-23 01:59:37,752 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0979 | Val rms_score: 0.7077
|
23 |
+
2025-09-23 01:59:42,521 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0801 | Val rms_score: 0.7214
|
24 |
+
2025-09-23 01:59:47,523 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0851 | Val rms_score: 0.6677
|
25 |
+
2025-09-23 01:59:52,238 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0925 | Val rms_score: 0.7257
|
26 |
+
2025-09-23 01:59:57,037 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0852 | Val rms_score: 0.6979
|
27 |
+
2025-09-23 02:00:01,762 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0839 | Val rms_score: 0.7614
|
28 |
+
2025-09-23 02:00:06,603 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0835 | Val rms_score: 0.6844
|
29 |
+
2025-09-23 02:00:11,587 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0768 | Val rms_score: 0.7044
|
30 |
+
2025-09-23 02:00:16,348 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0678 | Val rms_score: 0.8322
|
31 |
+
2025-09-23 02:00:21,147 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0768 | Val rms_score: 0.7278
|
32 |
+
2025-09-23 02:00:25,743 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0678 | Val rms_score: 0.7591
|
33 |
+
2025-09-23 02:00:30,324 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0621 | Val rms_score: 0.7180
|
34 |
+
2025-09-23 02:00:36,151 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0667 | Val rms_score: 0.8134
|
35 |
+
2025-09-23 02:00:40,687 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0703 | Val rms_score: 0.7505
|
36 |
+
2025-09-23 02:00:44,874 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0791 | Val rms_score: 0.7547
|
37 |
+
2025-09-23 02:00:49,456 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0662 | Val rms_score: 0.7829
|
38 |
+
2025-09-23 02:00:54,226 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0604 | Val rms_score: 0.7187
|
39 |
+
2025-09-23 02:00:59,326 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0571 | Val rms_score: 0.7406
|
40 |
+
2025-09-23 02:01:04,207 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0530 | Val rms_score: 0.7120
|
41 |
+
2025-09-23 02:01:09,038 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0584 | Val rms_score: 0.7346
|
42 |
+
2025-09-23 02:01:13,803 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0526 | Val rms_score: 0.7254
|
43 |
+
2025-09-23 02:01:18,717 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0510 | Val rms_score: 0.7710
|
44 |
+
2025-09-23 02:01:23,802 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0465 | Val rms_score: 0.7615
|
45 |
+
2025-09-23 02:01:28,587 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0500 | Val rms_score: 0.7421
|
46 |
+
2025-09-23 02:01:33,448 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0493 | Val rms_score: 0.7433
|
47 |
+
2025-09-23 02:01:38,159 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0484 | Val rms_score: 0.7817
|
48 |
+
2025-09-23 02:01:42,968 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0463 | Val rms_score: 0.7630
|
49 |
+
2025-09-23 02:01:48,196 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0440 | Val rms_score: 0.7882
|
50 |
+
2025-09-23 02:01:53,237 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0437 | Val rms_score: 0.7296
|
51 |
+
2025-09-23 02:01:57,909 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0438 | Val rms_score: 0.7546
|
52 |
+
2025-09-23 02:02:02,998 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0463 | Val rms_score: 0.7492
|
53 |
+
2025-09-23 02:02:07,229 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0399 | Val rms_score: 0.7589
|
54 |
+
2025-09-23 02:02:11,785 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0401 | Val rms_score: 0.7474
|
55 |
+
2025-09-23 02:02:16,773 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0368 | Val rms_score: 0.7733
|
56 |
+
2025-09-23 02:02:21,751 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0391 | Val rms_score: 0.7681
|
57 |
+
2025-09-23 02:02:26,731 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0403 | Val rms_score: 0.7934
|
58 |
+
2025-09-23 02:02:31,741 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0387 | Val rms_score: 0.7378
|
59 |
+
2025-09-23 02:02:37,031 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0387 | Val rms_score: 0.7833
|
60 |
+
2025-09-23 02:02:43,112 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0416 | Val rms_score: 0.8024
|
61 |
+
2025-09-23 02:02:48,195 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0419 | Val rms_score: 0.7901
|
62 |
+
2025-09-23 02:02:53,264 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0341 | Val rms_score: 0.7888
|
63 |
+
2025-09-23 02:02:58,414 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0325 | Val rms_score: 0.7630
|
64 |
+
2025-09-23 02:03:03,726 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0331 | Val rms_score: 0.7636
|
65 |
+
2025-09-23 02:03:08,802 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0255 | Val rms_score: 0.7460
|
66 |
+
2025-09-23 02:03:13,748 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0341 | Val rms_score: 0.7599
|
67 |
+
2025-09-23 02:03:18,694 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0323 | Val rms_score: 0.7549
|
68 |
+
2025-09-23 02:03:23,130 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0328 | Val rms_score: 0.7809
|
69 |
+
2025-09-23 02:03:28,349 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0327 | Val rms_score: 0.8207
|
70 |
+
2025-09-23 02:03:32,785 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0382 | Val rms_score: 0.7759
|
71 |
+
2025-09-23 02:03:37,783 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0310 | Val rms_score: 0.8058
|
72 |
+
2025-09-23 02:03:42,873 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0308 | Val rms_score: 0.7643
|
73 |
+
2025-09-23 02:03:48,034 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0366 | Val rms_score: 0.7679
|
74 |
+
2025-09-23 02:03:53,416 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0282 | Val rms_score: 0.7528
|
75 |
+
2025-09-23 02:03:58,302 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0292 | Val rms_score: 0.7563
|
76 |
+
2025-09-23 02:04:03,249 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0277 | Val rms_score: 0.7650
|
77 |
+
2025-09-23 02:04:08,168 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0273 | Val rms_score: 0.7592
|
78 |
+
2025-09-23 02:04:13,088 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0296 | Val rms_score: 0.7461
|
79 |
+
2025-09-23 02:04:18,478 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0295 | Val rms_score: 0.7328
|
80 |
+
2025-09-23 02:04:23,597 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0273 | Val rms_score: 0.7634
|
81 |
+
2025-09-23 02:04:28,795 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0257 | Val rms_score: 0.7728
|
82 |
+
2025-09-23 02:04:33,903 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0286 | Val rms_score: 0.7630
|
83 |
+
2025-09-23 02:04:38,869 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0265 | Val rms_score: 0.7818
|
84 |
+
2025-09-23 02:04:43,774 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0254 | Val rms_score: 0.7792
|
85 |
+
2025-09-23 02:04:48,888 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0273 | Val rms_score: 0.7553
|
86 |
+
2025-09-23 02:04:54,823 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0193 | Val rms_score: 0.7592
|
87 |
+
2025-09-23 02:04:59,414 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0250 | Val rms_score: 0.7675
|
88 |
+
2025-09-23 02:05:04,609 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0260 | Val rms_score: 0.7668
|
89 |
+
2025-09-23 02:05:10,051 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0265 | Val rms_score: 0.7541
|
90 |
+
2025-09-23 02:05:15,009 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0247 | Val rms_score: 0.7652
|
91 |
+
2025-09-23 02:05:20,177 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0259 | Val rms_score: 0.7421
|
92 |
+
2025-09-23 02:05:25,414 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0270 | Val rms_score: 0.7691
|
93 |
+
2025-09-23 02:05:30,531 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0246 | Val rms_score: 0.7926
|
94 |
+
2025-09-23 02:05:36,027 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0293 | Val rms_score: 0.7929
|
95 |
+
2025-09-23 02:05:41,227 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0252 | Val rms_score: 0.7510
|
96 |
+
2025-09-23 02:05:46,404 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0251 | Val rms_score: 0.7937
|
97 |
+
2025-09-23 02:05:51,451 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0262 | Val rms_score: 0.7946
|
98 |
+
2025-09-23 02:05:56,652 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0243 | Val rms_score: 0.7739
|
99 |
+
2025-09-23 02:06:01,883 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0246 | Val rms_score: 0.7752
|
100 |
+
2025-09-23 02:06:06,560 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0209 | Val rms_score: 0.7796
|
101 |
+
2025-09-23 02:06:11,588 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0218 | Val rms_score: 0.7562
|
102 |
+
2025-09-23 02:06:16,642 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0229 | Val rms_score: 0.7802
|
103 |
+
2025-09-23 02:06:21,027 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0208 | Val rms_score: 0.7737
|
104 |
+
2025-09-23 02:06:25,891 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0222 | Val rms_score: 0.7891
|
105 |
+
2025-09-23 02:06:31,077 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0223 | Val rms_score: 0.7544
|
106 |
+
2025-09-23 02:06:36,218 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0213 | Val rms_score: 0.7928
|
107 |
+
2025-09-23 02:06:41,376 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0212 | Val rms_score: 0.7744
|
108 |
+
2025-09-23 02:06:41,900 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Test rms_score: 0.9526
|
109 |
+
2025-09-23 02:06:42,218 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset bace_regression at 2025-09-23_02-06-42
|
110 |
+
2025-09-23 02:06:47,193 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.6151 | Val rms_score: 0.7748
|
111 |
+
2025-09-23 02:06:47,193 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Global step of best model: 38
|
112 |
+
2025-09-23 02:06:47,728 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 0.7748
|
113 |
+
2025-09-23 02:06:52,911 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.3322 | Val rms_score: 0.7296
|
114 |
+
2025-09-23 02:06:53,095 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Global step of best model: 76
|
115 |
+
2025-09-23 02:06:53,621 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 0.7296
|
116 |
+
2025-09-23 02:06:58,929 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.2388 | Val rms_score: 0.6580
|
117 |
+
2025-09-23 02:06:59,108 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Global step of best model: 114
|
118 |
+
2025-09-23 02:06:59,656 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val rms_score: 0.6580
|
119 |
+
2025-09-23 02:07:04,775 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.2188 | Val rms_score: 0.8624
|
120 |
+
2025-09-23 02:07:10,067 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.1933 | Val rms_score: 0.7167
|
121 |
+
2025-09-23 02:07:15,210 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.1708 | Val rms_score: 0.7656
|
122 |
+
2025-09-23 02:07:20,711 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1669 | Val rms_score: 0.7156
|
123 |
+
2025-09-23 02:07:25,822 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.1543 | Val rms_score: 0.7894
|
124 |
+
2025-09-23 02:07:30,362 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.1324 | Val rms_score: 0.8148
|
125 |
+
2025-09-23 02:07:35,358 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.1242 | Val rms_score: 0.7301
|
126 |
+
2025-09-23 02:07:40,297 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.1215 | Val rms_score: 0.7412
|
127 |
+
2025-09-23 02:07:44,916 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.1184 | Val rms_score: 0.6765
|
128 |
+
2025-09-23 02:07:49,700 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.1135 | Val rms_score: 0.8050
|
129 |
+
2025-09-23 02:07:54,895 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.1270 | Val rms_score: 0.6949
|
130 |
+
2025-09-23 02:08:00,045 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0991 | Val rms_score: 0.7169
|
131 |
+
2025-09-23 02:08:05,244 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.1226 | Val rms_score: 0.7729
|
132 |
+
2025-09-23 02:08:10,515 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0954 | Val rms_score: 0.7536
|
133 |
+
2025-09-23 02:08:15,577 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0942 | Val rms_score: 0.8126
|
134 |
+
2025-09-23 02:08:20,654 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0959 | Val rms_score: 0.7223
|
135 |
+
2025-09-23 02:08:25,747 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0835 | Val rms_score: 0.7430
|
136 |
+
2025-09-23 02:08:31,170 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0818 | Val rms_score: 0.6814
|
137 |
+
2025-09-23 02:08:36,685 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0755 | Val rms_score: 0.6853
|
138 |
+
2025-09-23 02:08:41,848 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0715 | Val rms_score: 0.7655
|
139 |
+
2025-09-23 02:08:47,133 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0716 | Val rms_score: 0.7362
|
140 |
+
2025-09-23 02:08:51,749 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0691 | Val rms_score: 0.7300
|
141 |
+
2025-09-23 02:08:56,888 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0637 | Val rms_score: 0.7648
|
142 |
+
2025-09-23 02:09:03,277 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0622 | Val rms_score: 0.7118
|
143 |
+
2025-09-23 02:09:08,072 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0633 | Val rms_score: 0.7343
|
144 |
+
2025-09-23 02:09:12,730 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0605 | Val rms_score: 0.7650
|
145 |
+
2025-09-23 02:09:17,691 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0588 | Val rms_score: 0.7864
|
146 |
+
2025-09-23 02:09:22,832 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0621 | Val rms_score: 0.7872
|
147 |
+
2025-09-23 02:09:28,321 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0605 | Val rms_score: 0.7598
|
148 |
+
2025-09-23 02:09:33,334 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0543 | Val rms_score: 0.7577
|
149 |
+
2025-09-23 02:09:38,642 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0563 | Val rms_score: 0.7271
|
150 |
+
2025-09-23 02:09:43,959 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0549 | Val rms_score: 0.7575
|
151 |
+
2025-09-23 02:09:49,203 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0526 | Val rms_score: 0.7809
|
152 |
+
2025-09-23 02:09:54,683 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0544 | Val rms_score: 0.7727
|
153 |
+
2025-09-23 02:09:59,875 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0506 | Val rms_score: 0.7984
|
154 |
+
2025-09-23 02:10:05,207 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0512 | Val rms_score: 0.8001
|
155 |
+
2025-09-23 02:10:10,405 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0523 | Val rms_score: 0.7650
|
156 |
+
2025-09-23 02:10:15,221 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0547 | Val rms_score: 0.7800
|
157 |
+
2025-09-23 02:10:20,567 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0485 | Val rms_score: 0.7750
|
158 |
+
2025-09-23 02:10:25,633 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0531 | Val rms_score: 0.7636
|
159 |
+
2025-09-23 02:10:30,694 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0467 | Val rms_score: 0.7748
|
160 |
+
2025-09-23 02:10:35,102 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0443 | Val rms_score: 0.7737
|
161 |
+
2025-09-23 02:10:39,662 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0391 | Val rms_score: 0.7596
|
162 |
+
2025-09-23 02:10:45,066 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0391 | Val rms_score: 0.7675
|
163 |
+
2025-09-23 02:10:50,218 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0396 | Val rms_score: 0.8023
|
164 |
+
2025-09-23 02:10:55,393 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0411 | Val rms_score: 0.7922
|
165 |
+
2025-09-23 02:11:00,510 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0430 | Val rms_score: 0.7660
|
166 |
+
2025-09-23 02:11:05,846 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0352 | Val rms_score: 0.7833
|
167 |
+
2025-09-23 02:11:11,362 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0374 | Val rms_score: 0.7772
|
168 |
+
2025-09-23 02:11:17,572 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0399 | Val rms_score: 0.7933
|
169 |
+
2025-09-23 02:11:22,909 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0366 | Val rms_score: 0.7722
|
170 |
+
2025-09-23 02:11:28,238 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0387 | Val rms_score: 0.7883
|
171 |
+
2025-09-23 02:11:33,465 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0371 | Val rms_score: 0.7978
|
172 |
+
2025-09-23 02:11:38,344 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0308 | Val rms_score: 0.7602
|
173 |
+
2025-09-23 02:11:43,335 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0437 | Val rms_score: 0.7594
|
174 |
+
2025-09-23 02:11:48,444 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0350 | Val rms_score: 0.7635
|
175 |
+
2025-09-23 02:11:53,397 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0319 | Val rms_score: 0.7999
|
176 |
+
2025-09-23 02:11:58,364 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0297 | Val rms_score: 0.7606
|
177 |
+
2025-09-23 02:12:03,274 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0310 | Val rms_score: 0.7928
|
178 |
+
2025-09-23 02:12:08,414 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0310 | Val rms_score: 0.7850
|
179 |
+
2025-09-23 02:12:13,363 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0317 | Val rms_score: 0.7844
|
180 |
+
2025-09-23 02:12:18,301 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0294 | Val rms_score: 0.8112
|
181 |
+
2025-09-23 02:12:23,312 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0322 | Val rms_score: 0.7525
|
182 |
+
2025-09-23 02:12:28,862 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0292 | Val rms_score: 0.7851
|
183 |
+
2025-09-23 02:12:34,055 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0292 | Val rms_score: 0.7913
|
184 |
+
2025-09-23 02:12:39,237 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0323 | Val rms_score: 0.7490
|
185 |
+
2025-09-23 02:12:44,424 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0304 | Val rms_score: 0.7838
|
186 |
+
2025-09-23 02:12:49,634 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0292 | Val rms_score: 0.7656
|
187 |
+
2025-09-23 02:12:54,906 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0312 | Val rms_score: 0.7731
|
188 |
+
2025-09-23 02:12:59,973 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0306 | Val rms_score: 0.7709
|
189 |
+
2025-09-23 02:13:05,092 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0306 | Val rms_score: 0.7739
|
190 |
+
2025-09-23 02:13:10,294 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0292 | Val rms_score: 0.7899
|
191 |
+
2025-09-23 02:13:15,605 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0275 | Val rms_score: 0.8170
|
192 |
+
2025-09-23 02:13:21,247 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0269 | Val rms_score: 0.7889
|
193 |
+
2025-09-23 02:13:25,807 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0262 | Val rms_score: 0.7830
|
194 |
+
2025-09-23 02:13:31,284 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0240 | Val rms_score: 0.7989
|
195 |
+
2025-09-23 02:13:36,406 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0265 | Val rms_score: 0.7489
|
196 |
+
2025-09-23 02:13:41,348 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0260 | Val rms_score: 0.7671
|
197 |
+
2025-09-23 02:13:46,788 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0265 | Val rms_score: 0.7811
|
198 |
+
2025-09-23 02:13:52,041 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0251 | Val rms_score: 0.7606
|
199 |
+
2025-09-23 02:13:57,325 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0265 | Val rms_score: 0.7887
|
200 |
+
2025-09-23 02:14:02,744 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0250 | Val rms_score: 0.7867
|
201 |
+
2025-09-23 02:14:08,013 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0262 | Val rms_score: 0.7870
|
202 |
+
2025-09-23 02:14:13,568 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0195 | Val rms_score: 0.7880
|
203 |
+
2025-09-23 02:14:18,833 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0250 | Val rms_score: 0.7727
|
204 |
+
2025-09-23 02:14:24,070 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0235 | Val rms_score: 0.7836
|
205 |
+
2025-09-23 02:14:29,333 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0249 | Val rms_score: 0.7855
|
206 |
+
2025-09-23 02:14:34,319 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0255 | Val rms_score: 0.7934
|
207 |
+
2025-09-23 02:14:39,751 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0263 | Val rms_score: 0.7471
|
208 |
+
2025-09-23 02:14:45,080 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0222 | Val rms_score: 0.7665
|
209 |
+
2025-09-23 02:14:50,314 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0231 | Val rms_score: 0.7818
|
210 |
+
2025-09-23 02:14:55,032 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0217 | Val rms_score: 0.7846
|
211 |
+
2025-09-23 02:15:00,312 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0224 | Val rms_score: 0.8143
|
212 |
+
2025-09-23 02:15:05,897 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0232 | Val rms_score: 0.8023
|
213 |
+
2025-09-23 02:15:11,053 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0220 | Val rms_score: 0.7827
|
214 |
+
2025-09-23 02:15:16,299 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0220 | Val rms_score: 0.7737
|
215 |
+
2025-09-23 02:15:21,603 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0221 | Val rms_score: 0.7661
|
216 |
+
2025-09-23 02:15:22,177 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Test rms_score: 1.0015
|
217 |
+
2025-09-23 02:15:22,497 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset bace_regression at 2025-09-23_02-15-22
|
218 |
+
2025-09-23 02:15:27,491 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.5954 | Val rms_score: 0.7332
|
219 |
+
2025-09-23 02:15:27,491 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Global step of best model: 38
|
220 |
+
2025-09-23 02:15:28,012 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 0.7332
|
221 |
+
2025-09-23 02:15:33,185 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.3125 | Val rms_score: 0.6718
|
222 |
+
2025-09-23 02:15:33,364 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Global step of best model: 76
|
223 |
+
2025-09-23 02:15:33,885 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 0.6718
|
224 |
+
2025-09-23 02:15:39,127 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.2690 | Val rms_score: 0.7359
|
225 |
+
2025-09-23 02:15:44,343 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.2237 | Val rms_score: 0.8022
|
226 |
+
2025-09-23 02:15:49,060 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.1990 | Val rms_score: 0.7298
|
227 |
+
2025-09-23 02:15:54,230 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.1897 | Val rms_score: 0.6662
|
228 |
+
2025-09-23 02:15:54,616 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Global step of best model: 228
|
229 |
+
2025-09-23 02:15:55,142 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val rms_score: 0.6662
|
230 |
+
2025-09-23 02:16:00,247 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1637 | Val rms_score: 0.7217
|
231 |
+
2025-09-23 02:16:05,242 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.1719 | Val rms_score: 0.8103
|
232 |
+
2025-09-23 02:16:10,369 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.1390 | Val rms_score: 0.6988
|
233 |
+
2025-09-23 02:16:15,267 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.1242 | Val rms_score: 0.7426
|
234 |
+
2025-09-23 02:16:19,990 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.1207 | Val rms_score: 0.7129
|
235 |
+
2025-09-23 02:16:25,468 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.1135 | Val rms_score: 0.7405
|
236 |
+
2025-09-23 02:16:30,715 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.1143 | Val rms_score: 0.7612
|
237 |
+
2025-09-23 02:16:36,119 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.1006 | Val rms_score: 0.8830
|
238 |
+
2025-09-23 02:16:41,459 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0979 | Val rms_score: 0.8090
|
239 |
+
2025-09-23 02:16:46,505 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.1177 | Val rms_score: 0.7594
|
240 |
+
2025-09-23 02:16:51,817 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0872 | Val rms_score: 0.7871
|
241 |
+
2025-09-23 02:16:56,813 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0958 | Val rms_score: 0.7683
|
242 |
+
2025-09-23 02:17:01,854 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0856 | Val rms_score: 0.6903
|
243 |
+
2025-09-23 02:17:07,080 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0806 | Val rms_score: 0.7164
|
244 |
+
2025-09-23 02:17:11,834 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0880 | Val rms_score: 0.7256
|
245 |
+
2025-09-23 02:17:17,266 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0777 | Val rms_score: 0.7641
|
246 |
+
2025-09-23 02:17:22,562 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0748 | Val rms_score: 0.7134
|
247 |
+
2025-09-23 02:17:27,821 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0814 | Val rms_score: 0.7168
|
248 |
+
2025-09-23 02:17:32,978 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0744 | Val rms_score: 0.7543
|
249 |
+
2025-09-23 02:17:38,286 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0699 | Val rms_score: 0.7601
|
250 |
+
2025-09-23 02:17:44,407 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0655 | Val rms_score: 0.7358
|
251 |
+
2025-09-23 02:17:49,184 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0592 | Val rms_score: 0.7545
|
252 |
+
2025-09-23 02:17:54,407 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0938 | Val rms_score: 0.7738
|
253 |
+
2025-09-23 02:17:59,756 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0588 | Val rms_score: 0.7460
|
254 |
+
2025-09-23 02:18:04,940 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0588 | Val rms_score: 0.7506
|
255 |
+
2025-09-23 02:18:10,190 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0554 | Val rms_score: 0.7685
|
256 |
+
2025-09-23 02:18:15,227 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0539 | Val rms_score: 0.7387
|
257 |
+
2025-09-23 02:18:20,348 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0502 | Val rms_score: 0.7397
|
258 |
+
2025-09-23 02:18:25,471 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0518 | Val rms_score: 0.7469
|
259 |
+
2025-09-23 02:18:30,692 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0504 | Val rms_score: 0.8029
|
260 |
+
2025-09-23 02:18:35,618 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0498 | Val rms_score: 0.7560
|
261 |
+
2025-09-23 02:18:40,931 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0477 | Val rms_score: 0.7898
|
262 |
+
2025-09-23 02:18:46,124 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0456 | Val rms_score: 0.7535
|
263 |
+
2025-09-23 02:18:51,432 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0508 | Val rms_score: 0.7581
|
264 |
+
2025-09-23 02:18:56,694 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0526 | Val rms_score: 0.7642
|
265 |
+
2025-09-23 02:19:02,289 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0444 | Val rms_score: 0.7455
|
266 |
+
2025-09-23 02:19:07,220 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0460 | Val rms_score: 0.7674
|
267 |
+
2025-09-23 02:19:11,835 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0444 | Val rms_score: 0.7597
|
268 |
+
2025-09-23 02:19:16,957 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0410 | Val rms_score: 0.7973
|
269 |
+
2025-09-23 02:19:21,762 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0421 | Val rms_score: 0.7532
|
270 |
+
2025-09-23 02:19:27,056 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0409 | Val rms_score: 0.7578
|
271 |
+
2025-09-23 02:19:32,182 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0439 | Val rms_score: 0.7522
|
272 |
+
2025-09-23 02:19:37,491 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0393 | Val rms_score: 0.7685
|
273 |
+
2025-09-23 02:19:42,767 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0417 | Val rms_score: 0.7476
|
274 |
+
2025-09-23 02:19:47,994 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0387 | Val rms_score: 0.7838
|
275 |
+
2025-09-23 02:19:53,667 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0387 | Val rms_score: 0.7634
|
276 |
+
2025-09-23 02:19:59,817 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0356 | Val rms_score: 0.7674
|
277 |
+
2025-09-23 02:20:05,154 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0370 | Val rms_score: 0.7890
|
278 |
+
2025-09-23 02:20:10,499 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0360 | Val rms_score: 0.7635
|
279 |
+
2025-09-23 02:20:15,661 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0354 | Val rms_score: 0.7899
|
280 |
+
2025-09-23 02:20:21,166 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0347 | Val rms_score: 0.7709
|
281 |
+
2025-09-23 02:20:26,324 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0304 | Val rms_score: 0.7466
|
282 |
+
2025-09-23 02:20:31,426 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0358 | Val rms_score: 0.7750
|
283 |
+
2025-09-23 02:20:35,968 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0333 | Val rms_score: 0.7611
|
284 |
+
2025-09-23 02:20:40,595 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0308 | Val rms_score: 0.7920
|
285 |
+
2025-09-23 02:20:45,871 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0312 | Val rms_score: 0.7661
|
286 |
+
2025-09-23 02:20:51,009 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0312 | Val rms_score: 0.7705
|
287 |
+
2025-09-23 02:20:56,110 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0297 | Val rms_score: 0.7901
|
288 |
+
2025-09-23 02:21:01,237 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0310 | Val rms_score: 0.7865
|
289 |
+
2025-09-23 02:21:06,624 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0344 | Val rms_score: 0.8015
|
290 |
+
2025-09-23 02:21:12,012 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0288 | Val rms_score: 0.7960
|
291 |
+
2025-09-23 02:21:17,134 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0286 | Val rms_score: 0.7993
|
292 |
+
2025-09-23 02:21:22,091 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0275 | Val rms_score: 0.7919
|
293 |
+
2025-09-23 02:21:27,368 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0273 | Val rms_score: 0.7919
|
294 |
+
2025-09-23 02:21:32,742 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0284 | Val rms_score: 0.8096
|
295 |
+
2025-09-23 02:21:38,368 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0271 | Val rms_score: 0.7791
|
296 |
+
2025-09-23 02:21:43,663 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0273 | Val rms_score: 0.7753
|
297 |
+
2025-09-23 02:21:48,895 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0273 | Val rms_score: 0.7764
|
298 |
+
2025-09-23 02:21:54,051 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0282 | Val rms_score: 0.7852
|
299 |
+
2025-09-23 02:21:59,172 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0269 | Val rms_score: 0.7452
|
300 |
+
2025-09-23 02:22:04,118 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0264 | Val rms_score: 0.7684
|
301 |
+
2025-09-23 02:22:09,467 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0275 | Val rms_score: 0.7957
|
302 |
+
2025-09-23 02:22:15,519 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0474 | Val rms_score: 0.7602
|
303 |
+
2025-09-23 02:22:20,818 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0271 | Val rms_score: 0.7938
|
304 |
+
2025-09-23 02:22:26,070 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0248 | Val rms_score: 0.7667
|
305 |
+
2025-09-23 02:22:31,444 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0242 | Val rms_score: 0.8077
|
306 |
+
2025-09-23 02:22:36,630 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0242 | Val rms_score: 0.7982
|
307 |
+
2025-09-23 02:22:41,942 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0244 | Val rms_score: 0.7737
|
308 |
+
2025-09-23 02:22:47,099 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0262 | Val rms_score: 0.7796
|
309 |
+
2025-09-23 02:22:52,286 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0244 | Val rms_score: 0.7624
|
310 |
+
2025-09-23 02:22:57,806 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0301 | Val rms_score: 0.7912
|
311 |
+
2025-09-23 02:23:03,092 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0236 | Val rms_score: 0.7841
|
312 |
+
2025-09-23 02:23:08,130 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0265 | Val rms_score: 0.7807
|
313 |
+
2025-09-23 02:23:13,269 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0240 | Val rms_score: 0.7792
|
314 |
+
2025-09-23 02:23:18,350 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0228 | Val rms_score: 0.7918
|
315 |
+
2025-09-23 02:23:23,574 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0217 | Val rms_score: 0.7888
|
316 |
+
2025-09-23 02:23:27,922 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0217 | Val rms_score: 0.7734
|
317 |
+
2025-09-23 02:23:32,707 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0220 | Val rms_score: 0.7749
|
318 |
+
2025-09-23 02:23:37,774 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0175 | Val rms_score: 0.7766
|
319 |
+
2025-09-23 02:23:42,861 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0228 | Val rms_score: 0.7671
|
320 |
+
2025-09-23 02:23:48,214 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0223 | Val rms_score: 0.7731
|
321 |
+
2025-09-23 02:23:53,436 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0210 | Val rms_score: 0.7806
|
322 |
+
2025-09-23 02:23:58,794 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0231 | Val rms_score: 0.7873
|
323 |
+
2025-09-23 02:24:04,182 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0226 | Val rms_score: 0.7664
|
324 |
+
2025-09-23 02:24:04,735 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Test rms_score: 0.9453
|
325 |
+
2025-09-23 02:24:05,059 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg rms_score: 0.9665, Std Dev: 0.0250
|
logs_modchembert_regression_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_clearance_epochs100_batch_size32_20250923_022405.log
ADDED
@@ -0,0 +1,331 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2025-09-23 02:24:05,060 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Running benchmark for dataset: clearance
|
2 |
+
2025-09-23 02:24:05,061 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - dataset: clearance, tasks: ['target'], epochs: 100, learning rate: 3e-05, transform: True
|
3 |
+
2025-09-23 02:24:05,065 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset clearance at 2025-09-23_02-24-05
|
4 |
+
2025-09-23 02:24:08,155 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 2.8095 | Val rms_score: 63.9123
|
5 |
+
2025-09-23 02:24:08,155 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 21
|
6 |
+
2025-09-23 02:24:08,703 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 63.9123
|
7 |
+
2025-09-23 02:24:11,624 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 1.2560 | Val rms_score: 53.7175
|
8 |
+
2025-09-23 02:24:11,802 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 42
|
9 |
+
2025-09-23 02:24:12,323 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 53.7175
|
10 |
+
2025-09-23 02:24:15,557 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 1.0238 | Val rms_score: 52.6228
|
11 |
+
2025-09-23 02:24:15,735 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 63
|
12 |
+
2025-09-23 02:24:16,254 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val rms_score: 52.6228
|
13 |
+
2025-09-23 02:24:19,570 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.8810 | Val rms_score: 54.6112
|
14 |
+
2025-09-23 02:24:22,833 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.7125 | Val rms_score: 52.2338
|
15 |
+
2025-09-23 02:24:23,013 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 105
|
16 |
+
2025-09-23 02:24:23,542 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val rms_score: 52.2338
|
17 |
+
2025-09-23 02:24:26,873 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.6518 | Val rms_score: 53.9528
|
18 |
+
2025-09-23 02:24:30,486 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.5774 | Val rms_score: 55.2152
|
19 |
+
2025-09-23 02:24:33,826 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.4940 | Val rms_score: 55.3008
|
20 |
+
2025-09-23 02:24:37,156 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.3810 | Val rms_score: 56.1576
|
21 |
+
2025-09-23 02:24:40,427 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.3219 | Val rms_score: 56.1982
|
22 |
+
2025-09-23 02:24:43,742 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.2307 | Val rms_score: 57.9538
|
23 |
+
2025-09-23 02:24:46,849 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.2054 | Val rms_score: 56.1983
|
24 |
+
2025-09-23 02:24:49,825 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.1801 | Val rms_score: 57.0751
|
25 |
+
2025-09-23 02:24:53,089 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.1607 | Val rms_score: 55.8194
|
26 |
+
2025-09-23 02:24:56,444 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.1448 | Val rms_score: 55.6718
|
27 |
+
2025-09-23 02:24:59,702 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.1220 | Val rms_score: 55.4480
|
28 |
+
2025-09-23 02:25:03,292 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.1161 | Val rms_score: 55.3476
|
29 |
+
2025-09-23 02:25:06,700 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0997 | Val rms_score: 54.8027
|
30 |
+
2025-09-23 02:25:10,026 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0975 | Val rms_score: 54.4771
|
31 |
+
2025-09-23 02:25:13,330 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0801 | Val rms_score: 54.0486
|
32 |
+
2025-09-23 02:25:16,749 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0885 | Val rms_score: 54.8937
|
33 |
+
2025-09-23 02:25:20,388 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0800 | Val rms_score: 54.4905
|
34 |
+
2025-09-23 02:25:23,757 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0796 | Val rms_score: 54.4460
|
35 |
+
2025-09-23 02:25:26,806 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0762 | Val rms_score: 54.1802
|
36 |
+
2025-09-23 02:25:30,149 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0662 | Val rms_score: 55.4057
|
37 |
+
2025-09-23 02:25:33,472 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0688 | Val rms_score: 54.5638
|
38 |
+
2025-09-23 02:25:37,045 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0692 | Val rms_score: 54.4406
|
39 |
+
2025-09-23 02:25:40,279 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0636 | Val rms_score: 54.3138
|
40 |
+
2025-09-23 02:25:43,598 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0499 | Val rms_score: 54.9272
|
41 |
+
2025-09-23 02:25:46,906 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0573 | Val rms_score: 53.7407
|
42 |
+
2025-09-23 02:25:50,093 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0554 | Val rms_score: 53.1949
|
43 |
+
2025-09-23 02:25:53,638 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0558 | Val rms_score: 54.7758
|
44 |
+
2025-09-23 02:25:56,864 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0565 | Val rms_score: 54.1386
|
45 |
+
2025-09-23 02:26:00,075 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0541 | Val rms_score: 54.9208
|
46 |
+
2025-09-23 02:26:03,320 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0469 | Val rms_score: 54.6620
|
47 |
+
2025-09-23 02:26:06,038 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0484 | Val rms_score: 54.3099
|
48 |
+
2025-09-23 02:26:09,286 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0476 | Val rms_score: 55.1880
|
49 |
+
2025-09-23 02:26:12,589 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0456 | Val rms_score: 54.9342
|
50 |
+
2025-09-23 02:26:15,903 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0446 | Val rms_score: 54.3387
|
51 |
+
2025-09-23 02:26:19,098 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0456 | Val rms_score: 54.3482
|
52 |
+
2025-09-23 02:26:22,369 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0480 | Val rms_score: 54.5293
|
53 |
+
2025-09-23 02:26:26,003 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0510 | Val rms_score: 54.5853
|
54 |
+
2025-09-23 02:26:29,307 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0355 | Val rms_score: 55.4028
|
55 |
+
2025-09-23 02:26:32,650 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0413 | Val rms_score: 55.0123
|
56 |
+
2025-09-23 02:26:36,072 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0424 | Val rms_score: 54.7065
|
57 |
+
2025-09-23 02:26:39,437 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0379 | Val rms_score: 54.7598
|
58 |
+
2025-09-23 02:26:43,023 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0415 | Val rms_score: 54.7665
|
59 |
+
2025-09-23 02:26:47,311 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0435 | Val rms_score: 54.4067
|
60 |
+
2025-09-23 02:26:50,535 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0402 | Val rms_score: 54.3983
|
61 |
+
2025-09-23 02:26:53,661 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0428 | Val rms_score: 54.7264
|
62 |
+
2025-09-23 02:26:56,845 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0428 | Val rms_score: 54.2095
|
63 |
+
2025-09-23 02:27:00,262 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0379 | Val rms_score: 54.7599
|
64 |
+
2025-09-23 02:27:03,486 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0343 | Val rms_score: 54.6994
|
65 |
+
2025-09-23 02:27:06,728 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0411 | Val rms_score: 54.5277
|
66 |
+
2025-09-23 02:27:10,065 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0396 | Val rms_score: 54.7223
|
67 |
+
2025-09-23 02:27:13,354 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0370 | Val rms_score: 55.5460
|
68 |
+
2025-09-23 02:27:16,908 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0398 | Val rms_score: 53.9347
|
69 |
+
2025-09-23 02:27:20,150 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0449 | Val rms_score: 53.8567
|
70 |
+
2025-09-23 02:27:23,384 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0415 | Val rms_score: 54.3969
|
71 |
+
2025-09-23 02:27:26,320 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0378 | Val rms_score: 54.5199
|
72 |
+
2025-09-23 02:27:29,110 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0363 | Val rms_score: 54.5271
|
73 |
+
2025-09-23 02:27:32,680 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0317 | Val rms_score: 54.7049
|
74 |
+
2025-09-23 02:27:35,951 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0352 | Val rms_score: 54.4889
|
75 |
+
2025-09-23 02:27:39,278 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0415 | Val rms_score: 54.1347
|
76 |
+
2025-09-23 02:27:42,585 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0342 | Val rms_score: 54.2777
|
77 |
+
2025-09-23 02:27:45,937 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0318 | Val rms_score: 54.0480
|
78 |
+
2025-09-23 02:27:49,546 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0268 | Val rms_score: 53.9690
|
79 |
+
2025-09-23 02:27:52,807 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0333 | Val rms_score: 54.1931
|
80 |
+
2025-09-23 02:27:56,102 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0333 | Val rms_score: 53.9289
|
81 |
+
2025-09-23 02:27:59,367 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0288 | Val rms_score: 53.9330
|
82 |
+
2025-09-23 02:28:02,391 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0312 | Val rms_score: 54.4347
|
83 |
+
2025-09-23 02:28:06,062 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0329 | Val rms_score: 54.1690
|
84 |
+
2025-09-23 02:28:09,426 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0326 | Val rms_score: 53.9657
|
85 |
+
2025-09-23 02:28:12,781 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0298 | Val rms_score: 54.2592
|
86 |
+
2025-09-23 02:28:16,074 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0309 | Val rms_score: 54.3653
|
87 |
+
2025-09-23 02:28:19,442 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0303 | Val rms_score: 53.6768
|
88 |
+
2025-09-23 02:28:23,103 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0276 | Val rms_score: 54.1879
|
89 |
+
2025-09-23 02:28:26,232 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0311 | Val rms_score: 54.3574
|
90 |
+
2025-09-23 02:28:29,390 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0259 | Val rms_score: 54.1942
|
91 |
+
2025-09-23 02:28:32,580 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0305 | Val rms_score: 53.7892
|
92 |
+
2025-09-23 02:28:35,797 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0309 | Val rms_score: 54.3539
|
93 |
+
2025-09-23 02:28:39,317 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0286 | Val rms_score: 53.5298
|
94 |
+
2025-09-23 02:28:42,474 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0268 | Val rms_score: 53.8574
|
95 |
+
2025-09-23 02:28:45,263 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0286 | Val rms_score: 54.2696
|
96 |
+
2025-09-23 02:28:48,175 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0255 | Val rms_score: 54.1452
|
97 |
+
2025-09-23 02:28:51,570 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0239 | Val rms_score: 53.6785
|
98 |
+
2025-09-23 02:28:55,153 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0229 | Val rms_score: 53.9346
|
99 |
+
2025-09-23 02:28:58,466 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0266 | Val rms_score: 53.6902
|
100 |
+
2025-09-23 02:29:01,804 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0238 | Val rms_score: 53.2765
|
101 |
+
2025-09-23 02:29:05,131 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0233 | Val rms_score: 53.7092
|
102 |
+
2025-09-23 02:29:08,426 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0193 | Val rms_score: 54.2826
|
103 |
+
2025-09-23 02:29:12,021 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0238 | Val rms_score: 53.4759
|
104 |
+
2025-09-23 02:29:15,286 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0221 | Val rms_score: 53.8316
|
105 |
+
2025-09-23 02:29:17,971 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0230 | Val rms_score: 54.1561
|
106 |
+
2025-09-23 02:29:21,197 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0275 | Val rms_score: 53.5898
|
107 |
+
2025-09-23 02:29:25,391 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0197 | Val rms_score: 53.6764
|
108 |
+
2025-09-23 02:29:29,042 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0249 | Val rms_score: 53.4852
|
109 |
+
2025-09-23 02:29:32,411 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0235 | Val rms_score: 54.0914
|
110 |
+
2025-09-23 02:29:35,672 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0216 | Val rms_score: 53.8689
|
111 |
+
2025-09-23 02:29:38,979 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0224 | Val rms_score: 54.0338
|
112 |
+
2025-09-23 02:29:39,427 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Test rms_score: 44.8877
|
113 |
+
2025-09-23 02:29:39,732 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset clearance at 2025-09-23_02-29-39
|
114 |
+
2025-09-23 02:29:42,808 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 2.6667 | Val rms_score: 62.1389
|
115 |
+
2025-09-23 02:29:42,808 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 21
|
116 |
+
2025-09-23 02:29:43,323 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 62.1389
|
117 |
+
2025-09-23 02:29:46,535 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 1.2083 | Val rms_score: 54.2129
|
118 |
+
2025-09-23 02:29:46,712 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 42
|
119 |
+
2025-09-23 02:29:47,227 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 54.2129
|
120 |
+
2025-09-23 02:29:50,388 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.9702 | Val rms_score: 53.4431
|
121 |
+
2025-09-23 02:29:50,567 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 63
|
122 |
+
2025-09-23 02:29:51,092 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val rms_score: 53.4431
|
123 |
+
2025-09-23 02:29:54,288 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.8512 | Val rms_score: 53.6316
|
124 |
+
2025-09-23 02:29:57,390 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.8063 | Val rms_score: 52.4938
|
125 |
+
2025-09-23 02:29:57,568 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 105
|
126 |
+
2025-09-23 02:29:58,095 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val rms_score: 52.4938
|
127 |
+
2025-09-23 02:30:01,012 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.6399 | Val rms_score: 53.9315
|
128 |
+
2025-09-23 02:30:04,341 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.5417 | Val rms_score: 54.7955
|
129 |
+
2025-09-23 02:30:07,637 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.4464 | Val rms_score: 54.3445
|
130 |
+
2025-09-23 02:30:10,811 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.3646 | Val rms_score: 55.8584
|
131 |
+
2025-09-23 02:30:14,176 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.3187 | Val rms_score: 57.0586
|
132 |
+
2025-09-23 02:30:17,553 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.2455 | Val rms_score: 56.0612
|
133 |
+
2025-09-23 02:30:21,219 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.1994 | Val rms_score: 55.6972
|
134 |
+
2025-09-23 02:30:24,557 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.1749 | Val rms_score: 55.5981
|
135 |
+
2025-09-23 02:30:27,655 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.1525 | Val rms_score: 55.6469
|
136 |
+
2025-09-23 02:30:30,901 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.1297 | Val rms_score: 54.8865
|
137 |
+
2025-09-23 02:30:33,661 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.1116 | Val rms_score: 54.3072
|
138 |
+
2025-09-23 02:30:37,139 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.1086 | Val rms_score: 53.7475
|
139 |
+
2025-09-23 02:30:40,386 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0960 | Val rms_score: 54.2329
|
140 |
+
2025-09-23 02:30:43,642 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0960 | Val rms_score: 54.3448
|
141 |
+
2025-09-23 02:30:47,020 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0781 | Val rms_score: 54.4796
|
142 |
+
2025-09-23 02:30:50,352 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0807 | Val rms_score: 53.6359
|
143 |
+
2025-09-23 02:30:53,901 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0770 | Val rms_score: 54.1505
|
144 |
+
2025-09-23 02:30:57,282 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0733 | Val rms_score: 53.4667
|
145 |
+
2025-09-23 02:31:00,544 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0918 | Val rms_score: 53.4772
|
146 |
+
2025-09-23 02:31:03,915 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0618 | Val rms_score: 54.9615
|
147 |
+
2025-09-23 02:31:07,182 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0681 | Val rms_score: 54.2795
|
148 |
+
2025-09-23 02:31:10,785 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0562 | Val rms_score: 53.6852
|
149 |
+
2025-09-23 02:31:14,097 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0606 | Val rms_score: 54.8208
|
150 |
+
2025-09-23 02:31:17,305 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0582 | Val rms_score: 54.0562
|
151 |
+
2025-09-23 02:31:20,040 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0640 | Val rms_score: 53.4400
|
152 |
+
2025-09-23 02:31:23,127 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0562 | Val rms_score: 54.8254
|
153 |
+
2025-09-23 02:31:26,737 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0580 | Val rms_score: 53.5470
|
154 |
+
2025-09-23 02:31:30,095 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0558 | Val rms_score: 54.0753
|
155 |
+
2025-09-23 02:31:33,451 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0474 | Val rms_score: 53.3721
|
156 |
+
2025-09-23 02:31:36,845 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0603 | Val rms_score: 56.0334
|
157 |
+
2025-09-23 02:31:40,215 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0551 | Val rms_score: 54.0927
|
158 |
+
2025-09-23 02:31:43,810 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0476 | Val rms_score: 53.8720
|
159 |
+
2025-09-23 02:31:47,036 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0476 | Val rms_score: 54.5329
|
160 |
+
2025-09-23 02:31:49,876 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0450 | Val rms_score: 54.2387
|
161 |
+
2025-09-23 02:31:53,272 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0422 | Val rms_score: 54.1994
|
162 |
+
2025-09-23 02:31:56,598 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0439 | Val rms_score: 53.8875
|
163 |
+
2025-09-23 02:32:00,233 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0476 | Val rms_score: 54.6129
|
164 |
+
2025-09-23 02:32:03,561 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0521 | Val rms_score: 53.3118
|
165 |
+
2025-09-23 02:32:06,885 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0480 | Val rms_score: 53.9840
|
166 |
+
2025-09-23 02:32:10,236 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0452 | Val rms_score: 54.9699
|
167 |
+
2025-09-23 02:32:13,543 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0454 | Val rms_score: 54.0350
|
168 |
+
2025-09-23 02:32:17,106 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0443 | Val rms_score: 54.0397
|
169 |
+
2025-09-23 02:32:21,355 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0535 | Val rms_score: 53.9308
|
170 |
+
2025-09-23 02:32:24,486 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0402 | Val rms_score: 54.2807
|
171 |
+
2025-09-23 02:32:27,597 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0350 | Val rms_score: 53.7566
|
172 |
+
2025-09-23 02:32:30,711 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0383 | Val rms_score: 54.3024
|
173 |
+
2025-09-23 02:32:34,096 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0383 | Val rms_score: 53.5839
|
174 |
+
2025-09-23 02:32:37,154 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0376 | Val rms_score: 54.2078
|
175 |
+
2025-09-23 02:32:40,181 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0415 | Val rms_score: 53.2826
|
176 |
+
2025-09-23 02:32:43,232 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0510 | Val rms_score: 55.6985
|
177 |
+
2025-09-23 02:32:46,506 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0426 | Val rms_score: 53.0345
|
178 |
+
2025-09-23 02:32:50,062 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0368 | Val rms_score: 54.2210
|
179 |
+
2025-09-23 02:32:53,402 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0388 | Val rms_score: 53.3964
|
180 |
+
2025-09-23 02:32:56,600 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0376 | Val rms_score: 53.6557
|
181 |
+
2025-09-23 02:32:59,877 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0326 | Val rms_score: 53.4909
|
182 |
+
2025-09-23 02:33:03,266 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0342 | Val rms_score: 53.4979
|
183 |
+
2025-09-23 02:33:06,285 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0245 | Val rms_score: 54.0685
|
184 |
+
2025-09-23 02:33:09,690 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0363 | Val rms_score: 53.3529
|
185 |
+
2025-09-23 02:33:13,014 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0324 | Val rms_score: 54.0627
|
186 |
+
2025-09-23 02:33:16,433 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0326 | Val rms_score: 53.5633
|
187 |
+
2025-09-23 02:33:19,806 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0346 | Val rms_score: 53.7578
|
188 |
+
2025-09-23 02:33:23,254 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0304 | Val rms_score: 54.1811
|
189 |
+
2025-09-23 02:33:26,448 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0296 | Val rms_score: 53.3319
|
190 |
+
2025-09-23 02:33:29,627 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0283 | Val rms_score: 54.1282
|
191 |
+
2025-09-23 02:33:32,785 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0279 | Val rms_score: 53.1424
|
192 |
+
2025-09-23 02:33:35,913 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0270 | Val rms_score: 53.7668
|
193 |
+
2025-09-23 02:33:39,410 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0272 | Val rms_score: 54.0468
|
194 |
+
2025-09-23 02:33:42,733 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0264 | Val rms_score: 53.7667
|
195 |
+
2025-09-23 02:33:46,109 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0292 | Val rms_score: 54.0684
|
196 |
+
2025-09-23 02:33:49,476 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0272 | Val rms_score: 53.5802
|
197 |
+
2025-09-23 02:33:52,799 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0283 | Val rms_score: 54.7996
|
198 |
+
2025-09-23 02:33:56,155 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0227 | Val rms_score: 53.9152
|
199 |
+
2025-09-23 02:33:59,173 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0227 | Val rms_score: 53.3719
|
200 |
+
2025-09-23 02:34:01,996 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0225 | Val rms_score: 53.7240
|
201 |
+
2025-09-23 02:34:05,408 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0227 | Val rms_score: 52.8891
|
202 |
+
2025-09-23 02:34:08,768 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0302 | Val rms_score: 53.3242
|
203 |
+
2025-09-23 02:34:12,454 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0224 | Val rms_score: 53.9964
|
204 |
+
2025-09-23 02:34:15,807 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0246 | Val rms_score: 53.4009
|
205 |
+
2025-09-23 02:34:19,212 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0210 | Val rms_score: 53.7258
|
206 |
+
2025-09-23 02:34:22,535 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0229 | Val rms_score: 53.5940
|
207 |
+
2025-09-23 02:34:25,731 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0249 | Val rms_score: 53.5943
|
208 |
+
2025-09-23 02:34:29,296 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0228 | Val rms_score: 53.6351
|
209 |
+
2025-09-23 02:34:32,556 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0232 | Val rms_score: 52.7507
|
210 |
+
2025-09-23 02:34:35,872 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0264 | Val rms_score: 53.9168
|
211 |
+
2025-09-23 02:34:39,168 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0233 | Val rms_score: 53.8379
|
212 |
+
2025-09-23 02:34:42,462 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0201 | Val rms_score: 53.5661
|
213 |
+
2025-09-23 02:34:46,143 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0208 | Val rms_score: 53.5656
|
214 |
+
2025-09-23 02:34:49,510 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0234 | Val rms_score: 53.5429
|
215 |
+
2025-09-23 02:34:52,853 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0236 | Val rms_score: 52.8555
|
216 |
+
2025-09-23 02:34:56,201 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0224 | Val rms_score: 53.2964
|
217 |
+
2025-09-23 02:35:00,320 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0203 | Val rms_score: 53.0408
|
218 |
+
2025-09-23 02:35:03,816 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0207 | Val rms_score: 53.4718
|
219 |
+
2025-09-23 02:35:07,066 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0222 | Val rms_score: 52.8475
|
220 |
+
2025-09-23 02:35:10,289 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0206 | Val rms_score: 53.1519
|
221 |
+
2025-09-23 02:35:13,501 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0193 | Val rms_score: 52.7876
|
222 |
+
2025-09-23 02:35:13,911 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Test rms_score: 44.7074
|
223 |
+
2025-09-23 02:35:14,244 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset clearance at 2025-09-23_02-35-14
|
224 |
+
2025-09-23 02:35:16,940 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 2.7024 | Val rms_score: 62.8137
|
225 |
+
2025-09-23 02:35:16,940 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 21
|
226 |
+
2025-09-23 02:35:17,480 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 62.8137
|
227 |
+
2025-09-23 02:35:20,439 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 1.1726 | Val rms_score: 53.0834
|
228 |
+
2025-09-23 02:35:20,611 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 42
|
229 |
+
2025-09-23 02:35:21,145 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 53.0834
|
230 |
+
2025-09-23 02:35:24,536 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.9643 | Val rms_score: 53.4656
|
231 |
+
2025-09-23 02:35:27,852 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.8631 | Val rms_score: 53.6624
|
232 |
+
2025-09-23 02:35:31,289 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.8938 | Val rms_score: 53.5688
|
233 |
+
2025-09-23 02:35:34,592 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.6429 | Val rms_score: 52.4163
|
234 |
+
2025-09-23 02:35:35,054 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 126
|
235 |
+
2025-09-23 02:35:35,587 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val rms_score: 52.4163
|
236 |
+
2025-09-23 02:35:38,869 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.5417 | Val rms_score: 53.7688
|
237 |
+
2025-09-23 02:35:41,809 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.4315 | Val rms_score: 54.1721
|
238 |
+
2025-09-23 02:35:45,011 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.3705 | Val rms_score: 55.1075
|
239 |
+
2025-09-23 02:35:48,166 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.3266 | Val rms_score: 57.0364
|
240 |
+
2025-09-23 02:35:51,324 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.2292 | Val rms_score: 56.6471
|
241 |
+
2025-09-23 02:35:54,803 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.1979 | Val rms_score: 56.3778
|
242 |
+
2025-09-23 02:35:58,036 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.1682 | Val rms_score: 56.0551
|
243 |
+
2025-09-23 02:36:01,298 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.1391 | Val rms_score: 55.6539
|
244 |
+
2025-09-23 02:36:04,529 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.1266 | Val rms_score: 55.8152
|
245 |
+
2025-09-23 02:36:07,769 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.1205 | Val rms_score: 55.4859
|
246 |
+
2025-09-23 02:36:11,263 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.1101 | Val rms_score: 55.4728
|
247 |
+
2025-09-23 02:36:14,614 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0945 | Val rms_score: 54.9457
|
248 |
+
2025-09-23 02:36:17,967 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0844 | Val rms_score: 55.2624
|
249 |
+
2025-09-23 02:36:21,344 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0965 | Val rms_score: 55.3222
|
250 |
+
2025-09-23 02:36:24,701 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0882 | Val rms_score: 54.9538
|
251 |
+
2025-09-23 02:36:28,312 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0774 | Val rms_score: 54.4550
|
252 |
+
2025-09-23 02:36:31,581 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0714 | Val rms_score: 55.2149
|
253 |
+
2025-09-23 02:36:34,320 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0957 | Val rms_score: 54.4989
|
254 |
+
2025-09-23 02:36:37,328 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0677 | Val rms_score: 55.0280
|
255 |
+
2025-09-23 02:36:40,686 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0632 | Val rms_score: 55.0573
|
256 |
+
2025-09-23 02:36:44,212 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0610 | Val rms_score: 54.8122
|
257 |
+
2025-09-23 02:36:47,602 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0636 | Val rms_score: 55.2845
|
258 |
+
2025-09-23 02:36:50,957 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0590 | Val rms_score: 54.9997
|
259 |
+
2025-09-23 02:36:54,313 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0629 | Val rms_score: 55.1799
|
260 |
+
2025-09-23 02:36:57,295 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0536 | Val rms_score: 53.8059
|
261 |
+
2025-09-23 02:37:00,667 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0510 | Val rms_score: 55.8723
|
262 |
+
2025-09-23 02:37:03,789 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0532 | Val rms_score: 54.2596
|
263 |
+
2025-09-23 02:37:07,085 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0578 | Val rms_score: 55.1476
|
264 |
+
2025-09-23 02:37:10,308 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0580 | Val rms_score: 54.3191
|
265 |
+
2025-09-23 02:37:13,581 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0562 | Val rms_score: 55.3143
|
266 |
+
2025-09-23 02:37:17,122 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0588 | Val rms_score: 55.3999
|
267 |
+
2025-09-23 02:37:20,410 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0502 | Val rms_score: 54.8114
|
268 |
+
2025-09-23 02:37:23,814 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0461 | Val rms_score: 54.2705
|
269 |
+
2025-09-23 02:37:27,129 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0454 | Val rms_score: 54.6981
|
270 |
+
2025-09-23 02:37:30,471 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0433 | Val rms_score: 55.1799
|
271 |
+
2025-09-23 02:37:34,026 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0450 | Val rms_score: 54.5653
|
272 |
+
2025-09-23 02:37:37,307 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0316 | Val rms_score: 55.3091
|
273 |
+
2025-09-23 02:37:40,549 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0452 | Val rms_score: 54.5225
|
274 |
+
2025-09-23 02:37:43,900 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0445 | Val rms_score: 54.8949
|
275 |
+
2025-09-23 02:37:47,255 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0415 | Val rms_score: 54.8670
|
276 |
+
2025-09-23 02:37:50,773 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0357 | Val rms_score: 54.6485
|
277 |
+
2025-09-23 02:37:54,530 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0420 | Val rms_score: 54.7409
|
278 |
+
2025-09-23 02:37:57,407 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0424 | Val rms_score: 54.7172
|
279 |
+
2025-09-23 02:38:00,764 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0365 | Val rms_score: 54.8498
|
280 |
+
2025-09-23 02:38:04,116 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0404 | Val rms_score: 55.2955
|
281 |
+
2025-09-23 02:38:07,666 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0353 | Val rms_score: 54.7863
|
282 |
+
2025-09-23 02:38:11,045 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0409 | Val rms_score: 54.2605
|
283 |
+
2025-09-23 02:38:13,784 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0379 | Val rms_score: 53.9504
|
284 |
+
2025-09-23 02:38:17,050 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0365 | Val rms_score: 54.4062
|
285 |
+
2025-09-23 02:38:20,367 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0331 | Val rms_score: 54.7869
|
286 |
+
2025-09-23 02:38:24,049 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0339 | Val rms_score: 54.1264
|
287 |
+
2025-09-23 02:38:27,413 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0306 | Val rms_score: 54.7420
|
288 |
+
2025-09-23 02:38:30,785 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0333 | Val rms_score: 54.3387
|
289 |
+
2025-09-23 02:38:34,137 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0301 | Val rms_score: 54.5247
|
290 |
+
2025-09-23 02:38:37,420 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0322 | Val rms_score: 54.4563
|
291 |
+
2025-09-23 02:38:41,017 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0283 | Val rms_score: 55.2089
|
292 |
+
2025-09-23 02:38:44,268 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0316 | Val rms_score: 54.2290
|
293 |
+
2025-09-23 02:38:47,457 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0303 | Val rms_score: 55.7078
|
294 |
+
2025-09-23 02:38:50,813 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0301 | Val rms_score: 53.8519
|
295 |
+
2025-09-23 02:38:54,105 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0320 | Val rms_score: 54.9603
|
296 |
+
2025-09-23 02:38:57,633 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0352 | Val rms_score: 53.9485
|
297 |
+
2025-09-23 02:39:00,829 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0296 | Val rms_score: 53.8367
|
298 |
+
2025-09-23 02:39:04,008 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0294 | Val rms_score: 54.1720
|
299 |
+
2025-09-23 02:39:07,206 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0322 | Val rms_score: 54.6150
|
300 |
+
2025-09-23 02:39:10,371 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0281 | Val rms_score: 53.6285
|
301 |
+
2025-09-23 02:39:13,588 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0342 | Val rms_score: 54.2210
|
302 |
+
2025-09-23 02:39:16,579 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0294 | Val rms_score: 54.3746
|
303 |
+
2025-09-23 02:39:19,929 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0259 | Val rms_score: 54.5778
|
304 |
+
2025-09-23 02:39:23,266 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0286 | Val rms_score: 54.4729
|
305 |
+
2025-09-23 02:39:26,454 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0260 | Val rms_score: 54.5327
|
306 |
+
2025-09-23 02:39:30,049 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0276 | Val rms_score: 53.8169
|
307 |
+
2025-09-23 02:39:33,425 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0266 | Val rms_score: 54.2326
|
308 |
+
2025-09-23 02:39:36,658 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0259 | Val rms_score: 53.7880
|
309 |
+
2025-09-23 02:39:39,981 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0288 | Val rms_score: 54.4541
|
310 |
+
2025-09-23 02:39:43,302 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0198 | Val rms_score: 54.4994
|
311 |
+
2025-09-23 02:39:46,851 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0247 | Val rms_score: 53.8658
|
312 |
+
2025-09-23 02:39:50,077 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0247 | Val rms_score: 54.5324
|
313 |
+
2025-09-23 02:39:53,402 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0262 | Val rms_score: 53.9655
|
314 |
+
2025-09-23 02:39:56,707 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0240 | Val rms_score: 54.1139
|
315 |
+
2025-09-23 02:39:59,897 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0247 | Val rms_score: 53.7986
|
316 |
+
2025-09-23 02:40:03,404 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0226 | Val rms_score: 54.6571
|
317 |
+
2025-09-23 02:40:06,631 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0251 | Val rms_score: 54.2616
|
318 |
+
2025-09-23 02:40:09,840 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0238 | Val rms_score: 54.6032
|
319 |
+
2025-09-23 02:40:13,128 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0275 | Val rms_score: 54.1367
|
320 |
+
2025-09-23 02:40:16,413 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0265 | Val rms_score: 54.1650
|
321 |
+
2025-09-23 02:40:19,921 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0206 | Val rms_score: 54.0102
|
322 |
+
2025-09-23 02:40:23,262 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0230 | Val rms_score: 54.0670
|
323 |
+
2025-09-23 02:40:26,585 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0275 | Val rms_score: 54.2562
|
324 |
+
2025-09-23 02:40:29,869 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0210 | Val rms_score: 54.0515
|
325 |
+
2025-09-23 02:40:33,679 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0201 | Val rms_score: 54.4246
|
326 |
+
2025-09-23 02:40:37,094 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0209 | Val rms_score: 53.9977
|
327 |
+
2025-09-23 02:40:40,403 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0226 | Val rms_score: 54.3206
|
328 |
+
2025-09-23 02:40:43,697 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0198 | Val rms_score: 53.7101
|
329 |
+
2025-09-23 02:40:46,952 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0209 | Val rms_score: 54.1627
|
330 |
+
2025-09-23 02:40:47,242 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Test rms_score: 42.4459
|
331 |
+
2025-09-23 02:40:47,563 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg rms_score: 44.0137, Std Dev: 1.1110
|
logs_modchembert_regression_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_delaney_epochs100_batch_size64_20250923_024047.log
ADDED
@@ -0,0 +1,413 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2025-09-23 02:40:47,565 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Running benchmark for dataset: delaney
|
2 |
+
2025-09-23 02:40:47,565 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - dataset: delaney, tasks: ['measured_log_solubility_in_mols_per_litre'], epochs: 100, learning rate: 3e-05, transform: True
|
3 |
+
2025-09-23 02:40:47,572 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Starting triplicate run 1 for dataset delaney at 2025-09-23_02-40-47
|
4 |
+
2025-09-23 02:40:50,167 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 1/100 | Train Loss: 0.4500 | Val rms_score: 1.1857
|
5 |
+
2025-09-23 02:40:50,167 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 15
|
6 |
+
2025-09-23 02:40:50,669 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 1 with val rms_score: 1.1857
|
7 |
+
2025-09-23 02:40:53,328 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 2/100 | Train Loss: 0.1427 | Val rms_score: 1.0089
|
8 |
+
2025-09-23 02:40:53,495 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 30
|
9 |
+
2025-09-23 02:40:54,011 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 2 with val rms_score: 1.0089
|
10 |
+
2025-09-23 02:40:56,560 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 3/100 | Train Loss: 0.0927 | Val rms_score: 0.9846
|
11 |
+
2025-09-23 02:40:56,728 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 45
|
12 |
+
2025-09-23 02:40:57,241 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 3 with val rms_score: 0.9846
|
13 |
+
2025-09-23 02:40:59,796 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 4/100 | Train Loss: 0.0760 | Val rms_score: 0.9739
|
14 |
+
2025-09-23 02:40:59,970 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 60
|
15 |
+
2025-09-23 02:41:00,498 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 4 with val rms_score: 0.9739
|
16 |
+
2025-09-23 02:41:03,077 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 5/100 | Train Loss: 0.0714 | Val rms_score: 0.9536
|
17 |
+
2025-09-23 02:41:03,254 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 75
|
18 |
+
2025-09-23 02:41:03,756 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 5 with val rms_score: 0.9536
|
19 |
+
2025-09-23 02:41:06,307 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 6/100 | Train Loss: 0.0760 | Val rms_score: 0.9469
|
20 |
+
2025-09-23 02:41:06,734 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 90
|
21 |
+
2025-09-23 02:41:07,238 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 6 with val rms_score: 0.9469
|
22 |
+
2025-09-23 02:41:09,853 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 7/100 | Train Loss: 0.0563 | Val rms_score: 0.9132
|
23 |
+
2025-09-23 02:41:10,022 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 105
|
24 |
+
2025-09-23 02:41:10,536 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 7 with val rms_score: 0.9132
|
25 |
+
2025-09-23 02:41:13,173 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 8/100 | Train Loss: 0.0544 | Val rms_score: 0.8970
|
26 |
+
2025-09-23 02:41:13,340 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 120
|
27 |
+
2025-09-23 02:41:13,842 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 8 with val rms_score: 0.8970
|
28 |
+
2025-09-23 02:41:16,443 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 9/100 | Train Loss: 0.0500 | Val rms_score: 0.8960
|
29 |
+
2025-09-23 02:41:16,638 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 135
|
30 |
+
2025-09-23 02:41:17,167 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 9 with val rms_score: 0.8960
|
31 |
+
2025-09-23 02:41:19,833 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 10/100 | Train Loss: 0.0484 | Val rms_score: 0.8885
|
32 |
+
2025-09-23 02:41:20,001 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 150
|
33 |
+
2025-09-23 02:41:20,506 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 10 with val rms_score: 0.8885
|
34 |
+
2025-09-23 02:41:23,127 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 11/100 | Train Loss: 0.0443 | Val rms_score: 0.8704
|
35 |
+
2025-09-23 02:41:23,557 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 165
|
36 |
+
2025-09-23 02:41:24,063 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 11 with val rms_score: 0.8704
|
37 |
+
2025-09-23 02:41:26,660 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 12/100 | Train Loss: 0.0380 | Val rms_score: 0.8676
|
38 |
+
2025-09-23 02:41:26,845 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 180
|
39 |
+
2025-09-23 02:41:27,369 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 12 with val rms_score: 0.8676
|
40 |
+
2025-09-23 02:41:29,877 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 13/100 | Train Loss: 0.0375 | Val rms_score: 0.8638
|
41 |
+
2025-09-23 02:41:30,044 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 195
|
42 |
+
2025-09-23 02:41:30,552 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 13 with val rms_score: 0.8638
|
43 |
+
2025-09-23 02:41:33,091 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 14/100 | Train Loss: 0.0359 | Val rms_score: 0.8794
|
44 |
+
2025-09-23 02:41:35,550 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 15/100 | Train Loss: 0.0318 | Val rms_score: 0.8566
|
45 |
+
2025-09-23 02:41:35,731 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 225
|
46 |
+
2025-09-23 02:41:36,272 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 15 with val rms_score: 0.8566
|
47 |
+
2025-09-23 02:41:38,764 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 16/100 | Train Loss: 0.0323 | Val rms_score: 0.8679
|
48 |
+
2025-09-23 02:41:41,546 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 17/100 | Train Loss: 0.0288 | Val rms_score: 0.8602
|
49 |
+
2025-09-23 02:41:43,848 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 18/100 | Train Loss: 0.0312 | Val rms_score: 0.8730
|
50 |
+
2025-09-23 02:41:46,127 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 19/100 | Train Loss: 0.0275 | Val rms_score: 0.8833
|
51 |
+
2025-09-23 02:41:48,361 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 20/100 | Train Loss: 0.0271 | Val rms_score: 0.8639
|
52 |
+
2025-09-23 02:41:50,802 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 21/100 | Train Loss: 0.0247 | Val rms_score: 0.8621
|
53 |
+
2025-09-23 02:41:53,546 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 22/100 | Train Loss: 0.0249 | Val rms_score: 0.8603
|
54 |
+
2025-09-23 02:41:55,973 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 23/100 | Train Loss: 0.0238 | Val rms_score: 0.8625
|
55 |
+
2025-09-23 02:41:58,080 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 24/100 | Train Loss: 0.0234 | Val rms_score: 0.8566
|
56 |
+
2025-09-23 02:41:58,238 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 360
|
57 |
+
2025-09-23 02:41:58,741 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 24 with val rms_score: 0.8566
|
58 |
+
2025-09-23 02:42:01,170 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 25/100 | Train Loss: 0.0229 | Val rms_score: 0.8675
|
59 |
+
2025-09-23 02:42:03,668 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 26/100 | Train Loss: 0.0216 | Val rms_score: 0.8678
|
60 |
+
2025-09-23 02:42:06,459 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 27/100 | Train Loss: 0.0206 | Val rms_score: 0.8608
|
61 |
+
2025-09-23 02:42:09,074 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 28/100 | Train Loss: 0.0216 | Val rms_score: 0.8539
|
62 |
+
2025-09-23 02:42:09,244 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 420
|
63 |
+
2025-09-23 02:42:09,757 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 28 with val rms_score: 0.8539
|
64 |
+
2025-09-23 02:42:12,315 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 29/100 | Train Loss: 0.0214 | Val rms_score: 0.8667
|
65 |
+
2025-09-23 02:42:14,890 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 30/100 | Train Loss: 0.0204 | Val rms_score: 0.8692
|
66 |
+
2025-09-23 02:42:17,496 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 31/100 | Train Loss: 0.0215 | Val rms_score: 0.8654
|
67 |
+
2025-09-23 02:42:20,360 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 32/100 | Train Loss: 0.0216 | Val rms_score: 0.8950
|
68 |
+
2025-09-23 02:42:22,982 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 33/100 | Train Loss: 0.0215 | Val rms_score: 0.8677
|
69 |
+
2025-09-23 02:42:25,500 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 34/100 | Train Loss: 0.0163 | Val rms_score: 0.8771
|
70 |
+
2025-09-23 02:42:28,080 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 35/100 | Train Loss: 0.0163 | Val rms_score: 0.8691
|
71 |
+
2025-09-23 02:42:30,649 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 36/100 | Train Loss: 0.0165 | Val rms_score: 0.8680
|
72 |
+
2025-09-23 02:42:33,504 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 37/100 | Train Loss: 0.0177 | Val rms_score: 0.8464
|
73 |
+
2025-09-23 02:42:33,675 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 555
|
74 |
+
2025-09-23 02:42:34,178 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 37 with val rms_score: 0.8464
|
75 |
+
2025-09-23 02:42:36,737 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 38/100 | Train Loss: 0.0190 | Val rms_score: 0.8701
|
76 |
+
2025-09-23 02:42:39,357 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 39/100 | Train Loss: 0.0160 | Val rms_score: 0.8599
|
77 |
+
2025-09-23 02:42:41,847 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 40/100 | Train Loss: 0.0152 | Val rms_score: 0.8645
|
78 |
+
2025-09-23 02:42:44,288 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 41/100 | Train Loss: 0.0145 | Val rms_score: 0.8609
|
79 |
+
2025-09-23 02:42:47,061 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 42/100 | Train Loss: 0.0140 | Val rms_score: 0.8764
|
80 |
+
2025-09-23 02:42:49,640 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 43/100 | Train Loss: 0.0145 | Val rms_score: 0.8608
|
81 |
+
2025-09-23 02:42:52,180 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 44/100 | Train Loss: 0.0142 | Val rms_score: 0.8709
|
82 |
+
2025-09-23 02:42:54,639 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 45/100 | Train Loss: 0.0143 | Val rms_score: 0.8550
|
83 |
+
2025-09-23 02:42:56,977 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 46/100 | Train Loss: 0.0131 | Val rms_score: 0.8627
|
84 |
+
2025-09-23 02:42:59,626 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 47/100 | Train Loss: 0.0134 | Val rms_score: 0.8655
|
85 |
+
2025-09-23 02:43:01,783 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 48/100 | Train Loss: 0.0132 | Val rms_score: 0.8674
|
86 |
+
2025-09-23 02:43:04,209 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 49/100 | Train Loss: 0.0136 | Val rms_score: 0.8595
|
87 |
+
2025-09-23 02:43:06,727 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 50/100 | Train Loss: 0.0124 | Val rms_score: 0.8654
|
88 |
+
2025-09-23 02:43:08,944 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 51/100 | Train Loss: 0.0124 | Val rms_score: 0.8549
|
89 |
+
2025-09-23 02:43:11,667 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 52/100 | Train Loss: 0.0127 | Val rms_score: 0.8602
|
90 |
+
2025-09-23 02:43:14,167 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 53/100 | Train Loss: 0.0120 | Val rms_score: 0.8662
|
91 |
+
2025-09-23 02:43:16,642 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 54/100 | Train Loss: 0.0124 | Val rms_score: 0.8644
|
92 |
+
2025-09-23 02:43:19,098 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 55/100 | Train Loss: 0.0122 | Val rms_score: 0.8525
|
93 |
+
2025-09-23 02:43:21,742 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 56/100 | Train Loss: 0.0133 | Val rms_score: 0.8786
|
94 |
+
2025-09-23 02:43:24,494 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 57/100 | Train Loss: 0.0130 | Val rms_score: 0.8562
|
95 |
+
2025-09-23 02:43:27,145 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 58/100 | Train Loss: 0.0121 | Val rms_score: 0.8701
|
96 |
+
2025-09-23 02:43:29,752 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 59/100 | Train Loss: 0.0117 | Val rms_score: 0.8625
|
97 |
+
2025-09-23 02:43:32,303 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 60/100 | Train Loss: 0.0121 | Val rms_score: 0.8638
|
98 |
+
2025-09-23 02:43:34,910 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 61/100 | Train Loss: 0.0109 | Val rms_score: 0.8556
|
99 |
+
2025-09-23 02:43:37,712 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 62/100 | Train Loss: 0.0115 | Val rms_score: 0.8630
|
100 |
+
2025-09-23 02:43:40,256 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 63/100 | Train Loss: 0.0109 | Val rms_score: 0.8572
|
101 |
+
2025-09-23 02:43:42,786 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 64/100 | Train Loss: 0.0102 | Val rms_score: 0.8631
|
102 |
+
2025-09-23 02:43:45,327 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 65/100 | Train Loss: 0.0131 | Val rms_score: 0.8740
|
103 |
+
2025-09-23 02:43:47,908 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 66/100 | Train Loss: 0.0283 | Val rms_score: 0.8802
|
104 |
+
2025-09-23 02:43:51,640 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 67/100 | Train Loss: 0.0146 | Val rms_score: 0.8759
|
105 |
+
2025-09-23 02:43:54,177 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 68/100 | Train Loss: 0.0134 | Val rms_score: 0.8607
|
106 |
+
2025-09-23 02:43:56,757 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 69/100 | Train Loss: 0.0118 | Val rms_score: 0.8376
|
107 |
+
2025-09-23 02:43:56,890 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 1035
|
108 |
+
2025-09-23 02:43:57,419 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 69 with val rms_score: 0.8376
|
109 |
+
2025-09-23 02:44:00,004 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 70/100 | Train Loss: 0.0102 | Val rms_score: 0.8538
|
110 |
+
2025-09-23 02:44:02,670 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 71/100 | Train Loss: 0.0098 | Val rms_score: 0.8538
|
111 |
+
2025-09-23 02:44:05,492 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 72/100 | Train Loss: 0.0097 | Val rms_score: 0.8680
|
112 |
+
2025-09-23 02:44:08,096 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 73/100 | Train Loss: 0.0096 | Val rms_score: 0.8598
|
113 |
+
2025-09-23 02:44:10,503 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 74/100 | Train Loss: 0.0098 | Val rms_score: 0.8698
|
114 |
+
2025-09-23 02:44:12,718 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 75/100 | Train Loss: 0.0109 | Val rms_score: 0.8882
|
115 |
+
2025-09-23 02:44:14,974 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 76/100 | Train Loss: 0.0104 | Val rms_score: 0.8611
|
116 |
+
2025-09-23 02:44:17,762 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 77/100 | Train Loss: 0.0100 | Val rms_score: 0.8770
|
117 |
+
2025-09-23 02:44:20,308 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 78/100 | Train Loss: 0.0107 | Val rms_score: 0.8710
|
118 |
+
2025-09-23 02:44:22,756 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 79/100 | Train Loss: 0.0115 | Val rms_score: 0.9117
|
119 |
+
2025-09-23 02:44:25,326 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 80/100 | Train Loss: 0.0110 | Val rms_score: 0.8715
|
120 |
+
2025-09-23 02:44:27,986 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 81/100 | Train Loss: 0.0100 | Val rms_score: 0.8619
|
121 |
+
2025-09-23 02:44:30,817 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 82/100 | Train Loss: 0.0094 | Val rms_score: 0.8700
|
122 |
+
2025-09-23 02:44:33,385 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 83/100 | Train Loss: 0.0097 | Val rms_score: 0.8573
|
123 |
+
2025-09-23 02:44:35,853 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 84/100 | Train Loss: 0.0087 | Val rms_score: 0.8785
|
124 |
+
2025-09-23 02:44:38,447 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 85/100 | Train Loss: 0.0090 | Val rms_score: 0.8672
|
125 |
+
2025-09-23 02:44:40,933 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 86/100 | Train Loss: 0.0078 | Val rms_score: 0.8800
|
126 |
+
2025-09-23 02:44:43,670 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 87/100 | Train Loss: 0.0073 | Val rms_score: 0.8631
|
127 |
+
2025-09-23 02:44:46,195 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 88/100 | Train Loss: 0.0083 | Val rms_score: 0.8601
|
128 |
+
2025-09-23 02:44:48,729 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 89/100 | Train Loss: 0.0076 | Val rms_score: 0.8657
|
129 |
+
2025-09-23 02:44:51,316 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 90/100 | Train Loss: 0.0085 | Val rms_score: 0.8888
|
130 |
+
2025-09-23 02:44:53,893 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 91/100 | Train Loss: 0.0086 | Val rms_score: 0.8486
|
131 |
+
2025-09-23 02:44:56,818 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 92/100 | Train Loss: 0.0087 | Val rms_score: 0.8533
|
132 |
+
2025-09-23 02:44:59,367 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 93/100 | Train Loss: 0.0079 | Val rms_score: 0.8582
|
133 |
+
2025-09-23 02:45:01,891 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 94/100 | Train Loss: 0.0087 | Val rms_score: 0.8679
|
134 |
+
2025-09-23 02:45:04,428 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 95/100 | Train Loss: 0.0085 | Val rms_score: 0.8630
|
135 |
+
2025-09-23 02:45:07,026 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 96/100 | Train Loss: 0.0087 | Val rms_score: 0.8877
|
136 |
+
2025-09-23 02:45:09,869 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 97/100 | Train Loss: 0.0093 | Val rms_score: 0.8410
|
137 |
+
2025-09-23 02:45:12,502 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 98/100 | Train Loss: 0.0125 | Val rms_score: 0.9683
|
138 |
+
2025-09-23 02:45:15,056 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 99/100 | Train Loss: 0.0105 | Val rms_score: 0.8497
|
139 |
+
2025-09-23 02:45:17,689 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 100/100 | Train Loss: 0.0101 | Val rms_score: 0.8951
|
140 |
+
2025-09-23 02:45:18,100 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Test rms_score: 0.8280
|
141 |
+
2025-09-23 02:45:18,397 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Starting triplicate run 2 for dataset delaney at 2025-09-23_02-45-18
|
142 |
+
2025-09-23 02:45:20,833 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 1/100 | Train Loss: 0.4625 | Val rms_score: 1.1368
|
143 |
+
2025-09-23 02:45:20,833 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 15
|
144 |
+
2025-09-23 02:45:21,339 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 1 with val rms_score: 1.1368
|
145 |
+
2025-09-23 02:45:23,897 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 2/100 | Train Loss: 0.1365 | Val rms_score: 1.0073
|
146 |
+
2025-09-23 02:45:24,066 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 30
|
147 |
+
2025-09-23 02:45:24,567 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 2 with val rms_score: 1.0073
|
148 |
+
2025-09-23 02:45:26,939 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 3/100 | Train Loss: 0.1052 | Val rms_score: 1.0098
|
149 |
+
2025-09-23 02:45:29,153 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 4/100 | Train Loss: 0.0833 | Val rms_score: 0.9649
|
150 |
+
2025-09-23 02:45:29,351 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 60
|
151 |
+
2025-09-23 02:45:29,877 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 4 with val rms_score: 0.9649
|
152 |
+
2025-09-23 02:45:32,472 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 5/100 | Train Loss: 0.0865 | Val rms_score: 0.9609
|
153 |
+
2025-09-23 02:45:32,643 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 75
|
154 |
+
2025-09-23 02:45:33,150 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 5 with val rms_score: 0.9609
|
155 |
+
2025-09-23 02:45:35,709 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 6/100 | Train Loss: 0.0667 | Val rms_score: 0.9568
|
156 |
+
2025-09-23 02:45:36,149 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 90
|
157 |
+
2025-09-23 02:45:36,656 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 6 with val rms_score: 0.9568
|
158 |
+
2025-09-23 02:45:39,114 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 7/100 | Train Loss: 0.0465 | Val rms_score: 0.9310
|
159 |
+
2025-09-23 02:45:39,282 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 105
|
160 |
+
2025-09-23 02:45:39,784 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 7 with val rms_score: 0.9310
|
161 |
+
2025-09-23 02:45:42,337 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 8/100 | Train Loss: 0.0529 | Val rms_score: 0.9064
|
162 |
+
2025-09-23 02:45:42,523 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 120
|
163 |
+
2025-09-23 02:45:43,031 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 8 with val rms_score: 0.9064
|
164 |
+
2025-09-23 02:45:45,646 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 9/100 | Train Loss: 0.0503 | Val rms_score: 0.8984
|
165 |
+
2025-09-23 02:45:45,830 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 135
|
166 |
+
2025-09-23 02:45:46,346 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 9 with val rms_score: 0.8984
|
167 |
+
2025-09-23 02:45:48,983 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 10/100 | Train Loss: 0.0495 | Val rms_score: 0.9087
|
168 |
+
2025-09-23 02:45:51,566 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 11/100 | Train Loss: 0.0406 | Val rms_score: 0.8934
|
169 |
+
2025-09-23 02:45:52,003 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 165
|
170 |
+
2025-09-23 02:45:52,517 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 11 with val rms_score: 0.8934
|
171 |
+
2025-09-23 02:45:55,140 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 12/100 | Train Loss: 0.0437 | Val rms_score: 0.9022
|
172 |
+
2025-09-23 02:45:57,711 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 13/100 | Train Loss: 0.0401 | Val rms_score: 0.9097
|
173 |
+
2025-09-23 02:46:00,276 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 14/100 | Train Loss: 0.0375 | Val rms_score: 0.8827
|
174 |
+
2025-09-23 02:46:00,445 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 210
|
175 |
+
2025-09-23 02:46:00,952 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 14 with val rms_score: 0.8827
|
176 |
+
2025-09-23 02:46:03,557 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 15/100 | Train Loss: 0.0359 | Val rms_score: 0.8941
|
177 |
+
2025-09-23 02:46:05,995 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 16/100 | Train Loss: 0.0344 | Val rms_score: 0.8850
|
178 |
+
2025-09-23 02:46:08,734 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 17/100 | Train Loss: 0.0346 | Val rms_score: 0.8865
|
179 |
+
2025-09-23 02:46:11,141 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 18/100 | Train Loss: 0.0320 | Val rms_score: 0.8697
|
180 |
+
2025-09-23 02:46:11,310 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 270
|
181 |
+
2025-09-23 02:46:11,825 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 18 with val rms_score: 0.8697
|
182 |
+
2025-09-23 02:46:14,258 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 19/100 | Train Loss: 0.0309 | Val rms_score: 0.8683
|
183 |
+
2025-09-23 02:46:14,426 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 285
|
184 |
+
2025-09-23 02:46:14,925 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 19 with val rms_score: 0.8683
|
185 |
+
2025-09-23 02:46:17,397 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 20/100 | Train Loss: 0.0275 | Val rms_score: 0.8636
|
186 |
+
2025-09-23 02:46:17,566 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 300
|
187 |
+
2025-09-23 02:46:18,094 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 20 with val rms_score: 0.8636
|
188 |
+
2025-09-23 02:46:20,556 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 21/100 | Train Loss: 0.0270 | Val rms_score: 0.8652
|
189 |
+
2025-09-23 02:46:23,375 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 22/100 | Train Loss: 0.0263 | Val rms_score: 0.8603
|
190 |
+
2025-09-23 02:46:23,546 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 330
|
191 |
+
2025-09-23 02:46:24,048 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 22 with val rms_score: 0.8603
|
192 |
+
2025-09-23 02:46:26,563 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 23/100 | Train Loss: 0.0238 | Val rms_score: 0.8636
|
193 |
+
2025-09-23 02:46:29,105 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 24/100 | Train Loss: 0.0243 | Val rms_score: 0.8659
|
194 |
+
2025-09-23 02:46:31,652 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 25/100 | Train Loss: 0.0233 | Val rms_score: 0.8667
|
195 |
+
2025-09-23 02:46:34,207 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 26/100 | Train Loss: 0.0228 | Val rms_score: 0.8787
|
196 |
+
2025-09-23 02:46:36,831 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 27/100 | Train Loss: 0.0223 | Val rms_score: 0.8610
|
197 |
+
2025-09-23 02:46:39,120 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 28/100 | Train Loss: 0.0241 | Val rms_score: 0.8660
|
198 |
+
2025-09-23 02:46:41,344 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 29/100 | Train Loss: 0.0207 | Val rms_score: 0.8463
|
199 |
+
2025-09-23 02:46:41,489 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 435
|
200 |
+
2025-09-23 02:46:41,991 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 29 with val rms_score: 0.8463
|
201 |
+
2025-09-23 02:46:44,054 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 30/100 | Train Loss: 0.0204 | Val rms_score: 0.8538
|
202 |
+
2025-09-23 02:46:46,599 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 31/100 | Train Loss: 0.0195 | Val rms_score: 0.8551
|
203 |
+
2025-09-23 02:46:49,497 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 32/100 | Train Loss: 0.0204 | Val rms_score: 0.8374
|
204 |
+
2025-09-23 02:46:49,664 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 480
|
205 |
+
2025-09-23 02:46:50,190 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 32 with val rms_score: 0.8374
|
206 |
+
2025-09-23 02:46:52,742 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 33/100 | Train Loss: 0.0215 | Val rms_score: 0.8372
|
207 |
+
2025-09-23 02:46:52,911 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 495
|
208 |
+
2025-09-23 02:46:53,421 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 33 with val rms_score: 0.8372
|
209 |
+
2025-09-23 02:46:55,968 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 34/100 | Train Loss: 0.0213 | Val rms_score: 0.8423
|
210 |
+
2025-09-23 02:46:58,409 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 35/100 | Train Loss: 0.0198 | Val rms_score: 0.8551
|
211 |
+
2025-09-23 02:47:00,934 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 36/100 | Train Loss: 0.0180 | Val rms_score: 0.8298
|
212 |
+
2025-09-23 02:47:01,360 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 540
|
213 |
+
2025-09-23 02:47:01,862 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 36 with val rms_score: 0.8298
|
214 |
+
2025-09-23 02:47:04,457 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 37/100 | Train Loss: 0.0182 | Val rms_score: 0.8447
|
215 |
+
2025-09-23 02:47:06,997 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 38/100 | Train Loss: 0.0166 | Val rms_score: 0.8590
|
216 |
+
2025-09-23 02:47:09,587 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 39/100 | Train Loss: 0.0173 | Val rms_score: 0.8569
|
217 |
+
2025-09-23 02:47:12,162 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 40/100 | Train Loss: 0.0163 | Val rms_score: 0.8502
|
218 |
+
2025-09-23 02:47:14,715 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 41/100 | Train Loss: 0.0180 | Val rms_score: 0.8716
|
219 |
+
2025-09-23 02:47:17,476 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 42/100 | Train Loss: 0.0185 | Val rms_score: 0.8544
|
220 |
+
2025-09-23 02:47:19,956 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 43/100 | Train Loss: 0.0152 | Val rms_score: 0.8598
|
221 |
+
2025-09-23 02:47:22,376 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 44/100 | Train Loss: 0.0137 | Val rms_score: 0.8522
|
222 |
+
2025-09-23 02:47:24,873 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 45/100 | Train Loss: 0.0143 | Val rms_score: 0.8706
|
223 |
+
2025-09-23 02:47:27,348 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 46/100 | Train Loss: 0.0141 | Val rms_score: 0.8580
|
224 |
+
2025-09-23 02:47:30,104 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 47/100 | Train Loss: 0.0165 | Val rms_score: 0.8708
|
225 |
+
2025-09-23 02:47:32,605 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 48/100 | Train Loss: 0.0156 | Val rms_score: 0.8600
|
226 |
+
2025-09-23 02:47:35,215 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 49/100 | Train Loss: 0.0156 | Val rms_score: 0.8656
|
227 |
+
2025-09-23 02:47:37,784 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 50/100 | Train Loss: 0.0143 | Val rms_score: 0.8475
|
228 |
+
2025-09-23 02:47:40,349 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 51/100 | Train Loss: 0.0152 | Val rms_score: 0.8559
|
229 |
+
2025-09-23 02:47:43,234 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 52/100 | Train Loss: 0.0133 | Val rms_score: 0.8478
|
230 |
+
2025-09-23 02:47:45,831 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 53/100 | Train Loss: 0.0133 | Val rms_score: 0.8563
|
231 |
+
2025-09-23 02:47:48,411 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 54/100 | Train Loss: 0.0131 | Val rms_score: 0.8634
|
232 |
+
2025-09-23 02:47:50,439 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 55/100 | Train Loss: 0.0134 | Val rms_score: 0.8612
|
233 |
+
2025-09-23 02:47:52,673 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 56/100 | Train Loss: 0.0138 | Val rms_score: 0.8532
|
234 |
+
2025-09-23 02:47:55,193 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 57/100 | Train Loss: 0.0155 | Val rms_score: 0.8927
|
235 |
+
2025-09-23 02:47:57,669 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 58/100 | Train Loss: 0.0147 | Val rms_score: 0.8848
|
236 |
+
2025-09-23 02:48:00,145 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 59/100 | Train Loss: 0.0130 | Val rms_score: 0.8678
|
237 |
+
2025-09-23 02:48:02,615 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 60/100 | Train Loss: 0.0126 | Val rms_score: 0.8685
|
238 |
+
2025-09-23 02:48:05,140 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 61/100 | Train Loss: 0.0107 | Val rms_score: 0.8662
|
239 |
+
2025-09-23 02:48:07,845 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 62/100 | Train Loss: 0.0122 | Val rms_score: 0.8660
|
240 |
+
2025-09-23 02:48:10,375 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 63/100 | Train Loss: 0.0120 | Val rms_score: 0.8583
|
241 |
+
2025-09-23 02:48:12,914 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 64/100 | Train Loss: 0.0107 | Val rms_score: 0.8727
|
242 |
+
2025-09-23 02:48:15,484 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 65/100 | Train Loss: 0.0103 | Val rms_score: 0.8627
|
243 |
+
2025-09-23 02:48:18,042 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 66/100 | Train Loss: 0.0107 | Val rms_score: 0.8652
|
244 |
+
2025-09-23 02:48:21,778 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 67/100 | Train Loss: 0.0123 | Val rms_score: 0.8634
|
245 |
+
2025-09-23 02:48:24,354 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 68/100 | Train Loss: 0.0119 | Val rms_score: 0.8732
|
246 |
+
2025-09-23 02:48:26,893 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 69/100 | Train Loss: 0.0111 | Val rms_score: 0.8614
|
247 |
+
2025-09-23 02:48:29,434 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 70/100 | Train Loss: 0.0115 | Val rms_score: 0.8567
|
248 |
+
2025-09-23 02:48:32,014 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 71/100 | Train Loss: 0.0111 | Val rms_score: 0.8735
|
249 |
+
2025-09-23 02:48:34,807 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 72/100 | Train Loss: 0.0102 | Val rms_score: 0.8556
|
250 |
+
2025-09-23 02:48:37,411 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 73/100 | Train Loss: 0.0100 | Val rms_score: 0.8746
|
251 |
+
2025-09-23 02:48:40,037 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 74/100 | Train Loss: 0.0094 | Val rms_score: 0.8709
|
252 |
+
2025-09-23 02:48:42,663 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 75/100 | Train Loss: 0.0093 | Val rms_score: 0.8544
|
253 |
+
2025-09-23 02:48:45,218 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 76/100 | Train Loss: 0.0095 | Val rms_score: 0.8768
|
254 |
+
2025-09-23 02:48:48,105 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 77/100 | Train Loss: 0.0094 | Val rms_score: 0.8634
|
255 |
+
2025-09-23 02:48:50,619 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 78/100 | Train Loss: 0.0097 | Val rms_score: 0.8609
|
256 |
+
2025-09-23 02:48:53,212 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 79/100 | Train Loss: 0.0113 | Val rms_score: 0.8625
|
257 |
+
2025-09-23 02:48:55,806 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 80/100 | Train Loss: 0.0172 | Val rms_score: 0.8668
|
258 |
+
2025-09-23 02:48:58,335 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 81/100 | Train Loss: 0.0126 | Val rms_score: 0.8667
|
259 |
+
2025-09-23 02:49:01,220 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 82/100 | Train Loss: 0.0111 | Val rms_score: 0.8392
|
260 |
+
2025-09-23 02:49:03,606 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 83/100 | Train Loss: 0.0090 | Val rms_score: 0.8606
|
261 |
+
2025-09-23 02:49:05,718 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 84/100 | Train Loss: 0.0090 | Val rms_score: 0.8561
|
262 |
+
2025-09-23 02:49:07,997 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 85/100 | Train Loss: 0.0089 | Val rms_score: 0.8584
|
263 |
+
2025-09-23 02:49:10,502 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 86/100 | Train Loss: 0.0100 | Val rms_score: 0.8652
|
264 |
+
2025-09-23 02:49:13,236 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 87/100 | Train Loss: 0.0135 | Val rms_score: 0.8760
|
265 |
+
2025-09-23 02:49:15,744 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 88/100 | Train Loss: 0.0098 | Val rms_score: 0.8770
|
266 |
+
2025-09-23 02:49:18,325 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 89/100 | Train Loss: 0.0090 | Val rms_score: 0.8720
|
267 |
+
2025-09-23 02:49:20,922 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 90/100 | Train Loss: 0.0087 | Val rms_score: 0.8687
|
268 |
+
2025-09-23 02:49:23,454 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 91/100 | Train Loss: 0.0089 | Val rms_score: 0.8764
|
269 |
+
2025-09-23 02:49:26,278 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 92/100 | Train Loss: 0.0087 | Val rms_score: 0.8747
|
270 |
+
2025-09-23 02:49:28,847 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 93/100 | Train Loss: 0.0085 | Val rms_score: 0.8824
|
271 |
+
2025-09-23 02:49:31,453 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 94/100 | Train Loss: 0.0092 | Val rms_score: 0.8616
|
272 |
+
2025-09-23 02:49:34,057 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 95/100 | Train Loss: 0.0090 | Val rms_score: 0.8689
|
273 |
+
2025-09-23 02:49:36,689 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 96/100 | Train Loss: 0.0089 | Val rms_score: 0.8820
|
274 |
+
2025-09-23 02:49:39,545 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 97/100 | Train Loss: 0.0080 | Val rms_score: 0.8636
|
275 |
+
2025-09-23 02:49:42,051 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 98/100 | Train Loss: 0.0080 | Val rms_score: 0.8688
|
276 |
+
2025-09-23 02:49:44,606 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 99/100 | Train Loss: 0.0084 | Val rms_score: 0.8678
|
277 |
+
2025-09-23 02:49:47,114 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 100/100 | Train Loss: 0.0085 | Val rms_score: 0.8680
|
278 |
+
2025-09-23 02:49:47,518 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Test rms_score: 0.8190
|
279 |
+
2025-09-23 02:49:47,823 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Starting triplicate run 3 for dataset delaney at 2025-09-23_02-49-47
|
280 |
+
2025-09-23 02:49:50,191 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 1/100 | Train Loss: 0.4375 | Val rms_score: 1.1555
|
281 |
+
2025-09-23 02:49:50,191 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 15
|
282 |
+
2025-09-23 02:49:50,693 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 1 with val rms_score: 1.1555
|
283 |
+
2025-09-23 02:49:53,212 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 2/100 | Train Loss: 0.1396 | Val rms_score: 1.0063
|
284 |
+
2025-09-23 02:49:53,377 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 30
|
285 |
+
2025-09-23 02:49:53,879 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 2 with val rms_score: 1.0063
|
286 |
+
2025-09-23 02:49:56,471 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 3/100 | Train Loss: 0.1005 | Val rms_score: 0.9937
|
287 |
+
2025-09-23 02:49:56,643 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 45
|
288 |
+
2025-09-23 02:49:57,142 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 3 with val rms_score: 0.9937
|
289 |
+
2025-09-23 02:49:59,659 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 4/100 | Train Loss: 0.0792 | Val rms_score: 0.9603
|
290 |
+
2025-09-23 02:49:59,828 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 60
|
291 |
+
2025-09-23 02:50:00,331 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 4 with val rms_score: 0.9603
|
292 |
+
2025-09-23 02:50:02,850 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 5/100 | Train Loss: 0.0740 | Val rms_score: 0.9567
|
293 |
+
2025-09-23 02:50:03,019 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 75
|
294 |
+
2025-09-23 02:50:03,522 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 5 with val rms_score: 0.9567
|
295 |
+
2025-09-23 02:50:05,952 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 6/100 | Train Loss: 0.0620 | Val rms_score: 0.9367
|
296 |
+
2025-09-23 02:50:06,393 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 90
|
297 |
+
2025-09-23 02:50:06,902 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 6 with val rms_score: 0.9367
|
298 |
+
2025-09-23 02:50:09,368 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 7/100 | Train Loss: 0.0490 | Val rms_score: 0.9225
|
299 |
+
2025-09-23 02:50:09,535 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 105
|
300 |
+
2025-09-23 02:50:10,044 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 7 with val rms_score: 0.9225
|
301 |
+
2025-09-23 02:50:12,519 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 8/100 | Train Loss: 0.0547 | Val rms_score: 0.9042
|
302 |
+
2025-09-23 02:50:12,689 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 120
|
303 |
+
2025-09-23 02:50:13,190 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 8 with val rms_score: 0.9042
|
304 |
+
2025-09-23 02:50:15,700 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 9/100 | Train Loss: 0.0505 | Val rms_score: 0.8972
|
305 |
+
2025-09-23 02:50:15,887 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 135
|
306 |
+
2025-09-23 02:50:16,397 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 9 with val rms_score: 0.8972
|
307 |
+
2025-09-23 02:50:18,523 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 10/100 | Train Loss: 0.0469 | Val rms_score: 0.8759
|
308 |
+
2025-09-23 02:50:18,690 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 150
|
309 |
+
2025-09-23 02:50:19,195 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 10 with val rms_score: 0.8759
|
310 |
+
2025-09-23 02:50:21,465 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 11/100 | Train Loss: 0.0440 | Val rms_score: 0.8866
|
311 |
+
2025-09-23 02:50:24,267 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 12/100 | Train Loss: 0.0396 | Val rms_score: 0.8654
|
312 |
+
2025-09-23 02:50:24,433 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 180
|
313 |
+
2025-09-23 02:50:24,939 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 12 with val rms_score: 0.8654
|
314 |
+
2025-09-23 02:50:27,464 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 13/100 | Train Loss: 0.0375 | Val rms_score: 0.8610
|
315 |
+
2025-09-23 02:50:27,642 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 195
|
316 |
+
2025-09-23 02:50:28,145 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 13 with val rms_score: 0.8610
|
317 |
+
2025-09-23 02:50:30,735 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 14/100 | Train Loss: 0.0322 | Val rms_score: 0.8591
|
318 |
+
2025-09-23 02:50:30,902 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 210
|
319 |
+
2025-09-23 02:50:31,403 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 14 with val rms_score: 0.8591
|
320 |
+
2025-09-23 02:50:33,947 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 15/100 | Train Loss: 0.0339 | Val rms_score: 0.8729
|
321 |
+
2025-09-23 02:50:36,546 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 16/100 | Train Loss: 0.0336 | Val rms_score: 0.8723
|
322 |
+
2025-09-23 02:50:39,362 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 17/100 | Train Loss: 0.0318 | Val rms_score: 0.8572
|
323 |
+
2025-09-23 02:50:39,532 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 255
|
324 |
+
2025-09-23 02:50:40,046 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 17 with val rms_score: 0.8572
|
325 |
+
2025-09-23 02:50:42,637 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 18/100 | Train Loss: 0.0316 | Val rms_score: 0.8488
|
326 |
+
2025-09-23 02:50:42,816 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 270
|
327 |
+
2025-09-23 02:50:43,315 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 18 with val rms_score: 0.8488
|
328 |
+
2025-09-23 02:50:45,886 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 19/100 | Train Loss: 0.0276 | Val rms_score: 0.8610
|
329 |
+
2025-09-23 02:50:48,491 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 20/100 | Train Loss: 0.0268 | Val rms_score: 0.8490
|
330 |
+
2025-09-23 02:50:51,038 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 21/100 | Train Loss: 0.0270 | Val rms_score: 0.8514
|
331 |
+
2025-09-23 02:50:53,877 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 22/100 | Train Loss: 0.0255 | Val rms_score: 0.8350
|
332 |
+
2025-09-23 02:50:54,054 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Global step of best model: 330
|
333 |
+
2025-09-23 02:50:54,563 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Best model saved at epoch 22 with val rms_score: 0.8350
|
334 |
+
2025-09-23 02:50:57,125 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 23/100 | Train Loss: 0.0306 | Val rms_score: 0.8625
|
335 |
+
2025-09-23 02:50:59,702 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 24/100 | Train Loss: 0.0260 | Val rms_score: 0.8454
|
336 |
+
2025-09-23 02:51:02,223 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 25/100 | Train Loss: 0.0243 | Val rms_score: 0.8508
|
337 |
+
2025-09-23 02:51:04,717 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 26/100 | Train Loss: 0.0237 | Val rms_score: 0.8420
|
338 |
+
2025-09-23 02:51:07,485 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 27/100 | Train Loss: 0.0230 | Val rms_score: 0.8435
|
339 |
+
2025-09-23 02:51:09,946 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 28/100 | Train Loss: 0.0214 | Val rms_score: 0.8487
|
340 |
+
2025-09-23 02:51:12,530 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 29/100 | Train Loss: 0.0198 | Val rms_score: 0.8376
|
341 |
+
2025-09-23 02:51:15,093 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 30/100 | Train Loss: 0.0186 | Val rms_score: 0.8475
|
342 |
+
2025-09-23 02:51:17,720 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 31/100 | Train Loss: 0.0191 | Val rms_score: 0.8616
|
343 |
+
2025-09-23 02:51:20,522 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 32/100 | Train Loss: 0.0216 | Val rms_score: 0.8479
|
344 |
+
2025-09-23 02:51:23,104 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 33/100 | Train Loss: 0.0193 | Val rms_score: 0.8464
|
345 |
+
2025-09-23 02:51:25,557 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 34/100 | Train Loss: 0.0193 | Val rms_score: 0.8494
|
346 |
+
2025-09-23 02:51:28,195 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 35/100 | Train Loss: 0.0197 | Val rms_score: 0.8504
|
347 |
+
2025-09-23 02:51:30,427 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 36/100 | Train Loss: 0.0181 | Val rms_score: 0.8410
|
348 |
+
2025-09-23 02:51:32,923 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 37/100 | Train Loss: 0.0176 | Val rms_score: 0.8758
|
349 |
+
2025-09-23 02:51:35,166 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 38/100 | Train Loss: 0.0154 | Val rms_score: 0.8369
|
350 |
+
2025-09-23 02:51:37,794 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 39/100 | Train Loss: 0.0152 | Val rms_score: 0.8589
|
351 |
+
2025-09-23 02:51:40,446 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 40/100 | Train Loss: 0.0150 | Val rms_score: 0.8564
|
352 |
+
2025-09-23 02:51:42,813 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 41/100 | Train Loss: 0.0155 | Val rms_score: 0.8442
|
353 |
+
2025-09-23 02:51:45,690 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 42/100 | Train Loss: 0.0169 | Val rms_score: 0.8731
|
354 |
+
2025-09-23 02:51:48,315 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 43/100 | Train Loss: 0.0137 | Val rms_score: 0.8502
|
355 |
+
2025-09-23 02:51:50,911 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 44/100 | Train Loss: 0.0135 | Val rms_score: 0.8559
|
356 |
+
2025-09-23 02:51:53,495 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 45/100 | Train Loss: 0.0130 | Val rms_score: 0.8512
|
357 |
+
2025-09-23 02:51:56,029 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 46/100 | Train Loss: 0.0134 | Val rms_score: 0.8518
|
358 |
+
2025-09-23 02:51:58,802 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 47/100 | Train Loss: 0.0156 | Val rms_score: 0.8550
|
359 |
+
2025-09-23 02:52:01,356 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 48/100 | Train Loss: 0.0126 | Val rms_score: 0.8520
|
360 |
+
2025-09-23 02:52:03,925 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 49/100 | Train Loss: 0.0144 | Val rms_score: 0.8492
|
361 |
+
2025-09-23 02:52:06,557 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 50/100 | Train Loss: 0.0146 | Val rms_score: 0.8740
|
362 |
+
2025-09-23 02:52:09,144 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 51/100 | Train Loss: 0.0147 | Val rms_score: 0.8724
|
363 |
+
2025-09-23 02:52:12,016 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 52/100 | Train Loss: 0.0128 | Val rms_score: 0.8593
|
364 |
+
2025-09-23 02:52:14,605 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 53/100 | Train Loss: 0.0136 | Val rms_score: 0.8739
|
365 |
+
2025-09-23 02:52:17,192 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 54/100 | Train Loss: 0.0123 | Val rms_score: 0.8538
|
366 |
+
2025-09-23 02:52:19,776 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 55/100 | Train Loss: 0.0123 | Val rms_score: 0.8728
|
367 |
+
2025-09-23 02:52:22,397 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 56/100 | Train Loss: 0.0124 | Val rms_score: 0.8705
|
368 |
+
2025-09-23 02:52:25,257 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 57/100 | Train Loss: 0.0138 | Val rms_score: 0.8456
|
369 |
+
2025-09-23 02:52:27,771 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 58/100 | Train Loss: 0.0156 | Val rms_score: 0.8611
|
370 |
+
2025-09-23 02:52:30,225 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 59/100 | Train Loss: 0.0132 | Val rms_score: 0.8639
|
371 |
+
2025-09-23 02:52:32,702 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 60/100 | Train Loss: 0.0111 | Val rms_score: 0.8536
|
372 |
+
2025-09-23 02:52:35,175 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 61/100 | Train Loss: 0.0103 | Val rms_score: 0.8722
|
373 |
+
2025-09-23 02:52:37,545 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 62/100 | Train Loss: 0.0109 | Val rms_score: 0.8696
|
374 |
+
2025-09-23 02:52:40,047 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 63/100 | Train Loss: 0.0109 | Val rms_score: 0.8621
|
375 |
+
2025-09-23 02:52:42,454 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 64/100 | Train Loss: 0.0107 | Val rms_score: 0.8643
|
376 |
+
2025-09-23 02:52:44,627 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 65/100 | Train Loss: 0.0100 | Val rms_score: 0.8569
|
377 |
+
2025-09-23 02:52:46,914 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 66/100 | Train Loss: 0.0103 | Val rms_score: 0.8686
|
378 |
+
2025-09-23 02:52:50,254 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 67/100 | Train Loss: 0.0102 | Val rms_score: 0.8612
|
379 |
+
2025-09-23 02:52:52,901 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 68/100 | Train Loss: 0.0094 | Val rms_score: 0.8589
|
380 |
+
2025-09-23 02:52:55,520 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 69/100 | Train Loss: 0.0087 | Val rms_score: 0.8585
|
381 |
+
2025-09-23 02:52:58,147 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 70/100 | Train Loss: 0.0091 | Val rms_score: 0.8606
|
382 |
+
2025-09-23 02:53:00,813 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 71/100 | Train Loss: 0.0092 | Val rms_score: 0.8657
|
383 |
+
2025-09-23 02:53:03,603 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 72/100 | Train Loss: 0.0104 | Val rms_score: 0.8727
|
384 |
+
2025-09-23 02:53:06,158 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 73/100 | Train Loss: 0.0107 | Val rms_score: 0.8476
|
385 |
+
2025-09-23 02:53:08,722 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 74/100 | Train Loss: 0.0098 | Val rms_score: 0.8778
|
386 |
+
2025-09-23 02:53:11,261 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 75/100 | Train Loss: 0.0092 | Val rms_score: 0.8550
|
387 |
+
2025-09-23 02:53:13,795 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 76/100 | Train Loss: 0.0091 | Val rms_score: 0.8711
|
388 |
+
2025-09-23 02:53:16,610 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 77/100 | Train Loss: 0.0091 | Val rms_score: 0.8592
|
389 |
+
2025-09-23 02:53:19,167 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 78/100 | Train Loss: 0.0085 | Val rms_score: 0.8624
|
390 |
+
2025-09-23 02:53:21,691 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 79/100 | Train Loss: 0.0085 | Val rms_score: 0.8599
|
391 |
+
2025-09-23 02:53:24,211 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 80/100 | Train Loss: 0.0084 | Val rms_score: 0.8651
|
392 |
+
2025-09-23 02:53:26,806 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 81/100 | Train Loss: 0.0088 | Val rms_score: 0.8681
|
393 |
+
2025-09-23 02:53:29,740 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 82/100 | Train Loss: 0.0090 | Val rms_score: 0.8668
|
394 |
+
2025-09-23 02:53:32,337 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 83/100 | Train Loss: 0.0081 | Val rms_score: 0.8662
|
395 |
+
2025-09-23 02:53:34,902 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 84/100 | Train Loss: 0.0082 | Val rms_score: 0.8645
|
396 |
+
2025-09-23 02:53:37,468 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 85/100 | Train Loss: 0.0085 | Val rms_score: 0.8599
|
397 |
+
2025-09-23 02:53:40,077 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 86/100 | Train Loss: 0.0085 | Val rms_score: 0.8646
|
398 |
+
2025-09-23 02:53:42,825 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 87/100 | Train Loss: 0.0086 | Val rms_score: 0.8668
|
399 |
+
2025-09-23 02:53:45,393 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 88/100 | Train Loss: 0.0090 | Val rms_score: 0.8645
|
400 |
+
2025-09-23 02:53:47,842 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 89/100 | Train Loss: 0.0087 | Val rms_score: 0.8715
|
401 |
+
2025-09-23 02:53:50,206 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 90/100 | Train Loss: 0.0087 | Val rms_score: 0.8732
|
402 |
+
2025-09-23 02:53:52,734 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 91/100 | Train Loss: 0.0084 | Val rms_score: 0.8720
|
403 |
+
2025-09-23 02:53:55,388 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 92/100 | Train Loss: 0.0079 | Val rms_score: 0.8629
|
404 |
+
2025-09-23 02:53:57,847 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 93/100 | Train Loss: 0.0079 | Val rms_score: 0.8666
|
405 |
+
2025-09-23 02:54:00,127 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 94/100 | Train Loss: 0.0081 | Val rms_score: 0.8832
|
406 |
+
2025-09-23 02:54:02,384 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 95/100 | Train Loss: 0.0074 | Val rms_score: 0.8638
|
407 |
+
2025-09-23 02:54:04,971 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 96/100 | Train Loss: 0.0078 | Val rms_score: 0.8654
|
408 |
+
2025-09-23 02:54:07,746 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 97/100 | Train Loss: 0.0070 | Val rms_score: 0.8736
|
409 |
+
2025-09-23 02:54:10,325 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 98/100 | Train Loss: 0.0072 | Val rms_score: 0.8704
|
410 |
+
2025-09-23 02:54:12,767 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 99/100 | Train Loss: 0.0077 | Val rms_score: 0.8697
|
411 |
+
2025-09-23 02:54:15,292 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Epoch 100/100 | Train Loss: 0.0075 | Val rms_score: 0.8601
|
412 |
+
2025-09-23 02:54:15,698 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Test rms_score: 0.8005
|
413 |
+
2025-09-23 02:54:15,987 - logs_modchembert_delaney_epochs100_batch_size64 - INFO - Final Triplicate Test Results — Avg rms_score: 0.8158, Std Dev: 0.0115
|
logs_modchembert_regression_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_freesolv_epochs100_batch_size32_20250923_025415.log
ADDED
@@ -0,0 +1,365 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2025-09-23 02:54:15,989 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Running benchmark for dataset: freesolv
|
2 |
+
2025-09-23 02:54:15,989 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - dataset: freesolv, tasks: ['y'], epochs: 100, learning rate: 3e-05, transform: True
|
3 |
+
2025-09-23 02:54:16,009 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset freesolv at 2025-09-23_02-54-16
|
4 |
+
2025-09-23 02:54:18,284 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.4485 | Val rms_score: 1.0926
|
5 |
+
2025-09-23 02:54:18,284 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 17
|
6 |
+
2025-09-23 02:54:18,804 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 1.0926
|
7 |
+
2025-09-23 02:54:21,328 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.1811 | Val rms_score: 1.0750
|
8 |
+
2025-09-23 02:54:21,497 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 34
|
9 |
+
2025-09-23 02:54:22,006 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 1.0750
|
10 |
+
2025-09-23 02:54:24,465 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.1232 | Val rms_score: 0.9377
|
11 |
+
2025-09-23 02:54:24,634 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 51
|
12 |
+
2025-09-23 02:54:25,173 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val rms_score: 0.9377
|
13 |
+
2025-09-23 02:54:27,680 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.0974 | Val rms_score: 0.8618
|
14 |
+
2025-09-23 02:54:27,861 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 68
|
15 |
+
2025-09-23 02:54:28,388 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val rms_score: 0.8618
|
16 |
+
2025-09-23 02:54:30,899 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0795 | Val rms_score: 0.8270
|
17 |
+
2025-09-23 02:54:31,067 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 85
|
18 |
+
2025-09-23 02:54:31,583 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val rms_score: 0.8270
|
19 |
+
2025-09-23 02:54:34,092 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0287 | Val rms_score: 0.8375
|
20 |
+
2025-09-23 02:54:36,904 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0407 | Val rms_score: 0.8378
|
21 |
+
2025-09-23 02:54:39,410 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0370 | Val rms_score: 0.8247
|
22 |
+
2025-09-23 02:54:39,591 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 136
|
23 |
+
2025-09-23 02:54:40,106 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 8 with val rms_score: 0.8247
|
24 |
+
2025-09-23 02:54:42,560 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0331 | Val rms_score: 0.8397
|
25 |
+
2025-09-23 02:54:45,108 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0303 | Val rms_score: 0.8260
|
26 |
+
2025-09-23 02:54:47,657 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0574 | Val rms_score: 0.7297
|
27 |
+
2025-09-23 02:54:48,095 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 187
|
28 |
+
2025-09-23 02:54:48,602 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 11 with val rms_score: 0.7297
|
29 |
+
2025-09-23 02:54:51,082 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.4707 | Val rms_score: 1.0318
|
30 |
+
2025-09-23 02:54:53,462 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.1498 | Val rms_score: 0.9555
|
31 |
+
2025-09-23 02:54:55,938 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0928 | Val rms_score: 0.9122
|
32 |
+
2025-09-23 02:54:58,351 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0570 | Val rms_score: 0.8612
|
33 |
+
2025-09-23 02:55:00,766 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0457 | Val rms_score: 0.8743
|
34 |
+
2025-09-23 02:55:03,461 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0368 | Val rms_score: 0.8688
|
35 |
+
2025-09-23 02:55:05,861 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0301 | Val rms_score: 0.8608
|
36 |
+
2025-09-23 02:55:07,964 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0324 | Val rms_score: 0.8651
|
37 |
+
2025-09-23 02:55:10,222 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0301 | Val rms_score: 0.8622
|
38 |
+
2025-09-23 02:55:12,723 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0310 | Val rms_score: 0.8614
|
39 |
+
2025-09-23 02:55:15,366 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0285 | Val rms_score: 0.8673
|
40 |
+
2025-09-23 02:55:17,832 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0273 | Val rms_score: 0.8601
|
41 |
+
2025-09-23 02:55:20,293 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0356 | Val rms_score: 0.8589
|
42 |
+
2025-09-23 02:55:22,748 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0237 | Val rms_score: 0.8684
|
43 |
+
2025-09-23 02:55:25,272 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0222 | Val rms_score: 0.8581
|
44 |
+
2025-09-23 02:55:28,199 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0226 | Val rms_score: 0.8625
|
45 |
+
2025-09-23 02:55:30,682 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0215 | Val rms_score: 0.8589
|
46 |
+
2025-09-23 02:55:33,204 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0194 | Val rms_score: 0.8571
|
47 |
+
2025-09-23 02:55:35,674 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0187 | Val rms_score: 0.8642
|
48 |
+
2025-09-23 02:55:38,188 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0257 | Val rms_score: 0.8851
|
49 |
+
2025-09-23 02:55:40,979 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0317 | Val rms_score: 0.8889
|
50 |
+
2025-09-23 02:55:43,390 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0263 | Val rms_score: 0.8751
|
51 |
+
2025-09-23 02:55:45,852 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0232 | Val rms_score: 0.8491
|
52 |
+
2025-09-23 02:55:48,281 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0173 | Val rms_score: 0.8536
|
53 |
+
2025-09-23 02:55:50,727 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0200 | Val rms_score: 0.8592
|
54 |
+
2025-09-23 02:55:53,472 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0216 | Val rms_score: 0.8714
|
55 |
+
2025-09-23 02:55:55,933 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0172 | Val rms_score: 0.8501
|
56 |
+
2025-09-23 02:55:58,349 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0172 | Val rms_score: 0.8457
|
57 |
+
2025-09-23 02:56:00,779 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0160 | Val rms_score: 0.8430
|
58 |
+
2025-09-23 02:56:03,059 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0146 | Val rms_score: 0.8384
|
59 |
+
2025-09-23 02:56:05,851 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0144 | Val rms_score: 0.8373
|
60 |
+
2025-09-23 02:56:08,316 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0248 | Val rms_score: 0.8799
|
61 |
+
2025-09-23 02:56:10,743 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0499 | Val rms_score: 0.8355
|
62 |
+
2025-09-23 02:56:13,234 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0287 | Val rms_score: 0.8558
|
63 |
+
2025-09-23 02:56:15,380 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0215 | Val rms_score: 0.8324
|
64 |
+
2025-09-23 02:56:18,061 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0163 | Val rms_score: 0.8215
|
65 |
+
2025-09-23 02:56:20,582 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0164 | Val rms_score: 0.8250
|
66 |
+
2025-09-23 02:56:23,078 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0164 | Val rms_score: 0.8277
|
67 |
+
2025-09-23 02:56:25,583 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0125 | Val rms_score: 0.8242
|
68 |
+
2025-09-23 02:56:28,179 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0131 | Val rms_score: 0.8174
|
69 |
+
2025-09-23 02:56:30,923 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0223 | Val rms_score: 0.8367
|
70 |
+
2025-09-23 02:56:33,441 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0070 | Val rms_score: 0.8342
|
71 |
+
2025-09-23 02:56:35,838 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0124 | Val rms_score: 0.8261
|
72 |
+
2025-09-23 02:56:38,286 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0133 | Val rms_score: 0.8349
|
73 |
+
2025-09-23 02:56:40,721 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0130 | Val rms_score: 0.8346
|
74 |
+
2025-09-23 02:56:43,375 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0115 | Val rms_score: 0.8326
|
75 |
+
2025-09-23 02:56:45,804 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0112 | Val rms_score: 0.8220
|
76 |
+
2025-09-23 02:56:49,190 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0112 | Val rms_score: 0.8197
|
77 |
+
2025-09-23 02:56:51,674 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0125 | Val rms_score: 0.8299
|
78 |
+
2025-09-23 02:56:54,217 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0107 | Val rms_score: 0.8354
|
79 |
+
2025-09-23 02:56:56,970 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0103 | Val rms_score: 0.8241
|
80 |
+
2025-09-23 02:56:59,523 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0097 | Val rms_score: 0.8222
|
81 |
+
2025-09-23 02:57:02,033 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0093 | Val rms_score: 0.8295
|
82 |
+
2025-09-23 02:57:04,580 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0070 | Val rms_score: 0.8306
|
83 |
+
2025-09-23 02:57:07,077 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0098 | Val rms_score: 0.8264
|
84 |
+
2025-09-23 02:57:09,675 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0099 | Val rms_score: 0.8285
|
85 |
+
2025-09-23 02:57:12,130 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0083 | Val rms_score: 0.8253
|
86 |
+
2025-09-23 02:57:14,631 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0083 | Val rms_score: 0.8326
|
87 |
+
2025-09-23 02:57:17,138 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0077 | Val rms_score: 0.8273
|
88 |
+
2025-09-23 02:57:19,586 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0127 | Val rms_score: 0.8287
|
89 |
+
2025-09-23 02:57:22,184 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0080 | Val rms_score: 0.8301
|
90 |
+
2025-09-23 02:57:24,390 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0075 | Val rms_score: 0.8294
|
91 |
+
2025-09-23 02:57:26,600 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0083 | Val rms_score: 0.8297
|
92 |
+
2025-09-23 02:57:29,121 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0156 | Val rms_score: 0.8841
|
93 |
+
2025-09-23 02:57:31,625 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0425 | Val rms_score: 0.9610
|
94 |
+
2025-09-23 02:57:34,360 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0190 | Val rms_score: 0.9405
|
95 |
+
2025-09-23 02:57:36,743 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0149 | Val rms_score: 0.9254
|
96 |
+
2025-09-23 02:57:39,224 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0134 | Val rms_score: 0.9125
|
97 |
+
2025-09-23 02:57:41,699 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0131 | Val rms_score: 0.9131
|
98 |
+
2025-09-23 02:57:44,202 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0102 | Val rms_score: 0.9082
|
99 |
+
2025-09-23 02:57:46,925 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0097 | Val rms_score: 0.8985
|
100 |
+
2025-09-23 02:57:49,397 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0074 | Val rms_score: 0.8979
|
101 |
+
2025-09-23 02:57:51,855 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0097 | Val rms_score: 0.9002
|
102 |
+
2025-09-23 02:57:54,337 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0096 | Val rms_score: 0.8960
|
103 |
+
2025-09-23 02:57:56,787 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0090 | Val rms_score: 0.8925
|
104 |
+
2025-09-23 02:57:59,532 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0081 | Val rms_score: 0.8938
|
105 |
+
2025-09-23 02:58:01,927 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0091 | Val rms_score: 0.8964
|
106 |
+
2025-09-23 02:58:04,341 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0088 | Val rms_score: 0.8911
|
107 |
+
2025-09-23 02:58:06,819 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0090 | Val rms_score: 0.8812
|
108 |
+
2025-09-23 02:58:09,263 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0088 | Val rms_score: 0.8851
|
109 |
+
2025-09-23 02:58:12,006 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0080 | Val rms_score: 0.8755
|
110 |
+
2025-09-23 02:58:14,394 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0079 | Val rms_score: 0.8861
|
111 |
+
2025-09-23 02:58:16,869 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0073 | Val rms_score: 0.8851
|
112 |
+
2025-09-23 02:58:19,349 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0076 | Val rms_score: 0.8853
|
113 |
+
2025-09-23 02:58:21,905 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0094 | Val rms_score: 0.8783
|
114 |
+
2025-09-23 02:58:24,690 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0141 | Val rms_score: 0.8819
|
115 |
+
2025-09-23 02:58:27,203 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0107 | Val rms_score: 0.8767
|
116 |
+
2025-09-23 02:58:29,717 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0081 | Val rms_score: 0.8752
|
117 |
+
2025-09-23 02:58:31,852 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0072 | Val rms_score: 0.8767
|
118 |
+
2025-09-23 02:58:32,172 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Test rms_score: 0.4878
|
119 |
+
2025-09-23 02:58:32,473 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset freesolv at 2025-09-23_02-58-32
|
120 |
+
2025-09-23 02:58:34,476 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.4118 | Val rms_score: 1.0664
|
121 |
+
2025-09-23 02:58:34,476 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 17
|
122 |
+
2025-09-23 02:58:35,022 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 1.0664
|
123 |
+
2025-09-23 02:58:37,440 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.1517 | Val rms_score: 1.0064
|
124 |
+
2025-09-23 02:58:37,607 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 34
|
125 |
+
2025-09-23 02:58:38,112 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 1.0064
|
126 |
+
2025-09-23 02:58:40,473 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.0979 | Val rms_score: 0.8827
|
127 |
+
2025-09-23 02:58:40,640 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 51
|
128 |
+
2025-09-23 02:58:41,145 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val rms_score: 0.8827
|
129 |
+
2025-09-23 02:58:43,558 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.0634 | Val rms_score: 0.8355
|
130 |
+
2025-09-23 02:58:43,726 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 68
|
131 |
+
2025-09-23 02:58:44,245 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val rms_score: 0.8355
|
132 |
+
2025-09-23 02:58:46,643 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0542 | Val rms_score: 0.7822
|
133 |
+
2025-09-23 02:58:46,815 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 85
|
134 |
+
2025-09-23 02:58:47,325 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val rms_score: 0.7822
|
135 |
+
2025-09-23 02:58:49,793 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0388 | Val rms_score: 0.8209
|
136 |
+
2025-09-23 02:58:52,627 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0361 | Val rms_score: 0.8061
|
137 |
+
2025-09-23 02:58:55,051 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0326 | Val rms_score: 0.8124
|
138 |
+
2025-09-23 02:58:57,520 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0268 | Val rms_score: 0.8126
|
139 |
+
2025-09-23 02:59:00,017 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0272 | Val rms_score: 0.8039
|
140 |
+
2025-09-23 02:59:02,572 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0245 | Val rms_score: 0.8150
|
141 |
+
2025-09-23 02:59:05,337 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0166 | Val rms_score: 0.8233
|
142 |
+
2025-09-23 02:59:07,807 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0262 | Val rms_score: 0.8139
|
143 |
+
2025-09-23 02:59:10,356 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0236 | Val rms_score: 0.8102
|
144 |
+
2025-09-23 02:59:12,831 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0272 | Val rms_score: 0.7787
|
145 |
+
2025-09-23 02:59:13,000 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 255
|
146 |
+
2025-09-23 02:59:13,506 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 15 with val rms_score: 0.7787
|
147 |
+
2025-09-23 02:59:16,059 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0338 | Val rms_score: 0.7995
|
148 |
+
2025-09-23 02:59:18,861 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0232 | Val rms_score: 0.8254
|
149 |
+
2025-09-23 02:59:21,007 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0215 | Val rms_score: 0.8129
|
150 |
+
2025-09-23 02:59:23,456 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0187 | Val rms_score: 0.8223
|
151 |
+
2025-09-23 02:59:25,981 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0192 | Val rms_score: 0.8018
|
152 |
+
2025-09-23 02:59:28,408 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0172 | Val rms_score: 0.8022
|
153 |
+
2025-09-23 02:59:31,230 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0182 | Val rms_score: 0.8396
|
154 |
+
2025-09-23 02:59:33,738 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0356 | Val rms_score: 0.7806
|
155 |
+
2025-09-23 02:59:36,212 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0165 | Val rms_score: 0.8113
|
156 |
+
2025-09-23 02:59:38,352 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0169 | Val rms_score: 0.8455
|
157 |
+
2025-09-23 02:59:40,720 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0249 | Val rms_score: 0.7796
|
158 |
+
2025-09-23 02:59:43,477 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0395 | Val rms_score: 0.7981
|
159 |
+
2025-09-23 02:59:45,965 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0237 | Val rms_score: 0.8393
|
160 |
+
2025-09-23 02:59:48,479 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0163 | Val rms_score: 0.8337
|
161 |
+
2025-09-23 02:59:50,952 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0131 | Val rms_score: 0.8170
|
162 |
+
2025-09-23 02:59:53,416 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0125 | Val rms_score: 0.8327
|
163 |
+
2025-09-23 02:59:56,171 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0124 | Val rms_score: 0.8136
|
164 |
+
2025-09-23 02:59:58,551 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0113 | Val rms_score: 0.8220
|
165 |
+
2025-09-23 03:00:00,929 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0110 | Val rms_score: 0.8017
|
166 |
+
2025-09-23 03:00:03,372 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0182 | Val rms_score: 0.8599
|
167 |
+
2025-09-23 03:00:05,783 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0218 | Val rms_score: 0.7599
|
168 |
+
2025-09-23 03:00:06,190 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 612
|
169 |
+
2025-09-23 03:00:06,715 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 36 with val rms_score: 0.7599
|
170 |
+
2025-09-23 03:00:09,227 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.1691 | Val rms_score: 0.9449
|
171 |
+
2025-09-23 03:00:11,689 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.2629 | Val rms_score: 0.9760
|
172 |
+
2025-09-23 03:00:14,170 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.1498 | Val rms_score: 1.0019
|
173 |
+
2025-09-23 03:00:16,658 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0494 | Val rms_score: 0.8967
|
174 |
+
2025-09-23 03:00:19,063 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0280 | Val rms_score: 0.8952
|
175 |
+
2025-09-23 03:00:21,925 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0206 | Val rms_score: 0.8913
|
176 |
+
2025-09-23 03:00:24,447 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0238 | Val rms_score: 0.8839
|
177 |
+
2025-09-23 03:00:26,589 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0279 | Val rms_score: 0.9059
|
178 |
+
2025-09-23 03:00:29,022 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0205 | Val rms_score: 0.8975
|
179 |
+
2025-09-23 03:00:31,456 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0163 | Val rms_score: 0.8848
|
180 |
+
2025-09-23 03:00:34,260 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0149 | Val rms_score: 0.8819
|
181 |
+
2025-09-23 03:00:36,705 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0115 | Val rms_score: 0.8799
|
182 |
+
2025-09-23 03:00:39,181 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0116 | Val rms_score: 0.8833
|
183 |
+
2025-09-23 03:00:41,680 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0111 | Val rms_score: 0.8832
|
184 |
+
2025-09-23 03:00:44,152 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0105 | Val rms_score: 0.8861
|
185 |
+
2025-09-23 03:00:46,612 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0126 | Val rms_score: 0.8867
|
186 |
+
2025-09-23 03:00:48,809 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0076 | Val rms_score: 0.8833
|
187 |
+
2025-09-23 03:00:51,431 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0094 | Val rms_score: 0.8860
|
188 |
+
2025-09-23 03:00:53,762 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0089 | Val rms_score: 0.8852
|
189 |
+
2025-09-23 03:00:56,310 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0098 | Val rms_score: 0.8818
|
190 |
+
2025-09-23 03:00:59,084 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0083 | Val rms_score: 0.8775
|
191 |
+
2025-09-23 03:01:01,600 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0116 | Val rms_score: 0.8850
|
192 |
+
2025-09-23 03:01:05,089 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0089 | Val rms_score: 0.8844
|
193 |
+
2025-09-23 03:01:07,438 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0106 | Val rms_score: 0.8794
|
194 |
+
2025-09-23 03:01:09,825 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0082 | Val rms_score: 0.8796
|
195 |
+
2025-09-23 03:01:12,520 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0087 | Val rms_score: 0.8757
|
196 |
+
2025-09-23 03:01:14,969 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0080 | Val rms_score: 0.8765
|
197 |
+
2025-09-23 03:01:17,298 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0076 | Val rms_score: 0.8764
|
198 |
+
2025-09-23 03:01:19,793 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0078 | Val rms_score: 0.8744
|
199 |
+
2025-09-23 03:01:22,320 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0172 | Val rms_score: 0.8604
|
200 |
+
2025-09-23 03:01:25,042 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0448 | Val rms_score: 0.8714
|
201 |
+
2025-09-23 03:01:27,504 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0161 | Val rms_score: 0.8655
|
202 |
+
2025-09-23 03:01:29,967 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0108 | Val rms_score: 0.8739
|
203 |
+
2025-09-23 03:01:32,370 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0081 | Val rms_score: 0.8784
|
204 |
+
2025-09-23 03:01:34,909 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0135 | Val rms_score: 0.8923
|
205 |
+
2025-09-23 03:01:37,654 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0167 | Val rms_score: 0.9067
|
206 |
+
2025-09-23 03:01:40,203 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0128 | Val rms_score: 0.8928
|
207 |
+
2025-09-23 03:01:42,727 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0096 | Val rms_score: 0.9002
|
208 |
+
2025-09-23 03:01:45,255 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0087 | Val rms_score: 0.8860
|
209 |
+
2025-09-23 03:01:47,777 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0090 | Val rms_score: 0.8907
|
210 |
+
2025-09-23 03:01:50,587 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0094 | Val rms_score: 0.8854
|
211 |
+
2025-09-23 03:01:53,024 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0101 | Val rms_score: 0.8766
|
212 |
+
2025-09-23 03:01:55,212 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0077 | Val rms_score: 0.8786
|
213 |
+
2025-09-23 03:01:57,423 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0070 | Val rms_score: 0.8786
|
214 |
+
2025-09-23 03:01:59,866 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0076 | Val rms_score: 0.8705
|
215 |
+
2025-09-23 03:02:02,561 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0063 | Val rms_score: 0.8665
|
216 |
+
2025-09-23 03:02:05,003 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0062 | Val rms_score: 0.8676
|
217 |
+
2025-09-23 03:02:07,412 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0064 | Val rms_score: 0.8686
|
218 |
+
2025-09-23 03:02:09,823 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0067 | Val rms_score: 0.8692
|
219 |
+
2025-09-23 03:02:12,287 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0060 | Val rms_score: 0.8742
|
220 |
+
2025-09-23 03:02:15,088 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0066 | Val rms_score: 0.8707
|
221 |
+
2025-09-23 03:02:17,556 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0060 | Val rms_score: 0.8673
|
222 |
+
2025-09-23 03:02:20,016 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0064 | Val rms_score: 0.8757
|
223 |
+
2025-09-23 03:02:22,489 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0079 | Val rms_score: 0.8727
|
224 |
+
2025-09-23 03:02:24,974 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0064 | Val rms_score: 0.8781
|
225 |
+
2025-09-23 03:02:27,746 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0070 | Val rms_score: 0.8738
|
226 |
+
2025-09-23 03:02:30,328 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0053 | Val rms_score: 0.8726
|
227 |
+
2025-09-23 03:02:32,774 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0054 | Val rms_score: 0.8718
|
228 |
+
2025-09-23 03:02:35,245 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0061 | Val rms_score: 0.8697
|
229 |
+
2025-09-23 03:02:37,786 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0054 | Val rms_score: 0.8729
|
230 |
+
2025-09-23 03:02:40,565 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0049 | Val rms_score: 0.8731
|
231 |
+
2025-09-23 03:02:43,108 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0054 | Val rms_score: 0.8709
|
232 |
+
2025-09-23 03:02:45,551 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0059 | Val rms_score: 0.8692
|
233 |
+
2025-09-23 03:02:48,034 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0055 | Val rms_score: 0.8628
|
234 |
+
2025-09-23 03:02:48,450 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Test rms_score: 0.4856
|
235 |
+
2025-09-23 03:02:48,753 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset freesolv at 2025-09-23_03-02-48
|
236 |
+
2025-09-23 03:02:50,972 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.5919 | Val rms_score: 1.0315
|
237 |
+
2025-09-23 03:02:50,972 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 17
|
238 |
+
2025-09-23 03:02:51,481 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 1.0315
|
239 |
+
2025-09-23 03:02:54,020 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.3107 | Val rms_score: 1.0931
|
240 |
+
2025-09-23 03:02:56,454 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.1406 | Val rms_score: 0.9547
|
241 |
+
2025-09-23 03:02:56,620 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 51
|
242 |
+
2025-09-23 03:02:57,131 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val rms_score: 0.9547
|
243 |
+
2025-09-23 03:02:59,616 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.0942 | Val rms_score: 0.9126
|
244 |
+
2025-09-23 03:02:59,784 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 68
|
245 |
+
2025-09-23 03:03:00,303 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val rms_score: 0.9126
|
246 |
+
2025-09-23 03:03:02,550 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0740 | Val rms_score: 0.8551
|
247 |
+
2025-09-23 03:03:02,727 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 85
|
248 |
+
2025-09-23 03:03:03,242 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val rms_score: 0.8551
|
249 |
+
2025-09-23 03:03:05,546 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0859 | Val rms_score: 0.8304
|
250 |
+
2025-09-23 03:03:05,990 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 102
|
251 |
+
2025-09-23 03:03:06,505 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val rms_score: 0.8304
|
252 |
+
2025-09-23 03:03:09,087 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0722 | Val rms_score: 0.8610
|
253 |
+
2025-09-23 03:03:11,554 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0896 | Val rms_score: 0.8493
|
254 |
+
2025-09-23 03:03:13,990 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0657 | Val rms_score: 0.8362
|
255 |
+
2025-09-23 03:03:16,470 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0616 | Val rms_score: 0.8274
|
256 |
+
2025-09-23 03:03:16,642 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 170
|
257 |
+
2025-09-23 03:03:17,156 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 10 with val rms_score: 0.8274
|
258 |
+
2025-09-23 03:03:19,609 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0607 | Val rms_score: 0.8157
|
259 |
+
2025-09-23 03:03:20,054 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 187
|
260 |
+
2025-09-23 03:03:20,560 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 11 with val rms_score: 0.8157
|
261 |
+
2025-09-23 03:03:23,087 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0337 | Val rms_score: 0.8341
|
262 |
+
2025-09-23 03:03:25,588 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0503 | Val rms_score: 0.8411
|
263 |
+
2025-09-23 03:03:28,074 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0749 | Val rms_score: 0.8248
|
264 |
+
2025-09-23 03:03:30,613 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0462 | Val rms_score: 0.8379
|
265 |
+
2025-09-23 03:03:33,186 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0368 | Val rms_score: 0.8237
|
266 |
+
2025-09-23 03:03:35,910 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0319 | Val rms_score: 0.8329
|
267 |
+
2025-09-23 03:03:38,310 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0413 | Val rms_score: 0.8377
|
268 |
+
2025-09-23 03:03:40,668 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0358 | Val rms_score: 0.8136
|
269 |
+
2025-09-23 03:03:40,838 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 323
|
270 |
+
2025-09-23 03:03:41,355 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 19 with val rms_score: 0.8136
|
271 |
+
2025-09-23 03:03:43,741 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0347 | Val rms_score: 0.8189
|
272 |
+
2025-09-23 03:03:45,953 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0281 | Val rms_score: 0.8275
|
273 |
+
2025-09-23 03:03:48,668 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0246 | Val rms_score: 0.8281
|
274 |
+
2025-09-23 03:03:51,078 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0285 | Val rms_score: 0.8247
|
275 |
+
2025-09-23 03:03:53,585 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0354 | Val rms_score: 0.8424
|
276 |
+
2025-09-23 03:03:56,085 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0234 | Val rms_score: 0.8255
|
277 |
+
2025-09-23 03:03:58,529 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0192 | Val rms_score: 0.8235
|
278 |
+
2025-09-23 03:04:01,389 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0185 | Val rms_score: 0.8187
|
279 |
+
2025-09-23 03:04:03,955 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0191 | Val rms_score: 0.8271
|
280 |
+
2025-09-23 03:04:06,461 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0163 | Val rms_score: 0.8191
|
281 |
+
2025-09-23 03:04:08,790 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0168 | Val rms_score: 0.8087
|
282 |
+
2025-09-23 03:04:08,938 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 510
|
283 |
+
2025-09-23 03:04:09,449 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 30 with val rms_score: 0.8087
|
284 |
+
2025-09-23 03:04:11,598 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0169 | Val rms_score: 0.8156
|
285 |
+
2025-09-23 03:04:14,378 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0160 | Val rms_score: 0.8180
|
286 |
+
2025-09-23 03:04:16,861 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0152 | Val rms_score: 0.8130
|
287 |
+
2025-09-23 03:04:19,402 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0145 | Val rms_score: 0.8157
|
288 |
+
2025-09-23 03:04:21,797 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0127 | Val rms_score: 0.8191
|
289 |
+
2025-09-23 03:04:24,323 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0145 | Val rms_score: 0.8225
|
290 |
+
2025-09-23 03:04:27,148 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0140 | Val rms_score: 0.8103
|
291 |
+
2025-09-23 03:04:29,673 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0192 | Val rms_score: 0.8036
|
292 |
+
2025-09-23 03:04:29,846 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 646
|
293 |
+
2025-09-23 03:04:30,354 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 38 with val rms_score: 0.8036
|
294 |
+
2025-09-23 03:04:32,850 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0146 | Val rms_score: 0.8045
|
295 |
+
2025-09-23 03:04:35,324 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0124 | Val rms_score: 0.8042
|
296 |
+
2025-09-23 03:04:37,849 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0137 | Val rms_score: 0.8115
|
297 |
+
2025-09-23 03:04:40,632 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0126 | Val rms_score: 0.8099
|
298 |
+
2025-09-23 03:04:43,084 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0114 | Val rms_score: 0.8084
|
299 |
+
2025-09-23 03:04:45,535 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0108 | Val rms_score: 0.8148
|
300 |
+
2025-09-23 03:04:48,043 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0104 | Val rms_score: 0.8185
|
301 |
+
2025-09-23 03:04:50,522 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0099 | Val rms_score: 0.8159
|
302 |
+
2025-09-23 03:04:53,164 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0105 | Val rms_score: 0.8159
|
303 |
+
2025-09-23 03:04:55,647 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0101 | Val rms_score: 0.8129
|
304 |
+
2025-09-23 03:04:58,173 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0113 | Val rms_score: 0.8035
|
305 |
+
2025-09-23 03:04:58,344 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 833
|
306 |
+
2025-09-23 03:04:58,863 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 49 with val rms_score: 0.8035
|
307 |
+
2025-09-23 03:05:01,318 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0114 | Val rms_score: 0.8144
|
308 |
+
2025-09-23 03:05:03,720 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0136 | Val rms_score: 0.8079
|
309 |
+
2025-09-23 03:05:06,470 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0106 | Val rms_score: 0.8259
|
310 |
+
2025-09-23 03:05:08,895 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0378 | Val rms_score: 0.8124
|
311 |
+
2025-09-23 03:05:11,308 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0146 | Val rms_score: 0.8036
|
312 |
+
2025-09-23 03:05:13,852 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0099 | Val rms_score: 0.8172
|
313 |
+
2025-09-23 03:05:16,319 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0101 | Val rms_score: 0.8118
|
314 |
+
2025-09-23 03:05:18,861 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0100 | Val rms_score: 0.8081
|
315 |
+
2025-09-23 03:05:21,040 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0098 | Val rms_score: 0.8030
|
316 |
+
2025-09-23 03:05:21,178 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 986
|
317 |
+
2025-09-23 03:05:21,688 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 58 with val rms_score: 0.8030
|
318 |
+
2025-09-23 03:05:25,106 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0100 | Val rms_score: 0.8112
|
319 |
+
2025-09-23 03:05:27,544 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0084 | Val rms_score: 0.8111
|
320 |
+
2025-09-23 03:05:30,080 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0088 | Val rms_score: 0.8062
|
321 |
+
2025-09-23 03:05:32,892 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0080 | Val rms_score: 0.8085
|
322 |
+
2025-09-23 03:05:35,368 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0084 | Val rms_score: 0.8136
|
323 |
+
2025-09-23 03:05:37,784 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0073 | Val rms_score: 0.8100
|
324 |
+
2025-09-23 03:05:40,166 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0102 | Val rms_score: 0.8120
|
325 |
+
2025-09-23 03:05:42,585 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0071 | Val rms_score: 0.8177
|
326 |
+
2025-09-23 03:05:45,340 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0082 | Val rms_score: 0.8078
|
327 |
+
2025-09-23 03:05:47,701 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0100 | Val rms_score: 0.8381
|
328 |
+
2025-09-23 03:05:50,166 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0176 | Val rms_score: 0.7996
|
329 |
+
2025-09-23 03:05:50,343 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 1173
|
330 |
+
2025-09-23 03:05:50,857 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 69 with val rms_score: 0.7996
|
331 |
+
2025-09-23 03:05:53,351 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0099 | Val rms_score: 0.8101
|
332 |
+
2025-09-23 03:05:55,882 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0080 | Val rms_score: 0.8094
|
333 |
+
2025-09-23 03:05:58,575 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0081 | Val rms_score: 0.8139
|
334 |
+
2025-09-23 03:06:01,126 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0080 | Val rms_score: 0.8054
|
335 |
+
2025-09-23 03:06:03,642 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0096 | Val rms_score: 0.8092
|
336 |
+
2025-09-23 03:06:06,161 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0081 | Val rms_score: 0.8108
|
337 |
+
2025-09-23 03:06:08,687 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0073 | Val rms_score: 0.8114
|
338 |
+
2025-09-23 03:06:11,412 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0072 | Val rms_score: 0.8104
|
339 |
+
2025-09-23 03:06:13,818 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0078 | Val rms_score: 0.8123
|
340 |
+
2025-09-23 03:06:16,282 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0067 | Val rms_score: 0.8080
|
341 |
+
2025-09-23 03:06:18,817 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0064 | Val rms_score: 0.8089
|
342 |
+
2025-09-23 03:06:21,398 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0072 | Val rms_score: 0.8114
|
343 |
+
2025-09-23 03:06:24,076 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0078 | Val rms_score: 0.8099
|
344 |
+
2025-09-23 03:06:26,346 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0079 | Val rms_score: 0.8088
|
345 |
+
2025-09-23 03:06:28,594 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0076 | Val rms_score: 0.8025
|
346 |
+
2025-09-23 03:06:31,126 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0104 | Val rms_score: 0.7916
|
347 |
+
2025-09-23 03:06:31,262 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 1445
|
348 |
+
2025-09-23 03:06:31,773 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 85 with val rms_score: 0.7916
|
349 |
+
2025-09-23 03:06:34,299 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0171 | Val rms_score: 0.8204
|
350 |
+
2025-09-23 03:06:37,021 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0116 | Val rms_score: 0.8202
|
351 |
+
2025-09-23 03:06:39,528 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0098 | Val rms_score: 0.8015
|
352 |
+
2025-09-23 03:06:42,023 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0083 | Val rms_score: 0.8203
|
353 |
+
2025-09-23 03:06:44,525 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0077 | Val rms_score: 0.8008
|
354 |
+
2025-09-23 03:06:46,991 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0070 | Val rms_score: 0.8072
|
355 |
+
2025-09-23 03:06:49,834 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0063 | Val rms_score: 0.8045
|
356 |
+
2025-09-23 03:06:52,300 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0069 | Val rms_score: 0.7982
|
357 |
+
2025-09-23 03:06:54,858 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0063 | Val rms_score: 0.8050
|
358 |
+
2025-09-23 03:06:57,333 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0071 | Val rms_score: 0.8033
|
359 |
+
2025-09-23 03:06:59,831 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0071 | Val rms_score: 0.8060
|
360 |
+
2025-09-23 03:07:02,611 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0068 | Val rms_score: 0.8062
|
361 |
+
2025-09-23 03:07:04,834 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0061 | Val rms_score: 0.8048
|
362 |
+
2025-09-23 03:07:07,370 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0059 | Val rms_score: 0.8076
|
363 |
+
2025-09-23 03:07:09,820 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0066 | Val rms_score: 0.8088
|
364 |
+
2025-09-23 03:07:10,280 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Test rms_score: 0.5202
|
365 |
+
2025-09-23 03:07:10,592 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg rms_score: 0.4979, Std Dev: 0.0158
|
logs_modchembert_regression_ModChemBERT-MLM-DAPT-TAFT-OPT/modchembert_deepchem_splits_run_lipo_epochs100_batch_size32_20250923_094951.log
ADDED
@@ -0,0 +1,365 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2025-09-23 09:49:51,070 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Running benchmark for dataset: lipo
|
2 |
+
2025-09-23 09:49:51,070 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - dataset: lipo, tasks: ['exp'], epochs: 100, learning rate: 3e-05, transform: True
|
3 |
+
2025-09-23 09:49:51,076 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset lipo at 2025-09-23_09-49-51
|
4 |
+
2025-09-23 09:50:02,247 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.4125 | Val rms_score: 0.8456
|
5 |
+
2025-09-23 09:50:02,247 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 105
|
6 |
+
2025-09-23 09:50:02,783 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 0.8456
|
7 |
+
2025-09-23 09:50:13,112 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.3187 | Val rms_score: 0.6877
|
8 |
+
2025-09-23 09:50:13,290 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 210
|
9 |
+
2025-09-23 09:50:13,824 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 0.6877
|
10 |
+
2025-09-23 09:50:24,072 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.3042 | Val rms_score: 0.6760
|
11 |
+
2025-09-23 09:50:24,251 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 315
|
12 |
+
2025-09-23 09:50:24,778 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val rms_score: 0.6760
|
13 |
+
2025-09-23 09:50:35,106 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.2359 | Val rms_score: 0.6661
|
14 |
+
2025-09-23 09:50:35,293 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 420
|
15 |
+
2025-09-23 09:50:35,846 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val rms_score: 0.6661
|
16 |
+
2025-09-23 09:50:46,221 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.2150 | Val rms_score: 0.7116
|
17 |
+
2025-09-23 09:50:56,535 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.1802 | Val rms_score: 0.6560
|
18 |
+
2025-09-23 09:50:56,919 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 630
|
19 |
+
2025-09-23 09:50:57,466 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val rms_score: 0.6560
|
20 |
+
2025-09-23 09:51:07,817 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1411 | Val rms_score: 0.6406
|
21 |
+
2025-09-23 09:51:08,017 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 735
|
22 |
+
2025-09-23 09:51:08,554 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 7 with val rms_score: 0.6406
|
23 |
+
2025-09-23 09:51:18,834 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.1250 | Val rms_score: 0.6428
|
24 |
+
2025-09-23 09:51:29,293 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.1250 | Val rms_score: 0.6591
|
25 |
+
2025-09-23 09:51:40,793 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.1006 | Val rms_score: 0.6415
|
26 |
+
2025-09-23 09:51:50,830 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0960 | Val rms_score: 0.6415
|
27 |
+
2025-09-23 09:52:01,354 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0865 | Val rms_score: 0.6432
|
28 |
+
2025-09-23 09:52:11,418 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0798 | Val rms_score: 0.6408
|
29 |
+
2025-09-23 09:52:21,434 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0737 | Val rms_score: 0.6372
|
30 |
+
2025-09-23 09:52:21,579 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 1470
|
31 |
+
2025-09-23 09:52:22,135 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 14 with val rms_score: 0.6372
|
32 |
+
2025-09-23 09:52:32,249 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0733 | Val rms_score: 0.6400
|
33 |
+
2025-09-23 09:52:42,299 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0656 | Val rms_score: 0.6479
|
34 |
+
2025-09-23 09:52:52,507 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0647 | Val rms_score: 0.6394
|
35 |
+
2025-09-23 09:53:02,588 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0587 | Val rms_score: 0.6362
|
36 |
+
2025-09-23 09:53:02,740 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 1890
|
37 |
+
2025-09-23 09:53:03,287 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 18 with val rms_score: 0.6362
|
38 |
+
2025-09-23 09:53:13,369 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0582 | Val rms_score: 0.6424
|
39 |
+
2025-09-23 09:53:24,936 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0566 | Val rms_score: 0.6589
|
40 |
+
2025-09-23 09:53:35,259 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0570 | Val rms_score: 0.6293
|
41 |
+
2025-09-23 09:53:35,637 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 2205
|
42 |
+
2025-09-23 09:53:36,185 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 21 with val rms_score: 0.6293
|
43 |
+
2025-09-23 09:53:46,499 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0496 | Val rms_score: 0.6395
|
44 |
+
2025-09-23 09:53:56,783 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0471 | Val rms_score: 0.6456
|
45 |
+
2025-09-23 09:54:06,854 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0475 | Val rms_score: 0.6384
|
46 |
+
2025-09-23 09:54:17,109 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0484 | Val rms_score: 0.6400
|
47 |
+
2025-09-23 09:54:27,257 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0453 | Val rms_score: 0.6342
|
48 |
+
2025-09-23 09:54:37,815 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0400 | Val rms_score: 0.6490
|
49 |
+
2025-09-23 09:54:47,927 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0412 | Val rms_score: 0.6425
|
50 |
+
2025-09-23 09:54:58,868 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0431 | Val rms_score: 0.6412
|
51 |
+
2025-09-23 09:55:09,095 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0403 | Val rms_score: 0.6343
|
52 |
+
2025-09-23 09:55:19,374 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0403 | Val rms_score: 0.6418
|
53 |
+
2025-09-23 09:55:29,905 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0362 | Val rms_score: 0.6451
|
54 |
+
2025-09-23 09:55:40,204 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0382 | Val rms_score: 0.6452
|
55 |
+
2025-09-23 09:55:50,620 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0350 | Val rms_score: 0.6402
|
56 |
+
2025-09-23 09:56:00,913 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0352 | Val rms_score: 0.6475
|
57 |
+
2025-09-23 09:56:11,188 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0338 | Val rms_score: 0.6436
|
58 |
+
2025-09-23 09:56:21,633 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0320 | Val rms_score: 0.6399
|
59 |
+
2025-09-23 09:56:31,796 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0337 | Val rms_score: 0.6415
|
60 |
+
2025-09-23 09:56:43,304 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0334 | Val rms_score: 0.6509
|
61 |
+
2025-09-23 09:56:53,394 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0312 | Val rms_score: 0.6344
|
62 |
+
2025-09-23 09:57:03,591 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0229 | Val rms_score: 0.6362
|
63 |
+
2025-09-23 09:57:13,921 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0324 | Val rms_score: 0.6357
|
64 |
+
2025-09-23 09:57:24,056 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0286 | Val rms_score: 0.6417
|
65 |
+
2025-09-23 09:57:34,169 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0326 | Val rms_score: 0.6348
|
66 |
+
2025-09-23 09:57:44,259 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0308 | Val rms_score: 0.6329
|
67 |
+
2025-09-23 09:57:54,306 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0312 | Val rms_score: 0.6391
|
68 |
+
2025-09-23 09:58:04,889 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0290 | Val rms_score: 0.6342
|
69 |
+
2025-09-23 09:58:16,389 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0318 | Val rms_score: 0.6459
|
70 |
+
2025-09-23 09:58:26,725 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0285 | Val rms_score: 0.6382
|
71 |
+
2025-09-23 09:58:37,117 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0305 | Val rms_score: 0.6396
|
72 |
+
2025-09-23 09:58:47,425 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0281 | Val rms_score: 0.6380
|
73 |
+
2025-09-23 09:58:57,859 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0280 | Val rms_score: 0.6357
|
74 |
+
2025-09-23 09:59:08,236 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0276 | Val rms_score: 0.6358
|
75 |
+
2025-09-23 09:59:18,544 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0259 | Val rms_score: 0.6448
|
76 |
+
2025-09-23 09:59:28,717 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0269 | Val rms_score: 0.6388
|
77 |
+
2025-09-23 09:59:38,758 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0256 | Val rms_score: 0.6353
|
78 |
+
2025-09-23 09:59:49,138 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0254 | Val rms_score: 0.6405
|
79 |
+
2025-09-23 10:00:00,667 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0250 | Val rms_score: 0.6400
|
80 |
+
2025-09-23 10:00:10,988 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0243 | Val rms_score: 0.6407
|
81 |
+
2025-09-23 10:00:21,188 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0273 | Val rms_score: 0.6370
|
82 |
+
2025-09-23 10:00:31,476 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0270 | Val rms_score: 0.6385
|
83 |
+
2025-09-23 10:00:41,810 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0246 | Val rms_score: 0.6395
|
84 |
+
2025-09-23 10:00:51,911 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0275 | Val rms_score: 0.6405
|
85 |
+
2025-09-23 10:01:01,888 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0227 | Val rms_score: 0.6387
|
86 |
+
2025-09-23 10:01:12,161 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0242 | Val rms_score: 0.6415
|
87 |
+
2025-09-23 10:01:22,437 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0240 | Val rms_score: 0.6371
|
88 |
+
2025-09-23 10:01:34,300 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0250 | Val rms_score: 0.6360
|
89 |
+
2025-09-23 10:01:44,528 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0256 | Val rms_score: 0.6377
|
90 |
+
2025-09-23 10:01:54,946 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0241 | Val rms_score: 0.6412
|
91 |
+
2025-09-23 10:02:05,005 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0241 | Val rms_score: 0.6426
|
92 |
+
2025-09-23 10:02:15,336 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0233 | Val rms_score: 0.6436
|
93 |
+
2025-09-23 10:02:25,883 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0247 | Val rms_score: 0.6399
|
94 |
+
2025-09-23 10:02:36,433 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0249 | Val rms_score: 0.6411
|
95 |
+
2025-09-23 10:02:46,549 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0233 | Val rms_score: 0.6356
|
96 |
+
2025-09-23 10:02:56,610 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0229 | Val rms_score: 0.6391
|
97 |
+
2025-09-23 10:03:06,659 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0229 | Val rms_score: 0.6360
|
98 |
+
2025-09-23 10:03:18,197 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0229 | Val rms_score: 0.6482
|
99 |
+
2025-09-23 10:03:28,461 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0238 | Val rms_score: 0.6398
|
100 |
+
2025-09-23 10:03:38,739 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0240 | Val rms_score: 0.6367
|
101 |
+
2025-09-23 10:03:48,976 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0231 | Val rms_score: 0.6438
|
102 |
+
2025-09-23 10:03:59,294 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0234 | Val rms_score: 0.6373
|
103 |
+
2025-09-23 10:04:09,854 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0205 | Val rms_score: 0.6346
|
104 |
+
2025-09-23 10:04:19,833 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0224 | Val rms_score: 0.6369
|
105 |
+
2025-09-23 10:04:30,002 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0262 | Val rms_score: 0.6373
|
106 |
+
2025-09-23 10:04:40,281 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0234 | Val rms_score: 0.6422
|
107 |
+
2025-09-23 10:04:51,919 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0249 | Val rms_score: 0.6384
|
108 |
+
2025-09-23 10:05:02,592 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0237 | Val rms_score: 0.6359
|
109 |
+
2025-09-23 10:05:12,913 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0215 | Val rms_score: 0.6315
|
110 |
+
2025-09-23 10:05:23,292 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0213 | Val rms_score: 0.6386
|
111 |
+
2025-09-23 10:05:33,514 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0219 | Val rms_score: 0.6391
|
112 |
+
2025-09-23 10:05:43,583 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0210 | Val rms_score: 0.6404
|
113 |
+
2025-09-23 10:05:53,893 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0214 | Val rms_score: 0.6431
|
114 |
+
2025-09-23 10:06:03,985 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0210 | Val rms_score: 0.6416
|
115 |
+
2025-09-23 10:06:14,330 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0213 | Val rms_score: 0.6404
|
116 |
+
2025-09-23 10:06:24,598 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0228 | Val rms_score: 0.6359
|
117 |
+
2025-09-23 10:06:36,157 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0215 | Val rms_score: 0.6430
|
118 |
+
2025-09-23 10:06:46,463 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0202 | Val rms_score: 0.6397
|
119 |
+
2025-09-23 10:06:56,527 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0207 | Val rms_score: 0.6406
|
120 |
+
2025-09-23 10:07:06,537 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0203 | Val rms_score: 0.6394
|
121 |
+
2025-09-23 10:07:16,611 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0206 | Val rms_score: 0.6440
|
122 |
+
2025-09-23 10:07:17,665 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Test rms_score: 0.6564
|
123 |
+
2025-09-23 10:07:17,954 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset lipo at 2025-09-23_10-07-17
|
124 |
+
2025-09-23 10:07:26,974 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.4594 | Val rms_score: 0.7378
|
125 |
+
2025-09-23 10:07:26,974 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 105
|
126 |
+
2025-09-23 10:07:27,511 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 0.7378
|
127 |
+
2025-09-23 10:07:37,960 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.2875 | Val rms_score: 0.6794
|
128 |
+
2025-09-23 10:07:38,144 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 210
|
129 |
+
2025-09-23 10:07:38,682 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 0.6794
|
130 |
+
2025-09-23 10:07:49,036 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.2562 | Val rms_score: 0.6874
|
131 |
+
2025-09-23 10:07:59,325 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.2156 | Val rms_score: 0.6588
|
132 |
+
2025-09-23 10:07:59,496 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 420
|
133 |
+
2025-09-23 10:08:00,065 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val rms_score: 0.6588
|
134 |
+
2025-09-23 10:08:10,305 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.1825 | Val rms_score: 0.6580
|
135 |
+
2025-09-23 10:08:10,488 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 525
|
136 |
+
2025-09-23 10:08:11,080 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val rms_score: 0.6580
|
137 |
+
2025-09-23 10:08:21,343 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.1781 | Val rms_score: 0.6366
|
138 |
+
2025-09-23 10:08:21,778 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 630
|
139 |
+
2025-09-23 10:08:22,331 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val rms_score: 0.6366
|
140 |
+
2025-09-23 10:08:32,613 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1366 | Val rms_score: 0.6514
|
141 |
+
2025-09-23 10:08:43,067 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.1227 | Val rms_score: 0.6508
|
142 |
+
2025-09-23 10:08:53,262 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.1028 | Val rms_score: 0.6402
|
143 |
+
2025-09-23 10:09:04,731 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.1037 | Val rms_score: 0.6513
|
144 |
+
2025-09-23 10:09:14,978 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0875 | Val rms_score: 0.6562
|
145 |
+
2025-09-23 10:09:25,497 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0854 | Val rms_score: 0.6380
|
146 |
+
2025-09-23 10:09:35,663 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0784 | Val rms_score: 0.6432
|
147 |
+
2025-09-23 10:09:46,047 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0857 | Val rms_score: 0.6363
|
148 |
+
2025-09-23 10:09:46,203 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 1470
|
149 |
+
2025-09-23 10:09:46,774 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 14 with val rms_score: 0.6363
|
150 |
+
2025-09-23 10:09:57,340 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0692 | Val rms_score: 0.6393
|
151 |
+
2025-09-23 10:10:07,480 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0609 | Val rms_score: 0.6398
|
152 |
+
2025-09-23 10:10:18,019 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0599 | Val rms_score: 0.6326
|
153 |
+
2025-09-23 10:10:18,168 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 1785
|
154 |
+
2025-09-23 10:10:18,706 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 17 with val rms_score: 0.6326
|
155 |
+
2025-09-23 10:10:28,737 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0552 | Val rms_score: 0.6408
|
156 |
+
2025-09-23 10:10:38,839 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0566 | Val rms_score: 0.6270
|
157 |
+
2025-09-23 10:10:39,029 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 1995
|
158 |
+
2025-09-23 10:10:39,587 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 19 with val rms_score: 0.6270
|
159 |
+
2025-09-23 10:10:51,031 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0541 | Val rms_score: 0.6347
|
160 |
+
2025-09-23 10:11:01,299 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0426 | Val rms_score: 0.6275
|
161 |
+
2025-09-23 10:11:12,005 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0566 | Val rms_score: 0.6451
|
162 |
+
2025-09-23 10:11:22,078 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0484 | Val rms_score: 0.6462
|
163 |
+
2025-09-23 10:11:32,250 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0475 | Val rms_score: 0.6413
|
164 |
+
2025-09-23 10:11:42,465 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0428 | Val rms_score: 0.6347
|
165 |
+
2025-09-23 10:11:52,644 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0503 | Val rms_score: 0.6343
|
166 |
+
2025-09-23 10:12:03,263 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0413 | Val rms_score: 0.6363
|
167 |
+
2025-09-23 10:12:13,689 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0447 | Val rms_score: 0.6510
|
168 |
+
2025-09-23 10:12:25,022 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0399 | Val rms_score: 0.6293
|
169 |
+
2025-09-23 10:12:35,237 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0400 | Val rms_score: 0.6420
|
170 |
+
2025-09-23 10:12:45,540 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0389 | Val rms_score: 0.6364
|
171 |
+
2025-09-23 10:12:56,141 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0375 | Val rms_score: 0.6461
|
172 |
+
2025-09-23 10:13:06,424 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0365 | Val rms_score: 0.6361
|
173 |
+
2025-09-23 10:13:16,506 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0355 | Val rms_score: 0.6319
|
174 |
+
2025-09-23 10:13:26,464 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0360 | Val rms_score: 0.6368
|
175 |
+
2025-09-23 10:13:36,530 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0391 | Val rms_score: 0.6499
|
176 |
+
2025-09-23 10:13:46,714 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0369 | Val rms_score: 0.6452
|
177 |
+
2025-09-23 10:13:56,868 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0351 | Val rms_score: 0.6436
|
178 |
+
2025-09-23 10:14:08,398 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0339 | Val rms_score: 0.6416
|
179 |
+
2025-09-23 10:14:18,694 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0328 | Val rms_score: 0.6375
|
180 |
+
2025-09-23 10:14:28,827 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0285 | Val rms_score: 0.6405
|
181 |
+
2025-09-23 10:14:39,234 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0354 | Val rms_score: 0.6422
|
182 |
+
2025-09-23 10:14:49,181 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0294 | Val rms_score: 0.6401
|
183 |
+
2025-09-23 10:14:59,321 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0346 | Val rms_score: 0.6344
|
184 |
+
2025-09-23 10:15:09,716 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0347 | Val rms_score: 0.6334
|
185 |
+
2025-09-23 10:15:20,062 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0301 | Val rms_score: 0.6344
|
186 |
+
2025-09-23 10:15:30,702 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0286 | Val rms_score: 0.6420
|
187 |
+
2025-09-23 10:15:42,166 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0305 | Val rms_score: 0.6393
|
188 |
+
2025-09-23 10:15:52,478 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0300 | Val rms_score: 0.6393
|
189 |
+
2025-09-23 10:16:02,748 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0278 | Val rms_score: 0.6361
|
190 |
+
2025-09-23 10:16:12,658 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0290 | Val rms_score: 0.6295
|
191 |
+
2025-09-23 10:16:23,369 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0284 | Val rms_score: 0.6477
|
192 |
+
2025-09-23 10:16:33,558 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0286 | Val rms_score: 0.6373
|
193 |
+
2025-09-23 10:16:43,897 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0269 | Val rms_score: 0.6462
|
194 |
+
2025-09-23 10:16:53,988 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0279 | Val rms_score: 0.6353
|
195 |
+
2025-09-23 10:17:03,774 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0283 | Val rms_score: 0.6322
|
196 |
+
2025-09-23 10:17:14,181 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0265 | Val rms_score: 0.6370
|
197 |
+
2025-09-23 10:17:26,061 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0266 | Val rms_score: 0.6398
|
198 |
+
2025-09-23 10:17:36,506 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0258 | Val rms_score: 0.6412
|
199 |
+
2025-09-23 10:17:47,030 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0259 | Val rms_score: 0.6362
|
200 |
+
2025-09-23 10:17:57,384 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0287 | Val rms_score: 0.6418
|
201 |
+
2025-09-23 10:18:07,993 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0268 | Val rms_score: 0.6344
|
202 |
+
2025-09-23 10:18:18,262 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0293 | Val rms_score: 0.6351
|
203 |
+
2025-09-23 10:18:28,208 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0289 | Val rms_score: 0.6386
|
204 |
+
2025-09-23 10:18:38,182 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0270 | Val rms_score: 0.6375
|
205 |
+
2025-09-23 10:18:48,285 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0262 | Val rms_score: 0.6319
|
206 |
+
2025-09-23 10:18:59,938 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0250 | Val rms_score: 0.6365
|
207 |
+
2025-09-23 10:19:10,125 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0242 | Val rms_score: 0.6337
|
208 |
+
2025-09-23 10:19:20,121 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0255 | Val rms_score: 0.6369
|
209 |
+
2025-09-23 10:19:30,532 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0244 | Val rms_score: 0.6320
|
210 |
+
2025-09-23 10:19:40,909 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0260 | Val rms_score: 0.6328
|
211 |
+
2025-09-23 10:19:51,458 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0251 | Val rms_score: 0.6315
|
212 |
+
2025-09-23 10:20:01,700 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0250 | Val rms_score: 0.6310
|
213 |
+
2025-09-23 10:20:11,896 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0253 | Val rms_score: 0.6384
|
214 |
+
2025-09-23 10:20:22,433 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0219 | Val rms_score: 0.6360
|
215 |
+
2025-09-23 10:20:32,678 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0241 | Val rms_score: 0.6372
|
216 |
+
2025-09-23 10:20:44,460 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0228 | Val rms_score: 0.6366
|
217 |
+
2025-09-23 10:20:54,594 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0222 | Val rms_score: 0.6355
|
218 |
+
2025-09-23 10:21:04,873 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0230 | Val rms_score: 0.6361
|
219 |
+
2025-09-23 10:21:15,152 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0233 | Val rms_score: 0.6415
|
220 |
+
2025-09-23 10:21:25,348 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0241 | Val rms_score: 0.6329
|
221 |
+
2025-09-23 10:21:35,669 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0209 | Val rms_score: 0.6346
|
222 |
+
2025-09-23 10:21:45,689 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0258 | Val rms_score: 0.6416
|
223 |
+
2025-09-23 10:21:55,759 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0226 | Val rms_score: 0.6353
|
224 |
+
2025-09-23 10:22:05,914 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0222 | Val rms_score: 0.6396
|
225 |
+
2025-09-23 10:22:17,421 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0215 | Val rms_score: 0.6393
|
226 |
+
2025-09-23 10:22:27,907 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0235 | Val rms_score: 0.6405
|
227 |
+
2025-09-23 10:22:38,215 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0234 | Val rms_score: 0.6351
|
228 |
+
2025-09-23 10:22:48,492 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0224 | Val rms_score: 0.6358
|
229 |
+
2025-09-23 10:22:58,638 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0217 | Val rms_score: 0.6343
|
230 |
+
2025-09-23 10:23:08,740 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0220 | Val rms_score: 0.6472
|
231 |
+
2025-09-23 10:23:18,940 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0206 | Val rms_score: 0.6382
|
232 |
+
2025-09-23 10:23:29,163 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0225 | Val rms_score: 0.6370
|
233 |
+
2025-09-23 10:23:39,254 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0222 | Val rms_score: 0.6350
|
234 |
+
2025-09-23 10:23:49,248 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0203 | Val rms_score: 0.6373
|
235 |
+
2025-09-23 10:24:00,773 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0210 | Val rms_score: 0.6346
|
236 |
+
2025-09-23 10:24:11,314 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0213 | Val rms_score: 0.6364
|
237 |
+
2025-09-23 10:24:21,537 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0214 | Val rms_score: 0.6340
|
238 |
+
2025-09-23 10:24:31,760 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0208 | Val rms_score: 0.6342
|
239 |
+
2025-09-23 10:24:41,991 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0219 | Val rms_score: 0.6349
|
240 |
+
2025-09-23 10:24:43,024 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Test rms_score: 0.6620
|
241 |
+
2025-09-23 10:24:43,323 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset lipo at 2025-09-23_10-24-43
|
242 |
+
2025-09-23 10:24:52,233 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.4562 | Val rms_score: 0.7717
|
243 |
+
2025-09-23 10:24:52,233 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 105
|
244 |
+
2025-09-23 10:24:52,750 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 0.7717
|
245 |
+
2025-09-23 10:25:02,976 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.3625 | Val rms_score: 0.7242
|
246 |
+
2025-09-23 10:25:03,146 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 210
|
247 |
+
2025-09-23 10:25:03,675 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 0.7242
|
248 |
+
2025-09-23 10:25:13,870 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.2938 | Val rms_score: 0.6620
|
249 |
+
2025-09-23 10:25:14,046 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 315
|
250 |
+
2025-09-23 10:25:14,581 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val rms_score: 0.6620
|
251 |
+
2025-09-23 10:25:24,777 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.2281 | Val rms_score: 0.6882
|
252 |
+
2025-09-23 10:25:34,563 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.1925 | Val rms_score: 0.6486
|
253 |
+
2025-09-23 10:25:34,751 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 525
|
254 |
+
2025-09-23 10:25:35,274 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val rms_score: 0.6486
|
255 |
+
2025-09-23 10:25:45,249 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.1812 | Val rms_score: 0.6793
|
256 |
+
2025-09-23 10:25:55,476 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1545 | Val rms_score: 0.6711
|
257 |
+
2025-09-23 10:26:05,420 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.1313 | Val rms_score: 0.6632
|
258 |
+
2025-09-23 10:26:15,293 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.1118 | Val rms_score: 0.6382
|
259 |
+
2025-09-23 10:26:15,443 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 945
|
260 |
+
2025-09-23 10:26:15,994 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 9 with val rms_score: 0.6382
|
261 |
+
2025-09-23 10:26:27,459 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.1013 | Val rms_score: 0.6393
|
262 |
+
2025-09-23 10:26:37,695 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0920 | Val rms_score: 0.6410
|
263 |
+
2025-09-23 10:26:48,077 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0865 | Val rms_score: 0.6355
|
264 |
+
2025-09-23 10:26:48,458 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 1260
|
265 |
+
2025-09-23 10:26:49,002 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 12 with val rms_score: 0.6355
|
266 |
+
2025-09-23 10:26:59,029 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0837 | Val rms_score: 0.6453
|
267 |
+
2025-09-23 10:27:09,134 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0790 | Val rms_score: 0.6521
|
268 |
+
2025-09-23 10:27:19,214 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0708 | Val rms_score: 0.6351
|
269 |
+
2025-09-23 10:27:19,394 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 1575
|
270 |
+
2025-09-23 10:27:19,921 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 15 with val rms_score: 0.6351
|
271 |
+
2025-09-23 10:27:30,112 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0684 | Val rms_score: 0.6521
|
272 |
+
2025-09-23 10:27:40,521 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0614 | Val rms_score: 0.6536
|
273 |
+
2025-09-23 10:27:50,456 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0597 | Val rms_score: 0.6467
|
274 |
+
2025-09-23 10:28:00,477 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0589 | Val rms_score: 0.6391
|
275 |
+
2025-09-23 10:28:11,963 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0556 | Val rms_score: 0.6334
|
276 |
+
2025-09-23 10:28:12,108 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 2100
|
277 |
+
2025-09-23 10:28:12,644 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 20 with val rms_score: 0.6334
|
278 |
+
2025-09-23 10:28:22,814 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0426 | Val rms_score: 0.6333
|
279 |
+
2025-09-23 10:28:23,269 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 2205
|
280 |
+
2025-09-23 10:28:23,798 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 21 with val rms_score: 0.6333
|
281 |
+
2025-09-23 10:28:33,653 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0516 | Val rms_score: 0.6319
|
282 |
+
2025-09-23 10:28:33,834 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 2310
|
283 |
+
2025-09-23 10:28:34,382 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 22 with val rms_score: 0.6319
|
284 |
+
2025-09-23 10:28:44,351 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0464 | Val rms_score: 0.6385
|
285 |
+
2025-09-23 10:28:54,325 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0543 | Val rms_score: 0.6400
|
286 |
+
2025-09-23 10:29:04,331 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0450 | Val rms_score: 0.6382
|
287 |
+
2025-09-23 10:29:14,462 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0479 | Val rms_score: 0.6528
|
288 |
+
2025-09-23 10:29:24,775 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0415 | Val rms_score: 0.6347
|
289 |
+
2025-09-23 10:29:34,980 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0414 | Val rms_score: 0.6483
|
290 |
+
2025-09-23 10:29:46,356 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0401 | Val rms_score: 0.6325
|
291 |
+
2025-09-23 10:29:56,471 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0413 | Val rms_score: 0.6322
|
292 |
+
2025-09-23 10:30:06,586 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0398 | Val rms_score: 0.6438
|
293 |
+
2025-09-23 10:30:16,902 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0406 | Val rms_score: 0.6349
|
294 |
+
2025-09-23 10:30:26,900 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0361 | Val rms_score: 0.6386
|
295 |
+
2025-09-23 10:30:36,796 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0350 | Val rms_score: 0.6350
|
296 |
+
2025-09-23 10:30:46,638 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0356 | Val rms_score: 0.6488
|
297 |
+
2025-09-23 10:30:56,828 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0352 | Val rms_score: 0.6347
|
298 |
+
2025-09-23 10:31:07,287 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0340 | Val rms_score: 0.6424
|
299 |
+
2025-09-23 10:31:17,397 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0351 | Val rms_score: 0.6389
|
300 |
+
2025-09-23 10:31:28,795 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0331 | Val rms_score: 0.6358
|
301 |
+
2025-09-23 10:31:38,845 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0319 | Val rms_score: 0.6374
|
302 |
+
2025-09-23 10:31:48,809 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0340 | Val rms_score: 0.6336
|
303 |
+
2025-09-23 10:31:59,259 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0328 | Val rms_score: 0.6442
|
304 |
+
2025-09-23 10:32:09,435 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0336 | Val rms_score: 0.6342
|
305 |
+
2025-09-23 10:32:19,544 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0299 | Val rms_score: 0.6435
|
306 |
+
2025-09-23 10:32:29,474 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0314 | Val rms_score: 0.6434
|
307 |
+
2025-09-23 10:32:39,468 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0328 | Val rms_score: 0.6471
|
308 |
+
2025-09-23 10:32:49,924 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0263 | Val rms_score: 0.6409
|
309 |
+
2025-09-23 10:33:00,995 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0283 | Val rms_score: 0.6449
|
310 |
+
2025-09-23 10:33:11,119 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0286 | Val rms_score: 0.6372
|
311 |
+
2025-09-23 10:33:21,340 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0294 | Val rms_score: 0.6402
|
312 |
+
2025-09-23 10:33:31,500 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0288 | Val rms_score: 0.6407
|
313 |
+
2025-09-23 10:33:41,934 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0306 | Val rms_score: 0.6449
|
314 |
+
2025-09-23 10:33:51,885 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0294 | Val rms_score: 0.6428
|
315 |
+
2025-09-23 10:34:01,865 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0276 | Val rms_score: 0.6403
|
316 |
+
2025-09-23 10:34:11,850 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0290 | Val rms_score: 0.6443
|
317 |
+
2025-09-23 10:34:21,934 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0279 | Val rms_score: 0.6400
|
318 |
+
2025-09-23 10:34:32,328 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0259 | Val rms_score: 0.6386
|
319 |
+
2025-09-23 10:34:43,740 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0264 | Val rms_score: 0.6358
|
320 |
+
2025-09-23 10:34:53,665 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0275 | Val rms_score: 0.6410
|
321 |
+
2025-09-23 10:35:03,645 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0256 | Val rms_score: 0.6351
|
322 |
+
2025-09-23 10:35:13,813 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0275 | Val rms_score: 0.6409
|
323 |
+
2025-09-23 10:35:24,016 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0250 | Val rms_score: 0.6418
|
324 |
+
2025-09-23 10:35:34,108 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0246 | Val rms_score: 0.6349
|
325 |
+
2025-09-23 10:35:44,240 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0252 | Val rms_score: 0.6367
|
326 |
+
2025-09-23 10:35:54,489 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0255 | Val rms_score: 0.6368
|
327 |
+
2025-09-23 10:36:04,627 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0238 | Val rms_score: 0.6422
|
328 |
+
2025-09-23 10:36:16,096 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0254 | Val rms_score: 0.6403
|
329 |
+
2025-09-23 10:36:26,014 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0243 | Val rms_score: 0.6389
|
330 |
+
2025-09-23 10:36:36,041 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0245 | Val rms_score: 0.6351
|
331 |
+
2025-09-23 10:36:46,288 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0259 | Val rms_score: 0.6346
|
332 |
+
2025-09-23 10:36:56,504 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0241 | Val rms_score: 0.6382
|
333 |
+
2025-09-23 10:37:06,930 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0233 | Val rms_score: 0.6330
|
334 |
+
2025-09-23 10:37:16,955 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0233 | Val rms_score: 0.6362
|
335 |
+
2025-09-23 10:37:27,060 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0235 | Val rms_score: 0.6354
|
336 |
+
2025-09-23 10:37:36,788 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0224 | Val rms_score: 0.6380
|
337 |
+
2025-09-23 10:37:47,009 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0241 | Val rms_score: 0.6329
|
338 |
+
2025-09-23 10:37:58,654 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0229 | Val rms_score: 0.6344
|
339 |
+
2025-09-23 10:38:08,876 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0234 | Val rms_score: 0.6325
|
340 |
+
2025-09-23 10:38:19,071 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0219 | Val rms_score: 0.6406
|
341 |
+
2025-09-23 10:38:29,310 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0236 | Val rms_score: 0.6328
|
342 |
+
2025-09-23 10:38:39,340 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0244 | Val rms_score: 0.6349
|
343 |
+
2025-09-23 10:38:49,821 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0234 | Val rms_score: 0.6359
|
344 |
+
2025-09-23 10:38:59,884 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0214 | Val rms_score: 0.6378
|
345 |
+
2025-09-23 10:39:09,988 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0249 | Val rms_score: 0.6410
|
346 |
+
2025-09-23 10:39:20,128 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0227 | Val rms_score: 0.6345
|
347 |
+
2025-09-23 10:39:31,259 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0246 | Val rms_score: 0.6356
|
348 |
+
2025-09-23 10:39:41,392 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0238 | Val rms_score: 0.6344
|
349 |
+
2025-09-23 10:39:50,955 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0219 | Val rms_score: 0.6399
|
350 |
+
2025-09-23 10:40:00,889 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0226 | Val rms_score: 0.6335
|
351 |
+
2025-09-23 10:40:10,872 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0217 | Val rms_score: 0.6406
|
352 |
+
2025-09-23 10:40:21,041 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0243 | Val rms_score: 0.6346
|
353 |
+
2025-09-23 10:40:31,445 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0225 | Val rms_score: 0.6358
|
354 |
+
2025-09-23 10:40:41,529 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0228 | Val rms_score: 0.6357
|
355 |
+
2025-09-23 10:40:51,655 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0218 | Val rms_score: 0.6393
|
356 |
+
2025-09-23 10:41:01,858 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0226 | Val rms_score: 0.6352
|
357 |
+
2025-09-23 10:41:13,215 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0224 | Val rms_score: 0.6359
|
358 |
+
2025-09-23 10:41:23,750 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0203 | Val rms_score: 0.6354
|
359 |
+
2025-09-23 10:41:34,078 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0207 | Val rms_score: 0.6316
|
360 |
+
2025-09-23 10:41:34,222 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 10290
|
361 |
+
2025-09-23 10:41:34,762 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 98 with val rms_score: 0.6316
|
362 |
+
2025-09-23 10:41:44,926 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0219 | Val rms_score: 0.6350
|
363 |
+
2025-09-23 10:41:55,113 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0214 | Val rms_score: 0.6345
|
364 |
+
2025-09-23 10:41:56,160 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Test rms_score: 0.6330
|
365 |
+
2025-09-23 10:41:56,482 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg rms_score: 0.6505, Std Dev: 0.0126
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:de6926433af59f86e8cdb62ccb573fb160752386612a3e9c36860d4a0e2f48c8
|
3 |
+
size 460409308
|
modeling_modchembert.py
ADDED
@@ -0,0 +1,554 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright 2025 Emmanuel Cortes, All Rights Reserved.
|
2 |
+
#
|
3 |
+
# Copyright 2024 Answer.AI, LightOn, and contributors, and the HuggingFace Inc. team. All rights reserved.
|
4 |
+
#
|
5 |
+
#
|
6 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
7 |
+
# you may not use this file except in compliance with the License.
|
8 |
+
# You may obtain a copy of the License at
|
9 |
+
#
|
10 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
11 |
+
#
|
12 |
+
# Unless required by applicable law or agreed to in writing, software
|
13 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
14 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
15 |
+
# See the License for the specific language governing permissions and
|
16 |
+
# limitations under the License.
|
17 |
+
|
18 |
+
# This file is adapted from the transformers library.
|
19 |
+
# Modifications include:
|
20 |
+
# - Additional classifier_pooling options for ModChemBertForSequenceClassification
|
21 |
+
# - sum_mean, sum_sum, mean_sum, mean_mean: from ChemLM (utilizes all hidden states)
|
22 |
+
# - max_cls, cls_mha, max_seq_mha: from MaxPoolBERT (utilizes last k hidden states)
|
23 |
+
# - max_seq_mean: a merge between sum_mean and max_cls (utilizes last k hidden states)
|
24 |
+
# - Addition of ModChemBertPoolingAttention for cls_mha and max_seq_mha pooling options
|
25 |
+
|
26 |
+
import copy
|
27 |
+
import math
|
28 |
+
import typing
|
29 |
+
from contextlib import nullcontext
|
30 |
+
|
31 |
+
import torch
|
32 |
+
import torch.nn as nn
|
33 |
+
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
|
34 |
+
from transformers.modeling_attn_mask_utils import _prepare_4d_attention_mask
|
35 |
+
from transformers.modeling_outputs import MaskedLMOutput, SequenceClassifierOutput
|
36 |
+
from transformers.models.modernbert.modeling_modernbert import (
|
37 |
+
MODERNBERT_ATTENTION_FUNCTION,
|
38 |
+
ModernBertModel,
|
39 |
+
ModernBertPredictionHead,
|
40 |
+
ModernBertPreTrainedModel,
|
41 |
+
ModernBertRotaryEmbedding,
|
42 |
+
_pad_modernbert_output,
|
43 |
+
_unpad_modernbert_input,
|
44 |
+
)
|
45 |
+
from transformers.utils import logging
|
46 |
+
|
47 |
+
from .configuration_modchembert import ModChemBertConfig
|
48 |
+
|
49 |
+
logger = logging.get_logger(__name__)
|
50 |
+
|
51 |
+
|
52 |
+
class InitWeightsMixin:
|
53 |
+
def _init_weights(self, module: nn.Module):
|
54 |
+
super()._init_weights(module) # type: ignore
|
55 |
+
|
56 |
+
cutoff_factor = self.config.initializer_cutoff_factor # type: ignore
|
57 |
+
if cutoff_factor is None:
|
58 |
+
cutoff_factor = 3
|
59 |
+
|
60 |
+
def init_weight(module: nn.Module, std: float):
|
61 |
+
if isinstance(module, nn.Linear):
|
62 |
+
nn.init.trunc_normal_(
|
63 |
+
module.weight,
|
64 |
+
mean=0.0,
|
65 |
+
std=std,
|
66 |
+
a=-cutoff_factor * std,
|
67 |
+
b=cutoff_factor * std,
|
68 |
+
)
|
69 |
+
if module.bias is not None:
|
70 |
+
nn.init.zeros_(module.bias)
|
71 |
+
|
72 |
+
stds = {
|
73 |
+
"in": self.config.initializer_range, # type: ignore
|
74 |
+
"out": self.config.initializer_range / math.sqrt(2.0 * self.config.num_hidden_layers), # type: ignore
|
75 |
+
"final_out": self.config.hidden_size**-0.5, # type: ignore
|
76 |
+
}
|
77 |
+
|
78 |
+
if isinstance(module, ModChemBertForMaskedLM):
|
79 |
+
init_weight(module.decoder, stds["out"])
|
80 |
+
elif isinstance(module, ModChemBertForSequenceClassification):
|
81 |
+
init_weight(module.classifier, stds["final_out"])
|
82 |
+
elif isinstance(module, ModChemBertPoolingAttention):
|
83 |
+
init_weight(module.Wq, stds["in"])
|
84 |
+
init_weight(module.Wk, stds["in"])
|
85 |
+
init_weight(module.Wv, stds["in"])
|
86 |
+
init_weight(module.Wo, stds["out"])
|
87 |
+
|
88 |
+
|
89 |
+
class ModChemBertPoolingAttention(nn.Module):
|
90 |
+
"""Performs multi-headed self attention on a batch of sequences."""
|
91 |
+
|
92 |
+
def __init__(self, config: ModChemBertConfig):
|
93 |
+
super().__init__()
|
94 |
+
self.config = copy.deepcopy(config)
|
95 |
+
# Override num_attention_heads to use classifier_pooling_num_attention_heads
|
96 |
+
self.config.num_attention_heads = config.classifier_pooling_num_attention_heads
|
97 |
+
# Override attention_dropout to use classifier_pooling_attention_dropout
|
98 |
+
self.config.attention_dropout = config.classifier_pooling_attention_dropout
|
99 |
+
|
100 |
+
if config.hidden_size % config.num_attention_heads != 0:
|
101 |
+
raise ValueError(
|
102 |
+
f"The hidden size ({config.hidden_size}) is not a multiple of the number of attention heads "
|
103 |
+
f"({config.num_attention_heads})"
|
104 |
+
)
|
105 |
+
|
106 |
+
self.attention_dropout = config.attention_dropout
|
107 |
+
self.num_heads = config.num_attention_heads
|
108 |
+
self.head_dim = config.hidden_size // config.num_attention_heads
|
109 |
+
self.all_head_size = self.head_dim * self.num_heads
|
110 |
+
self.Wq = nn.Linear(config.hidden_size, self.all_head_size, bias=config.attention_bias)
|
111 |
+
self.Wk = nn.Linear(config.hidden_size, self.all_head_size, bias=config.attention_bias)
|
112 |
+
self.Wv = nn.Linear(config.hidden_size, self.all_head_size, bias=config.attention_bias)
|
113 |
+
|
114 |
+
# Use global attention
|
115 |
+
self.local_attention = (-1, -1)
|
116 |
+
rope_theta = config.global_rope_theta
|
117 |
+
# sdpa path from original ModernBert implementation
|
118 |
+
config_copy = copy.deepcopy(config)
|
119 |
+
config_copy.rope_theta = rope_theta
|
120 |
+
self.rotary_emb = ModernBertRotaryEmbedding(config=config_copy)
|
121 |
+
|
122 |
+
self.Wo = nn.Linear(config.hidden_size, config.hidden_size, bias=config.attention_bias)
|
123 |
+
self.out_drop = nn.Dropout(config.attention_dropout) if config.attention_dropout > 0.0 else nn.Identity()
|
124 |
+
self.pruned_heads = set()
|
125 |
+
|
126 |
+
def forward(
|
127 |
+
self,
|
128 |
+
q: torch.Tensor,
|
129 |
+
kv: torch.Tensor,
|
130 |
+
attention_mask: torch.Tensor | None = None,
|
131 |
+
**kwargs,
|
132 |
+
) -> torch.Tensor:
|
133 |
+
bs, seq_len = kv.shape[:2]
|
134 |
+
q_proj: torch.Tensor = self.Wq(q)
|
135 |
+
k_proj: torch.Tensor = self.Wk(kv)
|
136 |
+
v_proj: torch.Tensor = self.Wv(kv)
|
137 |
+
qkv = torch.stack(
|
138 |
+
(
|
139 |
+
q_proj.reshape(bs, seq_len, self.num_heads, self.head_dim),
|
140 |
+
k_proj.reshape(bs, seq_len, self.num_heads, self.head_dim),
|
141 |
+
v_proj.reshape(bs, seq_len, self.num_heads, self.head_dim),
|
142 |
+
),
|
143 |
+
dim=2,
|
144 |
+
) # (bs, seq_len, 3, num_heads, head_dim)
|
145 |
+
|
146 |
+
device = kv.device
|
147 |
+
if attention_mask is None:
|
148 |
+
attention_mask = torch.ones((bs, seq_len), device=device, dtype=torch.bool)
|
149 |
+
position_ids = torch.arange(seq_len, device=device).unsqueeze(0).long()
|
150 |
+
|
151 |
+
attn_outputs = MODERNBERT_ATTENTION_FUNCTION["sdpa"](
|
152 |
+
self,
|
153 |
+
qkv=qkv,
|
154 |
+
attention_mask=_prepare_4d_attention_mask(attention_mask, kv.dtype),
|
155 |
+
sliding_window_mask=None, # not needed when using global attention
|
156 |
+
position_ids=position_ids,
|
157 |
+
local_attention=self.local_attention,
|
158 |
+
bs=bs,
|
159 |
+
dim=self.all_head_size,
|
160 |
+
**kwargs,
|
161 |
+
)
|
162 |
+
hidden_states = attn_outputs[0]
|
163 |
+
hidden_states = self.out_drop(self.Wo(hidden_states))
|
164 |
+
|
165 |
+
return hidden_states
|
166 |
+
|
167 |
+
|
168 |
+
class ModChemBertForMaskedLM(InitWeightsMixin, ModernBertPreTrainedModel):
|
169 |
+
config_class = ModChemBertConfig
|
170 |
+
_tied_weights_keys = ["decoder.weight"]
|
171 |
+
|
172 |
+
def __init__(self, config: ModChemBertConfig):
|
173 |
+
super().__init__(config)
|
174 |
+
self.config = config
|
175 |
+
self.model = ModernBertModel(config)
|
176 |
+
self.head = ModernBertPredictionHead(config)
|
177 |
+
self.decoder = nn.Linear(config.hidden_size, config.vocab_size, bias=config.decoder_bias)
|
178 |
+
|
179 |
+
self.sparse_prediction = self.config.sparse_prediction
|
180 |
+
self.sparse_pred_ignore_index = self.config.sparse_pred_ignore_index
|
181 |
+
|
182 |
+
# Initialize weights and apply final processing
|
183 |
+
self.post_init()
|
184 |
+
|
185 |
+
def get_output_embeddings(self):
|
186 |
+
return self.decoder
|
187 |
+
|
188 |
+
def set_output_embeddings(self, new_embeddings: nn.Linear):
|
189 |
+
self.decoder = new_embeddings
|
190 |
+
|
191 |
+
@torch.compile(dynamic=True)
|
192 |
+
def compiled_head(self, output: torch.Tensor) -> torch.Tensor:
|
193 |
+
return self.decoder(self.head(output))
|
194 |
+
|
195 |
+
def forward(
|
196 |
+
self,
|
197 |
+
input_ids: torch.LongTensor | None = None,
|
198 |
+
attention_mask: torch.Tensor | None = None,
|
199 |
+
sliding_window_mask: torch.Tensor | None = None,
|
200 |
+
position_ids: torch.Tensor | None = None,
|
201 |
+
inputs_embeds: torch.Tensor | None = None,
|
202 |
+
labels: torch.Tensor | None = None,
|
203 |
+
indices: torch.Tensor | None = None,
|
204 |
+
cu_seqlens: torch.Tensor | None = None,
|
205 |
+
max_seqlen: int | None = None,
|
206 |
+
batch_size: int | None = None,
|
207 |
+
seq_len: int | None = None,
|
208 |
+
output_attentions: bool | None = None,
|
209 |
+
output_hidden_states: bool | None = None,
|
210 |
+
return_dict: bool | None = None,
|
211 |
+
**kwargs,
|
212 |
+
) -> tuple[torch.Tensor] | tuple[torch.Tensor, typing.Any] | MaskedLMOutput:
|
213 |
+
r"""
|
214 |
+
sliding_window_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*):
|
215 |
+
Mask to avoid performing attention on padding or far-away tokens. In ModernBert, only every few layers
|
216 |
+
perform global attention, while the rest perform local attention. This mask is used to avoid attending to
|
217 |
+
far-away tokens in the local attention layers when not using Flash Attention.
|
218 |
+
indices (`torch.Tensor` of shape `(total_unpadded_tokens,)`, *optional*):
|
219 |
+
Indices of the non-padding tokens in the input sequence. Used for unpadding the output.
|
220 |
+
cu_seqlens (`torch.Tensor` of shape `(batch + 1,)`, *optional*):
|
221 |
+
Cumulative sequence lengths of the input sequences. Used to index the unpadded tensors.
|
222 |
+
max_seqlen (`int`, *optional*):
|
223 |
+
Maximum sequence length in the batch excluding padding tokens. Used to unpad input_ids & pad output tensors.
|
224 |
+
batch_size (`int`, *optional*):
|
225 |
+
Batch size of the input sequences. Used to pad the output tensors.
|
226 |
+
seq_len (`int`, *optional*):
|
227 |
+
Sequence length of the input sequences including padding tokens. Used to pad the output tensors.
|
228 |
+
"""
|
229 |
+
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
|
230 |
+
self._maybe_set_compile()
|
231 |
+
|
232 |
+
if self.config._attn_implementation == "flash_attention_2": # noqa: SIM102
|
233 |
+
if indices is None and cu_seqlens is None and max_seqlen is None:
|
234 |
+
if batch_size is None and seq_len is None:
|
235 |
+
if inputs_embeds is not None:
|
236 |
+
batch_size, seq_len = inputs_embeds.shape[:2]
|
237 |
+
else:
|
238 |
+
batch_size, seq_len = input_ids.shape[:2] # type: ignore
|
239 |
+
device = input_ids.device if input_ids is not None else inputs_embeds.device # type: ignore
|
240 |
+
|
241 |
+
if attention_mask is None:
|
242 |
+
attention_mask = torch.ones((batch_size, seq_len), device=device, dtype=torch.bool) # type: ignore
|
243 |
+
|
244 |
+
if inputs_embeds is None:
|
245 |
+
with torch.no_grad():
|
246 |
+
input_ids, indices, cu_seqlens, max_seqlen, position_ids, labels = _unpad_modernbert_input(
|
247 |
+
inputs=input_ids, # type: ignore
|
248 |
+
attention_mask=attention_mask, # type: ignore
|
249 |
+
position_ids=position_ids,
|
250 |
+
labels=labels,
|
251 |
+
)
|
252 |
+
else:
|
253 |
+
inputs_embeds, indices, cu_seqlens, max_seqlen, position_ids, labels = _unpad_modernbert_input(
|
254 |
+
inputs=inputs_embeds,
|
255 |
+
attention_mask=attention_mask, # type: ignore
|
256 |
+
position_ids=position_ids,
|
257 |
+
labels=labels,
|
258 |
+
)
|
259 |
+
|
260 |
+
outputs = self.model(
|
261 |
+
input_ids=input_ids,
|
262 |
+
attention_mask=attention_mask,
|
263 |
+
sliding_window_mask=sliding_window_mask,
|
264 |
+
position_ids=position_ids,
|
265 |
+
inputs_embeds=inputs_embeds,
|
266 |
+
indices=indices,
|
267 |
+
cu_seqlens=cu_seqlens,
|
268 |
+
max_seqlen=max_seqlen,
|
269 |
+
batch_size=batch_size,
|
270 |
+
seq_len=seq_len,
|
271 |
+
output_attentions=output_attentions,
|
272 |
+
output_hidden_states=output_hidden_states,
|
273 |
+
return_dict=return_dict,
|
274 |
+
)
|
275 |
+
last_hidden_state = outputs[0]
|
276 |
+
|
277 |
+
if self.sparse_prediction and labels is not None:
|
278 |
+
# flatten labels and output first
|
279 |
+
labels = labels.view(-1)
|
280 |
+
last_hidden_state = last_hidden_state.view(labels.shape[0], -1)
|
281 |
+
|
282 |
+
# then filter out the non-masked tokens
|
283 |
+
mask_tokens = labels != self.sparse_pred_ignore_index
|
284 |
+
last_hidden_state = last_hidden_state[mask_tokens]
|
285 |
+
labels = labels[mask_tokens]
|
286 |
+
|
287 |
+
logits = (
|
288 |
+
self.compiled_head(last_hidden_state)
|
289 |
+
if self.config.reference_compile
|
290 |
+
else self.decoder(self.head(last_hidden_state))
|
291 |
+
)
|
292 |
+
|
293 |
+
loss = None
|
294 |
+
if labels is not None:
|
295 |
+
loss = self.loss_function(logits, labels, vocab_size=self.config.vocab_size, **kwargs)
|
296 |
+
|
297 |
+
if self.config._attn_implementation == "flash_attention_2":
|
298 |
+
with nullcontext() if self.config.repad_logits_with_grad or labels is None else torch.no_grad():
|
299 |
+
logits = _pad_modernbert_output(inputs=logits, indices=indices, batch=batch_size, seqlen=seq_len) # type: ignore
|
300 |
+
|
301 |
+
if not return_dict:
|
302 |
+
output = (logits,)
|
303 |
+
return ((loss,) + output) if loss is not None else output
|
304 |
+
|
305 |
+
return MaskedLMOutput(
|
306 |
+
loss=loss,
|
307 |
+
logits=typing.cast(torch.FloatTensor, logits),
|
308 |
+
hidden_states=outputs.hidden_states,
|
309 |
+
attentions=outputs.attentions,
|
310 |
+
)
|
311 |
+
|
312 |
+
|
313 |
+
class ModChemBertForSequenceClassification(InitWeightsMixin, ModernBertPreTrainedModel):
|
314 |
+
config_class = ModChemBertConfig
|
315 |
+
|
316 |
+
def __init__(self, config: ModChemBertConfig):
|
317 |
+
super().__init__(config)
|
318 |
+
self.num_labels = config.num_labels
|
319 |
+
self.config = config
|
320 |
+
|
321 |
+
self.model = ModernBertModel(config)
|
322 |
+
if self.config.classifier_pooling in {"cls_mha", "max_seq_mha"}:
|
323 |
+
self.pooling_attn = ModChemBertPoolingAttention(config=self.config)
|
324 |
+
else:
|
325 |
+
self.pooling_attn = None
|
326 |
+
self.head = ModernBertPredictionHead(config)
|
327 |
+
self.drop = torch.nn.Dropout(config.classifier_dropout)
|
328 |
+
self.classifier = nn.Linear(config.hidden_size, config.num_labels)
|
329 |
+
|
330 |
+
# Initialize weights and apply final processing
|
331 |
+
self.post_init()
|
332 |
+
|
333 |
+
def forward(
|
334 |
+
self,
|
335 |
+
input_ids: torch.LongTensor | None = None,
|
336 |
+
attention_mask: torch.Tensor | None = None,
|
337 |
+
sliding_window_mask: torch.Tensor | None = None,
|
338 |
+
position_ids: torch.Tensor | None = None,
|
339 |
+
inputs_embeds: torch.Tensor | None = None,
|
340 |
+
labels: torch.Tensor | None = None,
|
341 |
+
indices: torch.Tensor | None = None,
|
342 |
+
cu_seqlens: torch.Tensor | None = None,
|
343 |
+
max_seqlen: int | None = None,
|
344 |
+
batch_size: int | None = None,
|
345 |
+
seq_len: int | None = None,
|
346 |
+
output_attentions: bool | None = None,
|
347 |
+
output_hidden_states: bool | None = None,
|
348 |
+
return_dict: bool | None = None,
|
349 |
+
**kwargs,
|
350 |
+
) -> tuple[torch.Tensor] | tuple[torch.Tensor, typing.Any] | SequenceClassifierOutput:
|
351 |
+
r"""
|
352 |
+
sliding_window_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*):
|
353 |
+
Mask to avoid performing attention on padding or far-away tokens. In ModernBert, only every few layers
|
354 |
+
perform global attention, while the rest perform local attention. This mask is used to avoid attending to
|
355 |
+
far-away tokens in the local attention layers when not using Flash Attention.
|
356 |
+
labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
|
357 |
+
Labels for computing the sequence classification/regression loss. Indices should be in `[0, ...,
|
358 |
+
config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
|
359 |
+
`config.num_labels > 1` a classification loss is computed (Cross-Entropy).
|
360 |
+
indices (`torch.Tensor` of shape `(total_unpadded_tokens,)`, *optional*):
|
361 |
+
Indices of the non-padding tokens in the input sequence. Used for unpadding the output.
|
362 |
+
cu_seqlens (`torch.Tensor` of shape `(batch + 1,)`, *optional*):
|
363 |
+
Cumulative sequence lengths of the input sequences. Used to index the unpadded tensors.
|
364 |
+
max_seqlen (`int`, *optional*):
|
365 |
+
Maximum sequence length in the batch excluding padding tokens. Used to unpad input_ids & pad output tensors.
|
366 |
+
batch_size (`int`, *optional*):
|
367 |
+
Batch size of the input sequences. Used to pad the output tensors.
|
368 |
+
seq_len (`int`, *optional*):
|
369 |
+
Sequence length of the input sequences including padding tokens. Used to pad the output tensors.
|
370 |
+
"""
|
371 |
+
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
|
372 |
+
self._maybe_set_compile()
|
373 |
+
|
374 |
+
if input_ids is not None:
|
375 |
+
self.warn_if_padding_and_no_attention_mask(input_ids, attention_mask)
|
376 |
+
|
377 |
+
if batch_size is None and seq_len is None:
|
378 |
+
if inputs_embeds is not None:
|
379 |
+
batch_size, seq_len = inputs_embeds.shape[:2]
|
380 |
+
else:
|
381 |
+
batch_size, seq_len = input_ids.shape[:2] # type: ignore
|
382 |
+
device = input_ids.device if input_ids is not None else inputs_embeds.device # type: ignore
|
383 |
+
|
384 |
+
if attention_mask is None:
|
385 |
+
attention_mask = torch.ones((batch_size, seq_len), device=device, dtype=torch.bool) # type: ignore
|
386 |
+
|
387 |
+
# Ensure output_hidden_states is True in case pooling mode requires all hidden states
|
388 |
+
output_hidden_states = True
|
389 |
+
|
390 |
+
outputs = self.model(
|
391 |
+
input_ids=input_ids,
|
392 |
+
attention_mask=attention_mask,
|
393 |
+
sliding_window_mask=sliding_window_mask,
|
394 |
+
position_ids=position_ids,
|
395 |
+
inputs_embeds=inputs_embeds,
|
396 |
+
indices=indices,
|
397 |
+
cu_seqlens=cu_seqlens,
|
398 |
+
max_seqlen=max_seqlen,
|
399 |
+
batch_size=batch_size,
|
400 |
+
seq_len=seq_len,
|
401 |
+
output_attentions=output_attentions,
|
402 |
+
output_hidden_states=output_hidden_states,
|
403 |
+
return_dict=return_dict,
|
404 |
+
)
|
405 |
+
last_hidden_state = outputs[0]
|
406 |
+
hidden_states = outputs[1]
|
407 |
+
|
408 |
+
last_hidden_state = _pool_modchembert_output(
|
409 |
+
self,
|
410 |
+
last_hidden_state,
|
411 |
+
hidden_states,
|
412 |
+
typing.cast(torch.Tensor, attention_mask),
|
413 |
+
)
|
414 |
+
pooled_output = self.head(last_hidden_state)
|
415 |
+
pooled_output = self.drop(pooled_output)
|
416 |
+
logits = self.classifier(pooled_output)
|
417 |
+
|
418 |
+
loss = None
|
419 |
+
if labels is not None:
|
420 |
+
if self.config.problem_type is None:
|
421 |
+
if self.num_labels == 1:
|
422 |
+
self.config.problem_type = "regression"
|
423 |
+
elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int):
|
424 |
+
self.config.problem_type = "single_label_classification"
|
425 |
+
else:
|
426 |
+
self.config.problem_type = "multi_label_classification"
|
427 |
+
|
428 |
+
if self.config.problem_type == "regression":
|
429 |
+
loss_fct = MSELoss()
|
430 |
+
if self.num_labels == 1:
|
431 |
+
loss = loss_fct(logits.squeeze(), labels.squeeze())
|
432 |
+
else:
|
433 |
+
loss = loss_fct(logits, labels)
|
434 |
+
elif self.config.problem_type == "single_label_classification":
|
435 |
+
loss_fct = CrossEntropyLoss()
|
436 |
+
loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
|
437 |
+
elif self.config.problem_type == "multi_label_classification":
|
438 |
+
loss_fct = BCEWithLogitsLoss()
|
439 |
+
loss = loss_fct(logits, labels)
|
440 |
+
|
441 |
+
if not return_dict:
|
442 |
+
output = (logits,)
|
443 |
+
return ((loss,) + output) if loss is not None else output
|
444 |
+
|
445 |
+
return SequenceClassifierOutput(
|
446 |
+
loss=loss,
|
447 |
+
logits=logits,
|
448 |
+
hidden_states=outputs.hidden_states,
|
449 |
+
attentions=outputs.attentions,
|
450 |
+
)
|
451 |
+
|
452 |
+
|
453 |
+
def _pool_modchembert_output(
|
454 |
+
module: ModChemBertForSequenceClassification,
|
455 |
+
last_hidden_state: torch.Tensor,
|
456 |
+
hidden_states: list[torch.Tensor],
|
457 |
+
attention_mask: torch.Tensor,
|
458 |
+
):
|
459 |
+
"""
|
460 |
+
Apply pooling strategy to hidden states for sequence-level classification/regression tasks.
|
461 |
+
|
462 |
+
This function implements various pooling strategies to aggregate sequence representations
|
463 |
+
into a single vector for downstream classification or regression tasks. The pooling method
|
464 |
+
is determined by the `classifier_pooling` configuration parameter.
|
465 |
+
|
466 |
+
Available pooling strategies:
|
467 |
+
- cls: Use the CLS token ([CLS]) representation from the last hidden state
|
468 |
+
- mean: Average pooling over all tokens in the sequence (attention-weighted)
|
469 |
+
- max_cls: Element-wise max pooling over the last k hidden states, then take CLS token
|
470 |
+
- cls_mha: Multi-head attention with CLS token as query and full sequence as keys/values
|
471 |
+
- max_seq_mha: Max pooling over last k states + multi-head attention with CLS as query
|
472 |
+
- max_seq_mean: Max pooling over last k hidden states, then mean pooling over sequence
|
473 |
+
- sum_mean: Sum all hidden states across layers, then mean pool over sequence
|
474 |
+
- sum_sum: Sum all hidden states across layers, then sum pool over sequence
|
475 |
+
- mean_sum: Mean all hidden states across layers, then sum pool over sequence
|
476 |
+
- mean_mean: Mean all hidden states across layers, then mean pool over sequence
|
477 |
+
|
478 |
+
Args:
|
479 |
+
module: The model instance containing configuration and pooling attention if needed
|
480 |
+
last_hidden_state: Final layer hidden states of shape (batch_size, seq_len, hidden_size)
|
481 |
+
hidden_states: List of hidden states from all layers, each of shape (batch_size, seq_len, hidden_size)
|
482 |
+
attention_mask: Attention mask of shape (batch_size, seq_len) indicating valid tokens
|
483 |
+
|
484 |
+
Returns:
|
485 |
+
torch.Tensor: Pooled representation of shape (batch_size, hidden_size)
|
486 |
+
|
487 |
+
Note:
|
488 |
+
Some pooling strategies (cls_mha, max_seq_mha) require the module to have a pooling_attn
|
489 |
+
attribute containing a ModChemBertPoolingAttention instance.
|
490 |
+
"""
|
491 |
+
config = typing.cast(ModChemBertConfig, module.config)
|
492 |
+
if config.classifier_pooling == "cls":
|
493 |
+
last_hidden_state = last_hidden_state[:, 0]
|
494 |
+
elif config.classifier_pooling == "mean":
|
495 |
+
last_hidden_state = (last_hidden_state * attention_mask.unsqueeze(-1)).sum(dim=1) / attention_mask.sum(
|
496 |
+
dim=1, keepdim=True
|
497 |
+
)
|
498 |
+
elif config.classifier_pooling == "max_cls":
|
499 |
+
k_hidden_states = hidden_states[-config.classifier_pooling_last_k :]
|
500 |
+
theta = torch.stack(k_hidden_states, dim=1) # (batch, k, seq_len, hidden)
|
501 |
+
pooled_seq = torch.max(theta, dim=1).values # Element-wise max over k -> (batch, seq_len, hidden)
|
502 |
+
last_hidden_state = pooled_seq[:, 0, :] # (batch, hidden)
|
503 |
+
elif config.classifier_pooling == "cls_mha":
|
504 |
+
# Similar to max_seq_mha but without the max pooling step
|
505 |
+
# Query is CLS token (position 0); Keys/Values are full sequence
|
506 |
+
q = last_hidden_state[:, 0, :].unsqueeze(1) # (batch, 1, hidden)
|
507 |
+
q = q.expand(-1, last_hidden_state.shape[1], -1) # (batch, seq_len, hidden)
|
508 |
+
attn_out: torch.Tensor = module.pooling_attn( # type: ignore
|
509 |
+
q=q, kv=last_hidden_state, attention_mask=attention_mask
|
510 |
+
) # (batch, seq_len, hidden)
|
511 |
+
last_hidden_state = torch.mean(attn_out, dim=1)
|
512 |
+
elif config.classifier_pooling == "max_seq_mha":
|
513 |
+
k_hidden_states = hidden_states[-config.classifier_pooling_last_k :]
|
514 |
+
theta = torch.stack(k_hidden_states, dim=1) # (batch, k, seq_len, hidden)
|
515 |
+
pooled_seq = torch.max(theta, dim=1).values # Element-wise max over k -> (batch, seq_len, hidden)
|
516 |
+
# Query is pooled CLS token (position 0); Keys/Values are pooled sequence
|
517 |
+
q = pooled_seq[:, 0, :].unsqueeze(1) # (batch, 1, hidden)
|
518 |
+
q = q.expand(-1, pooled_seq.shape[1], -1) # (batch, seq_len, hidden)
|
519 |
+
attn_out: torch.Tensor = module.pooling_attn( # type: ignore
|
520 |
+
q=q, kv=pooled_seq, attention_mask=attention_mask
|
521 |
+
) # (batch, seq_len, hidden)
|
522 |
+
last_hidden_state = torch.mean(attn_out, dim=1)
|
523 |
+
elif config.classifier_pooling == "max_seq_mean":
|
524 |
+
k_hidden_states = hidden_states[-config.classifier_pooling_last_k :]
|
525 |
+
theta = torch.stack(k_hidden_states, dim=1) # (batch, k, seq_len, hidden)
|
526 |
+
pooled_seq = torch.max(theta, dim=1).values # Element-wise max over k -> (batch, seq_len, hidden)
|
527 |
+
last_hidden_state = torch.mean(pooled_seq, dim=1) # Mean over sequence length
|
528 |
+
elif config.classifier_pooling == "sum_mean":
|
529 |
+
# ChemLM uses the mean of all hidden states
|
530 |
+
# which outperforms using just the last layer mean or the cls embedding
|
531 |
+
# https://doi.org/10.1038/s42004-025-01484-4
|
532 |
+
# https://static-content.springer.com/esm/art%3A10.1038%2Fs42004-025-01484-4/MediaObjects/42004_2025_1484_MOESM2_ESM.pdf
|
533 |
+
all_hidden_states = torch.stack(hidden_states)
|
534 |
+
w = torch.sum(all_hidden_states, dim=0)
|
535 |
+
last_hidden_state = torch.mean(w, dim=1)
|
536 |
+
elif config.classifier_pooling == "sum_sum":
|
537 |
+
all_hidden_states = torch.stack(hidden_states)
|
538 |
+
w = torch.sum(all_hidden_states, dim=0)
|
539 |
+
last_hidden_state = torch.sum(w, dim=1)
|
540 |
+
elif config.classifier_pooling == "mean_sum":
|
541 |
+
all_hidden_states = torch.stack(hidden_states)
|
542 |
+
w = torch.mean(all_hidden_states, dim=0)
|
543 |
+
last_hidden_state = torch.sum(w, dim=1)
|
544 |
+
elif config.classifier_pooling == "mean_mean":
|
545 |
+
all_hidden_states = torch.stack(hidden_states)
|
546 |
+
w = torch.mean(all_hidden_states, dim=0)
|
547 |
+
last_hidden_state = torch.mean(w, dim=1)
|
548 |
+
return last_hidden_state
|
549 |
+
|
550 |
+
|
551 |
+
__all__ = [
|
552 |
+
"ModChemBertForMaskedLM",
|
553 |
+
"ModChemBertForSequenceClassification",
|
554 |
+
]
|
special_tokens_map.json
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cls_token": {
|
3 |
+
"content": "[CLS]",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"mask_token": {
|
10 |
+
"content": "[MASK]",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"pad_token": {
|
17 |
+
"content": "[PAD]",
|
18 |
+
"lstrip": false,
|
19 |
+
"normalized": false,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
},
|
23 |
+
"sep_token": {
|
24 |
+
"content": "[SEP]",
|
25 |
+
"lstrip": false,
|
26 |
+
"normalized": false,
|
27 |
+
"rstrip": false,
|
28 |
+
"single_word": false
|
29 |
+
},
|
30 |
+
"unk_token": {
|
31 |
+
"content": "[UNK]",
|
32 |
+
"lstrip": false,
|
33 |
+
"normalized": false,
|
34 |
+
"rstrip": false,
|
35 |
+
"single_word": false
|
36 |
+
}
|
37 |
+
}
|
tokenizer.json
ADDED
@@ -0,0 +1,2554 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"version": "1.0",
|
3 |
+
"truncation": {
|
4 |
+
"direction": "Right",
|
5 |
+
"max_length": 256,
|
6 |
+
"strategy": "LongestFirst",
|
7 |
+
"stride": 0
|
8 |
+
},
|
9 |
+
"padding": {
|
10 |
+
"strategy": "BatchLongest",
|
11 |
+
"direction": "Right",
|
12 |
+
"pad_to_multiple_of": 8,
|
13 |
+
"pad_id": 2,
|
14 |
+
"pad_type_id": 0,
|
15 |
+
"pad_token": "[PAD]"
|
16 |
+
},
|
17 |
+
"added_tokens": [
|
18 |
+
{
|
19 |
+
"id": 0,
|
20 |
+
"content": "[CLS]",
|
21 |
+
"single_word": false,
|
22 |
+
"lstrip": false,
|
23 |
+
"rstrip": false,
|
24 |
+
"normalized": false,
|
25 |
+
"special": true
|
26 |
+
},
|
27 |
+
{
|
28 |
+
"id": 1,
|
29 |
+
"content": "[SEP]",
|
30 |
+
"single_word": false,
|
31 |
+
"lstrip": false,
|
32 |
+
"rstrip": false,
|
33 |
+
"normalized": false,
|
34 |
+
"special": true
|
35 |
+
},
|
36 |
+
{
|
37 |
+
"id": 2,
|
38 |
+
"content": "[PAD]",
|
39 |
+
"single_word": false,
|
40 |
+
"lstrip": false,
|
41 |
+
"rstrip": false,
|
42 |
+
"normalized": false,
|
43 |
+
"special": true
|
44 |
+
},
|
45 |
+
{
|
46 |
+
"id": 3,
|
47 |
+
"content": "[MASK]",
|
48 |
+
"single_word": false,
|
49 |
+
"lstrip": false,
|
50 |
+
"rstrip": false,
|
51 |
+
"normalized": false,
|
52 |
+
"special": true
|
53 |
+
},
|
54 |
+
{
|
55 |
+
"id": 2361,
|
56 |
+
"content": "[UNK]",
|
57 |
+
"single_word": false,
|
58 |
+
"lstrip": false,
|
59 |
+
"rstrip": false,
|
60 |
+
"normalized": false,
|
61 |
+
"special": true
|
62 |
+
}
|
63 |
+
],
|
64 |
+
"normalizer": null,
|
65 |
+
"pre_tokenizer": {
|
66 |
+
"type": "ByteLevel",
|
67 |
+
"add_prefix_space": false,
|
68 |
+
"trim_offsets": true,
|
69 |
+
"use_regex": true
|
70 |
+
},
|
71 |
+
"post_processor": {
|
72 |
+
"type": "TemplateProcessing",
|
73 |
+
"single": [
|
74 |
+
{
|
75 |
+
"SpecialToken": {
|
76 |
+
"id": "[CLS]",
|
77 |
+
"type_id": 0
|
78 |
+
}
|
79 |
+
},
|
80 |
+
{
|
81 |
+
"Sequence": {
|
82 |
+
"id": "A",
|
83 |
+
"type_id": 0
|
84 |
+
}
|
85 |
+
},
|
86 |
+
{
|
87 |
+
"SpecialToken": {
|
88 |
+
"id": "[SEP]",
|
89 |
+
"type_id": 0
|
90 |
+
}
|
91 |
+
}
|
92 |
+
],
|
93 |
+
"pair": [
|
94 |
+
{
|
95 |
+
"SpecialToken": {
|
96 |
+
"id": "[CLS]",
|
97 |
+
"type_id": 0
|
98 |
+
}
|
99 |
+
},
|
100 |
+
{
|
101 |
+
"Sequence": {
|
102 |
+
"id": "A",
|
103 |
+
"type_id": 0
|
104 |
+
}
|
105 |
+
},
|
106 |
+
{
|
107 |
+
"SpecialToken": {
|
108 |
+
"id": "[SEP]",
|
109 |
+
"type_id": 0
|
110 |
+
}
|
111 |
+
},
|
112 |
+
{
|
113 |
+
"Sequence": {
|
114 |
+
"id": "B",
|
115 |
+
"type_id": 0
|
116 |
+
}
|
117 |
+
},
|
118 |
+
{
|
119 |
+
"SpecialToken": {
|
120 |
+
"id": "[SEP]",
|
121 |
+
"type_id": 0
|
122 |
+
}
|
123 |
+
}
|
124 |
+
],
|
125 |
+
"special_tokens": {
|
126 |
+
"[CLS]": {
|
127 |
+
"id": "[CLS]",
|
128 |
+
"ids": [
|
129 |
+
0
|
130 |
+
],
|
131 |
+
"tokens": [
|
132 |
+
"[CLS]"
|
133 |
+
]
|
134 |
+
},
|
135 |
+
"[MASK]": {
|
136 |
+
"id": "[MASK]",
|
137 |
+
"ids": [
|
138 |
+
3
|
139 |
+
],
|
140 |
+
"tokens": [
|
141 |
+
"[MASK]"
|
142 |
+
]
|
143 |
+
},
|
144 |
+
"[PAD]": {
|
145 |
+
"id": "[PAD]",
|
146 |
+
"ids": [
|
147 |
+
2
|
148 |
+
],
|
149 |
+
"tokens": [
|
150 |
+
"[PAD]"
|
151 |
+
]
|
152 |
+
},
|
153 |
+
"[SEP]": {
|
154 |
+
"id": "[SEP]",
|
155 |
+
"ids": [
|
156 |
+
1
|
157 |
+
],
|
158 |
+
"tokens": [
|
159 |
+
"[SEP]"
|
160 |
+
]
|
161 |
+
},
|
162 |
+
"[UNK]": {
|
163 |
+
"id": "[UNK]",
|
164 |
+
"ids": [
|
165 |
+
2361
|
166 |
+
],
|
167 |
+
"tokens": [
|
168 |
+
"[UNK]"
|
169 |
+
]
|
170 |
+
}
|
171 |
+
}
|
172 |
+
},
|
173 |
+
"decoder": {
|
174 |
+
"type": "ByteLevel",
|
175 |
+
"add_prefix_space": false,
|
176 |
+
"trim_offsets": true,
|
177 |
+
"use_regex": true
|
178 |
+
},
|
179 |
+
"model": {
|
180 |
+
"type": "BPE",
|
181 |
+
"dropout": null,
|
182 |
+
"unk_token": "[UNK]",
|
183 |
+
"continuing_subword_prefix": null,
|
184 |
+
"end_of_word_suffix": null,
|
185 |
+
"fuse_unk": false,
|
186 |
+
"byte_fallback": false,
|
187 |
+
"ignore_merges": false,
|
188 |
+
"vocab": {
|
189 |
+
"[CLS]": 0,
|
190 |
+
"[SEP]": 1,
|
191 |
+
"[PAD]": 2,
|
192 |
+
"[MASK]": 3,
|
193 |
+
"C": 4,
|
194 |
+
"c": 5,
|
195 |
+
"(": 6,
|
196 |
+
")": 7,
|
197 |
+
"1": 8,
|
198 |
+
"O": 9,
|
199 |
+
"N": 10,
|
200 |
+
"2": 11,
|
201 |
+
"=": 12,
|
202 |
+
"n": 13,
|
203 |
+
"3": 14,
|
204 |
+
"[C@H]": 15,
|
205 |
+
"[C@@H]": 16,
|
206 |
+
"F": 17,
|
207 |
+
"S": 18,
|
208 |
+
"4": 19,
|
209 |
+
"Cl": 20,
|
210 |
+
"-": 21,
|
211 |
+
"o": 22,
|
212 |
+
"s": 23,
|
213 |
+
"[nH]": 24,
|
214 |
+
"#": 25,
|
215 |
+
"/": 26,
|
216 |
+
"Br": 27,
|
217 |
+
"[C@]": 28,
|
218 |
+
"[C@@]": 29,
|
219 |
+
"[N+]": 30,
|
220 |
+
"[O-]": 31,
|
221 |
+
"5": 32,
|
222 |
+
"\\": 33,
|
223 |
+
".": 34,
|
224 |
+
"I": 35,
|
225 |
+
"6": 36,
|
226 |
+
"[S@]": 37,
|
227 |
+
"[S@@]": 38,
|
228 |
+
"P": 39,
|
229 |
+
"[N-]": 40,
|
230 |
+
"[Si]": 41,
|
231 |
+
"7": 42,
|
232 |
+
"[n+]": 43,
|
233 |
+
"[2H]": 44,
|
234 |
+
"8": 45,
|
235 |
+
"[NH+]": 46,
|
236 |
+
"B": 47,
|
237 |
+
"9": 48,
|
238 |
+
"[C-]": 49,
|
239 |
+
"[Na+]": 50,
|
240 |
+
"[Cl-]": 51,
|
241 |
+
"[c-]": 52,
|
242 |
+
"[CH]": 53,
|
243 |
+
"%10": 54,
|
244 |
+
"[NH2+]": 55,
|
245 |
+
"[P+]": 56,
|
246 |
+
"[B]": 57,
|
247 |
+
"[I-]": 58,
|
248 |
+
"%11": 59,
|
249 |
+
"[CH2-]": 60,
|
250 |
+
"[O+]": 61,
|
251 |
+
"[NH3+]": 62,
|
252 |
+
"[C]": 63,
|
253 |
+
"[Br-]": 64,
|
254 |
+
"[IH2]": 65,
|
255 |
+
"[S-]": 66,
|
256 |
+
"[cH-]": 67,
|
257 |
+
"%12": 68,
|
258 |
+
"[nH+]": 69,
|
259 |
+
"[B-]": 70,
|
260 |
+
"[K+]": 71,
|
261 |
+
"[Sn]": 72,
|
262 |
+
"[Se]": 73,
|
263 |
+
"[CH-]": 74,
|
264 |
+
"[HH]": 75,
|
265 |
+
"[Y]": 76,
|
266 |
+
"[n-]": 77,
|
267 |
+
"[CH3-]": 78,
|
268 |
+
"[SiH]": 79,
|
269 |
+
"[S+]": 80,
|
270 |
+
"%13": 81,
|
271 |
+
"[SiH2]": 82,
|
272 |
+
"[Li+]": 83,
|
273 |
+
"[NH-]": 84,
|
274 |
+
"%14": 85,
|
275 |
+
"[Na]": 86,
|
276 |
+
"[CH2]": 87,
|
277 |
+
"[O-2]": 88,
|
278 |
+
"[U+2]": 89,
|
279 |
+
"[W]": 90,
|
280 |
+
"[Al]": 91,
|
281 |
+
"[P@]": 92,
|
282 |
+
"[Fe+2]": 93,
|
283 |
+
"[PH+]": 94,
|
284 |
+
"%15": 95,
|
285 |
+
"[Cl+3]": 96,
|
286 |
+
"[Zn+2]": 97,
|
287 |
+
"[Ir]": 98,
|
288 |
+
"[Mg+2]": 99,
|
289 |
+
"[Pt+2]": 100,
|
290 |
+
"[OH2+]": 101,
|
291 |
+
"[As]": 102,
|
292 |
+
"[Fe]": 103,
|
293 |
+
"[OH+]": 104,
|
294 |
+
"[Zr+2]": 105,
|
295 |
+
"[3H]": 106,
|
296 |
+
"[Ge]": 107,
|
297 |
+
"[SiH3]": 108,
|
298 |
+
"[OH-]": 109,
|
299 |
+
"[NH4+]": 110,
|
300 |
+
"[Cu+2]": 111,
|
301 |
+
"[P@@]": 112,
|
302 |
+
"p": 113,
|
303 |
+
"[Pt]": 114,
|
304 |
+
"%16": 115,
|
305 |
+
"[Ca+2]": 116,
|
306 |
+
"[Zr]": 117,
|
307 |
+
"[F-]": 118,
|
308 |
+
"[C+]": 119,
|
309 |
+
"[Ti]": 120,
|
310 |
+
"[P-]": 121,
|
311 |
+
"[V]": 122,
|
312 |
+
"[se]": 123,
|
313 |
+
"[U]": 124,
|
314 |
+
"[O]": 125,
|
315 |
+
"[Ni+2]": 126,
|
316 |
+
"[Zn]": 127,
|
317 |
+
"[Co]": 128,
|
318 |
+
"[Ni]": 129,
|
319 |
+
"[Pd+2]": 130,
|
320 |
+
"[Cu]": 131,
|
321 |
+
"%17": 132,
|
322 |
+
"[Cu+]": 133,
|
323 |
+
"[Te]": 134,
|
324 |
+
"[H+]": 135,
|
325 |
+
"[CH+]": 136,
|
326 |
+
"[Li]": 137,
|
327 |
+
"[Pd]": 138,
|
328 |
+
"[Mo]": 139,
|
329 |
+
"[Ru+2]": 140,
|
330 |
+
"[o+]": 141,
|
331 |
+
"[Re]": 142,
|
332 |
+
"[SH+]": 143,
|
333 |
+
"%18": 144,
|
334 |
+
"[Ac]": 145,
|
335 |
+
"[Cr]": 146,
|
336 |
+
"[NH2-]": 147,
|
337 |
+
"[K]": 148,
|
338 |
+
"[13CH2]": 149,
|
339 |
+
"[c]": 150,
|
340 |
+
"[Zr+4]": 151,
|
341 |
+
"[Tl]": 152,
|
342 |
+
"[13C]": 153,
|
343 |
+
"[Mn]": 154,
|
344 |
+
"[N@+]": 155,
|
345 |
+
"[Hg]": 156,
|
346 |
+
"[Rh]": 157,
|
347 |
+
"[Ti+4]": 158,
|
348 |
+
"[Sb]": 159,
|
349 |
+
"[Co+2]": 160,
|
350 |
+
"[Ag+]": 161,
|
351 |
+
"[Ru]": 162,
|
352 |
+
"%19": 163,
|
353 |
+
"[N@@+]": 164,
|
354 |
+
"[Ti+2]": 165,
|
355 |
+
"[Al+3]": 166,
|
356 |
+
"[Pb]": 167,
|
357 |
+
"[I+]": 168,
|
358 |
+
"[18F]": 169,
|
359 |
+
"[s+]": 170,
|
360 |
+
"[Rb+]": 171,
|
361 |
+
"[Ba+2]": 172,
|
362 |
+
"[H-]": 173,
|
363 |
+
"[Fe+3]": 174,
|
364 |
+
"[Ir+3]": 175,
|
365 |
+
"[13cH]": 176,
|
366 |
+
"%20": 177,
|
367 |
+
"[AlH2]": 178,
|
368 |
+
"[Au+]": 179,
|
369 |
+
"[13c]": 180,
|
370 |
+
"[SH2+]": 181,
|
371 |
+
"[Sn+2]": 182,
|
372 |
+
"[Mn+2]": 183,
|
373 |
+
"[Si-]": 184,
|
374 |
+
"[Ag]": 185,
|
375 |
+
"[N]": 186,
|
376 |
+
"[Bi]": 187,
|
377 |
+
"%21": 188,
|
378 |
+
"[In]": 189,
|
379 |
+
"[CH2+]": 190,
|
380 |
+
"[Y+3]": 191,
|
381 |
+
"[Ga]": 192,
|
382 |
+
"%22": 193,
|
383 |
+
"[Co+3]": 194,
|
384 |
+
"[Au]": 195,
|
385 |
+
"[13CH3]": 196,
|
386 |
+
"[Mg]": 197,
|
387 |
+
"[Cs+]": 198,
|
388 |
+
"[W+2]": 199,
|
389 |
+
"[Hf]": 200,
|
390 |
+
"[Zn+]": 201,
|
391 |
+
"[Se-]": 202,
|
392 |
+
"[S-2]": 203,
|
393 |
+
"[Ca]": 204,
|
394 |
+
"[pH]": 205,
|
395 |
+
"[ClH+]": 206,
|
396 |
+
"[Ti+3]": 207,
|
397 |
+
"%23": 208,
|
398 |
+
"[Ru+]": 209,
|
399 |
+
"[SH-]": 210,
|
400 |
+
"[13CH]": 211,
|
401 |
+
"[IH+]": 212,
|
402 |
+
"[Hf+4]": 213,
|
403 |
+
"[Rf]": 214,
|
404 |
+
"[OH3+]": 215,
|
405 |
+
"%24": 216,
|
406 |
+
"[Pt+4]": 217,
|
407 |
+
"[Zr+3]": 218,
|
408 |
+
"[PH3+]": 219,
|
409 |
+
"[Sr+2]": 220,
|
410 |
+
"[Cd+2]": 221,
|
411 |
+
"[Cd]": 222,
|
412 |
+
"%25": 223,
|
413 |
+
"[Os]": 224,
|
414 |
+
"[BH-]": 225,
|
415 |
+
"[Sn+4]": 226,
|
416 |
+
"[Cr+3]": 227,
|
417 |
+
"[Ru+3]": 228,
|
418 |
+
"[PH2+]": 229,
|
419 |
+
"[Rh+2]": 230,
|
420 |
+
"[V+2]": 231,
|
421 |
+
"%26": 232,
|
422 |
+
"[Gd+3]": 233,
|
423 |
+
"[Pb+2]": 234,
|
424 |
+
"[PH]": 235,
|
425 |
+
"[Hg+]": 236,
|
426 |
+
"[Mo+2]": 237,
|
427 |
+
"[AlH]": 238,
|
428 |
+
"[Sn+]": 239,
|
429 |
+
"%27": 240,
|
430 |
+
"[Pd+]": 241,
|
431 |
+
"b": 242,
|
432 |
+
"[Rh+3]": 243,
|
433 |
+
"[Hg+2]": 244,
|
434 |
+
"[15NH]": 245,
|
435 |
+
"[14C]": 246,
|
436 |
+
"%28": 247,
|
437 |
+
"[Mn+3]": 248,
|
438 |
+
"[Si+]": 249,
|
439 |
+
"[SeH]": 250,
|
440 |
+
"[13C@H]": 251,
|
441 |
+
"[NH]": 252,
|
442 |
+
"[Ga+3]": 253,
|
443 |
+
"[SiH-]": 254,
|
444 |
+
"[13C@@H]": 255,
|
445 |
+
"[Ce]": 256,
|
446 |
+
"[Au+3]": 257,
|
447 |
+
"[Bi+3]": 258,
|
448 |
+
"[15N]": 259,
|
449 |
+
"%29": 260,
|
450 |
+
"[BH3-]": 261,
|
451 |
+
"[14cH]": 262,
|
452 |
+
"[Ti+]": 263,
|
453 |
+
"[Gd]": 264,
|
454 |
+
"[cH+]": 265,
|
455 |
+
"[Cr+2]": 266,
|
456 |
+
"[Sb-]": 267,
|
457 |
+
"%30": 268,
|
458 |
+
"[Be+2]": 269,
|
459 |
+
"[Al+]": 270,
|
460 |
+
"[te]": 271,
|
461 |
+
"[11CH3]": 272,
|
462 |
+
"[Sm]": 273,
|
463 |
+
"[Pr]": 274,
|
464 |
+
"[La]": 275,
|
465 |
+
"%31": 276,
|
466 |
+
"[Al-]": 277,
|
467 |
+
"[Ta]": 278,
|
468 |
+
"[125I]": 279,
|
469 |
+
"[BH2-]": 280,
|
470 |
+
"[Nb]": 281,
|
471 |
+
"[Si@]": 282,
|
472 |
+
"%32": 283,
|
473 |
+
"[14c]": 284,
|
474 |
+
"[Sb+3]": 285,
|
475 |
+
"[Ba]": 286,
|
476 |
+
"%33": 287,
|
477 |
+
"[Os+2]": 288,
|
478 |
+
"[Si@@]": 289,
|
479 |
+
"[La+3]": 290,
|
480 |
+
"[15n]": 291,
|
481 |
+
"[15NH2]": 292,
|
482 |
+
"[Nd+3]": 293,
|
483 |
+
"%34": 294,
|
484 |
+
"[14CH2]": 295,
|
485 |
+
"[18O]": 296,
|
486 |
+
"[Nd]": 297,
|
487 |
+
"[GeH]": 298,
|
488 |
+
"[Ni+3]": 299,
|
489 |
+
"[Eu]": 300,
|
490 |
+
"[Dy+3]": 301,
|
491 |
+
"[Sc]": 302,
|
492 |
+
"%36": 303,
|
493 |
+
"[Se-2]": 304,
|
494 |
+
"[As+]": 305,
|
495 |
+
"%35": 306,
|
496 |
+
"[AsH]": 307,
|
497 |
+
"[Tb]": 308,
|
498 |
+
"[Sb+5]": 309,
|
499 |
+
"[Se+]": 310,
|
500 |
+
"[Ce+3]": 311,
|
501 |
+
"[c+]": 312,
|
502 |
+
"[In+3]": 313,
|
503 |
+
"[SnH]": 314,
|
504 |
+
"[Mo+4]": 315,
|
505 |
+
"%37": 316,
|
506 |
+
"[V+4]": 317,
|
507 |
+
"[Eu+3]": 318,
|
508 |
+
"[Hf+2]": 319,
|
509 |
+
"%38": 320,
|
510 |
+
"[Pt+]": 321,
|
511 |
+
"[p+]": 322,
|
512 |
+
"[123I]": 323,
|
513 |
+
"[Tl+]": 324,
|
514 |
+
"[Sm+3]": 325,
|
515 |
+
"%39": 326,
|
516 |
+
"[Yb+3]": 327,
|
517 |
+
"%40": 328,
|
518 |
+
"[Yb]": 329,
|
519 |
+
"[Os+]": 330,
|
520 |
+
"%41": 331,
|
521 |
+
"[10B]": 332,
|
522 |
+
"[Sc+3]": 333,
|
523 |
+
"[Al+2]": 334,
|
524 |
+
"%42": 335,
|
525 |
+
"[Sr]": 336,
|
526 |
+
"[Tb+3]": 337,
|
527 |
+
"[Po]": 338,
|
528 |
+
"[Tc]": 339,
|
529 |
+
"[PH-]": 340,
|
530 |
+
"[AlH3]": 341,
|
531 |
+
"[Ar]": 342,
|
532 |
+
"[U+4]": 343,
|
533 |
+
"[SnH2]": 344,
|
534 |
+
"[Cl+2]": 345,
|
535 |
+
"[si]": 346,
|
536 |
+
"[Fe+]": 347,
|
537 |
+
"[14CH3]": 348,
|
538 |
+
"[U+3]": 349,
|
539 |
+
"[Cl+]": 350,
|
540 |
+
"%43": 351,
|
541 |
+
"[GeH2]": 352,
|
542 |
+
"%44": 353,
|
543 |
+
"[Er+3]": 354,
|
544 |
+
"[Mo+3]": 355,
|
545 |
+
"[I+2]": 356,
|
546 |
+
"[Fe+4]": 357,
|
547 |
+
"[99Tc]": 358,
|
548 |
+
"%45": 359,
|
549 |
+
"[11C]": 360,
|
550 |
+
"%46": 361,
|
551 |
+
"[SnH3]": 362,
|
552 |
+
"[S]": 363,
|
553 |
+
"[Te+]": 364,
|
554 |
+
"[Er]": 365,
|
555 |
+
"[Lu+3]": 366,
|
556 |
+
"[11B]": 367,
|
557 |
+
"%47": 368,
|
558 |
+
"%48": 369,
|
559 |
+
"[P]": 370,
|
560 |
+
"[Tm]": 371,
|
561 |
+
"[Th]": 372,
|
562 |
+
"[Dy]": 373,
|
563 |
+
"[Pr+3]": 374,
|
564 |
+
"[Ta+5]": 375,
|
565 |
+
"[Nb+5]": 376,
|
566 |
+
"[Rb]": 377,
|
567 |
+
"[GeH3]": 378,
|
568 |
+
"[Br+2]": 379,
|
569 |
+
"%49": 380,
|
570 |
+
"[131I]": 381,
|
571 |
+
"[Fm]": 382,
|
572 |
+
"[Cs]": 383,
|
573 |
+
"[BH4-]": 384,
|
574 |
+
"[Lu]": 385,
|
575 |
+
"[15nH]": 386,
|
576 |
+
"%50": 387,
|
577 |
+
"[Ru+6]": 388,
|
578 |
+
"[b-]": 389,
|
579 |
+
"[Ho]": 390,
|
580 |
+
"[Th+4]": 391,
|
581 |
+
"[Ru+4]": 392,
|
582 |
+
"%52": 393,
|
583 |
+
"[14CH]": 394,
|
584 |
+
"%51": 395,
|
585 |
+
"[Cr+6]": 396,
|
586 |
+
"[18OH]": 397,
|
587 |
+
"[Ho+3]": 398,
|
588 |
+
"[Ce+4]": 399,
|
589 |
+
"[Bi+2]": 400,
|
590 |
+
"[Co+]": 401,
|
591 |
+
"%53": 402,
|
592 |
+
"[Yb+2]": 403,
|
593 |
+
"[Fe+6]": 404,
|
594 |
+
"[Be]": 405,
|
595 |
+
"%54": 406,
|
596 |
+
"[SH3+]": 407,
|
597 |
+
"[Np]": 408,
|
598 |
+
"[As-]": 409,
|
599 |
+
"%55": 410,
|
600 |
+
"[14C@@H]": 411,
|
601 |
+
"[Ir+2]": 412,
|
602 |
+
"[GaH3]": 413,
|
603 |
+
"[p-]": 414,
|
604 |
+
"[GeH4]": 415,
|
605 |
+
"[Sn+3]": 416,
|
606 |
+
"[Os+4]": 417,
|
607 |
+
"%56": 418,
|
608 |
+
"[14C@H]": 419,
|
609 |
+
"[sH+]": 420,
|
610 |
+
"[19F]": 421,
|
611 |
+
"[Eu+2]": 422,
|
612 |
+
"[TlH]": 423,
|
613 |
+
"%57": 424,
|
614 |
+
"[Cr+4]": 425,
|
615 |
+
"%58": 426,
|
616 |
+
"[B@@-]": 427,
|
617 |
+
"[SiH+]": 428,
|
618 |
+
"[At]": 429,
|
619 |
+
"[Am]": 430,
|
620 |
+
"[Fe+5]": 431,
|
621 |
+
"[AsH2]": 432,
|
622 |
+
"[Si+4]": 433,
|
623 |
+
"[B@-]": 434,
|
624 |
+
"[Pu]": 435,
|
625 |
+
"[SbH]": 436,
|
626 |
+
"[P-2]": 437,
|
627 |
+
"[Tm+3]": 438,
|
628 |
+
"*": 439,
|
629 |
+
"%59": 440,
|
630 |
+
"[se+]": 441,
|
631 |
+
"%60": 442,
|
632 |
+
"[oH+]": 443,
|
633 |
+
"[1H]": 444,
|
634 |
+
"[15N+]": 445,
|
635 |
+
"[124I]": 446,
|
636 |
+
"[S@@+]": 447,
|
637 |
+
"[P-3]": 448,
|
638 |
+
"[H]": 449,
|
639 |
+
"[IH2+]": 450,
|
640 |
+
"[TeH]": 451,
|
641 |
+
"[Xe]": 452,
|
642 |
+
"[PH4+]": 453,
|
643 |
+
"[Cr+]": 454,
|
644 |
+
"[Cm]": 455,
|
645 |
+
"[I+3]": 456,
|
646 |
+
"%61": 457,
|
647 |
+
"[Nb+2]": 458,
|
648 |
+
"[Ru+5]": 459,
|
649 |
+
"%62": 460,
|
650 |
+
"[Ta+2]": 461,
|
651 |
+
"[Tc+4]": 462,
|
652 |
+
"[CH3+]": 463,
|
653 |
+
"[Pm]": 464,
|
654 |
+
"[Si@H]": 465,
|
655 |
+
"[No]": 466,
|
656 |
+
"%63": 467,
|
657 |
+
"[Cr+5]": 468,
|
658 |
+
"[Th+2]": 469,
|
659 |
+
"[Zn-2]": 470,
|
660 |
+
"[13C@]": 471,
|
661 |
+
"[Lr]": 472,
|
662 |
+
"%64": 473,
|
663 |
+
"[99Tc+3]": 474,
|
664 |
+
"%65": 475,
|
665 |
+
"[13C@@]": 476,
|
666 |
+
"%66": 477,
|
667 |
+
"[Fe-]": 478,
|
668 |
+
"[17O]": 479,
|
669 |
+
"[siH]": 480,
|
670 |
+
"[Sb+]": 481,
|
671 |
+
"[OH]": 482,
|
672 |
+
"[IH]": 483,
|
673 |
+
"[11CH2]": 484,
|
674 |
+
"[Cf]": 485,
|
675 |
+
"[SiH2+]": 486,
|
676 |
+
"[Gd+2]": 487,
|
677 |
+
"[In+]": 488,
|
678 |
+
"[Si@@H]": 489,
|
679 |
+
"[Mn+]": 490,
|
680 |
+
"[99Tc+4]": 491,
|
681 |
+
"[Ga-]": 492,
|
682 |
+
"%67": 493,
|
683 |
+
"[S@+]": 494,
|
684 |
+
"[Ge+4]": 495,
|
685 |
+
"[Tl+3]": 496,
|
686 |
+
"[16OH]": 497,
|
687 |
+
"%68": 498,
|
688 |
+
"[2H-]": 499,
|
689 |
+
"[Ra]": 500,
|
690 |
+
"[si-]": 501,
|
691 |
+
"[NiH2]": 502,
|
692 |
+
"[P@@H]": 503,
|
693 |
+
"[Rh+]": 504,
|
694 |
+
"[12C]": 505,
|
695 |
+
"[35S]": 506,
|
696 |
+
"[32P]": 507,
|
697 |
+
"[SiH2-]": 508,
|
698 |
+
"[AlH2+]": 509,
|
699 |
+
"[16O]": 510,
|
700 |
+
"%69": 511,
|
701 |
+
"[BiH]": 512,
|
702 |
+
"[BiH2]": 513,
|
703 |
+
"[Zn-]": 514,
|
704 |
+
"[BH]": 515,
|
705 |
+
"[Tc+3]": 516,
|
706 |
+
"[Ir+]": 517,
|
707 |
+
"[Ni+]": 518,
|
708 |
+
"%70": 519,
|
709 |
+
"[InH2]": 520,
|
710 |
+
"[InH]": 521,
|
711 |
+
"[Nb+3]": 522,
|
712 |
+
"[PbH]": 523,
|
713 |
+
"[Bi+]": 524,
|
714 |
+
"%71": 525,
|
715 |
+
"[As+3]": 526,
|
716 |
+
"%72": 527,
|
717 |
+
"[18O-]": 528,
|
718 |
+
"[68Ga+3]": 529,
|
719 |
+
"%73": 530,
|
720 |
+
"[Pa]": 531,
|
721 |
+
"[76Br]": 532,
|
722 |
+
"[Tc+5]": 533,
|
723 |
+
"[pH+]": 534,
|
724 |
+
"[64Cu+2]": 535,
|
725 |
+
"[Ru+8]": 536,
|
726 |
+
"%74": 537,
|
727 |
+
"[PH2-]": 538,
|
728 |
+
"[Si+2]": 539,
|
729 |
+
"[17OH]": 540,
|
730 |
+
"[RuH]": 541,
|
731 |
+
"[111In+3]": 542,
|
732 |
+
"[AlH+]": 543,
|
733 |
+
"%75": 544,
|
734 |
+
"%76": 545,
|
735 |
+
"[W+]": 546,
|
736 |
+
"[SbH2]": 547,
|
737 |
+
"[PoH]": 548,
|
738 |
+
"[Ru-]": 549,
|
739 |
+
"[XeH]": 550,
|
740 |
+
"[Tc+2]": 551,
|
741 |
+
"[13C-]": 552,
|
742 |
+
"[Br+]": 553,
|
743 |
+
"[Pt-2]": 554,
|
744 |
+
"[Es]": 555,
|
745 |
+
"[Cu-]": 556,
|
746 |
+
"[Mg+]": 557,
|
747 |
+
"[3HH]": 558,
|
748 |
+
"[P@H]": 559,
|
749 |
+
"[ClH2+]": 560,
|
750 |
+
"%77": 561,
|
751 |
+
"[SH]": 562,
|
752 |
+
"[Au-]": 563,
|
753 |
+
"[2HH]": 564,
|
754 |
+
"%78": 565,
|
755 |
+
"[Sn-]": 566,
|
756 |
+
"[11CH]": 567,
|
757 |
+
"[PdH2]": 568,
|
758 |
+
"0": 569,
|
759 |
+
"[Os+6]": 570,
|
760 |
+
"%79": 571,
|
761 |
+
"[Mo+]": 572,
|
762 |
+
"%80": 573,
|
763 |
+
"[al]": 574,
|
764 |
+
"[PbH2]": 575,
|
765 |
+
"[64Cu]": 576,
|
766 |
+
"[Cl]": 577,
|
767 |
+
"[12CH3]": 578,
|
768 |
+
"%81": 579,
|
769 |
+
"[Tc+7]": 580,
|
770 |
+
"[11c]": 581,
|
771 |
+
"%82": 582,
|
772 |
+
"[Li-]": 583,
|
773 |
+
"[99Tc+5]": 584,
|
774 |
+
"[He]": 585,
|
775 |
+
"[12c]": 586,
|
776 |
+
"[Kr]": 587,
|
777 |
+
"[RuH+2]": 588,
|
778 |
+
"[35Cl]": 589,
|
779 |
+
"[Pd-2]": 590,
|
780 |
+
"[GaH2]": 591,
|
781 |
+
"[4H]": 592,
|
782 |
+
"[Sg]": 593,
|
783 |
+
"[Cu-2]": 594,
|
784 |
+
"[Br+3]": 595,
|
785 |
+
"%83": 596,
|
786 |
+
"[37Cl]": 597,
|
787 |
+
"[211At]": 598,
|
788 |
+
"[IrH+2]": 599,
|
789 |
+
"[Mt]": 600,
|
790 |
+
"[Ir-2]": 601,
|
791 |
+
"[In-]": 602,
|
792 |
+
"[12cH]": 603,
|
793 |
+
"[12CH2]": 604,
|
794 |
+
"[RuH2]": 605,
|
795 |
+
"[99Tc+7]": 606,
|
796 |
+
"%84": 607,
|
797 |
+
"[15n+]": 608,
|
798 |
+
"[ClH2+2]": 609,
|
799 |
+
"[16N]": 610,
|
800 |
+
"[111In]": 611,
|
801 |
+
"[Tc+]": 612,
|
802 |
+
"[Ru-2]": 613,
|
803 |
+
"[12CH]": 614,
|
804 |
+
"[si+]": 615,
|
805 |
+
"[Tc+6]": 616,
|
806 |
+
"%85": 617,
|
807 |
+
"%86": 618,
|
808 |
+
"[90Y]": 619,
|
809 |
+
"[Pd-]": 620,
|
810 |
+
"[188Re]": 621,
|
811 |
+
"[RuH+]": 622,
|
812 |
+
"[NiH]": 623,
|
813 |
+
"[SiH3-]": 624,
|
814 |
+
"[14n]": 625,
|
815 |
+
"[CH3]": 626,
|
816 |
+
"[14N]": 627,
|
817 |
+
"[10BH2]": 628,
|
818 |
+
"%88": 629,
|
819 |
+
"%89": 630,
|
820 |
+
"%90": 631,
|
821 |
+
"[34S]": 632,
|
822 |
+
"[77Br]": 633,
|
823 |
+
"[GaH]": 634,
|
824 |
+
"[Br]": 635,
|
825 |
+
"[Ge@]": 636,
|
826 |
+
"[B@@H-]": 637,
|
827 |
+
"[CuH]": 638,
|
828 |
+
"[SiH4]": 639,
|
829 |
+
"[3H-]": 640,
|
830 |
+
"%87": 641,
|
831 |
+
"%91": 642,
|
832 |
+
"%92": 643,
|
833 |
+
"[67Cu]": 644,
|
834 |
+
"[I]": 645,
|
835 |
+
"[177Lu]": 646,
|
836 |
+
"[ReH]": 647,
|
837 |
+
"[67Ga+3]": 648,
|
838 |
+
"[Db]": 649,
|
839 |
+
"[177Lu+3]": 650,
|
840 |
+
"[AlH2-]": 651,
|
841 |
+
"[Si+3]": 652,
|
842 |
+
"[Ti-2]": 653,
|
843 |
+
"[RuH+3]": 654,
|
844 |
+
"[al+]": 655,
|
845 |
+
"[68Ga]": 656,
|
846 |
+
"[2H+]": 657,
|
847 |
+
"[B@H-]": 658,
|
848 |
+
"[WH2]": 659,
|
849 |
+
"[OsH]": 660,
|
850 |
+
"[Ir-3]": 661,
|
851 |
+
"[AlH-]": 662,
|
852 |
+
"[Bk]": 663,
|
853 |
+
"[75Se]": 664,
|
854 |
+
"[14C@]": 665,
|
855 |
+
"[Pt-]": 666,
|
856 |
+
"[N@@H+]": 667,
|
857 |
+
"[Nb-]": 668,
|
858 |
+
"[13NH2]": 669,
|
859 |
+
"%93": 670,
|
860 |
+
"[186Re]": 671,
|
861 |
+
"[Tb+4]": 672,
|
862 |
+
"[PtH]": 673,
|
863 |
+
"[IrH2]": 674,
|
864 |
+
"[Hg-2]": 675,
|
865 |
+
"[AlH3-]": 676,
|
866 |
+
"[PdH+]": 677,
|
867 |
+
"[Md]": 678,
|
868 |
+
"[RhH+2]": 679,
|
869 |
+
"[11cH]": 680,
|
870 |
+
"[Co-2]": 681,
|
871 |
+
"[15N-]": 682,
|
872 |
+
"[ZrH2]": 683,
|
873 |
+
"%94": 684,
|
874 |
+
"[Hg-]": 685,
|
875 |
+
"[127I]": 686,
|
876 |
+
"[AsH2+]": 687,
|
877 |
+
"[MoH2]": 688,
|
878 |
+
"[Te+4]": 689,
|
879 |
+
"[14C@@]": 690,
|
880 |
+
"[As+5]": 691,
|
881 |
+
"[SnH+3]": 692,
|
882 |
+
"[Ge@@]": 693,
|
883 |
+
"[6Li+]": 694,
|
884 |
+
"[WH]": 695,
|
885 |
+
"[Ne]": 696,
|
886 |
+
"[14NH2]": 697,
|
887 |
+
"[14NH]": 698,
|
888 |
+
"[12C@@H]": 699,
|
889 |
+
"[Os+7]": 700,
|
890 |
+
"[RhH]": 701,
|
891 |
+
"[Al-3]": 702,
|
892 |
+
"[SnH+]": 703,
|
893 |
+
"[15NH3+]": 704,
|
894 |
+
"[Zr+]": 705,
|
895 |
+
"[197Hg+]": 706,
|
896 |
+
"%95": 707,
|
897 |
+
"%96": 708,
|
898 |
+
"[90Y+3]": 709,
|
899 |
+
"[Os-2]": 710,
|
900 |
+
"[98Tc+5]": 711,
|
901 |
+
"[15NH3]": 712,
|
902 |
+
"[bH-]": 713,
|
903 |
+
"[33P]": 714,
|
904 |
+
"[Zr-2]": 715,
|
905 |
+
"[15O]": 716,
|
906 |
+
"[Rh-]": 717,
|
907 |
+
"[PbH3]": 718,
|
908 |
+
"[PH2]": 719,
|
909 |
+
"[Ni-]": 720,
|
910 |
+
"[CuH+]": 721,
|
911 |
+
"%97": 722,
|
912 |
+
"%98": 723,
|
913 |
+
"%99": 724,
|
914 |
+
"[Os+5]": 725,
|
915 |
+
"[PtH+]": 726,
|
916 |
+
"[ReH4]": 727,
|
917 |
+
"[16NH]": 728,
|
918 |
+
"[82Br]": 729,
|
919 |
+
"[W-]": 730,
|
920 |
+
"[18F-]": 731,
|
921 |
+
"[15NH4+]": 732,
|
922 |
+
"[Se+4]": 733,
|
923 |
+
"[SeH-]": 734,
|
924 |
+
"[67Cu+2]": 735,
|
925 |
+
"[12C@H]": 736,
|
926 |
+
"[AsH3]": 737,
|
927 |
+
"[HgH]": 738,
|
928 |
+
"[10B-]": 739,
|
929 |
+
"[99Tc+6]": 740,
|
930 |
+
"[117Sn+4]": 741,
|
931 |
+
"[Te@]": 742,
|
932 |
+
"[P@+]": 743,
|
933 |
+
"[35SH]": 744,
|
934 |
+
"[SeH+]": 745,
|
935 |
+
"[Ni-2]": 746,
|
936 |
+
"[Al-2]": 747,
|
937 |
+
"[TeH2]": 748,
|
938 |
+
"[Bh]": 749,
|
939 |
+
"[99Tc+2]": 750,
|
940 |
+
"[Os+8]": 751,
|
941 |
+
"[PH-2]": 752,
|
942 |
+
"[7Li+]": 753,
|
943 |
+
"[14nH]": 754,
|
944 |
+
"[AlH+2]": 755,
|
945 |
+
"[18FH]": 756,
|
946 |
+
"[SnH4]": 757,
|
947 |
+
"[18O-2]": 758,
|
948 |
+
"[IrH]": 759,
|
949 |
+
"[13N]": 760,
|
950 |
+
"[Te@@]": 761,
|
951 |
+
"[Rh-3]": 762,
|
952 |
+
"[15NH+]": 763,
|
953 |
+
"[AsH3+]": 764,
|
954 |
+
"[SeH2]": 765,
|
955 |
+
"[AsH+]": 766,
|
956 |
+
"[CoH2]": 767,
|
957 |
+
"[16NH2]": 768,
|
958 |
+
"[AsH-]": 769,
|
959 |
+
"[203Hg+]": 770,
|
960 |
+
"[P@@+]": 771,
|
961 |
+
"[166Ho+3]": 772,
|
962 |
+
"[60Co+3]": 773,
|
963 |
+
"[13CH2-]": 774,
|
964 |
+
"[SeH2+]": 775,
|
965 |
+
"[75Br]": 776,
|
966 |
+
"[TlH2]": 777,
|
967 |
+
"[80Br]": 778,
|
968 |
+
"[siH+]": 779,
|
969 |
+
"[Ca+]": 780,
|
970 |
+
"[153Sm+3]": 781,
|
971 |
+
"[PdH]": 782,
|
972 |
+
"[225Ac]": 783,
|
973 |
+
"[13CH3-]": 784,
|
974 |
+
"[AlH4-]": 785,
|
975 |
+
"[FeH]": 786,
|
976 |
+
"[13CH-]": 787,
|
977 |
+
"[14C-]": 788,
|
978 |
+
"[11C-]": 789,
|
979 |
+
"[153Sm]": 790,
|
980 |
+
"[Re-]": 791,
|
981 |
+
"[te+]": 792,
|
982 |
+
"[13CH4]": 793,
|
983 |
+
"[ClH+2]": 794,
|
984 |
+
"[8CH2]": 795,
|
985 |
+
"[99Mo]": 796,
|
986 |
+
"[ClH3+3]": 797,
|
987 |
+
"[SbH3]": 798,
|
988 |
+
"[25Mg+2]": 799,
|
989 |
+
"[16N+]": 800,
|
990 |
+
"[SnH2+]": 801,
|
991 |
+
"[11C@H]": 802,
|
992 |
+
"[122I]": 803,
|
993 |
+
"[Re-2]": 804,
|
994 |
+
"[RuH2+2]": 805,
|
995 |
+
"[ZrH]": 806,
|
996 |
+
"[Bi-]": 807,
|
997 |
+
"[Pr+]": 808,
|
998 |
+
"[Rn]": 809,
|
999 |
+
"[Fr]": 810,
|
1000 |
+
"[36Cl]": 811,
|
1001 |
+
"[18o]": 812,
|
1002 |
+
"[YH]": 813,
|
1003 |
+
"[79Br]": 814,
|
1004 |
+
"[121I]": 815,
|
1005 |
+
"[113In+3]": 816,
|
1006 |
+
"[TaH]": 817,
|
1007 |
+
"[RhH2]": 818,
|
1008 |
+
"[Ta-]": 819,
|
1009 |
+
"[67Ga]": 820,
|
1010 |
+
"[ZnH+]": 821,
|
1011 |
+
"[SnH2-]": 822,
|
1012 |
+
"[OsH2]": 823,
|
1013 |
+
"[16F]": 824,
|
1014 |
+
"[FeH2]": 825,
|
1015 |
+
"[14O]": 826,
|
1016 |
+
"[PbH2+2]": 827,
|
1017 |
+
"[BH2]": 828,
|
1018 |
+
"[6H]": 829,
|
1019 |
+
"[125Te]": 830,
|
1020 |
+
"[197Hg]": 831,
|
1021 |
+
"[TaH2]": 832,
|
1022 |
+
"[TaH3]": 833,
|
1023 |
+
"[76As]": 834,
|
1024 |
+
"[Nb-2]": 835,
|
1025 |
+
"[14N+]": 836,
|
1026 |
+
"[125I-]": 837,
|
1027 |
+
"[33S]": 838,
|
1028 |
+
"[IH2+2]": 839,
|
1029 |
+
"[NH2]": 840,
|
1030 |
+
"[PtH2]": 841,
|
1031 |
+
"[MnH]": 842,
|
1032 |
+
"[19C]": 843,
|
1033 |
+
"[17F]": 844,
|
1034 |
+
"[1H-]": 845,
|
1035 |
+
"[SnH4+2]": 846,
|
1036 |
+
"[Mn-2]": 847,
|
1037 |
+
"[15NH2+]": 848,
|
1038 |
+
"[TiH2]": 849,
|
1039 |
+
"[ReH7]": 850,
|
1040 |
+
"[Cd-2]": 851,
|
1041 |
+
"[Fe-3]": 852,
|
1042 |
+
"[SH2]": 853,
|
1043 |
+
"[17O-]": 854,
|
1044 |
+
"[siH-]": 855,
|
1045 |
+
"[CoH+]": 856,
|
1046 |
+
"[VH]": 857,
|
1047 |
+
"[10BH]": 858,
|
1048 |
+
"[Ru-3]": 859,
|
1049 |
+
"[13O]": 860,
|
1050 |
+
"[5H]": 861,
|
1051 |
+
"[15n-]": 862,
|
1052 |
+
"[153Gd]": 863,
|
1053 |
+
"[12C@]": 864,
|
1054 |
+
"[11CH3-]": 865,
|
1055 |
+
"[IrH3]": 866,
|
1056 |
+
"[RuH3]": 867,
|
1057 |
+
"[74Se]": 868,
|
1058 |
+
"[Se@]": 869,
|
1059 |
+
"[Hf+]": 870,
|
1060 |
+
"[77Se]": 871,
|
1061 |
+
"[166Ho]": 872,
|
1062 |
+
"[59Fe+2]": 873,
|
1063 |
+
"[203Hg]": 874,
|
1064 |
+
"[18OH-]": 875,
|
1065 |
+
"[8CH]": 876,
|
1066 |
+
"[12C@@]": 877,
|
1067 |
+
"[11CH4]": 878,
|
1068 |
+
"[15C]": 879,
|
1069 |
+
"[249Cf]": 880,
|
1070 |
+
"[PbH4]": 881,
|
1071 |
+
"[64Zn]": 882,
|
1072 |
+
"[99Tc+]": 883,
|
1073 |
+
"[14c-]": 884,
|
1074 |
+
"[149Pm]": 885,
|
1075 |
+
"[IrH4]": 886,
|
1076 |
+
"[Se@@]": 887,
|
1077 |
+
"[13OH]": 888,
|
1078 |
+
"[14CH3-]": 889,
|
1079 |
+
"[28Si]": 890,
|
1080 |
+
"[Rh-2]": 891,
|
1081 |
+
"[Fe-2]": 892,
|
1082 |
+
"[131I-]": 893,
|
1083 |
+
"[51Cr]": 894,
|
1084 |
+
"[62Cu+2]": 895,
|
1085 |
+
"[81Br]": 896,
|
1086 |
+
"[121Sb]": 897,
|
1087 |
+
"[7Li]": 898,
|
1088 |
+
"[89Zr+4]": 899,
|
1089 |
+
"[SbH3+]": 900,
|
1090 |
+
"[11C@@H]": 901,
|
1091 |
+
"[98Tc]": 902,
|
1092 |
+
"[59Fe+3]": 903,
|
1093 |
+
"[BiH2+]": 904,
|
1094 |
+
"[SbH+]": 905,
|
1095 |
+
"[TiH]": 906,
|
1096 |
+
"[14NH3]": 907,
|
1097 |
+
"[15OH]": 908,
|
1098 |
+
"[119Sn]": 909,
|
1099 |
+
"[201Hg]": 910,
|
1100 |
+
"[MnH+]": 911,
|
1101 |
+
"[201Tl]": 912,
|
1102 |
+
"[51Cr+3]": 913,
|
1103 |
+
"[123I-]": 914,
|
1104 |
+
"[MoH]": 915,
|
1105 |
+
"[AlH6-3]": 916,
|
1106 |
+
"[MnH2]": 917,
|
1107 |
+
"[WH3]": 918,
|
1108 |
+
"[213Bi+3]": 919,
|
1109 |
+
"[SnH2+2]": 920,
|
1110 |
+
"[123IH]": 921,
|
1111 |
+
"[13CH+]": 922,
|
1112 |
+
"[Zr-]": 923,
|
1113 |
+
"[74As]": 924,
|
1114 |
+
"[13C+]": 925,
|
1115 |
+
"[32P+]": 926,
|
1116 |
+
"[KrH]": 927,
|
1117 |
+
"[SiH+2]": 928,
|
1118 |
+
"[ClH3+2]": 929,
|
1119 |
+
"[13NH]": 930,
|
1120 |
+
"[9CH2]": 931,
|
1121 |
+
"[ZrH2+2]": 932,
|
1122 |
+
"[87Sr+2]": 933,
|
1123 |
+
"[35s]": 934,
|
1124 |
+
"[239Pu]": 935,
|
1125 |
+
"[198Au]": 936,
|
1126 |
+
"[241Am]": 937,
|
1127 |
+
"[203Hg+2]": 938,
|
1128 |
+
"[V+]": 939,
|
1129 |
+
"[YH2]": 940,
|
1130 |
+
"[195Pt]": 941,
|
1131 |
+
"[203Pb]": 942,
|
1132 |
+
"[RuH4]": 943,
|
1133 |
+
"[ThH2]": 944,
|
1134 |
+
"[AuH]": 945,
|
1135 |
+
"[66Ga+3]": 946,
|
1136 |
+
"[11B-]": 947,
|
1137 |
+
"[F]": 948,
|
1138 |
+
"[24Na+]": 949,
|
1139 |
+
"[85Sr+2]": 950,
|
1140 |
+
"[201Tl+]": 951,
|
1141 |
+
"[14CH4]": 952,
|
1142 |
+
"[32S]": 953,
|
1143 |
+
"[TeH2+]": 954,
|
1144 |
+
"[ClH2+3]": 955,
|
1145 |
+
"[AgH]": 956,
|
1146 |
+
"[Ge@H]": 957,
|
1147 |
+
"[44Ca+2]": 958,
|
1148 |
+
"[Os-]": 959,
|
1149 |
+
"[31P]": 960,
|
1150 |
+
"[15nH+]": 961,
|
1151 |
+
"[SbH4]": 962,
|
1152 |
+
"[TiH+]": 963,
|
1153 |
+
"[Ba+]": 964,
|
1154 |
+
"[57Co+2]": 965,
|
1155 |
+
"[Ta+]": 966,
|
1156 |
+
"[125IH]": 967,
|
1157 |
+
"[77As]": 968,
|
1158 |
+
"[129I]": 969,
|
1159 |
+
"[Fe-4]": 970,
|
1160 |
+
"[Ta-2]": 971,
|
1161 |
+
"[19O]": 972,
|
1162 |
+
"[12O]": 973,
|
1163 |
+
"[BiH3]": 974,
|
1164 |
+
"[237Np]": 975,
|
1165 |
+
"[252Cf]": 976,
|
1166 |
+
"[86Y]": 977,
|
1167 |
+
"[Cr-2]": 978,
|
1168 |
+
"[89Y]": 979,
|
1169 |
+
"[195Pt+2]": 980,
|
1170 |
+
"[si+2]": 981,
|
1171 |
+
"[58Fe+2]": 982,
|
1172 |
+
"[Hs]": 983,
|
1173 |
+
"[S@@H]": 984,
|
1174 |
+
"[8CH4]": 985,
|
1175 |
+
"[164Dy+3]": 986,
|
1176 |
+
"[47Ca+2]": 987,
|
1177 |
+
"[57Co]": 988,
|
1178 |
+
"[NbH2]": 989,
|
1179 |
+
"[ReH2]": 990,
|
1180 |
+
"[ZnH2]": 991,
|
1181 |
+
"[CrH2]": 992,
|
1182 |
+
"[17NH]": 993,
|
1183 |
+
"[ZrH3]": 994,
|
1184 |
+
"[RhH3]": 995,
|
1185 |
+
"[12C-]": 996,
|
1186 |
+
"[18O+]": 997,
|
1187 |
+
"[Bi-2]": 998,
|
1188 |
+
"[ClH4+3]": 999,
|
1189 |
+
"[Ni-3]": 1000,
|
1190 |
+
"[Ag-]": 1001,
|
1191 |
+
"[111In-]": 1002,
|
1192 |
+
"[Mo-2]": 1003,
|
1193 |
+
"[55Fe+3]": 1004,
|
1194 |
+
"[204Hg+]": 1005,
|
1195 |
+
"[35Cl-]": 1006,
|
1196 |
+
"[211Pb]": 1007,
|
1197 |
+
"[75Ge]": 1008,
|
1198 |
+
"[8B]": 1009,
|
1199 |
+
"[TeH3]": 1010,
|
1200 |
+
"[SnH3+]": 1011,
|
1201 |
+
"[Zr-3]": 1012,
|
1202 |
+
"[28F]": 1013,
|
1203 |
+
"[249Bk]": 1014,
|
1204 |
+
"[169Yb]": 1015,
|
1205 |
+
"[34SH]": 1016,
|
1206 |
+
"[6Li]": 1017,
|
1207 |
+
"[94Tc]": 1018,
|
1208 |
+
"[197Au]": 1019,
|
1209 |
+
"[195Pt+4]": 1020,
|
1210 |
+
"[169Yb+3]": 1021,
|
1211 |
+
"[32Cl]": 1022,
|
1212 |
+
"[82Se]": 1023,
|
1213 |
+
"[159Gd+3]": 1024,
|
1214 |
+
"[213Bi]": 1025,
|
1215 |
+
"[CoH+2]": 1026,
|
1216 |
+
"[36S]": 1027,
|
1217 |
+
"[35P]": 1028,
|
1218 |
+
"[Ru-4]": 1029,
|
1219 |
+
"[Cr-3]": 1030,
|
1220 |
+
"[60Co]": 1031,
|
1221 |
+
"[1H+]": 1032,
|
1222 |
+
"[18CH2]": 1033,
|
1223 |
+
"[Cd-]": 1034,
|
1224 |
+
"[152Sm+3]": 1035,
|
1225 |
+
"[106Ru]": 1036,
|
1226 |
+
"[238Pu]": 1037,
|
1227 |
+
"[220Rn]": 1038,
|
1228 |
+
"[45Ca+2]": 1039,
|
1229 |
+
"[89Sr+2]": 1040,
|
1230 |
+
"[239Np]": 1041,
|
1231 |
+
"[90Sr+2]": 1042,
|
1232 |
+
"[137Cs+]": 1043,
|
1233 |
+
"[165Dy]": 1044,
|
1234 |
+
"[68GaH3]": 1045,
|
1235 |
+
"[65Zn+2]": 1046,
|
1236 |
+
"[89Zr]": 1047,
|
1237 |
+
"[BiH2+2]": 1048,
|
1238 |
+
"[62Cu]": 1049,
|
1239 |
+
"[165Dy+3]": 1050,
|
1240 |
+
"[238U]": 1051,
|
1241 |
+
"[105Rh+3]": 1052,
|
1242 |
+
"[70Zn]": 1053,
|
1243 |
+
"[12B]": 1054,
|
1244 |
+
"[12OH]": 1055,
|
1245 |
+
"[18CH]": 1056,
|
1246 |
+
"[17CH]": 1057,
|
1247 |
+
"[42K]": 1058,
|
1248 |
+
"[76Br-]": 1059,
|
1249 |
+
"[71As]": 1060,
|
1250 |
+
"[NbH3]": 1061,
|
1251 |
+
"[ReH3]": 1062,
|
1252 |
+
"[OsH-]": 1063,
|
1253 |
+
"[WH4]": 1064,
|
1254 |
+
"[MoH3]": 1065,
|
1255 |
+
"[OsH4]": 1066,
|
1256 |
+
"[RuH6]": 1067,
|
1257 |
+
"[PtH3]": 1068,
|
1258 |
+
"[CuH2]": 1069,
|
1259 |
+
"[CoH3]": 1070,
|
1260 |
+
"[TiH4]": 1071,
|
1261 |
+
"[64Zn+2]": 1072,
|
1262 |
+
"[Si-2]": 1073,
|
1263 |
+
"[79BrH]": 1074,
|
1264 |
+
"[14CH2-]": 1075,
|
1265 |
+
"[PtH2+2]": 1076,
|
1266 |
+
"[Os-3]": 1077,
|
1267 |
+
"[29Si]": 1078,
|
1268 |
+
"[Ti-]": 1079,
|
1269 |
+
"[Se+6]": 1080,
|
1270 |
+
"[22Na+]": 1081,
|
1271 |
+
"[42K+]": 1082,
|
1272 |
+
"[131Cs+]": 1083,
|
1273 |
+
"[86Rb+]": 1084,
|
1274 |
+
"[134Cs+]": 1085,
|
1275 |
+
"[209Po]": 1086,
|
1276 |
+
"[208Po]": 1087,
|
1277 |
+
"[81Rb+]": 1088,
|
1278 |
+
"[203Tl+]": 1089,
|
1279 |
+
"[Zr-4]": 1090,
|
1280 |
+
"[148Sm]": 1091,
|
1281 |
+
"[147Sm]": 1092,
|
1282 |
+
"[37Cl-]": 1093,
|
1283 |
+
"[12CH4]": 1094,
|
1284 |
+
"[Ge@@H]": 1095,
|
1285 |
+
"[63Cu]": 1096,
|
1286 |
+
"[13CH2+]": 1097,
|
1287 |
+
"[AsH2-]": 1098,
|
1288 |
+
"[CeH]": 1099,
|
1289 |
+
"[SnH-]": 1100,
|
1290 |
+
"[UH]": 1101,
|
1291 |
+
"[9c]": 1102,
|
1292 |
+
"[21CH3]": 1103,
|
1293 |
+
"[TeH+]": 1104,
|
1294 |
+
"[57Co+3]": 1105,
|
1295 |
+
"[8BH2]": 1106,
|
1296 |
+
"[12BH2]": 1107,
|
1297 |
+
"[19BH2]": 1108,
|
1298 |
+
"[9BH2]": 1109,
|
1299 |
+
"[YbH2]": 1110,
|
1300 |
+
"[CrH+2]": 1111,
|
1301 |
+
"[208Bi]": 1112,
|
1302 |
+
"[152Gd]": 1113,
|
1303 |
+
"[61Cu]": 1114,
|
1304 |
+
"[115In]": 1115,
|
1305 |
+
"[60Co+2]": 1116,
|
1306 |
+
"[13NH2-]": 1117,
|
1307 |
+
"[120I]": 1118,
|
1308 |
+
"[18OH2]": 1119,
|
1309 |
+
"[75SeH]": 1120,
|
1310 |
+
"[SbH2+]": 1121,
|
1311 |
+
"[144Ce]": 1122,
|
1312 |
+
"[16n]": 1123,
|
1313 |
+
"[113In]": 1124,
|
1314 |
+
"[22nH]": 1125,
|
1315 |
+
"[129I-]": 1126,
|
1316 |
+
"[InH3]": 1127,
|
1317 |
+
"[32PH3]": 1128,
|
1318 |
+
"[234U]": 1129,
|
1319 |
+
"[235U]": 1130,
|
1320 |
+
"[59Fe]": 1131,
|
1321 |
+
"[82Rb+]": 1132,
|
1322 |
+
"[65Zn]": 1133,
|
1323 |
+
"[244Cm]": 1134,
|
1324 |
+
"[147Pm]": 1135,
|
1325 |
+
"[91Y]": 1136,
|
1326 |
+
"[237Pu]": 1137,
|
1327 |
+
"[231Pa]": 1138,
|
1328 |
+
"[253Cf]": 1139,
|
1329 |
+
"[127Te]": 1140,
|
1330 |
+
"[187Re]": 1141,
|
1331 |
+
"[236Np]": 1142,
|
1332 |
+
"[235Np]": 1143,
|
1333 |
+
"[72Zn]": 1144,
|
1334 |
+
"[253Es]": 1145,
|
1335 |
+
"[159Dy]": 1146,
|
1336 |
+
"[62Zn]": 1147,
|
1337 |
+
"[101Tc]": 1148,
|
1338 |
+
"[149Tb]": 1149,
|
1339 |
+
"[124I-]": 1150,
|
1340 |
+
"[SeH3+]": 1151,
|
1341 |
+
"[210Pb]": 1152,
|
1342 |
+
"[40K]": 1153,
|
1343 |
+
"[210Po]": 1154,
|
1344 |
+
"[214Pb]": 1155,
|
1345 |
+
"[218Po]": 1156,
|
1346 |
+
"[214Po]": 1157,
|
1347 |
+
"[7Be]": 1158,
|
1348 |
+
"[212Pb]": 1159,
|
1349 |
+
"[205Pb]": 1160,
|
1350 |
+
"[209Pb]": 1161,
|
1351 |
+
"[123Te]": 1162,
|
1352 |
+
"[202Pb]": 1163,
|
1353 |
+
"[72As]": 1164,
|
1354 |
+
"[201Pb]": 1165,
|
1355 |
+
"[70As]": 1166,
|
1356 |
+
"[73Ge]": 1167,
|
1357 |
+
"[200Pb]": 1168,
|
1358 |
+
"[198Pb]": 1169,
|
1359 |
+
"[66Ga]": 1170,
|
1360 |
+
"[73Se]": 1171,
|
1361 |
+
"[195Pb]": 1172,
|
1362 |
+
"[199Pb]": 1173,
|
1363 |
+
"[144Ce+3]": 1174,
|
1364 |
+
"[235U+2]": 1175,
|
1365 |
+
"[90Tc]": 1176,
|
1366 |
+
"[114In+3]": 1177,
|
1367 |
+
"[128I]": 1178,
|
1368 |
+
"[100Tc+]": 1179,
|
1369 |
+
"[82Br-]": 1180,
|
1370 |
+
"[191Pt+2]": 1181,
|
1371 |
+
"[191Pt+4]": 1182,
|
1372 |
+
"[193Pt+4]": 1183,
|
1373 |
+
"[31PH3]": 1184,
|
1374 |
+
"[125I+2]": 1185,
|
1375 |
+
"[131I+2]": 1186,
|
1376 |
+
"[125Te+4]": 1187,
|
1377 |
+
"[82Sr+2]": 1188,
|
1378 |
+
"[149Sm]": 1189,
|
1379 |
+
"[81BrH]": 1190,
|
1380 |
+
"[129Xe]": 1191,
|
1381 |
+
"[193Pt+2]": 1192,
|
1382 |
+
"[123I+2]": 1193,
|
1383 |
+
"[Cr-]": 1194,
|
1384 |
+
"[Co-]": 1195,
|
1385 |
+
"[227Th+4]": 1196,
|
1386 |
+
"[249Cf+3]": 1197,
|
1387 |
+
"[252Cf+3]": 1198,
|
1388 |
+
"[187Os]": 1199,
|
1389 |
+
"[16O-]": 1200,
|
1390 |
+
"[17O+]": 1201,
|
1391 |
+
"[16OH-]": 1202,
|
1392 |
+
"[98Tc+7]": 1203,
|
1393 |
+
"[58Co+2]": 1204,
|
1394 |
+
"[69Ga+3]": 1205,
|
1395 |
+
"[57Fe+2]": 1206,
|
1396 |
+
"[43K+]": 1207,
|
1397 |
+
"[16C]": 1208,
|
1398 |
+
"[52Fe+3]": 1209,
|
1399 |
+
"[SeH5]": 1210,
|
1400 |
+
"[194Pb]": 1211,
|
1401 |
+
"[196Pb]": 1212,
|
1402 |
+
"[197Pb]": 1213,
|
1403 |
+
"[213Pb]": 1214,
|
1404 |
+
"[9B]": 1215,
|
1405 |
+
"[19B]": 1216,
|
1406 |
+
"[11CH-]": 1217,
|
1407 |
+
"[9CH]": 1218,
|
1408 |
+
"[20OH]": 1219,
|
1409 |
+
"[25OH]": 1220,
|
1410 |
+
"[8cH]": 1221,
|
1411 |
+
"[TiH+3]": 1222,
|
1412 |
+
"[SnH6+3]": 1223,
|
1413 |
+
"[N@H+]": 1224,
|
1414 |
+
"[52Mn+2]": 1225,
|
1415 |
+
"[64Ga]": 1226,
|
1416 |
+
"[13B]": 1227,
|
1417 |
+
"[216Bi]": 1228,
|
1418 |
+
"[117Sn+2]": 1229,
|
1419 |
+
"[232Th]": 1230,
|
1420 |
+
"[SnH+2]": 1231,
|
1421 |
+
"[BiH5]": 1232,
|
1422 |
+
"[77Kr]": 1233,
|
1423 |
+
"[103Cd]": 1234,
|
1424 |
+
"[62Ni]": 1235,
|
1425 |
+
"[LaH3]": 1236,
|
1426 |
+
"[SmH3]": 1237,
|
1427 |
+
"[EuH3]": 1238,
|
1428 |
+
"[MoH5]": 1239,
|
1429 |
+
"[64Ni]": 1240,
|
1430 |
+
"[66Zn]": 1241,
|
1431 |
+
"[68Zn]": 1242,
|
1432 |
+
"[186W]": 1243,
|
1433 |
+
"[FeH4]": 1244,
|
1434 |
+
"[MoH4]": 1245,
|
1435 |
+
"[HgH2]": 1246,
|
1436 |
+
"[15NH2-]": 1247,
|
1437 |
+
"[UH2]": 1248,
|
1438 |
+
"[204Hg]": 1249,
|
1439 |
+
"[GaH4-]": 1250,
|
1440 |
+
"[ThH4]": 1251,
|
1441 |
+
"[WH6]": 1252,
|
1442 |
+
"[PtH4]": 1253,
|
1443 |
+
"[VH2]": 1254,
|
1444 |
+
"[UH3]": 1255,
|
1445 |
+
"[FeH3]": 1256,
|
1446 |
+
"[RuH5]": 1257,
|
1447 |
+
"[BiH4]": 1258,
|
1448 |
+
"[80Br-]": 1259,
|
1449 |
+
"[CeH3]": 1260,
|
1450 |
+
"[37ClH]": 1261,
|
1451 |
+
"[157Gd+3]": 1262,
|
1452 |
+
"[205Tl]": 1263,
|
1453 |
+
"[203Tl]": 1264,
|
1454 |
+
"[62Cu+]": 1265,
|
1455 |
+
"[64Cu+]": 1266,
|
1456 |
+
"[61Cu+]": 1267,
|
1457 |
+
"[37SH2]": 1268,
|
1458 |
+
"[30Si]": 1269,
|
1459 |
+
"[28Al]": 1270,
|
1460 |
+
"[19OH2]": 1271,
|
1461 |
+
"[8He]": 1272,
|
1462 |
+
"[6He]": 1273,
|
1463 |
+
"[153Pm]": 1274,
|
1464 |
+
"[209Bi]": 1275,
|
1465 |
+
"[66Zn+2]": 1276,
|
1466 |
+
"[10CH4]": 1277,
|
1467 |
+
"[191Ir]": 1278,
|
1468 |
+
"[66Cu]": 1279,
|
1469 |
+
"[16O+]": 1280,
|
1470 |
+
"[25O]": 1281,
|
1471 |
+
"[10c]": 1282,
|
1472 |
+
"[Co-3]": 1283,
|
1473 |
+
"[Sn@@]": 1284,
|
1474 |
+
"[17OH-]": 1285,
|
1475 |
+
"[206Po]": 1286,
|
1476 |
+
"[204Po]": 1287,
|
1477 |
+
"[202Po]": 1288,
|
1478 |
+
"[201Po]": 1289,
|
1479 |
+
"[200Po]": 1290,
|
1480 |
+
"[199Po]": 1291,
|
1481 |
+
"[198Po]": 1292,
|
1482 |
+
"[197Po]": 1293,
|
1483 |
+
"[196Po]": 1294,
|
1484 |
+
"[195Po]": 1295,
|
1485 |
+
"[194Po]": 1296,
|
1486 |
+
"[193Po]": 1297,
|
1487 |
+
"[192Po]": 1298,
|
1488 |
+
"[191Po]": 1299,
|
1489 |
+
"[190Po]": 1300,
|
1490 |
+
"[217Po]": 1301,
|
1491 |
+
"[BiH4-]": 1302,
|
1492 |
+
"[TeH4]": 1303,
|
1493 |
+
"[222Ra]": 1304,
|
1494 |
+
"[62Ga]": 1305,
|
1495 |
+
"[39Ar]": 1306,
|
1496 |
+
"[144Sm]": 1307,
|
1497 |
+
"[58Fe]": 1308,
|
1498 |
+
"[153Eu]": 1309,
|
1499 |
+
"[85Rb]": 1310,
|
1500 |
+
"[171Yb]": 1311,
|
1501 |
+
"[172Yb]": 1312,
|
1502 |
+
"[114Cd]": 1313,
|
1503 |
+
"[51Fe]": 1314,
|
1504 |
+
"[142Ce]": 1315,
|
1505 |
+
"[207Tl]": 1316,
|
1506 |
+
"[92Mo]": 1317,
|
1507 |
+
"[115Sn]": 1318,
|
1508 |
+
"[140Ce]": 1319,
|
1509 |
+
"[202Hg]": 1320,
|
1510 |
+
"[180W]": 1321,
|
1511 |
+
"[182W]": 1322,
|
1512 |
+
"[183W]": 1323,
|
1513 |
+
"[184W]": 1324,
|
1514 |
+
"[96Mo]": 1325,
|
1515 |
+
"[47Ti]": 1326,
|
1516 |
+
"[111Cd]": 1327,
|
1517 |
+
"[143Nd]": 1328,
|
1518 |
+
"[145Nd]": 1329,
|
1519 |
+
"[126Te]": 1330,
|
1520 |
+
"[128Te]": 1331,
|
1521 |
+
"[130Te]": 1332,
|
1522 |
+
"[185Re]": 1333,
|
1523 |
+
"[97Mo]": 1334,
|
1524 |
+
"[98Mo]": 1335,
|
1525 |
+
"[183Re]": 1336,
|
1526 |
+
"[52V]": 1337,
|
1527 |
+
"[80Se]": 1338,
|
1528 |
+
"[87Kr]": 1339,
|
1529 |
+
"[137Xe]": 1340,
|
1530 |
+
"[196Au]": 1341,
|
1531 |
+
"[146Ce]": 1342,
|
1532 |
+
"[88Kr]": 1343,
|
1533 |
+
"[51Ti]": 1344,
|
1534 |
+
"[138Xe]": 1345,
|
1535 |
+
"[112Cd]": 1346,
|
1536 |
+
"[116Sn]": 1347,
|
1537 |
+
"[120Sn]": 1348,
|
1538 |
+
"[28SiH3]": 1349,
|
1539 |
+
"[35S-]": 1350,
|
1540 |
+
"[15NH-]": 1351,
|
1541 |
+
"[13CH3+]": 1352,
|
1542 |
+
"[34S+]": 1353,
|
1543 |
+
"[34s]": 1354,
|
1544 |
+
"[SiH4-]": 1355,
|
1545 |
+
"[100Tc+5]": 1356,
|
1546 |
+
"[NiH2+2]": 1357,
|
1547 |
+
"[239Th]": 1358,
|
1548 |
+
"[186Lu]": 1359,
|
1549 |
+
"[AuH3]": 1360,
|
1550 |
+
"[I@@-]": 1361,
|
1551 |
+
"[XeH2]": 1362,
|
1552 |
+
"[B+]": 1363,
|
1553 |
+
"[16CH2]": 1364,
|
1554 |
+
"[8C]": 1365,
|
1555 |
+
"[TaH5]": 1366,
|
1556 |
+
"[FeH4-]": 1367,
|
1557 |
+
"[19C@H]": 1368,
|
1558 |
+
"[10NH]": 1369,
|
1559 |
+
"[FeH6-3]": 1370,
|
1560 |
+
"[22CH]": 1371,
|
1561 |
+
"[25N]": 1372,
|
1562 |
+
"[25N+]": 1373,
|
1563 |
+
"[25N-]": 1374,
|
1564 |
+
"[21CH2]": 1375,
|
1565 |
+
"[18cH]": 1376,
|
1566 |
+
"[113I]": 1377,
|
1567 |
+
"[ScH3]": 1378,
|
1568 |
+
"[30PH3]": 1379,
|
1569 |
+
"[43Ca+2]": 1380,
|
1570 |
+
"[41Ca+2]": 1381,
|
1571 |
+
"[106Cd]": 1382,
|
1572 |
+
"[122Sn]": 1383,
|
1573 |
+
"[18CH3]": 1384,
|
1574 |
+
"[58Co+3]": 1385,
|
1575 |
+
"[98Tc+4]": 1386,
|
1576 |
+
"[70Ge]": 1387,
|
1577 |
+
"[76Ge]": 1388,
|
1578 |
+
"[108Cd]": 1389,
|
1579 |
+
"[116Cd]": 1390,
|
1580 |
+
"[130Xe]": 1391,
|
1581 |
+
"[94Mo]": 1392,
|
1582 |
+
"[124Sn]": 1393,
|
1583 |
+
"[186Os]": 1394,
|
1584 |
+
"[188Os]": 1395,
|
1585 |
+
"[190Os]": 1396,
|
1586 |
+
"[192Os]": 1397,
|
1587 |
+
"[106Pd]": 1398,
|
1588 |
+
"[110Pd]": 1399,
|
1589 |
+
"[120Te]": 1400,
|
1590 |
+
"[132Ba]": 1401,
|
1591 |
+
"[134Ba]": 1402,
|
1592 |
+
"[136Ba]": 1403,
|
1593 |
+
"[136Ce]": 1404,
|
1594 |
+
"[138Ce]": 1405,
|
1595 |
+
"[156Dy]": 1406,
|
1596 |
+
"[158Dy]": 1407,
|
1597 |
+
"[160Dy]": 1408,
|
1598 |
+
"[163Dy]": 1409,
|
1599 |
+
"[162Er]": 1410,
|
1600 |
+
"[164Er]": 1411,
|
1601 |
+
"[167Er]": 1412,
|
1602 |
+
"[176Hf]": 1413,
|
1603 |
+
"[26Mg]": 1414,
|
1604 |
+
"[144Nd]": 1415,
|
1605 |
+
"[150Nd]": 1416,
|
1606 |
+
"[41K]": 1417,
|
1607 |
+
"[46Ti]": 1418,
|
1608 |
+
"[48Ti]": 1419,
|
1609 |
+
"[49Ti]": 1420,
|
1610 |
+
"[50Ti]": 1421,
|
1611 |
+
"[170Yb]": 1422,
|
1612 |
+
"[173Yb]": 1423,
|
1613 |
+
"[91Zr]": 1424,
|
1614 |
+
"[92Zr]": 1425,
|
1615 |
+
"[96Zr]": 1426,
|
1616 |
+
"[34S-]": 1427,
|
1617 |
+
"[CuH2-]": 1428,
|
1618 |
+
"[38Cl]": 1429,
|
1619 |
+
"[25Mg]": 1430,
|
1620 |
+
"[51V]": 1431,
|
1621 |
+
"[93Nb]": 1432,
|
1622 |
+
"[95Mo]": 1433,
|
1623 |
+
"[45Sc]": 1434,
|
1624 |
+
"[123Sb]": 1435,
|
1625 |
+
"[139La]": 1436,
|
1626 |
+
"[9Be]": 1437,
|
1627 |
+
"[99Y+3]": 1438,
|
1628 |
+
"[99Y]": 1439,
|
1629 |
+
"[156Ho]": 1440,
|
1630 |
+
"[67Zn]": 1441,
|
1631 |
+
"[144Ce+4]": 1442,
|
1632 |
+
"[210Tl]": 1443,
|
1633 |
+
"[42Ca]": 1444,
|
1634 |
+
"[54Fe]": 1445,
|
1635 |
+
"[193Ir]": 1446,
|
1636 |
+
"[92Nb]": 1447,
|
1637 |
+
"[141Cs]": 1448,
|
1638 |
+
"[52Cr]": 1449,
|
1639 |
+
"[35ClH]": 1450,
|
1640 |
+
"[46Ca]": 1451,
|
1641 |
+
"[139Cs]": 1452,
|
1642 |
+
"[65Cu]": 1453,
|
1643 |
+
"[71Ga]": 1454,
|
1644 |
+
"[60Ni]": 1455,
|
1645 |
+
"[16NH3]": 1456,
|
1646 |
+
"[148Nd]": 1457,
|
1647 |
+
"[72Ge]": 1458,
|
1648 |
+
"[161Dy]": 1459,
|
1649 |
+
"[49Ca]": 1460,
|
1650 |
+
"[43Ca]": 1461,
|
1651 |
+
"[8Be]": 1462,
|
1652 |
+
"[48Ca]": 1463,
|
1653 |
+
"[44Ca]": 1464,
|
1654 |
+
"[120Xe]": 1465,
|
1655 |
+
"[80Rb]": 1466,
|
1656 |
+
"[215At]": 1467,
|
1657 |
+
"[180Re]": 1468,
|
1658 |
+
"[146Sm]": 1469,
|
1659 |
+
"[19Ne]": 1470,
|
1660 |
+
"[74Kr]": 1471,
|
1661 |
+
"[134La]": 1472,
|
1662 |
+
"[76Kr]": 1473,
|
1663 |
+
"[219Fr]": 1474,
|
1664 |
+
"[121Xe]": 1475,
|
1665 |
+
"[220Fr]": 1476,
|
1666 |
+
"[216At]": 1477,
|
1667 |
+
"[223Ac]": 1478,
|
1668 |
+
"[218At]": 1479,
|
1669 |
+
"[37Ar]": 1480,
|
1670 |
+
"[135I]": 1481,
|
1671 |
+
"[110Cd]": 1482,
|
1672 |
+
"[94Tc+7]": 1483,
|
1673 |
+
"[86Y+3]": 1484,
|
1674 |
+
"[135I-]": 1485,
|
1675 |
+
"[15O-2]": 1486,
|
1676 |
+
"[151Eu+3]": 1487,
|
1677 |
+
"[161Tb+3]": 1488,
|
1678 |
+
"[197Hg+2]": 1489,
|
1679 |
+
"[109Cd+2]": 1490,
|
1680 |
+
"[191Os+4]": 1491,
|
1681 |
+
"[170Tm+3]": 1492,
|
1682 |
+
"[205Bi+3]": 1493,
|
1683 |
+
"[233U+4]": 1494,
|
1684 |
+
"[126Sb+3]": 1495,
|
1685 |
+
"[127Sb+3]": 1496,
|
1686 |
+
"[132Cs+]": 1497,
|
1687 |
+
"[136Eu+3]": 1498,
|
1688 |
+
"[136Eu]": 1499,
|
1689 |
+
"[125Sn+4]": 1500,
|
1690 |
+
"[175Yb+3]": 1501,
|
1691 |
+
"[100Mo]": 1502,
|
1692 |
+
"[22Ne]": 1503,
|
1693 |
+
"[13c-]": 1504,
|
1694 |
+
"[13NH4+]": 1505,
|
1695 |
+
"[17C]": 1506,
|
1696 |
+
"[9C]": 1507,
|
1697 |
+
"[31S]": 1508,
|
1698 |
+
"[31SH]": 1509,
|
1699 |
+
"[133I]": 1510,
|
1700 |
+
"[126I]": 1511,
|
1701 |
+
"[36SH]": 1512,
|
1702 |
+
"[30S]": 1513,
|
1703 |
+
"[32SH]": 1514,
|
1704 |
+
"[19CH2]": 1515,
|
1705 |
+
"[19c]": 1516,
|
1706 |
+
"[18c]": 1517,
|
1707 |
+
"[15F]": 1518,
|
1708 |
+
"[10C]": 1519,
|
1709 |
+
"[RuH-]": 1520,
|
1710 |
+
"[62Zn+2]": 1521,
|
1711 |
+
"[32ClH]": 1522,
|
1712 |
+
"[33ClH]": 1523,
|
1713 |
+
"[78BrH]": 1524,
|
1714 |
+
"[12Li+]": 1525,
|
1715 |
+
"[12Li]": 1526,
|
1716 |
+
"[233Ra]": 1527,
|
1717 |
+
"[68Ge+4]": 1528,
|
1718 |
+
"[44Sc+3]": 1529,
|
1719 |
+
"[91Y+3]": 1530,
|
1720 |
+
"[106Ru+3]": 1531,
|
1721 |
+
"[PoH2]": 1532,
|
1722 |
+
"[AtH]": 1533,
|
1723 |
+
"[55Fe]": 1534,
|
1724 |
+
"[233U]": 1535,
|
1725 |
+
"[210PoH2]": 1536,
|
1726 |
+
"[230Th]": 1537,
|
1727 |
+
"[228Th]": 1538,
|
1728 |
+
"[222Rn]": 1539,
|
1729 |
+
"[35SH2]": 1540,
|
1730 |
+
"[227Th]": 1541,
|
1731 |
+
"[192Ir]": 1542,
|
1732 |
+
"[133Xe]": 1543,
|
1733 |
+
"[81Kr]": 1544,
|
1734 |
+
"[95Zr]": 1545,
|
1735 |
+
"[240Pu]": 1546,
|
1736 |
+
"[54Mn]": 1547,
|
1737 |
+
"[103Ru]": 1548,
|
1738 |
+
"[95Nb]": 1549,
|
1739 |
+
"[109Cd]": 1550,
|
1740 |
+
"[141Ce]": 1551,
|
1741 |
+
"[85Kr]": 1552,
|
1742 |
+
"[110Ag]": 1553,
|
1743 |
+
"[58Co]": 1554,
|
1744 |
+
"[241Pu]": 1555,
|
1745 |
+
"[234Th]": 1556,
|
1746 |
+
"[140La]": 1557,
|
1747 |
+
"[63Ni]": 1558,
|
1748 |
+
"[152Eu]": 1559,
|
1749 |
+
"[132IH]": 1560,
|
1750 |
+
"[226Rn]": 1561,
|
1751 |
+
"[154Eu]": 1562,
|
1752 |
+
"[36ClH]": 1563,
|
1753 |
+
"[228Ac]": 1564,
|
1754 |
+
"[155Eu]": 1565,
|
1755 |
+
"[106Rh]": 1566,
|
1756 |
+
"[243Am]": 1567,
|
1757 |
+
"[227Ac]": 1568,
|
1758 |
+
"[243Cm]": 1569,
|
1759 |
+
"[236U]": 1570,
|
1760 |
+
"[144Pr]": 1571,
|
1761 |
+
"[232U]": 1572,
|
1762 |
+
"[32SH2]": 1573,
|
1763 |
+
"[88Y]": 1574,
|
1764 |
+
"[82BrH]": 1575,
|
1765 |
+
"[135IH]": 1576,
|
1766 |
+
"[242Cm]": 1577,
|
1767 |
+
"[115Cd]": 1578,
|
1768 |
+
"[242Pu]": 1579,
|
1769 |
+
"[46Sc]": 1580,
|
1770 |
+
"[56Mn]": 1581,
|
1771 |
+
"[234Pa]": 1582,
|
1772 |
+
"[41Ar]": 1583,
|
1773 |
+
"[147Nd]": 1584,
|
1774 |
+
"[187W]": 1585,
|
1775 |
+
"[151Sm]": 1586,
|
1776 |
+
"[59Ni]": 1587,
|
1777 |
+
"[233Pa]": 1588,
|
1778 |
+
"[52Mn]": 1589,
|
1779 |
+
"[94Nb]": 1590,
|
1780 |
+
"[219Rn]": 1591,
|
1781 |
+
"[236Pu]": 1592,
|
1782 |
+
"[13NH3]": 1593,
|
1783 |
+
"[93Zr]": 1594,
|
1784 |
+
"[51Cr+6]": 1595,
|
1785 |
+
"[TlH3]": 1596,
|
1786 |
+
"[123Xe]": 1597,
|
1787 |
+
"[160Tb]": 1598,
|
1788 |
+
"[170Tm]": 1599,
|
1789 |
+
"[182Ta]": 1600,
|
1790 |
+
"[175Yb]": 1601,
|
1791 |
+
"[93Mo]": 1602,
|
1792 |
+
"[143Ce]": 1603,
|
1793 |
+
"[191Os]": 1604,
|
1794 |
+
"[126IH]": 1605,
|
1795 |
+
"[48V]": 1606,
|
1796 |
+
"[113Cd]": 1607,
|
1797 |
+
"[47Sc]": 1608,
|
1798 |
+
"[181Hf]": 1609,
|
1799 |
+
"[185W]": 1610,
|
1800 |
+
"[143Pr]": 1611,
|
1801 |
+
"[191Pt]": 1612,
|
1802 |
+
"[181W]": 1613,
|
1803 |
+
"[33PH3]": 1614,
|
1804 |
+
"[97Ru]": 1615,
|
1805 |
+
"[97Tc]": 1616,
|
1806 |
+
"[111Ag]": 1617,
|
1807 |
+
"[169Er]": 1618,
|
1808 |
+
"[107Pd]": 1619,
|
1809 |
+
"[103Ru+2]": 1620,
|
1810 |
+
"[34SH2]": 1621,
|
1811 |
+
"[137Ce]": 1622,
|
1812 |
+
"[242Am]": 1623,
|
1813 |
+
"[117SnH2]": 1624,
|
1814 |
+
"[57Ni]": 1625,
|
1815 |
+
"[239U]": 1626,
|
1816 |
+
"[60Cu]": 1627,
|
1817 |
+
"[250Cf]": 1628,
|
1818 |
+
"[193Au]": 1629,
|
1819 |
+
"[69Zn]": 1630,
|
1820 |
+
"[55Co]": 1631,
|
1821 |
+
"[139Ce]": 1632,
|
1822 |
+
"[127Xe]": 1633,
|
1823 |
+
"[159Gd]": 1634,
|
1824 |
+
"[56Co]": 1635,
|
1825 |
+
"[177Hf]": 1636,
|
1826 |
+
"[244Pu]": 1637,
|
1827 |
+
"[38ClH]": 1638,
|
1828 |
+
"[142Pr]": 1639,
|
1829 |
+
"[199Hg]": 1640,
|
1830 |
+
"[179Hf]": 1641,
|
1831 |
+
"[178Hf]": 1642,
|
1832 |
+
"[237U]": 1643,
|
1833 |
+
"[156Eu]": 1644,
|
1834 |
+
"[157Eu]": 1645,
|
1835 |
+
"[105Ru]": 1646,
|
1836 |
+
"[171Tm]": 1647,
|
1837 |
+
"[199Au]": 1648,
|
1838 |
+
"[155Sm]": 1649,
|
1839 |
+
"[80BrH]": 1650,
|
1840 |
+
"[108Ag]": 1651,
|
1841 |
+
"[128IH]": 1652,
|
1842 |
+
"[48Sc]": 1653,
|
1843 |
+
"[45Ti]": 1654,
|
1844 |
+
"[176Lu]": 1655,
|
1845 |
+
"[121SnH2]": 1656,
|
1846 |
+
"[148Pm]": 1657,
|
1847 |
+
"[57Fe]": 1658,
|
1848 |
+
"[10BH3]": 1659,
|
1849 |
+
"[96Tc]": 1660,
|
1850 |
+
"[133IH]": 1661,
|
1851 |
+
"[143Pm]": 1662,
|
1852 |
+
"[105Rh]": 1663,
|
1853 |
+
"[130IH]": 1664,
|
1854 |
+
"[134IH]": 1665,
|
1855 |
+
"[131IH]": 1666,
|
1856 |
+
"[71Zn]": 1667,
|
1857 |
+
"[105Ag]": 1668,
|
1858 |
+
"[97Zr]": 1669,
|
1859 |
+
"[235Pu]": 1670,
|
1860 |
+
"[231Th]": 1671,
|
1861 |
+
"[109Pd]": 1672,
|
1862 |
+
"[93Y]": 1673,
|
1863 |
+
"[190Ir]": 1674,
|
1864 |
+
"[135Xe]": 1675,
|
1865 |
+
"[53Mn]": 1676,
|
1866 |
+
"[134Ce]": 1677,
|
1867 |
+
"[234Np]": 1678,
|
1868 |
+
"[240Am]": 1679,
|
1869 |
+
"[246Cf]": 1680,
|
1870 |
+
"[240Cm]": 1681,
|
1871 |
+
"[241Cm]": 1682,
|
1872 |
+
"[226Th]": 1683,
|
1873 |
+
"[39ClH]": 1684,
|
1874 |
+
"[229Th]": 1685,
|
1875 |
+
"[245Cm]": 1686,
|
1876 |
+
"[240U]": 1687,
|
1877 |
+
"[240Np]": 1688,
|
1878 |
+
"[249Cm]": 1689,
|
1879 |
+
"[243Pu]": 1690,
|
1880 |
+
"[145Pm]": 1691,
|
1881 |
+
"[199Pt]": 1692,
|
1882 |
+
"[246Bk]": 1693,
|
1883 |
+
"[193Pt]": 1694,
|
1884 |
+
"[230U]": 1695,
|
1885 |
+
"[250Cm]": 1696,
|
1886 |
+
"[44Ti]": 1697,
|
1887 |
+
"[175Hf]": 1698,
|
1888 |
+
"[254Fm]": 1699,
|
1889 |
+
"[255Fm]": 1700,
|
1890 |
+
"[257Fm]": 1701,
|
1891 |
+
"[92Y]": 1702,
|
1892 |
+
"[188Ir]": 1703,
|
1893 |
+
"[171Lu]": 1704,
|
1894 |
+
"[257Md]": 1705,
|
1895 |
+
"[247Bk]": 1706,
|
1896 |
+
"[121IH]": 1707,
|
1897 |
+
"[250Bk]": 1708,
|
1898 |
+
"[179Lu]": 1709,
|
1899 |
+
"[224Ac]": 1710,
|
1900 |
+
"[195Hg]": 1711,
|
1901 |
+
"[244Am]": 1712,
|
1902 |
+
"[246Pu]": 1713,
|
1903 |
+
"[194Au]": 1714,
|
1904 |
+
"[252Fm]": 1715,
|
1905 |
+
"[173Hf]": 1716,
|
1906 |
+
"[246Cm]": 1717,
|
1907 |
+
"[135Ce]": 1718,
|
1908 |
+
"[49Cr]": 1719,
|
1909 |
+
"[248Cf]": 1720,
|
1910 |
+
"[247Cm]": 1721,
|
1911 |
+
"[248Cm]": 1722,
|
1912 |
+
"[174Ta]": 1723,
|
1913 |
+
"[176Ta]": 1724,
|
1914 |
+
"[154Tb]": 1725,
|
1915 |
+
"[172Ta]": 1726,
|
1916 |
+
"[177Ta]": 1727,
|
1917 |
+
"[175Ta]": 1728,
|
1918 |
+
"[180Ta]": 1729,
|
1919 |
+
"[158Tb]": 1730,
|
1920 |
+
"[115Ag]": 1731,
|
1921 |
+
"[189Os]": 1732,
|
1922 |
+
"[251Cf]": 1733,
|
1923 |
+
"[145Pr]": 1734,
|
1924 |
+
"[147Pr]": 1735,
|
1925 |
+
"[76BrH]": 1736,
|
1926 |
+
"[102Rh]": 1737,
|
1927 |
+
"[238Np]": 1738,
|
1928 |
+
"[185Os]": 1739,
|
1929 |
+
"[246Am]": 1740,
|
1930 |
+
"[233Np]": 1741,
|
1931 |
+
"[166Dy]": 1742,
|
1932 |
+
"[254Es]": 1743,
|
1933 |
+
"[244Cf]": 1744,
|
1934 |
+
"[193Os]": 1745,
|
1935 |
+
"[245Am]": 1746,
|
1936 |
+
"[245Bk]": 1747,
|
1937 |
+
"[239Am]": 1748,
|
1938 |
+
"[238Am]": 1749,
|
1939 |
+
"[97Nb]": 1750,
|
1940 |
+
"[245Pu]": 1751,
|
1941 |
+
"[254Cf]": 1752,
|
1942 |
+
"[188W]": 1753,
|
1943 |
+
"[250Es]": 1754,
|
1944 |
+
"[251Es]": 1755,
|
1945 |
+
"[237Am]": 1756,
|
1946 |
+
"[182Hf]": 1757,
|
1947 |
+
"[258Md]": 1758,
|
1948 |
+
"[232Np]": 1759,
|
1949 |
+
"[238Cm]": 1760,
|
1950 |
+
"[60Fe]": 1761,
|
1951 |
+
"[109Pd+2]": 1762,
|
1952 |
+
"[234Pu]": 1763,
|
1953 |
+
"[141Ce+3]": 1764,
|
1954 |
+
"[136Nd]": 1765,
|
1955 |
+
"[136Pr]": 1766,
|
1956 |
+
"[173Ta]": 1767,
|
1957 |
+
"[110Ru]": 1768,
|
1958 |
+
"[147Tb]": 1769,
|
1959 |
+
"[253Fm]": 1770,
|
1960 |
+
"[139Nd]": 1771,
|
1961 |
+
"[178Re]": 1772,
|
1962 |
+
"[177Re]": 1773,
|
1963 |
+
"[200Au]": 1774,
|
1964 |
+
"[182Re]": 1775,
|
1965 |
+
"[156Tb]": 1776,
|
1966 |
+
"[155Tb]": 1777,
|
1967 |
+
"[157Tb]": 1778,
|
1968 |
+
"[161Tb]": 1779,
|
1969 |
+
"[161Ho]": 1780,
|
1970 |
+
"[167Tm]": 1781,
|
1971 |
+
"[173Lu]": 1782,
|
1972 |
+
"[179Ta]": 1783,
|
1973 |
+
"[171Er]": 1784,
|
1974 |
+
"[44Sc]": 1785,
|
1975 |
+
"[49Sc]": 1786,
|
1976 |
+
"[49V]": 1787,
|
1977 |
+
"[51Mn]": 1788,
|
1978 |
+
"[90Nb]": 1789,
|
1979 |
+
"[88Nb]": 1790,
|
1980 |
+
"[88Zr]": 1791,
|
1981 |
+
"[36SH2]": 1792,
|
1982 |
+
"[174Yb]": 1793,
|
1983 |
+
"[178Lu]": 1794,
|
1984 |
+
"[179W]": 1795,
|
1985 |
+
"[83BrH]": 1796,
|
1986 |
+
"[107Cd]": 1797,
|
1987 |
+
"[75BrH]": 1798,
|
1988 |
+
"[62Co]": 1799,
|
1989 |
+
"[48Cr]": 1800,
|
1990 |
+
"[63Zn]": 1801,
|
1991 |
+
"[102Ag]": 1802,
|
1992 |
+
"[154Sm]": 1803,
|
1993 |
+
"[168Er]": 1804,
|
1994 |
+
"[65Ni]": 1805,
|
1995 |
+
"[137La]": 1806,
|
1996 |
+
"[187Ir]": 1807,
|
1997 |
+
"[144Pm]": 1808,
|
1998 |
+
"[146Pm]": 1809,
|
1999 |
+
"[160Gd]": 1810,
|
2000 |
+
"[166Yb]": 1811,
|
2001 |
+
"[162Dy]": 1812,
|
2002 |
+
"[47V]": 1813,
|
2003 |
+
"[141Nd]": 1814,
|
2004 |
+
"[141Sm]": 1815,
|
2005 |
+
"[166Er]": 1816,
|
2006 |
+
"[150Sm]": 1817,
|
2007 |
+
"[146Eu]": 1818,
|
2008 |
+
"[149Eu]": 1819,
|
2009 |
+
"[174Lu]": 1820,
|
2010 |
+
"[17NH3]": 1821,
|
2011 |
+
"[102Ru]": 1822,
|
2012 |
+
"[170Hf]": 1823,
|
2013 |
+
"[188Pt]": 1824,
|
2014 |
+
"[61Ni]": 1825,
|
2015 |
+
"[56Ni]": 1826,
|
2016 |
+
"[149Gd]": 1827,
|
2017 |
+
"[151Gd]": 1828,
|
2018 |
+
"[141Pm]": 1829,
|
2019 |
+
"[147Gd]": 1830,
|
2020 |
+
"[146Gd]": 1831,
|
2021 |
+
"[161Er]": 1832,
|
2022 |
+
"[103Ag]": 1833,
|
2023 |
+
"[145Eu]": 1834,
|
2024 |
+
"[153Tb]": 1835,
|
2025 |
+
"[155Dy]": 1836,
|
2026 |
+
"[184Re]": 1837,
|
2027 |
+
"[180Os]": 1838,
|
2028 |
+
"[182Os]": 1839,
|
2029 |
+
"[186Pt]": 1840,
|
2030 |
+
"[181Os]": 1841,
|
2031 |
+
"[181Re]": 1842,
|
2032 |
+
"[151Tb]": 1843,
|
2033 |
+
"[178Ta]": 1844,
|
2034 |
+
"[178W]": 1845,
|
2035 |
+
"[189Pt]": 1846,
|
2036 |
+
"[194Hg]": 1847,
|
2037 |
+
"[145Sm]": 1848,
|
2038 |
+
"[150Tb]": 1849,
|
2039 |
+
"[132La]": 1850,
|
2040 |
+
"[158Gd]": 1851,
|
2041 |
+
"[104Ag]": 1852,
|
2042 |
+
"[193Hg]": 1853,
|
2043 |
+
"[94Ru]": 1854,
|
2044 |
+
"[137Pr]": 1855,
|
2045 |
+
"[155Ho]": 1856,
|
2046 |
+
"[117Cd]": 1857,
|
2047 |
+
"[99Ru]": 1858,
|
2048 |
+
"[146Nd]": 1859,
|
2049 |
+
"[218Rn]": 1860,
|
2050 |
+
"[95Y]": 1861,
|
2051 |
+
"[79Kr]": 1862,
|
2052 |
+
"[120IH]": 1863,
|
2053 |
+
"[138Pr]": 1864,
|
2054 |
+
"[100Pd]": 1865,
|
2055 |
+
"[166Tm]": 1866,
|
2056 |
+
"[90Mo]": 1867,
|
2057 |
+
"[151Nd]": 1868,
|
2058 |
+
"[231U]": 1869,
|
2059 |
+
"[138Nd]": 1870,
|
2060 |
+
"[89Nb]": 1871,
|
2061 |
+
"[98Nb]": 1872,
|
2062 |
+
"[162Ho]": 1873,
|
2063 |
+
"[142Sm]": 1874,
|
2064 |
+
"[186Ta]": 1875,
|
2065 |
+
"[104Tc]": 1876,
|
2066 |
+
"[184Ta]": 1877,
|
2067 |
+
"[185Ta]": 1878,
|
2068 |
+
"[170Er]": 1879,
|
2069 |
+
"[107Rh]": 1880,
|
2070 |
+
"[131La]": 1881,
|
2071 |
+
"[169Lu]": 1882,
|
2072 |
+
"[74BrH]": 1883,
|
2073 |
+
"[150Pm]": 1884,
|
2074 |
+
"[172Tm]": 1885,
|
2075 |
+
"[197Pt]": 1886,
|
2076 |
+
"[230Pu]": 1887,
|
2077 |
+
"[170Lu]": 1888,
|
2078 |
+
"[86Zr]": 1889,
|
2079 |
+
"[176W]": 1890,
|
2080 |
+
"[177W]": 1891,
|
2081 |
+
"[101Pd]": 1892,
|
2082 |
+
"[105Pd]": 1893,
|
2083 |
+
"[108Pd]": 1894,
|
2084 |
+
"[149Nd]": 1895,
|
2085 |
+
"[164Ho]": 1896,
|
2086 |
+
"[159Ho]": 1897,
|
2087 |
+
"[167Ho]": 1898,
|
2088 |
+
"[176Yb]": 1899,
|
2089 |
+
"[156Sm]": 1900,
|
2090 |
+
"[77BrH]": 1901,
|
2091 |
+
"[189Re]": 1902,
|
2092 |
+
"[99Rh]": 1903,
|
2093 |
+
"[100Rh]": 1904,
|
2094 |
+
"[151Pm]": 1905,
|
2095 |
+
"[232Pa]": 1906,
|
2096 |
+
"[228Pa]": 1907,
|
2097 |
+
"[230Pa]": 1908,
|
2098 |
+
"[66Ni]": 1909,
|
2099 |
+
"[194Os]": 1910,
|
2100 |
+
"[135La]": 1911,
|
2101 |
+
"[138La]": 1912,
|
2102 |
+
"[141La]": 1913,
|
2103 |
+
"[142La]": 1914,
|
2104 |
+
"[195Ir]": 1915,
|
2105 |
+
"[96Nb]": 1916,
|
2106 |
+
"[157Ho]": 1917,
|
2107 |
+
"[183Hf]": 1918,
|
2108 |
+
"[162Tm]": 1919,
|
2109 |
+
"[172Er]": 1920,
|
2110 |
+
"[148Eu]": 1921,
|
2111 |
+
"[150Eu]": 1922,
|
2112 |
+
"[15CH4]": 1923,
|
2113 |
+
"[89Kr]": 1924,
|
2114 |
+
"[143La]": 1925,
|
2115 |
+
"[58Ni]": 1926,
|
2116 |
+
"[61Co]": 1927,
|
2117 |
+
"[158Eu]": 1928,
|
2118 |
+
"[165Er]": 1929,
|
2119 |
+
"[167Yb]": 1930,
|
2120 |
+
"[173Tm]": 1931,
|
2121 |
+
"[175Tm]": 1932,
|
2122 |
+
"[172Hf]": 1933,
|
2123 |
+
"[172Lu]": 1934,
|
2124 |
+
"[93Tc]": 1935,
|
2125 |
+
"[177Yb]": 1936,
|
2126 |
+
"[124IH]": 1937,
|
2127 |
+
"[194Ir]": 1938,
|
2128 |
+
"[147Eu]": 1939,
|
2129 |
+
"[101Mo]": 1940,
|
2130 |
+
"[180Hf]": 1941,
|
2131 |
+
"[189Ir]": 1942,
|
2132 |
+
"[87Y]": 1943,
|
2133 |
+
"[43Sc]": 1944,
|
2134 |
+
"[195Au]": 1945,
|
2135 |
+
"[112Ag]": 1946,
|
2136 |
+
"[84BrH]": 1947,
|
2137 |
+
"[106Ag]": 1948,
|
2138 |
+
"[109Ag]": 1949,
|
2139 |
+
"[101Rh]": 1950,
|
2140 |
+
"[162Yb]": 1951,
|
2141 |
+
"[228Rn]": 1952,
|
2142 |
+
"[139Pr]": 1953,
|
2143 |
+
"[94Y]": 1954,
|
2144 |
+
"[201Au]": 1955,
|
2145 |
+
"[40PH3]": 1956,
|
2146 |
+
"[110Ag+]": 1957,
|
2147 |
+
"[104Cd]": 1958,
|
2148 |
+
"[133Ba+2]": 1959,
|
2149 |
+
"[226Ac]": 1960,
|
2150 |
+
"[145Gd]": 1961,
|
2151 |
+
"[186Ir]": 1962,
|
2152 |
+
"[184Ir]": 1963,
|
2153 |
+
"[224Rn]": 1964,
|
2154 |
+
"[185Ir]": 1965,
|
2155 |
+
"[182Ir]": 1966,
|
2156 |
+
"[184Hf]": 1967,
|
2157 |
+
"[200Pt]": 1968,
|
2158 |
+
"[227Pa]": 1969,
|
2159 |
+
"[178Yb]": 1970,
|
2160 |
+
"[72Br-]": 1971,
|
2161 |
+
"[72BrH]": 1972,
|
2162 |
+
"[248Am]": 1973,
|
2163 |
+
"[238Th]": 1974,
|
2164 |
+
"[161Gd]": 1975,
|
2165 |
+
"[35S-2]": 1976,
|
2166 |
+
"[107Ag]": 1977,
|
2167 |
+
"[FeH6-4]": 1978,
|
2168 |
+
"[89Sr]": 1979,
|
2169 |
+
"[SnH3-]": 1980,
|
2170 |
+
"[SeH3]": 1981,
|
2171 |
+
"[TeH3+]": 1982,
|
2172 |
+
"[SbH4+]": 1983,
|
2173 |
+
"[AsH4+]": 1984,
|
2174 |
+
"[4He]": 1985,
|
2175 |
+
"[AsH3-]": 1986,
|
2176 |
+
"[1HH]": 1987,
|
2177 |
+
"[3H+]": 1988,
|
2178 |
+
"[82Rb]": 1989,
|
2179 |
+
"[85Sr]": 1990,
|
2180 |
+
"[90Sr]": 1991,
|
2181 |
+
"[137Cs]": 1992,
|
2182 |
+
"[133Ba]": 1993,
|
2183 |
+
"[131Cs]": 1994,
|
2184 |
+
"[SbH5]": 1995,
|
2185 |
+
"[224Ra]": 1996,
|
2186 |
+
"[22Na]": 1997,
|
2187 |
+
"[210Bi]": 1998,
|
2188 |
+
"[214Bi]": 1999,
|
2189 |
+
"[228Ra]": 2000,
|
2190 |
+
"[127Sb]": 2001,
|
2191 |
+
"[136Cs]": 2002,
|
2192 |
+
"[125Sb]": 2003,
|
2193 |
+
"[134Cs]": 2004,
|
2194 |
+
"[140Ba]": 2005,
|
2195 |
+
"[45Ca]": 2006,
|
2196 |
+
"[206Pb]": 2007,
|
2197 |
+
"[207Pb]": 2008,
|
2198 |
+
"[24Na]": 2009,
|
2199 |
+
"[86Rb]": 2010,
|
2200 |
+
"[212Bi]": 2011,
|
2201 |
+
"[208Pb]": 2012,
|
2202 |
+
"[124Sb]": 2013,
|
2203 |
+
"[204Pb]": 2014,
|
2204 |
+
"[44K]": 2015,
|
2205 |
+
"[129Te]": 2016,
|
2206 |
+
"[113Sn]": 2017,
|
2207 |
+
"[204Tl]": 2018,
|
2208 |
+
"[87Sr]": 2019,
|
2209 |
+
"[208Tl]": 2020,
|
2210 |
+
"[87Rb]": 2021,
|
2211 |
+
"[47Ca]": 2022,
|
2212 |
+
"[135Cs]": 2023,
|
2213 |
+
"[216Po]": 2024,
|
2214 |
+
"[137Ba]": 2025,
|
2215 |
+
"[207Bi]": 2026,
|
2216 |
+
"[212Po]": 2027,
|
2217 |
+
"[79Se]": 2028,
|
2218 |
+
"[223Ra]": 2029,
|
2219 |
+
"[86Sr]": 2030,
|
2220 |
+
"[122Sb]": 2031,
|
2221 |
+
"[26Al]": 2032,
|
2222 |
+
"[32Si]": 2033,
|
2223 |
+
"[126Sn]": 2034,
|
2224 |
+
"[225Ra]": 2035,
|
2225 |
+
"[114In]": 2036,
|
2226 |
+
"[72Ga]": 2037,
|
2227 |
+
"[132Te]": 2038,
|
2228 |
+
"[10Be]": 2039,
|
2229 |
+
"[125Sn]": 2040,
|
2230 |
+
"[73As]": 2041,
|
2231 |
+
"[206Bi]": 2042,
|
2232 |
+
"[117Sn]": 2043,
|
2233 |
+
"[40Ca]": 2044,
|
2234 |
+
"[41Ca]": 2045,
|
2235 |
+
"[89Rb]": 2046,
|
2236 |
+
"[116In]": 2047,
|
2237 |
+
"[129Sb]": 2048,
|
2238 |
+
"[91Sr]": 2049,
|
2239 |
+
"[71Ge]": 2050,
|
2240 |
+
"[139Ba]": 2051,
|
2241 |
+
"[69Ga]": 2052,
|
2242 |
+
"[120Sb]": 2053,
|
2243 |
+
"[121Sn]": 2054,
|
2244 |
+
"[123Sn]": 2055,
|
2245 |
+
"[131Te]": 2056,
|
2246 |
+
"[77Ge]": 2057,
|
2247 |
+
"[135Ba]": 2058,
|
2248 |
+
"[82Sr]": 2059,
|
2249 |
+
"[43K]": 2060,
|
2250 |
+
"[131Ba]": 2061,
|
2251 |
+
"[92Sr]": 2062,
|
2252 |
+
"[88Rb]": 2063,
|
2253 |
+
"[129Cs]": 2064,
|
2254 |
+
"[144Cs]": 2065,
|
2255 |
+
"[127Cs]": 2066,
|
2256 |
+
"[200Tl]": 2067,
|
2257 |
+
"[202Tl]": 2068,
|
2258 |
+
"[141Ba]": 2069,
|
2259 |
+
"[117Sb]": 2070,
|
2260 |
+
"[116Sb]": 2071,
|
2261 |
+
"[78As]": 2072,
|
2262 |
+
"[131Sb]": 2073,
|
2263 |
+
"[126Sb]": 2074,
|
2264 |
+
"[128Sb]": 2075,
|
2265 |
+
"[130Sb]": 2076,
|
2266 |
+
"[67Ge]": 2077,
|
2267 |
+
"[68Ge]": 2078,
|
2268 |
+
"[78Ge]": 2079,
|
2269 |
+
"[66Ge]": 2080,
|
2270 |
+
"[223Fr]": 2081,
|
2271 |
+
"[132Cs]": 2082,
|
2272 |
+
"[125Cs]": 2083,
|
2273 |
+
"[138Cs]": 2084,
|
2274 |
+
"[133Te]": 2085,
|
2275 |
+
"[84Rb]": 2086,
|
2276 |
+
"[83Rb]": 2087,
|
2277 |
+
"[81Rb]": 2088,
|
2278 |
+
"[142Ba]": 2089,
|
2279 |
+
"[200Bi]": 2090,
|
2280 |
+
"[115Sb]": 2091,
|
2281 |
+
"[194Tl]": 2092,
|
2282 |
+
"[70Se]": 2093,
|
2283 |
+
"[112In]": 2094,
|
2284 |
+
"[118Sb]": 2095,
|
2285 |
+
"[70Ga]": 2096,
|
2286 |
+
"[27Mg]": 2097,
|
2287 |
+
"[202Bi]": 2098,
|
2288 |
+
"[83Se]": 2099,
|
2289 |
+
"[9Li]": 2100,
|
2290 |
+
"[69As]": 2101,
|
2291 |
+
"[79Rb]": 2102,
|
2292 |
+
"[81Sr]": 2103,
|
2293 |
+
"[83Sr]": 2104,
|
2294 |
+
"[78Se]": 2105,
|
2295 |
+
"[109In]": 2106,
|
2296 |
+
"[29Al]": 2107,
|
2297 |
+
"[118Sn]": 2108,
|
2298 |
+
"[117In]": 2109,
|
2299 |
+
"[119Sb]": 2110,
|
2300 |
+
"[114Sn]": 2111,
|
2301 |
+
"[138Ba]": 2112,
|
2302 |
+
"[69Ge]": 2113,
|
2303 |
+
"[73Ga]": 2114,
|
2304 |
+
"[74Ge]": 2115,
|
2305 |
+
"[206Tl]": 2116,
|
2306 |
+
"[199Tl]": 2117,
|
2307 |
+
"[130Cs]": 2118,
|
2308 |
+
"[28Mg]": 2119,
|
2309 |
+
"[116Te]": 2120,
|
2310 |
+
"[112Sn]": 2121,
|
2311 |
+
"[126Ba]": 2122,
|
2312 |
+
"[211Bi]": 2123,
|
2313 |
+
"[81Se]": 2124,
|
2314 |
+
"[127Sn]": 2125,
|
2315 |
+
"[143Cs]": 2126,
|
2316 |
+
"[134Te]": 2127,
|
2317 |
+
"[80Sr]": 2128,
|
2318 |
+
"[45K]": 2129,
|
2319 |
+
"[215Po]": 2130,
|
2320 |
+
"[207Po]": 2131,
|
2321 |
+
"[111Sn]": 2132,
|
2322 |
+
"[211Po]": 2133,
|
2323 |
+
"[128Ba]": 2134,
|
2324 |
+
"[198Tl]": 2135,
|
2325 |
+
"[227Ra]": 2136,
|
2326 |
+
"[213Po]": 2137,
|
2327 |
+
"[220Ra]": 2138,
|
2328 |
+
"[128Sn]": 2139,
|
2329 |
+
"[203Po]": 2140,
|
2330 |
+
"[205Po]": 2141,
|
2331 |
+
"[65Ga]": 2142,
|
2332 |
+
"[197Tl]": 2143,
|
2333 |
+
"[88Sr]": 2144,
|
2334 |
+
"[110In]": 2145,
|
2335 |
+
"[31Si]": 2146,
|
2336 |
+
"[201Bi]": 2147,
|
2337 |
+
"[121Te]": 2148,
|
2338 |
+
"[205Bi]": 2149,
|
2339 |
+
"[203Bi]": 2150,
|
2340 |
+
"[195Tl]": 2151,
|
2341 |
+
"[209Tl]": 2152,
|
2342 |
+
"[110Sn]": 2153,
|
2343 |
+
"[222Fr]": 2154,
|
2344 |
+
"[207At]": 2155,
|
2345 |
+
"[119In]": 2156,
|
2346 |
+
"[As@]": 2157,
|
2347 |
+
"[129IH]": 2158,
|
2348 |
+
"[157Dy]": 2159,
|
2349 |
+
"[111IH]": 2160,
|
2350 |
+
"[230Ra]": 2161,
|
2351 |
+
"[144Pr+3]": 2162,
|
2352 |
+
"[SiH3+]": 2163,
|
2353 |
+
"[3He]": 2164,
|
2354 |
+
"[AsH5]": 2165,
|
2355 |
+
"[72Se]": 2166,
|
2356 |
+
"[95Tc]": 2167,
|
2357 |
+
"[103Pd]": 2168,
|
2358 |
+
"[121Sn+2]": 2169,
|
2359 |
+
"[211Rn]": 2170,
|
2360 |
+
"[38SH2]": 2171,
|
2361 |
+
"[127IH]": 2172,
|
2362 |
+
"[74Br-]": 2173,
|
2363 |
+
"[133I-]": 2174,
|
2364 |
+
"[100Tc+4]": 2175,
|
2365 |
+
"[100Tc]": 2176,
|
2366 |
+
"[36Cl-]": 2177,
|
2367 |
+
"[89Y+3]": 2178,
|
2368 |
+
"[104Rh]": 2179,
|
2369 |
+
"[152Sm]": 2180,
|
2370 |
+
"[226Ra]": 2181,
|
2371 |
+
"[19FH]": 2182,
|
2372 |
+
"[104Pd]": 2183,
|
2373 |
+
"[148Gd]": 2184,
|
2374 |
+
"[157Lu]": 2185,
|
2375 |
+
"[33SH2]": 2186,
|
2376 |
+
"[121I-]": 2187,
|
2377 |
+
"[17FH]": 2188,
|
2378 |
+
"[71Se]": 2189,
|
2379 |
+
"[157Sm]": 2190,
|
2380 |
+
"[148Tb]": 2191,
|
2381 |
+
"[164Dy]": 2192,
|
2382 |
+
"[15OH2]": 2193,
|
2383 |
+
"[15O+]": 2194,
|
2384 |
+
"[39K]": 2195,
|
2385 |
+
"[40Ar]": 2196,
|
2386 |
+
"[50Cr+3]": 2197,
|
2387 |
+
"[50Cr]": 2198,
|
2388 |
+
"[52Ti]": 2199,
|
2389 |
+
"[103Pd+2]": 2200,
|
2390 |
+
"[130Ba]": 2201,
|
2391 |
+
"[142Pm]": 2202,
|
2392 |
+
"[153Gd+3]": 2203,
|
2393 |
+
"[151Eu]": 2204,
|
2394 |
+
"[103Rh]": 2205,
|
2395 |
+
"[124Xe]": 2206,
|
2396 |
+
"[152Tb]": 2207,
|
2397 |
+
"[17OH2]": 2208,
|
2398 |
+
"[20Ne]": 2209,
|
2399 |
+
"[52Fe]": 2210,
|
2400 |
+
"[94Zr+4]": 2211,
|
2401 |
+
"[94Zr]": 2212,
|
2402 |
+
"[149Pr]": 2213,
|
2403 |
+
"[16OH2]": 2214,
|
2404 |
+
"[53Cr+6]": 2215,
|
2405 |
+
"[53Cr]": 2216,
|
2406 |
+
"[81Br-]": 2217,
|
2407 |
+
"[112Pd]": 2218,
|
2408 |
+
"[125Xe]": 2219,
|
2409 |
+
"[155Gd]": 2220,
|
2410 |
+
"[157Gd]": 2221,
|
2411 |
+
"[168Yb]": 2222,
|
2412 |
+
"[184Os]": 2223,
|
2413 |
+
"[166Tb]": 2224,
|
2414 |
+
"[221Fr]": 2225,
|
2415 |
+
"[212Ra]": 2226,
|
2416 |
+
"[75Br-]": 2227,
|
2417 |
+
"[79Br-]": 2228,
|
2418 |
+
"[113Ag]": 2229,
|
2419 |
+
"[23Na]": 2230,
|
2420 |
+
"[34Cl-]": 2231,
|
2421 |
+
"[34ClH]": 2232,
|
2422 |
+
"[38Cl-]": 2233,
|
2423 |
+
"[56Fe]": 2234,
|
2424 |
+
"[68Cu]": 2235,
|
2425 |
+
"[77Br-]": 2236,
|
2426 |
+
"[90Zr+4]": 2237,
|
2427 |
+
"[90Zr]": 2238,
|
2428 |
+
"[102Pd]": 2239,
|
2429 |
+
"[154Eu+3]": 2240,
|
2430 |
+
"[57Mn]": 2241,
|
2431 |
+
"[165Tm]": 2242,
|
2432 |
+
"[152Dy]": 2243,
|
2433 |
+
"[217At]": 2244,
|
2434 |
+
"[77se]": 2245,
|
2435 |
+
"[13cH-]": 2246,
|
2436 |
+
"[122Te]": 2247,
|
2437 |
+
"[156Gd]": 2248,
|
2438 |
+
"[124Te]": 2249,
|
2439 |
+
"[53Ni]": 2250,
|
2440 |
+
"[131Xe]": 2251,
|
2441 |
+
"[174Hf+4]": 2252,
|
2442 |
+
"[174Hf]": 2253,
|
2443 |
+
"[76Se]": 2254,
|
2444 |
+
"[168Tm]": 2255,
|
2445 |
+
"[167Dy]": 2256,
|
2446 |
+
"[154Gd]": 2257,
|
2447 |
+
"[95Ru]": 2258,
|
2448 |
+
"[210At]": 2259,
|
2449 |
+
"[85Br]": 2260,
|
2450 |
+
"[59Co]": 2261,
|
2451 |
+
"[122Xe]": 2262,
|
2452 |
+
"[27Al]": 2263,
|
2453 |
+
"[54Cr]": 2264,
|
2454 |
+
"[198Hg]": 2265,
|
2455 |
+
"[85Rb+]": 2266,
|
2456 |
+
"[214Tl]": 2267,
|
2457 |
+
"[229Rn]": 2268,
|
2458 |
+
"[218Pb]": 2269,
|
2459 |
+
"[218Bi]": 2270,
|
2460 |
+
"[167Tm+3]": 2271,
|
2461 |
+
"[18o+]": 2272,
|
2462 |
+
"[P@@H+]": 2273,
|
2463 |
+
"[P@H+]": 2274,
|
2464 |
+
"[13N+]": 2275,
|
2465 |
+
"[212Pb+2]": 2276,
|
2466 |
+
"[217Bi]": 2277,
|
2467 |
+
"[249Cf+2]": 2278,
|
2468 |
+
"[18OH3+]": 2279,
|
2469 |
+
"[90Sr-]": 2280,
|
2470 |
+
"[Cf+3]": 2281,
|
2471 |
+
"[200Hg]": 2282,
|
2472 |
+
"[86Tc]": 2283,
|
2473 |
+
"[141Pr+3]": 2284,
|
2474 |
+
"[141Pr]": 2285,
|
2475 |
+
"[16nH]": 2286,
|
2476 |
+
"[14NH4+]": 2287,
|
2477 |
+
"[132Xe]": 2288,
|
2478 |
+
"[83Kr]": 2289,
|
2479 |
+
"[70Zn+2]": 2290,
|
2480 |
+
"[137Ba+2]": 2291,
|
2481 |
+
"[36Ar]": 2292,
|
2482 |
+
"[38Ar]": 2293,
|
2483 |
+
"[21Ne]": 2294,
|
2484 |
+
"[126Xe]": 2295,
|
2485 |
+
"[136Xe]": 2296,
|
2486 |
+
"[128Xe]": 2297,
|
2487 |
+
"[134Xe]": 2298,
|
2488 |
+
"[84Kr]": 2299,
|
2489 |
+
"[86Kr]": 2300,
|
2490 |
+
"[78Kr]": 2301,
|
2491 |
+
"[80Kr]": 2302,
|
2492 |
+
"[82Kr]": 2303,
|
2493 |
+
"[67Zn+2]": 2304,
|
2494 |
+
"[65Cu+2]": 2305,
|
2495 |
+
"[110Te]": 2306,
|
2496 |
+
"[58Fe+3]": 2307,
|
2497 |
+
"[142Nd]": 2308,
|
2498 |
+
"[38K]": 2309,
|
2499 |
+
"[198Au+3]": 2310,
|
2500 |
+
"[122IH]": 2311,
|
2501 |
+
"[38PH3]": 2312,
|
2502 |
+
"[130I-]": 2313,
|
2503 |
+
"[40K+]": 2314,
|
2504 |
+
"[38K+]": 2315,
|
2505 |
+
"[28Mg+2]": 2316,
|
2506 |
+
"[208Tl+]": 2317,
|
2507 |
+
"[13OH2]": 2318,
|
2508 |
+
"[198Bi]": 2319,
|
2509 |
+
"[192Bi]": 2320,
|
2510 |
+
"[194Bi]": 2321,
|
2511 |
+
"[196Bi]": 2322,
|
2512 |
+
"[132I-]": 2323,
|
2513 |
+
"[83Sr+2]": 2324,
|
2514 |
+
"[169Er+3]": 2325,
|
2515 |
+
"[122I-]": 2326,
|
2516 |
+
"[120I-]": 2327,
|
2517 |
+
"[92Sr+2]": 2328,
|
2518 |
+
"[126I-]": 2329,
|
2519 |
+
"[24Mg]": 2330,
|
2520 |
+
"[84Sr]": 2331,
|
2521 |
+
"[118Pd+2]": 2332,
|
2522 |
+
"[118Pd]": 2333,
|
2523 |
+
"[AsH4]": 2334,
|
2524 |
+
"[127I-]": 2335,
|
2525 |
+
"[9C-]": 2336,
|
2526 |
+
"[11CH3+]": 2337,
|
2527 |
+
"[17B]": 2338,
|
2528 |
+
"[7B]": 2339,
|
2529 |
+
"[4HH]": 2340,
|
2530 |
+
"[18C-]": 2341,
|
2531 |
+
"[22CH3-]": 2342,
|
2532 |
+
"[22CH4]": 2343,
|
2533 |
+
"[17C-]": 2344,
|
2534 |
+
"[15CH3]": 2345,
|
2535 |
+
"[16CH3]": 2346,
|
2536 |
+
"[11NH3]": 2347,
|
2537 |
+
"[21NH3]": 2348,
|
2538 |
+
"[11N-]": 2349,
|
2539 |
+
"[11NH]": 2350,
|
2540 |
+
"[16CH]": 2351,
|
2541 |
+
"[17CH2]": 2352,
|
2542 |
+
"[99Ru+2]": 2353,
|
2543 |
+
"[181Ta+2]": 2354,
|
2544 |
+
"[181Ta]": 2355,
|
2545 |
+
"[20CH]": 2356,
|
2546 |
+
"[32PH2]": 2357,
|
2547 |
+
"[55Fe+2]": 2358,
|
2548 |
+
"[SH3]": 2359,
|
2549 |
+
"[S@H]": 2360,
|
2550 |
+
"[UNK]": 2361
|
2551 |
+
},
|
2552 |
+
"merges": []
|
2553 |
+
}
|
2554 |
+
}
|
tokenizer_config.json
ADDED
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"added_tokens_decoder": {
|
3 |
+
"0": {
|
4 |
+
"content": "[CLS]",
|
5 |
+
"lstrip": false,
|
6 |
+
"normalized": false,
|
7 |
+
"rstrip": false,
|
8 |
+
"single_word": false,
|
9 |
+
"special": true
|
10 |
+
},
|
11 |
+
"1": {
|
12 |
+
"content": "[SEP]",
|
13 |
+
"lstrip": false,
|
14 |
+
"normalized": false,
|
15 |
+
"rstrip": false,
|
16 |
+
"single_word": false,
|
17 |
+
"special": true
|
18 |
+
},
|
19 |
+
"2": {
|
20 |
+
"content": "[PAD]",
|
21 |
+
"lstrip": false,
|
22 |
+
"normalized": false,
|
23 |
+
"rstrip": false,
|
24 |
+
"single_word": false,
|
25 |
+
"special": true
|
26 |
+
},
|
27 |
+
"3": {
|
28 |
+
"content": "[MASK]",
|
29 |
+
"lstrip": false,
|
30 |
+
"normalized": false,
|
31 |
+
"rstrip": false,
|
32 |
+
"single_word": false,
|
33 |
+
"special": true
|
34 |
+
},
|
35 |
+
"2361": {
|
36 |
+
"content": "[UNK]",
|
37 |
+
"lstrip": false,
|
38 |
+
"normalized": false,
|
39 |
+
"rstrip": false,
|
40 |
+
"single_word": false,
|
41 |
+
"special": true
|
42 |
+
}
|
43 |
+
},
|
44 |
+
"clean_up_tokenization_spaces": false,
|
45 |
+
"cls_token": "[CLS]",
|
46 |
+
"extra_special_tokens": {},
|
47 |
+
"mask_token": "[MASK]",
|
48 |
+
"model_max_length": 256,
|
49 |
+
"pad_token": "[PAD]",
|
50 |
+
"sep_token": "[SEP]",
|
51 |
+
"tokenizer_class": "PreTrainedTokenizerFast",
|
52 |
+
"unk_token": "[UNK]"
|
53 |
+
}
|