File size: 325 Bytes
e2c0480
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
{
  "dataset_size": 60,
  "epochs": 2,
  "batch_size": 2,
  "learning_rate": 5e-05,
  "lora_rank": 8,
  "base_model": "elyza/Llama-3-ELYZA-JP-8B",
  "training_start": "2025-06-17T03:46:29.961358",
  "optimization": "conservative_training",
  "training_time": "0:01:09.425663",
  "training_end": "2025-06-17T03:47:40.274331"
}