File size: 808 Bytes
d53b29d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
{
    "adapter_path": "adapters",
    "batch_size": 1,
    "config": "/dev/fd/11",
    "data": "data/",
    "fine_tune_type": "lora",
    "grad_checkpoint": false,
    "hf_dataset": {
        "name": "isaiahbjork/chain-of-thought",
        "prompt_feature": "prompt",
        "completion_feature": "response"
    },
    "iters": 600,
    "learning_rate": 1e-05,
    "lora_parameters": {
        "rank": 8,
        "alpha": 16,
        "dropout": 0.0,
        "scale": 10.0
    },
    "lr_schedule": null,
    "max_seq_length": 2048,
    "model": "Qwen/Qwen2.5-3B-Instruct",
    "num_layers": 16,
    "resume_adapter_file": null,
    "save_every": 100,
    "seed": 0,
    "steps_per_eval": 200,
    "steps_per_report": 10,
    "test": false,
    "test_batches": 500,
    "train": true,
    "val_batches": 25
}