piercemaloney commited on
Commit
a03a938
1 Parent(s): a0f2ee7

Training in progress, step 100

Browse files
adapter_config.json CHANGED
@@ -23,32 +23,32 @@
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
- "32.self_attn.v_proj",
27
- "34.self_attn.v_proj",
28
  "35.self_attn.v_proj",
29
- "cross_attn.q_proj",
30
- "model.layers.29.self_attn.q_proj",
31
- "36.self_attn.q_proj",
32
- "model.layers.30.self_attn.v_proj",
33
- "model.layers.28.self_attn.q_proj",
34
  "model.layers.31.self_attn.v_proj",
35
- "37.self_attn.q_proj",
 
 
 
 
36
  "model.layers.29.self_attn.v_proj",
37
- "38.self_attn.v_proj",
38
- "33.self_attn.v_proj",
39
- "37.self_attn.v_proj",
40
- "33.self_attn.q_proj",
41
- "36.self_attn.v_proj",
42
  "model.layers.28.self_attn.v_proj",
43
- "cross_attn.v_proj",
44
- "39.self_attn.v_proj",
45
- "32.self_attn.q_proj",
46
  "model.layers.31.self_attn.q_proj",
47
- "34.self_attn.q_proj",
48
  "35.self_attn.q_proj",
 
 
 
 
49
  "39.self_attn.q_proj",
50
- "38.self_attn.q_proj",
51
- "model.layers.30.self_attn.q_proj"
 
 
 
 
52
  ],
53
  "task_type": null,
54
  "use_dora": false,
 
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
 
 
26
  "35.self_attn.v_proj",
27
+ "39.self_attn.v_proj",
 
 
 
 
28
  "model.layers.31.self_attn.v_proj",
29
+ "model.layers.30.self_attn.v_proj",
30
+ "34.self_attn.q_proj",
31
+ "32.self_attn.v_proj",
32
+ "model.layers.30.self_attn.q_proj",
33
+ "38.self_attn.q_proj",
34
  "model.layers.29.self_attn.v_proj",
35
+ "34.self_attn.v_proj",
36
+ "36.self_attn.q_proj",
 
 
 
37
  "model.layers.28.self_attn.v_proj",
38
+ "38.self_attn.v_proj",
 
 
39
  "model.layers.31.self_attn.q_proj",
 
40
  "35.self_attn.q_proj",
41
+ "37.self_attn.v_proj",
42
+ "cross_attn.q_proj",
43
+ "model.layers.29.self_attn.q_proj",
44
+ "33.self_attn.v_proj",
45
  "39.self_attn.q_proj",
46
+ "model.layers.28.self_attn.q_proj",
47
+ "32.self_attn.q_proj",
48
+ "33.self_attn.q_proj",
49
+ "37.self_attn.q_proj",
50
+ "cross_attn.v_proj",
51
+ "36.self_attn.v_proj"
52
  ],
53
  "task_type": null,
54
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:95da283a183fd1687ee158c46a4e084699967dc011530fa396391d3a7e8c60f7
3
  size 57944064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1064cab9ce0345e4aad15a3f33e9aace42e46ef1736e62d3d69c4ae5c0eaac12
3
  size 57944064
runs/Oct02_20-07-03_f253517289f1/events.out.tfevents.1727899636.f253517289f1.86129.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0db3b697b65a3b0bb71329298577ed8d4bc365b15953c22d76b125b84bd4f568
3
+ size 9878
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:aac1ba8dd98988985da8caffa36c93aad53a7c8221c6a6812aa022188b5fe746
3
  size 5240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5620c36c229a411ca8e9b20f35e4c40e389e0c9629e52935ddee803f286d3f2b
3
  size 5240