shyam1909 commited on
Commit
39acded
·
1 Parent(s): c087bd4

Training in progress, step 10

Browse files
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": null,
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
@@ -19,9 +19,9 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "dense_h_to_4h",
23
  "dense",
24
  "dense_4h_to_h",
 
25
  "query_key_value"
26
  ],
27
  "task_type": "CAUSAL_LM",
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "vilsonrodrigues/falcon-7b-instruct-sharded",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
22
  "dense",
23
  "dense_4h_to_h",
24
+ "dense_h_to_4h",
25
  "query_key_value"
26
  ],
27
  "task_type": "CAUSAL_LM",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f59e48c18560872c2782076bde12ffc39c8844472799a26cff932c95c0b20034
3
- size 261136192
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42d57d9a07f3ad33f7e0bd6d2fd58040b610ddf3901724d4ad3fad15e49df148
3
+ size 261131840
runs/Dec22_18-59-22_b1592d39be1f/events.out.tfevents.1703271565.b1592d39be1f.269.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5e632b899c8b11b448ca4a2cf8c71ce6d07df5cdb32ed9e8e221edcc54afd0d4
3
+ size 5720
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:45d999f2a4c683ff4de92a4cb8174f9c5fc359b8610d90f4e8b492d16b104878
3
  size 4728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ab501dcebbdd63c9a1672ebdbd84fd7421d94cf1d5e4ee392327b7eaa06c6a8
3
  size 4728