ghiffaryr commited on
Commit
9b50bb5
·
verified ·
1 Parent(s): 945fafa

Training in progress, step 500

Browse files
adapter_config.json CHANGED
@@ -3,6 +3,8 @@
3
  "auto_mapping": null,
4
  "base_model_name_or_path": "google/flan-t5-base",
5
  "bias": "none",
 
 
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
@@ -11,6 +13,7 @@
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
  "lora_alpha": 32,
 
14
  "lora_dropout": 0.05,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
 
3
  "auto_mapping": null,
4
  "base_model_name_or_path": "google/flan-t5-base",
5
  "bias": "none",
6
+ "eva_config": null,
7
+ "exclude_modules": null,
8
  "fan_in_fan_out": false,
9
  "inference_mode": true,
10
  "init_lora_weights": true,
 
13
  "layers_to_transform": null,
14
  "loftq_config": {},
15
  "lora_alpha": 32,
16
+ "lora_bias": false,
17
  "lora_dropout": 0.05,
18
  "megatron_config": null,
19
  "megatron_core": "megatron.core",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:29f10d8660758a2221fef858ea931ee6ea6df153feb35408dc471b9f2bc32d51
3
  size 14176016
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54bd4373c8359a3688e86808df8be9b2386971cca68bb812f5080d4b1efa207f
3
  size 14176016
runs/Mar03_08-41-50_GhiffaryR-PC/events.out.tfevents.1740987713.GhiffaryR-PC.14859.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7144ca3c76d609eae69da09f7336daa01b2ed67ef6a6f4040930e437dc22a678
3
+ size 111907
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:48a6ce6ffbc92566347069ba3d4969a5b74f7b849b3d97e9fe3040062bd6560a
3
- size 4923
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4902f3eacc5822e994862fa9032e8dc46945d9ccbe4dfd7456758b3e06e7ccf0
3
+ size 5432