EdBerg commited on
Commit
a8f18e4
·
verified ·
1 Parent(s): ec18102

Upload model trained with Unsloth

Browse files

Upload model trained with Unsloth 2x faster

Files changed (3) hide show
  1. README.md +1 -0
  2. adapter_config.json +7 -7
  3. adapter_model.safetensors +2 -2
README.md CHANGED
@@ -4,6 +4,7 @@ license: apache-2.0
4
  base_model: unsloth/mistral-7b-v0.3-bnb-4bit
5
  tags:
6
  - generated_from_trainer
 
7
  model-index:
8
  - name: arabic_trained_model_mistral7b_completion
9
  results: []
 
4
  base_model: unsloth/mistral-7b-v0.3-bnb-4bit
5
  tags:
6
  - generated_from_trainer
7
+ - unsloth
8
  model-index:
9
  - name: arabic_trained_model_mistral7b_completion
10
  results: []
adapter_config.json CHANGED
@@ -18,21 +18,21 @@
18
  "megatron_config": null,
19
  "megatron_core": "megatron.core",
20
  "modules_to_save": [
21
- "lm_head",
22
- "embed_tokens"
23
  ],
24
  "peft_type": "LORA",
25
  "r": 8,
26
  "rank_pattern": {},
27
  "revision": null,
28
  "target_modules": [
29
- "up_proj",
30
- "o_proj",
31
- "k_proj",
32
  "v_proj",
33
  "down_proj",
34
- "gate_proj",
35
- "q_proj"
 
 
36
  ],
37
  "task_type": "CAUSAL_LM",
38
  "use_dora": false,
 
18
  "megatron_config": null,
19
  "megatron_core": "megatron.core",
20
  "modules_to_save": [
21
+ "embed_tokens",
22
+ "lm_head"
23
  ],
24
  "peft_type": "LORA",
25
  "r": 8,
26
  "rank_pattern": {},
27
  "revision": null,
28
  "target_modules": [
29
+ "gate_proj",
 
 
30
  "v_proj",
31
  "down_proj",
32
+ "o_proj",
33
+ "up_proj",
34
+ "q_proj",
35
+ "k_proj"
36
  ],
37
  "task_type": "CAUSAL_LM",
38
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:42a74220857e0ab3665f98ef61f5582f4744d0d37f40d056f3f5d6bcdbde317d
3
- size 620816432
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0302b1cd4045f74955dd6fa69f09928bdf00f4f80a62b3761afa08b619b8ba86
3
+ size 1157689240