mahiatlinux commited on
Commit
9017617
·
verified ·
1 Parent(s): 61409cb

Model save

Browse files
README.md CHANGED
@@ -1,16 +1,17 @@
1
  ---
 
2
  library_name: transformers
3
  model_name: MyGemmaNPC
4
  tags:
5
  - generated_from_trainer
6
- - sft
7
  - trl
 
8
  licence: license
9
  ---
10
 
11
  # Model Card for MyGemmaNPC
12
 
13
- This model is a fine-tuned version of [None](https://huggingface.co/None).
14
  It has been trained using [TRL](https://github.com/huggingface/trl).
15
 
16
  ## Quick start
@@ -34,8 +35,8 @@ This model was trained with SFT.
34
  ### Framework versions
35
 
36
  - TRL: 0.21.0
37
- - Transformers: 4.55.2
38
- - Pytorch: 2.8.0+cu126
39
  - Datasets: 4.0.0
40
  - Tokenizers: 0.21.4
41
 
 
1
  ---
2
+ base_model: google/gemma-3-270m-it
3
  library_name: transformers
4
  model_name: MyGemmaNPC
5
  tags:
6
  - generated_from_trainer
 
7
  - trl
8
+ - sft
9
  licence: license
10
  ---
11
 
12
  # Model Card for MyGemmaNPC
13
 
14
+ This model is a fine-tuned version of [google/gemma-3-270m-it](https://huggingface.co/google/gemma-3-270m-it).
15
  It has been trained using [TRL](https://github.com/huggingface/trl).
16
 
17
  ## Quick start
 
35
  ### Framework versions
36
 
37
  - TRL: 0.21.0
38
+ - Transformers: 4.55.4
39
+ - Pytorch: 2.8.0
40
  - Datasets: 4.0.0
41
  - Tokenizers: 0.21.4
42
 
config.json CHANGED
@@ -47,7 +47,7 @@
47
  "rope_theta": 1000000.0,
48
  "sliding_window": 512,
49
  "torch_dtype": "bfloat16",
50
- "transformers_version": "4.55.2",
51
  "use_bidirectional_attention": false,
52
  "use_cache": true,
53
  "vocab_size": 262144
 
47
  "rope_theta": 1000000.0,
48
  "sliding_window": 512,
49
  "torch_dtype": "bfloat16",
50
+ "transformers_version": "4.55.4",
51
  "use_bidirectional_attention": false,
52
  "use_cache": true,
53
  "vocab_size": 262144
generation_config.json CHANGED
@@ -7,5 +7,5 @@
7
  ],
8
  "top_k": 64,
9
  "top_p": 0.95,
10
- "transformers_version": "4.55.2"
11
  }
 
7
  ],
8
  "top_k": 64,
9
  "top_p": 0.95,
10
+ "transformers_version": "4.55.4"
11
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:46f04b589408d96c7cfd34a0294d3a29cf2f65bc89af57d8b24c754e0dd9f409
3
  size 536223056
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72176de05f82cc6f2054d37cdd361928b83a4115f7627fc1976ae51481fa40cc
3
  size 536223056
runs/Aug24_11-09-12_shali02.lx.pc.landcareresearch.co.nz/events.out.tfevents.1755990559.shali02.lx.pc.landcareresearch.co.nz.8932.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86109bf3c8147f08dce95f9a73f06f3b47d8b9cbbee20211204356a0b2d8049b
3
+ size 16574
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3eebc159cf2b7329c38c7c81347ae91fa443462295017e992c4bf17ad5083f7c
3
  size 6225
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f07dfdca445156dd3c2891d6f143cb6f3a11c5e41fe4afd459b5d6d820e7f9d9
3
  size 6225