AIArchitect23 commited on
Commit
7a44c32
·
verified ·
1 Parent(s): 4fada78

Upload GPTNeoXForCausalLM

Browse files
Files changed (3) hide show
  1. README.md +6 -6
  2. config.json +2 -2
  3. generation_config.json +1 -1
README.md CHANGED
@@ -1,15 +1,15 @@
1
  ---
 
 
 
2
  tags:
3
  - autotrain
4
  - text-generation-inference
5
  - text-generation
6
- library_name: transformers
7
- base_model: EleutherAI/pythia-410m
8
  widget:
9
- - messages:
10
- - role: user
11
- content: What is your favorite condiment?
12
- license: other
13
  ---
14
 
15
  # Model Trained Using AutoTrain
 
1
  ---
2
+ base_model: EleutherAI/pythia-410m
3
+ library_name: transformers
4
+ license: other
5
  tags:
6
  - autotrain
7
  - text-generation-inference
8
  - text-generation
 
 
9
  widget:
10
+ - messages:
11
+ - role: user
12
+ content: What is your favorite condiment?
 
13
  ---
14
 
15
  # Model Trained Using AutoTrain
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "EleutherAI/pythia-410m",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
@@ -25,7 +25,7 @@
25
  "rotary_pct": 0.25,
26
  "tie_word_embeddings": false,
27
  "torch_dtype": "bfloat16",
28
- "transformers_version": "4.48.0",
29
  "use_cache": true,
30
  "use_parallel_residual": true,
31
  "vocab_size": 50277
 
1
  {
2
+ "_name_or_path": "/home/nicknames/SN80/ai-factory-arxiv-model-improved",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
 
25
  "rotary_pct": 0.25,
26
  "tie_word_embeddings": false,
27
  "torch_dtype": "bfloat16",
28
+ "transformers_version": "4.44.1",
29
  "use_cache": true,
30
  "use_parallel_residual": true,
31
  "vocab_size": 50277
generation_config.json CHANGED
@@ -2,6 +2,6 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 0,
4
  "eos_token_id": 0,
5
- "transformers_version": "4.48.0",
6
  "use_cache": false
7
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 0,
4
  "eos_token_id": 0,
5
+ "transformers_version": "4.44.1",
6
  "use_cache": false
7
  }