File size: 689 Bytes
e5a8ae2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
{
  "auto_mapping": null,
  "base_model_name_or_path": "abeja/gpt-neox-japanese-2.7b",
  "inference_mode": true,
  "num_attention_heads": 32,
  "num_layers": 32,
  "num_transformer_submodules": 1,
  "num_virtual_tokens": 16,
  "peft_type": "PROMPT_TUNING",
  "prompt_tuning_init": "TEXT",
  "prompt_tuning_init_text": "\u4ee5\u4e0b\u306e\u5546\u54c1\u30ec\u30d3\u30e5\u30fc\u304c\u4fa1\u683c\u306b\u3064\u3044\u3066\u8a00\u53ca\u3057\u3066\u3044\u308b\u304b\u3069\u3046\u304b\u3092\u5206\u985e\u3057\u3066\u304f\u3060\u3055\u3044",
  "revision": null,
  "task_type": "CAUSAL_LM",
  "token_dim": 2560,
  "tokenizer_kwargs": null,
  "tokenizer_name_or_path": "abeja/gpt-neox-japanese-2.7b"
}