aamodthakur commited on
Commit
d19a8e1
·
verified ·
1 Parent(s): 362d076

Upload folder using huggingface_hub

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/raid/Hindi/Hindi_Model_5_3",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
@@ -22,7 +22,7 @@
22
  "sliding_window": 1024,
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "bfloat16",
25
- "transformers_version": "4.46.3",
26
  "use_cache": true,
27
  "vocab_size": 32768
28
  }
 
1
  {
2
+ "_name_or_path": "Ganga-2-1B-PreAlpha_v0.1",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
 
22
  "sliding_window": 1024,
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "bfloat16",
25
+ "transformers_version": "4.48.2",
26
  "use_cache": true,
27
  "vocab_size": 32768
28
  }
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "bos_token_id": 6,
4
  "eos_token_id": 3,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.46.3"
7
  }
 
3
  "bos_token_id": 6,
4
  "eos_token_id": 3,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.48.2"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:870d6b3ca3eaae09b87f1f14348c575eb09444c309ea14e9aa231d0fb80b176a
3
  size 2013417944
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:185619e06ca95dd5fb88ecd4407490544f079c7f12ba8d5a6240aeb3b1ce703e
3
  size 2013417944
special_tokens_map.json CHANGED
@@ -6,9 +6,39 @@
6
  "##",
7
  "###"
8
  ],
9
- "bos_token": "<bos>",
10
- "eos_token": "<eos>",
11
- "mask_token": "<mask>",
12
- "pad_token": "<pad>",
13
- "unk_token": "<unk>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  }
 
6
  "##",
7
  "###"
8
  ],
9
+ "bos_token": {
10
+ "content": "<bos>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "eos_token": {
17
+ "content": "<eos>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "mask_token": {
24
+ "content": "<mask>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "pad_token": {
31
+ "content": "<pad>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ },
37
+ "unk_token": {
38
+ "content": "<unk>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false
43
+ }
44
  }
tokenizer.json CHANGED
@@ -1,6 +1,11 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
@@ -162886,4 +162891,4 @@
162886
  ]
162887
  ]
162888
  }
162889
- }
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 2048,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
  "padding": null,
10
  "added_tokens": [
11
  {
 
162891
  ]
162892
  ]
162893
  }
162894
+ }
tokenizer_config.json CHANGED
@@ -1,4 +1,6 @@
1
  {
 
 
2
  "added_tokens_decoder": {
3
  "0": {
4
  "content": "<pad>",
@@ -104,9 +106,11 @@
104
  "##",
105
  "###"
106
  ],
 
107
  "bos_token": "<bos>",
108
  "clean_up_tokenization_spaces": false,
109
  "eos_token": "<eos>",
 
110
  "mask_token": "<mask>",
111
  "model_max_length": 1000000000000000019884624838656,
112
  "pad_token": "<pad>",
 
1
  {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<pad>",
 
106
  "##",
107
  "###"
108
  ],
109
+ "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '<user>' + message['content'] + '<assistant>' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
110
  "bos_token": "<bos>",
111
  "clean_up_tokenization_spaces": false,
112
  "eos_token": "<eos>",
113
+ "extra_special_tokens": {},
114
  "mask_token": "<mask>",
115
  "model_max_length": 1000000000000000019884624838656,
116
  "pad_token": "<pad>",