prithivMLmods's picture
Upload folder using huggingface_hub
9ee6ac1 verified
{
"architectures": [
"SiglipForImageClassification"
],
"id2label": {
"0": "Baked Potato",
"1": "Crispy Chicken",
"2": "Donut",
"3": "Fries",
"4": "Hot Dog",
"5": "Sandwich",
"6": "Taco",
"7": "Taquito",
"8": "Apple Pie",
"9": "Burger",
"10": "Butter Naan",
"11": "Chai",
"12": "Chapati",
"13": "Cheesecake",
"14": "Chicken Curry",
"15": "Chole Bhature",
"16": "Dal Makhani",
"17": "Dhokla",
"18": "Fried Rice",
"19": "Ice Cream",
"20": "Idli",
"21": "Jalebi",
"22": "Kaathi Rolls",
"23": "Kadai Paneer",
"24": "Kulfi",
"25": "Masala Dosa",
"26": "Momos",
"27": "Omelette",
"28": "Paani Puri",
"29": "Pakode",
"30": "Pav Bhaji",
"31": "Pizza",
"32": "Samosa",
"33": "Sushi"
},
"initializer_factor": 1.0,
"label2id": {
"Apple Pie": 8,
"Baked Potato": 0,
"Burger": 9,
"Butter Naan": 10,
"Chai": 11,
"Chapati": 12,
"Cheesecake": 13,
"Chicken Curry": 14,
"Chole Bhature": 15,
"Crispy Chicken": 1,
"Dal Makhani": 16,
"Dhokla": 17,
"Donut": 2,
"Fried Rice": 18,
"Fries": 3,
"Hot Dog": 4,
"Ice Cream": 19,
"Idli": 20,
"Jalebi": 21,
"Kaathi Rolls": 22,
"Kadai Paneer": 23,
"Kulfi": 24,
"Masala Dosa": 25,
"Momos": 26,
"Omelette": 27,
"Paani Puri": 28,
"Pakode": 29,
"Pav Bhaji": 30,
"Pizza": 31,
"Samosa": 32,
"Sandwich": 5,
"Sushi": 33,
"Taco": 6,
"Taquito": 7
},
"model_type": "siglip",
"problem_type": "single_label_classification",
"text_config": {
"attention_dropout": 0.0,
"hidden_act": "gelu_pytorch_tanh",
"hidden_size": 768,
"intermediate_size": 3072,
"layer_norm_eps": 1e-06,
"max_position_embeddings": 64,
"model_type": "siglip_text_model",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"projection_size": 768,
"torch_dtype": "float32",
"vocab_size": 256000
},
"torch_dtype": "float32",
"transformers_version": "4.50.0",
"vision_config": {
"attention_dropout": 0.0,
"hidden_act": "gelu_pytorch_tanh",
"hidden_size": 768,
"image_size": 224,
"intermediate_size": 3072,
"layer_norm_eps": 1e-06,
"model_type": "siglip_vision_model",
"num_attention_heads": 12,
"num_channels": 3,
"num_hidden_layers": 12,
"patch_size": 16,
"torch_dtype": "float32"
}
}