{ "architectures": ["LWM-v1.1"], "hidden_size": 128, "num_attention_heads": 8, "num_hidden_layers": 12 }