{ "architectures": [ "Siglip2Model" ], "initializer_factor": 1.0, "model_type": "siglip2", "text_config": { "hidden_size": 1152, "intermediate_size": 4304, "model_type": "siglip2_text_model", "num_attention_heads": 16, "num_hidden_layers": 27, "projection_size": 1152, "vocab_size": 256000 }, "torch_dtype": "float32", "transformers_version": "4.49.0.dev0", "vision_config": { "hidden_size": 1152, "intermediate_size": 4304, "model_type": "siglip2_vision_model", "num_attention_heads": 16, "num_hidden_layers": 27 } }