| { | |
| "architectures": [ | |
| "LlavaOnevisionForConditionalGeneration" | |
| ], | |
| "image_grid_pinpoints": [ | |
| [ | |
| 384, | |
| 384 | |
| ], | |
| [ | |
| 384, | |
| 768 | |
| ], | |
| [ | |
| 384, | |
| 1152 | |
| ], | |
| [ | |
| 384, | |
| 1536 | |
| ], | |
| [ | |
| 384, | |
| 1920 | |
| ], | |
| [ | |
| 384, | |
| 2304 | |
| ], | |
| [ | |
| 768, | |
| 384 | |
| ], | |
| [ | |
| 768, | |
| 768 | |
| ], | |
| [ | |
| 768, | |
| 1152 | |
| ], | |
| [ | |
| 768, | |
| 1536 | |
| ], | |
| [ | |
| 768, | |
| 1920 | |
| ], | |
| [ | |
| 768, | |
| 2304 | |
| ], | |
| [ | |
| 1152, | |
| 384 | |
| ], | |
| [ | |
| 1152, | |
| 768 | |
| ], | |
| [ | |
| 1152, | |
| 1152 | |
| ], | |
| [ | |
| 1152, | |
| 1536 | |
| ], | |
| [ | |
| 1152, | |
| 1920 | |
| ], | |
| [ | |
| 1152, | |
| 2304 | |
| ], | |
| [ | |
| 1536, | |
| 384 | |
| ], | |
| [ | |
| 1536, | |
| 768 | |
| ], | |
| [ | |
| 1536, | |
| 1152 | |
| ], | |
| [ | |
| 1536, | |
| 1536 | |
| ], | |
| [ | |
| 1536, | |
| 1920 | |
| ], | |
| [ | |
| 1536, | |
| 2304 | |
| ], | |
| [ | |
| 1920, | |
| 384 | |
| ], | |
| [ | |
| 1920, | |
| 768 | |
| ], | |
| [ | |
| 1920, | |
| 1152 | |
| ], | |
| [ | |
| 1920, | |
| 1536 | |
| ], | |
| [ | |
| 1920, | |
| 1920 | |
| ], | |
| [ | |
| 1920, | |
| 2304 | |
| ], | |
| [ | |
| 2304, | |
| 384 | |
| ], | |
| [ | |
| 2304, | |
| 768 | |
| ], | |
| [ | |
| 2304, | |
| 1152 | |
| ], | |
| [ | |
| 2304, | |
| 1536 | |
| ], | |
| [ | |
| 2304, | |
| 1920 | |
| ], | |
| [ | |
| 2304, | |
| 2304 | |
| ] | |
| ], | |
| "image_token_index": 256000, | |
| "model_type": "llava_onevision", | |
| "multimodal_projector_bias": true, | |
| "projector_hidden_act": "gelu", | |
| "text_config": { | |
| "_name_or_path": "Unbabel/Tower-Plus-2B", | |
| "architectures": [ | |
| "Gemma2ForCausalLM" | |
| ], | |
| "attention_bias": false, | |
| "attention_dropout": 0.0, | |
| "attn_logit_softcapping": 50.0, | |
| "cache_implementation": "hybrid", | |
| "eos_token_id": 107, | |
| "final_logit_softcapping": 30.0, | |
| "head_dim": 256, | |
| "hidden_act": "gelu_pytorch_tanh", | |
| "hidden_activation": "gelu_pytorch_tanh", | |
| "hidden_size": 2304, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 9216, | |
| "layer_types": [ | |
| "sliding_attention", | |
| "full_attention", | |
| "sliding_attention", | |
| "full_attention", | |
| "sliding_attention", | |
| "full_attention", | |
| "sliding_attention", | |
| "full_attention", | |
| "sliding_attention", | |
| "full_attention", | |
| "sliding_attention", | |
| "full_attention", | |
| "sliding_attention", | |
| "full_attention", | |
| "sliding_attention", | |
| "full_attention", | |
| "sliding_attention", | |
| "full_attention", | |
| "sliding_attention", | |
| "full_attention", | |
| "sliding_attention", | |
| "full_attention", | |
| "sliding_attention", | |
| "full_attention", | |
| "sliding_attention", | |
| "full_attention" | |
| ], | |
| "max_position_embeddings": 8192, | |
| "model_type": "gemma2", | |
| "num_attention_heads": 8, | |
| "num_hidden_layers": 26, | |
| "num_key_value_heads": 4, | |
| "query_pre_attn_scalar": 224, | |
| "rms_norm_eps": 1e-06, | |
| "rope_theta": 10000.0, | |
| "sliding_window": 4096, | |
| "torch_dtype": "bfloat16", | |
| "use_cache": false, | |
| "vocab_size": 256064 | |
| }, | |
| "tie_word_embeddings": false, | |
| "torch_dtype": "float16", | |
| "transformers_version": "4.55.4", | |
| "use_image_newline_parameter": true, | |
| "video_token_index": 256001, | |
| "vision_aspect_ratio": "anyres_max_9", | |
| "vision_config": { | |
| "attention_dropout": 0.0, | |
| "hidden_act": "gelu_pytorch_tanh", | |
| "hidden_size": 1152, | |
| "image_size": 384, | |
| "intermediate_size": 4304, | |
| "layer_norm_eps": 1e-06, | |
| "model_type": "siglip_vision_model", | |
| "num_attention_heads": 16, | |
| "num_channels": 3, | |
| "num_hidden_layers": 26, | |
| "patch_size": 14, | |
| "vision_use_head": false | |
| }, | |
| "vision_feature_layer": -1, | |
| "vision_feature_select_strategy": "full" | |
| } | |