๐ŸŒ‹ LLaVA is a novel end-to-end trained large multimodal model that combines a vision encoder and Vicuna for general-purpose visual and language understanding. Updated to version 1.6.

vision 7b 13b 34b

2M 10 months ago

... /
projector
72d6f08a42f6 ยท 624MB
    Metadata
  • clip.has_llava_projector
    true
  • clip.has_text_encoder
    false
  • clip.has_vision_encoder
    true
  • clip.projector_type
    mlp
  • clip.use_gelu
    false
  • clip.vision.attention.head_count
    16
  • clip.vision.attention.layer_norm_epsilon
    1e-05
  • clip.vision.block_count
    23
  • clip.vision.embedding_length
    1024
  • clip.vision.feed_forward_length
    4096
  • clip.vision.image_mean
    [0.48145467, 0.4578275, 0.40821072]
  • clip.vision.image_size
    336
  • clip.vision.image_std
    [0.26862955, 0.2613026, 0.2757771]
  • clip.vision.patch_size
    14
  • clip.vision.projection_dim
    768
  • general.architecture
    clip
  • general.description
    image encoder for LLaVA
  • general.file_type
    1
  • general.name
    openai/clip-vit-large-patch14-336
  • Tensor
  • Name
    Type
    Shape
  • mm.0.bias
    F32
    [4096]
  • mm.0.weight
    F16
    [1024, 4096]
  • mm.2.bias
    F32
    [4096]
  • mm.2.weight
    F16
    [4096, 4096]
  • v.blk.0
  • v.blk.0.attn_k.bias
    F32
    [1024]
  • v.blk.0.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.0.attn_out.bias
    F32
    [1024]
  • v.blk.0.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.0.attn_q.bias
    F32
    [1024]
  • v.blk.0.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.0.attn_v.bias
    F32
    [1024]
  • v.blk.0.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.0.ffn_down.bias
    F32
    [4096]
  • v.blk.0.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.0.ffn_up.bias
    F32
    [1024]
  • v.blk.0.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.0.ln1.bias
    F32
    [1024]
  • v.blk.0.ln1.weight
    F32
    [1024]
  • v.blk.0.ln2.bias
    F32
    [1024]
  • v.blk.0.ln2.weight
    F32
    [1024]
  • v.blk.1
  • v.blk.1.attn_k.bias
    F32
    [1024]
  • v.blk.1.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.1.attn_out.bias
    F32
    [1024]
  • v.blk.1.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.1.attn_q.bias
    F32
    [1024]
  • v.blk.1.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.1.attn_v.bias
    F32
    [1024]
  • v.blk.1.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.1.ffn_down.bias
    F32
    [4096]
  • v.blk.1.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.1.ffn_up.bias
    F32
    [1024]
  • v.blk.1.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.1.ln1.bias
    F32
    [1024]
  • v.blk.1.ln1.weight
    F32
    [1024]
  • v.blk.1.ln2.bias
    F32
    [1024]
  • v.blk.1.ln2.weight
    F32
    [1024]
  • v.blk.2
  • v.blk.2.attn_k.bias
    F32
    [1024]
  • v.blk.2.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.2.attn_out.bias
    F32
    [1024]
  • v.blk.2.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.2.attn_q.bias
    F32
    [1024]
  • v.blk.2.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.2.attn_v.bias
    F32
    [1024]
  • v.blk.2.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.2.ffn_down.bias
    F32
    [4096]
  • v.blk.2.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.2.ffn_up.bias
    F32
    [1024]
  • v.blk.2.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.2.ln1.bias
    F32
    [1024]
  • v.blk.2.ln1.weight
    F32
    [1024]
  • v.blk.2.ln2.bias
    F32
    [1024]
  • v.blk.2.ln2.weight
    F32
    [1024]
  • v.blk.3
  • v.blk.3.attn_k.bias
    F32
    [1024]
  • v.blk.3.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.3.attn_out.bias
    F32
    [1024]
  • v.blk.3.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.3.attn_q.bias
    F32
    [1024]
  • v.blk.3.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.3.attn_v.bias
    F32
    [1024]
  • v.blk.3.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.3.ffn_down.bias
    F32
    [4096]
  • v.blk.3.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.3.ffn_up.bias
    F32
    [1024]
  • v.blk.3.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.3.ln1.bias
    F32
    [1024]
  • v.blk.3.ln1.weight
    F32
    [1024]
  • v.blk.3.ln2.bias
    F32
    [1024]
  • v.blk.3.ln2.weight
    F32
    [1024]
  • v.blk.4
  • v.blk.4.attn_k.bias
    F32
    [1024]
  • v.blk.4.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.4.attn_out.bias
    F32
    [1024]
  • v.blk.4.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.4.attn_q.bias
    F32
    [1024]
  • v.blk.4.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.4.attn_v.bias
    F32
    [1024]
  • v.blk.4.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.4.ffn_down.bias
    F32
    [4096]
  • v.blk.4.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.4.ffn_up.bias
    F32
    [1024]
  • v.blk.4.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.4.ln1.bias
    F32
    [1024]
  • v.blk.4.ln1.weight
    F32
    [1024]
  • v.blk.4.ln2.bias
    F32
    [1024]
  • v.blk.4.ln2.weight
    F32
    [1024]
  • v.blk.5
  • v.blk.5.attn_k.bias
    F32
    [1024]
  • v.blk.5.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.5.attn_out.bias
    F32
    [1024]
  • v.blk.5.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.5.attn_q.bias
    F32
    [1024]
  • v.blk.5.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.5.attn_v.bias
    F32
    [1024]
  • v.blk.5.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.5.ffn_down.bias
    F32
    [4096]
  • v.blk.5.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.5.ffn_up.bias
    F32
    [1024]
  • v.blk.5.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.5.ln1.bias
    F32
    [1024]
  • v.blk.5.ln1.weight
    F32
    [1024]
  • v.blk.5.ln2.bias
    F32
    [1024]
  • v.blk.5.ln2.weight
    F32
    [1024]
  • v.blk.6
  • v.blk.6.attn_k.bias
    F32
    [1024]
  • v.blk.6.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.6.attn_out.bias
    F32
    [1024]
  • v.blk.6.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.6.attn_q.bias
    F32
    [1024]
  • v.blk.6.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.6.attn_v.bias
    F32
    [1024]
  • v.blk.6.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.6.ffn_down.bias
    F32
    [4096]
  • v.blk.6.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.6.ffn_up.bias
    F32
    [1024]
  • v.blk.6.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.6.ln1.bias
    F32
    [1024]
  • v.blk.6.ln1.weight
    F32
    [1024]
  • v.blk.6.ln2.bias
    F32
    [1024]
  • v.blk.6.ln2.weight
    F32
    [1024]
  • v.blk.7
  • v.blk.7.attn_k.bias
    F32
    [1024]
  • v.blk.7.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.7.attn_out.bias
    F32
    [1024]
  • v.blk.7.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.7.attn_q.bias
    F32
    [1024]
  • v.blk.7.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.7.attn_v.bias
    F32
    [1024]
  • v.blk.7.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.7.ffn_down.bias
    F32
    [4096]
  • v.blk.7.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.7.ffn_up.bias
    F32
    [1024]
  • v.blk.7.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.7.ln1.bias
    F32
    [1024]
  • v.blk.7.ln1.weight
    F32
    [1024]
  • v.blk.7.ln2.bias
    F32
    [1024]
  • v.blk.7.ln2.weight
    F32
    [1024]
  • v.blk.8
  • v.blk.8.attn_k.bias
    F32
    [1024]
  • v.blk.8.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.8.attn_out.bias
    F32
    [1024]
  • v.blk.8.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.8.attn_q.bias
    F32
    [1024]
  • v.blk.8.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.8.attn_v.bias
    F32
    [1024]
  • v.blk.8.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.8.ffn_down.bias
    F32
    [4096]
  • v.blk.8.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.8.ffn_up.bias
    F32
    [1024]
  • v.blk.8.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.8.ln1.bias
    F32
    [1024]
  • v.blk.8.ln1.weight
    F32
    [1024]
  • v.blk.8.ln2.bias
    F32
    [1024]
  • v.blk.8.ln2.weight
    F32
    [1024]
  • v.blk.9
  • v.blk.9.attn_k.bias
    F32
    [1024]
  • v.blk.9.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.9.attn_out.bias
    F32
    [1024]
  • v.blk.9.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.9.attn_q.bias
    F32
    [1024]
  • v.blk.9.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.9.attn_v.bias
    F32
    [1024]
  • v.blk.9.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.9.ffn_down.bias
    F32
    [4096]
  • v.blk.9.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.9.ffn_up.bias
    F32
    [1024]
  • v.blk.9.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.9.ln1.bias
    F32
    [1024]
  • v.blk.9.ln1.weight
    F32
    [1024]
  • v.blk.9.ln2.bias
    F32
    [1024]
  • v.blk.9.ln2.weight
    F32
    [1024]
  • v.blk.10
  • v.blk.10.attn_k.bias
    F32
    [1024]
  • v.blk.10.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.10.attn_out.bias
    F32
    [1024]
  • v.blk.10.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.10.attn_q.bias
    F32
    [1024]
  • v.blk.10.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.10.attn_v.bias
    F32
    [1024]
  • v.blk.10.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.10.ffn_down.bias
    F32
    [4096]
  • v.blk.10.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.10.ffn_up.bias
    F32
    [1024]
  • v.blk.10.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.10.ln1.bias
    F32
    [1024]
  • v.blk.10.ln1.weight
    F32
    [1024]
  • v.blk.10.ln2.bias
    F32
    [1024]
  • v.blk.10.ln2.weight
    F32
    [1024]
  • v.blk.11
  • v.blk.11.attn_k.bias
    F32
    [1024]
  • v.blk.11.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.11.attn_out.bias
    F32
    [1024]
  • v.blk.11.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.11.attn_q.bias
    F32
    [1024]
  • v.blk.11.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.11.attn_v.bias
    F32
    [1024]
  • v.blk.11.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.11.ffn_down.bias
    F32
    [4096]
  • v.blk.11.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.11.ffn_up.bias
    F32
    [1024]
  • v.blk.11.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.11.ln1.bias
    F32
    [1024]
  • v.blk.11.ln1.weight
    F32
    [1024]
  • v.blk.11.ln2.bias
    F32
    [1024]
  • v.blk.11.ln2.weight
    F32
    [1024]
  • v.blk.12
  • v.blk.12.attn_k.bias
    F32
    [1024]
  • v.blk.12.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.12.attn_out.bias
    F32
    [1024]
  • v.blk.12.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.12.attn_q.bias
    F32
    [1024]
  • v.blk.12.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.12.attn_v.bias
    F32
    [1024]
  • v.blk.12.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.12.ffn_down.bias
    F32
    [4096]
  • v.blk.12.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.12.ffn_up.bias
    F32
    [1024]
  • v.blk.12.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.12.ln1.bias
    F32
    [1024]
  • v.blk.12.ln1.weight
    F32
    [1024]
  • v.blk.12.ln2.bias
    F32
    [1024]
  • v.blk.12.ln2.weight
    F32
    [1024]
  • v.blk.13
  • v.blk.13.attn_k.bias
    F32
    [1024]
  • v.blk.13.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.13.attn_out.bias
    F32
    [1024]
  • v.blk.13.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.13.attn_q.bias
    F32
    [1024]
  • v.blk.13.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.13.attn_v.bias
    F32
    [1024]
  • v.blk.13.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.13.ffn_down.bias
    F32
    [4096]
  • v.blk.13.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.13.ffn_up.bias
    F32
    [1024]
  • v.blk.13.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.13.ln1.bias
    F32
    [1024]
  • v.blk.13.ln1.weight
    F32
    [1024]
  • v.blk.13.ln2.bias
    F32
    [1024]
  • v.blk.13.ln2.weight
    F32
    [1024]
  • v.blk.14
  • v.blk.14.attn_k.bias
    F32
    [1024]
  • v.blk.14.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.14.attn_out.bias
    F32
    [1024]
  • v.blk.14.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.14.attn_q.bias
    F32
    [1024]
  • v.blk.14.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.14.attn_v.bias
    F32
    [1024]
  • v.blk.14.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.14.ffn_down.bias
    F32
    [4096]
  • v.blk.14.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.14.ffn_up.bias
    F32
    [1024]
  • v.blk.14.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.14.ln1.bias
    F32
    [1024]
  • v.blk.14.ln1.weight
    F32
    [1024]
  • v.blk.14.ln2.bias
    F32
    [1024]
  • v.blk.14.ln2.weight
    F32
    [1024]
  • v.blk.15
  • v.blk.15.attn_k.bias
    F32
    [1024]
  • v.blk.15.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.15.attn_out.bias
    F32
    [1024]
  • v.blk.15.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.15.attn_q.bias
    F32
    [1024]
  • v.blk.15.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.15.attn_v.bias
    F32
    [1024]
  • v.blk.15.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.15.ffn_down.bias
    F32
    [4096]
  • v.blk.15.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.15.ffn_up.bias
    F32
    [1024]
  • v.blk.15.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.15.ln1.bias
    F32
    [1024]
  • v.blk.15.ln1.weight
    F32
    [1024]
  • v.blk.15.ln2.bias
    F32
    [1024]
  • v.blk.15.ln2.weight
    F32
    [1024]
  • v.blk.16
  • v.blk.16.attn_k.bias
    F32
    [1024]
  • v.blk.16.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.16.attn_out.bias
    F32
    [1024]
  • v.blk.16.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.16.attn_q.bias
    F32
    [1024]
  • v.blk.16.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.16.attn_v.bias
    F32
    [1024]
  • v.blk.16.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.16.ffn_down.bias
    F32
    [4096]
  • v.blk.16.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.16.ffn_up.bias
    F32
    [1024]
  • v.blk.16.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.16.ln1.bias
    F32
    [1024]
  • v.blk.16.ln1.weight
    F32
    [1024]
  • v.blk.16.ln2.bias
    F32
    [1024]
  • v.blk.16.ln2.weight
    F32
    [1024]
  • v.blk.17
  • v.blk.17.attn_k.bias
    F32
    [1024]
  • v.blk.17.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.17.attn_out.bias
    F32
    [1024]
  • v.blk.17.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.17.attn_q.bias
    F32
    [1024]
  • v.blk.17.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.17.attn_v.bias
    F32
    [1024]
  • v.blk.17.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.17.ffn_down.bias
    F32
    [4096]
  • v.blk.17.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.17.ffn_up.bias
    F32
    [1024]
  • v.blk.17.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.17.ln1.bias
    F32
    [1024]
  • v.blk.17.ln1.weight
    F32
    [1024]
  • v.blk.17.ln2.bias
    F32
    [1024]
  • v.blk.17.ln2.weight
    F32
    [1024]
  • v.blk.18
  • v.blk.18.attn_k.bias
    F32
    [1024]
  • v.blk.18.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.18.attn_out.bias
    F32
    [1024]
  • v.blk.18.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.18.attn_q.bias
    F32
    [1024]
  • v.blk.18.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.18.attn_v.bias
    F32
    [1024]
  • v.blk.18.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.18.ffn_down.bias
    F32
    [4096]
  • v.blk.18.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.18.ffn_up.bias
    F32
    [1024]
  • v.blk.18.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.18.ln1.bias
    F32
    [1024]
  • v.blk.18.ln1.weight
    F32
    [1024]
  • v.blk.18.ln2.bias
    F32
    [1024]
  • v.blk.18.ln2.weight
    F32
    [1024]
  • v.blk.19
  • v.blk.19.attn_k.bias
    F32
    [1024]
  • v.blk.19.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.19.attn_out.bias
    F32
    [1024]
  • v.blk.19.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.19.attn_q.bias
    F32
    [1024]
  • v.blk.19.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.19.attn_v.bias
    F32
    [1024]
  • v.blk.19.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.19.ffn_down.bias
    F32
    [4096]
  • v.blk.19.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.19.ffn_up.bias
    F32
    [1024]
  • v.blk.19.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.19.ln1.bias
    F32
    [1024]
  • v.blk.19.ln1.weight
    F32
    [1024]
  • v.blk.19.ln2.bias
    F32
    [1024]
  • v.blk.19.ln2.weight
    F32
    [1024]
  • v.blk.20
  • v.blk.20.attn_k.bias
    F32
    [1024]
  • v.blk.20.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.20.attn_out.bias
    F32
    [1024]
  • v.blk.20.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.20.attn_q.bias
    F32
    [1024]
  • v.blk.20.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.20.attn_v.bias
    F32
    [1024]
  • v.blk.20.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.20.ffn_down.bias
    F32
    [4096]
  • v.blk.20.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.20.ffn_up.bias
    F32
    [1024]
  • v.blk.20.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.20.ln1.bias
    F32
    [1024]
  • v.blk.20.ln1.weight
    F32
    [1024]
  • v.blk.20.ln2.bias
    F32
    [1024]
  • v.blk.20.ln2.weight
    F32
    [1024]
  • v.blk.21
  • v.blk.21.attn_k.bias
    F32
    [1024]
  • v.blk.21.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.21.attn_out.bias
    F32
    [1024]
  • v.blk.21.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.21.attn_q.bias
    F32
    [1024]
  • v.blk.21.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.21.attn_v.bias
    F32
    [1024]
  • v.blk.21.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.21.ffn_down.bias
    F32
    [4096]
  • v.blk.21.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.21.ffn_up.bias
    F32
    [1024]
  • v.blk.21.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.21.ln1.bias
    F32
    [1024]
  • v.blk.21.ln1.weight
    F32
    [1024]
  • v.blk.21.ln2.bias
    F32
    [1024]
  • v.blk.21.ln2.weight
    F32
    [1024]
  • v.blk.22
  • v.blk.22.attn_k.bias
    F32
    [1024]
  • v.blk.22.attn_k.weight
    F16
    [1024, 1024]
  • v.blk.22.attn_out.bias
    F32
    [1024]
  • v.blk.22.attn_out.weight
    F16
    [1024, 1024]
  • v.blk.22.attn_q.bias
    F32
    [1024]
  • v.blk.22.attn_q.weight
    F16
    [1024, 1024]
  • v.blk.22.attn_v.bias
    F32
    [1024]
  • v.blk.22.attn_v.weight
    F16
    [1024, 1024]
  • v.blk.22.ffn_down.bias
    F32
    [4096]
  • v.blk.22.ffn_down.weight
    F16
    [1024, 4096]
  • v.blk.22.ffn_up.bias
    F32
    [1024]
  • v.blk.22.ffn_up.weight
    F16
    [4096, 1024]
  • v.blk.22.ln1.bias
    F32
    [1024]
  • v.blk.22.ln1.weight
    F32
    [1024]
  • v.blk.22.ln2.bias
    F32
    [1024]
  • v.blk.22.ln2.weight
    F32
    [1024]
  • v.class_embd
    F32
    [1024]
  • v.patch_embd.weight
    F16
    [14, 14, 3, 1024]
  • v.position_embd.weight
    F16
    [1024, 577]
  • v.pre_ln.bias
    F32
    [1024]
  • v.pre_ln.weight
    F32
    [1024]