Models
Turbo
Sign in
Download
Models
Download
Sign in
dolphin-mistral
:7b-v2.2.1-fp16
379.4K
Downloads
Updated
1 year ago
The uncensored Dolphin model based on Mistral that excels at coding tasks. Updated to version 2.8.
The uncensored Dolphin model based on Mistral that excels at coding tasks. Updated to version 2.8.
Cancel
7b
dolphin-mistral:7b-v2.2.1-fp16
...
/
model
d212ee7efc1f · 14GB
Metadata
general.architecture
llama
llama
general.file_type
F16
F16
llama.attention.head_count
32
32
llama.attention.head_count_kv
8
8
llama.attention.layer_norm_rms_epsilon
1e-05
1e-05
llama.block_count
32
32
llama.context_length
32768
32768
llama.embedding_length
4096
4096
llama.feed_forward_length
14336
14336
llama.rope.dimension_count
128
128
llama.rope.freq_base
10000
10000
tokenizer.ggml.add_bos_token
true
true
tokenizer.ggml.add_eos_token
false
false
tokenizer.ggml.bos_token_id
1
1
tokenizer.ggml.eos_token_id
32000
32000
tokenizer.ggml.model
llama
llama
tokenizer.ggml.scores
[0, 0, 0, 0, 0, ...]
[0, 0, 0, 0, 0, ...]
tokenizer.ggml.token_type
[2, 3, 3, 6, 6, ...]
[2, 3, 3, 6, 6, ...]
tokenizer.ggml.tokens
[<unk>, <s>, </s>, <0x00>, <0x01>, ...]
[<unk>, <s>, </s>, <0x00>, <0x01>, ...]
Tensor
Name
Type
Shape
token_embd.weight
F16
F16
[4096, 32002]
blk.0
blk.0.attn_k.weight
F16
F16
[4096, 1024]
blk.0.attn_norm.weight
F32
F32
[4096]
blk.0.attn_output.weight
F16
F16
[4096, 4096]
blk.0.attn_q.weight
F16
F16
[4096, 4096]
blk.0.attn_v.weight
F16
F16
[4096, 1024]
blk.0.ffn_down.weight
F16
F16
[14336, 4096]
blk.0.ffn_gate.weight
F16
F16
[4096, 14336]
blk.0.ffn_norm.weight
F32
F32
[4096]
blk.0.ffn_up.weight
F16
F16
[4096, 14336]
blk.1
blk.1.attn_k.weight
F16
F16
[4096, 1024]
blk.1.attn_norm.weight
F32
F32
[4096]
blk.1.attn_output.weight
F16
F16
[4096, 4096]
blk.1.attn_q.weight
F16
F16
[4096, 4096]
blk.1.attn_v.weight
F16
F16
[4096, 1024]
blk.1.ffn_down.weight
F16
F16
[14336, 4096]
blk.1.ffn_gate.weight
F16
F16
[4096, 14336]
blk.1.ffn_norm.weight
F32
F32
[4096]
blk.1.ffn_up.weight
F16
F16
[4096, 14336]
blk.2
blk.2.attn_k.weight
F16
F16
[4096, 1024]
blk.2.attn_norm.weight
F32
F32
[4096]
blk.2.attn_output.weight
F16
F16
[4096, 4096]
blk.2.attn_q.weight
F16
F16
[4096, 4096]
blk.2.attn_v.weight
F16
F16
[4096, 1024]
blk.2.ffn_down.weight
F16
F16
[14336, 4096]
blk.2.ffn_gate.weight
F16
F16
[4096, 14336]
blk.2.ffn_norm.weight
F32
F32
[4096]
blk.2.ffn_up.weight
F16
F16
[4096, 14336]
blk.3
blk.3.attn_k.weight
F16
F16
[4096, 1024]
blk.3.attn_norm.weight
F32
F32
[4096]
blk.3.attn_output.weight
F16
F16
[4096, 4096]
blk.3.attn_q.weight
F16
F16
[4096, 4096]
blk.3.attn_v.weight
F16
F16
[4096, 1024]
blk.3.ffn_down.weight
F16
F16
[14336, 4096]
blk.3.ffn_gate.weight
F16
F16
[4096, 14336]
blk.3.ffn_norm.weight
F32
F32
[4096]
blk.3.ffn_up.weight
F16
F16
[4096, 14336]
blk.4
blk.4.attn_k.weight
F16
F16
[4096, 1024]
blk.4.attn_norm.weight
F32
F32
[4096]
blk.4.attn_output.weight
F16
F16
[4096, 4096]
blk.4.attn_q.weight
F16
F16
[4096, 4096]
blk.4.attn_v.weight
F16
F16
[4096, 1024]
blk.4.ffn_down.weight
F16
F16
[14336, 4096]
blk.4.ffn_gate.weight
F16
F16
[4096, 14336]
blk.4.ffn_norm.weight
F32
F32
[4096]
blk.4.ffn_up.weight
F16
F16
[4096, 14336]
blk.5
blk.5.attn_k.weight
F16
F16
[4096, 1024]
blk.5.attn_norm.weight
F32
F32
[4096]
blk.5.attn_output.weight
F16
F16
[4096, 4096]
blk.5.attn_q.weight
F16
F16
[4096, 4096]
blk.5.attn_v.weight
F16
F16
[4096, 1024]
blk.5.ffn_down.weight
F16
F16
[14336, 4096]
blk.5.ffn_gate.weight
F16
F16
[4096, 14336]
blk.5.ffn_norm.weight
F32
F32
[4096]
blk.5.ffn_up.weight
F16
F16
[4096, 14336]
blk.6
blk.6.attn_k.weight
F16
F16
[4096, 1024]
blk.6.attn_norm.weight
F32
F32
[4096]
blk.6.attn_output.weight
F16
F16
[4096, 4096]
blk.6.attn_q.weight
F16
F16
[4096, 4096]
blk.6.attn_v.weight
F16
F16
[4096, 1024]
blk.6.ffn_down.weight
F16
F16
[14336, 4096]
blk.6.ffn_gate.weight
F16
F16
[4096, 14336]
blk.6.ffn_norm.weight
F32
F32
[4096]
blk.6.ffn_up.weight
F16
F16
[4096, 14336]
blk.7
blk.7.attn_k.weight
F16
F16
[4096, 1024]
blk.7.attn_norm.weight
F32
F32
[4096]
blk.7.attn_output.weight
F16
F16
[4096, 4096]
blk.7.attn_q.weight
F16
F16
[4096, 4096]
blk.7.attn_v.weight
F16
F16
[4096, 1024]
blk.7.ffn_down.weight
F16
F16
[14336, 4096]
blk.7.ffn_gate.weight
F16
F16
[4096, 14336]
blk.7.ffn_norm.weight
F32
F32
[4096]
blk.7.ffn_up.weight
F16
F16
[4096, 14336]
blk.8
blk.8.attn_k.weight
F16
F16
[4096, 1024]
blk.8.attn_norm.weight
F32
F32
[4096]
blk.8.attn_output.weight
F16
F16
[4096, 4096]
blk.8.attn_q.weight
F16
F16
[4096, 4096]
blk.8.attn_v.weight
F16
F16
[4096, 1024]
blk.8.ffn_down.weight
F16
F16
[14336, 4096]
blk.8.ffn_gate.weight
F16
F16
[4096, 14336]
blk.8.ffn_norm.weight
F32
F32
[4096]
blk.8.ffn_up.weight
F16
F16
[4096, 14336]
blk.9
blk.9.attn_k.weight
F16
F16
[4096, 1024]
blk.9.attn_norm.weight
F32
F32
[4096]
blk.9.attn_output.weight
F16
F16
[4096, 4096]
blk.9.attn_q.weight
F16
F16
[4096, 4096]
blk.9.attn_v.weight
F16
F16
[4096, 1024]
blk.9.ffn_down.weight
F16
F16
[14336, 4096]
blk.9.ffn_gate.weight
F16
F16
[4096, 14336]
blk.9.ffn_norm.weight
F32
F32
[4096]
blk.9.ffn_up.weight
F16
F16
[4096, 14336]
blk.10
blk.10.attn_k.weight
F16
F16
[4096, 1024]
blk.10.attn_norm.weight
F32
F32
[4096]
blk.10.attn_output.weight
F16
F16
[4096, 4096]
blk.10.attn_q.weight
F16
F16
[4096, 4096]
blk.10.attn_v.weight
F16
F16
[4096, 1024]
blk.10.ffn_down.weight
F16
F16
[14336, 4096]
blk.10.ffn_gate.weight
F16
F16
[4096, 14336]
blk.10.ffn_norm.weight
F32
F32
[4096]
blk.10.ffn_up.weight
F16
F16
[4096, 14336]
blk.11
blk.11.attn_k.weight
F16
F16
[4096, 1024]
blk.11.attn_norm.weight
F32
F32
[4096]
blk.11.attn_output.weight
F16
F16
[4096, 4096]
blk.11.attn_q.weight
F16
F16
[4096, 4096]
blk.11.attn_v.weight
F16
F16
[4096, 1024]
blk.11.ffn_down.weight
F16
F16
[14336, 4096]
blk.11.ffn_gate.weight
F16
F16
[4096, 14336]
blk.11.ffn_norm.weight
F32
F32
[4096]
blk.11.ffn_up.weight
F16
F16
[4096, 14336]
blk.12
blk.12.attn_k.weight
F16
F16
[4096, 1024]
blk.12.attn_norm.weight
F32
F32
[4096]
blk.12.attn_output.weight
F16
F16
[4096, 4096]
blk.12.attn_q.weight
F16
F16
[4096, 4096]
blk.12.attn_v.weight
F16
F16
[4096, 1024]
blk.12.ffn_down.weight
F16
F16
[14336, 4096]
blk.12.ffn_gate.weight
F16
F16
[4096, 14336]
blk.12.ffn_norm.weight
F32
F32
[4096]
blk.12.ffn_up.weight
F16
F16
[4096, 14336]
blk.13
blk.13.attn_k.weight
F16
F16
[4096, 1024]
blk.13.attn_norm.weight
F32
F32
[4096]
blk.13.attn_output.weight
F16
F16
[4096, 4096]
blk.13.attn_q.weight
F16
F16
[4096, 4096]
blk.13.attn_v.weight
F16
F16
[4096, 1024]
blk.13.ffn_down.weight
F16
F16
[14336, 4096]
blk.13.ffn_gate.weight
F16
F16
[4096, 14336]
blk.13.ffn_norm.weight
F32
F32
[4096]
blk.13.ffn_up.weight
F16
F16
[4096, 14336]
blk.14
blk.14.attn_k.weight
F16
F16
[4096, 1024]
blk.14.attn_norm.weight
F32
F32
[4096]
blk.14.attn_output.weight
F16
F16
[4096, 4096]
blk.14.attn_q.weight
F16
F16
[4096, 4096]
blk.14.attn_v.weight
F16
F16
[4096, 1024]
blk.14.ffn_down.weight
F16
F16
[14336, 4096]
blk.14.ffn_gate.weight
F16
F16
[4096, 14336]
blk.14.ffn_norm.weight
F32
F32
[4096]
blk.14.ffn_up.weight
F16
F16
[4096, 14336]
blk.15
blk.15.attn_k.weight
F16
F16
[4096, 1024]
blk.15.attn_norm.weight
F32
F32
[4096]
blk.15.attn_output.weight
F16
F16
[4096, 4096]
blk.15.attn_q.weight
F16
F16
[4096, 4096]
blk.15.attn_v.weight
F16
F16
[4096, 1024]
blk.15.ffn_down.weight
F16
F16
[14336, 4096]
blk.15.ffn_gate.weight
F16
F16
[4096, 14336]
blk.15.ffn_norm.weight
F32
F32
[4096]
blk.15.ffn_up.weight
F16
F16
[4096, 14336]
blk.16
blk.16.attn_k.weight
F16
F16
[4096, 1024]
blk.16.attn_norm.weight
F32
F32
[4096]
blk.16.attn_output.weight
F16
F16
[4096, 4096]
blk.16.attn_q.weight
F16
F16
[4096, 4096]
blk.16.attn_v.weight
F16
F16
[4096, 1024]
blk.16.ffn_down.weight
F16
F16
[14336, 4096]
blk.16.ffn_gate.weight
F16
F16
[4096, 14336]
blk.16.ffn_norm.weight
F32
F32
[4096]
blk.16.ffn_up.weight
F16
F16
[4096, 14336]
blk.17
blk.17.attn_k.weight
F16
F16
[4096, 1024]
blk.17.attn_norm.weight
F32
F32
[4096]
blk.17.attn_output.weight
F16
F16
[4096, 4096]
blk.17.attn_q.weight
F16
F16
[4096, 4096]
blk.17.attn_v.weight
F16
F16
[4096, 1024]
blk.17.ffn_down.weight
F16
F16
[14336, 4096]
blk.17.ffn_gate.weight
F16
F16
[4096, 14336]
blk.17.ffn_norm.weight
F32
F32
[4096]
blk.17.ffn_up.weight
F16
F16
[4096, 14336]
blk.18
blk.18.attn_k.weight
F16
F16
[4096, 1024]
blk.18.attn_norm.weight
F32
F32
[4096]
blk.18.attn_output.weight
F16
F16
[4096, 4096]
blk.18.attn_q.weight
F16
F16
[4096, 4096]
blk.18.attn_v.weight
F16
F16
[4096, 1024]
blk.18.ffn_down.weight
F16
F16
[14336, 4096]
blk.18.ffn_gate.weight
F16
F16
[4096, 14336]
blk.18.ffn_norm.weight
F32
F32
[4096]
blk.18.ffn_up.weight
F16
F16
[4096, 14336]
blk.19
blk.19.attn_k.weight
F16
F16
[4096, 1024]
blk.19.attn_norm.weight
F32
F32
[4096]
blk.19.attn_output.weight
F16
F16
[4096, 4096]
blk.19.attn_q.weight
F16
F16
[4096, 4096]
blk.19.attn_v.weight
F16
F16
[4096, 1024]
blk.19.ffn_down.weight
F16
F16
[14336, 4096]
blk.19.ffn_gate.weight
F16
F16
[4096, 14336]
blk.19.ffn_norm.weight
F32
F32
[4096]
blk.19.ffn_up.weight
F16
F16
[4096, 14336]
blk.20
blk.20.attn_k.weight
F16
F16
[4096, 1024]
blk.20.attn_norm.weight
F32
F32
[4096]
blk.20.attn_output.weight
F16
F16
[4096, 4096]
blk.20.attn_q.weight
F16
F16
[4096, 4096]
blk.20.attn_v.weight
F16
F16
[4096, 1024]
blk.20.ffn_down.weight
F16
F16
[14336, 4096]
blk.20.ffn_gate.weight
F16
F16
[4096, 14336]
blk.20.ffn_norm.weight
F32
F32
[4096]
blk.20.ffn_up.weight
F16
F16
[4096, 14336]
blk.21
blk.21.attn_k.weight
F16
F16
[4096, 1024]
blk.21.attn_norm.weight
F32
F32
[4096]
blk.21.attn_output.weight
F16
F16
[4096, 4096]
blk.21.attn_q.weight
F16
F16
[4096, 4096]
blk.21.attn_v.weight
F16
F16
[4096, 1024]
blk.21.ffn_down.weight
F16
F16
[14336, 4096]
blk.21.ffn_gate.weight
F16
F16
[4096, 14336]
blk.21.ffn_norm.weight
F32
F32
[4096]
blk.21.ffn_up.weight
F16
F16
[4096, 14336]
blk.22
blk.22.attn_k.weight
F16
F16
[4096, 1024]
blk.22.attn_norm.weight
F32
F32
[4096]
blk.22.attn_output.weight
F16
F16
[4096, 4096]
blk.22.attn_q.weight
F16
F16
[4096, 4096]
blk.22.attn_v.weight
F16
F16
[4096, 1024]
blk.22.ffn_down.weight
F16
F16
[14336, 4096]
blk.22.ffn_gate.weight
F16
F16
[4096, 14336]
blk.22.ffn_norm.weight
F32
F32
[4096]
blk.22.ffn_up.weight
F16
F16
[4096, 14336]
blk.23
blk.23.attn_k.weight
F16
F16
[4096, 1024]
blk.23.attn_norm.weight
F32
F32
[4096]
blk.23.attn_output.weight
F16
F16
[4096, 4096]
blk.23.attn_q.weight
F16
F16
[4096, 4096]
blk.23.attn_v.weight
F16
F16
[4096, 1024]
blk.23.ffn_down.weight
F16
F16
[14336, 4096]
blk.23.ffn_gate.weight
F16
F16
[4096, 14336]
blk.23.ffn_norm.weight
F32
F32
[4096]
blk.23.ffn_up.weight
F16
F16
[4096, 14336]
blk.24
blk.24.attn_k.weight
F16
F16
[4096, 1024]
blk.24.attn_norm.weight
F32
F32
[4096]
blk.24.attn_output.weight
F16
F16
[4096, 4096]
blk.24.attn_q.weight
F16
F16
[4096, 4096]
blk.24.attn_v.weight
F16
F16
[4096, 1024]
blk.24.ffn_down.weight
F16
F16
[14336, 4096]
blk.24.ffn_gate.weight
F16
F16
[4096, 14336]
blk.24.ffn_norm.weight
F32
F32
[4096]
blk.24.ffn_up.weight
F16
F16
[4096, 14336]
blk.25
blk.25.attn_k.weight
F16
F16
[4096, 1024]
blk.25.attn_norm.weight
F32
F32
[4096]
blk.25.attn_output.weight
F16
F16
[4096, 4096]
blk.25.attn_q.weight
F16
F16
[4096, 4096]
blk.25.attn_v.weight
F16
F16
[4096, 1024]
blk.25.ffn_down.weight
F16
F16
[14336, 4096]
blk.25.ffn_gate.weight
F16
F16
[4096, 14336]
blk.25.ffn_norm.weight
F32
F32
[4096]
blk.25.ffn_up.weight
F16
F16
[4096, 14336]
blk.26
blk.26.attn_k.weight
F16
F16
[4096, 1024]
blk.26.attn_norm.weight
F32
F32
[4096]
blk.26.attn_output.weight
F16
F16
[4096, 4096]
blk.26.attn_q.weight
F16
F16
[4096, 4096]
blk.26.attn_v.weight
F16
F16
[4096, 1024]
blk.26.ffn_down.weight
F16
F16
[14336, 4096]
blk.26.ffn_gate.weight
F16
F16
[4096, 14336]
blk.26.ffn_norm.weight
F32
F32
[4096]
blk.26.ffn_up.weight
F16
F16
[4096, 14336]
blk.27
blk.27.attn_k.weight
F16
F16
[4096, 1024]
blk.27.attn_norm.weight
F32
F32
[4096]
blk.27.attn_output.weight
F16
F16
[4096, 4096]
blk.27.attn_q.weight
F16
F16
[4096, 4096]
blk.27.attn_v.weight
F16
F16
[4096, 1024]
blk.27.ffn_down.weight
F16
F16
[14336, 4096]
blk.27.ffn_gate.weight
F16
F16
[4096, 14336]
blk.27.ffn_norm.weight
F32
F32
[4096]
blk.27.ffn_up.weight
F16
F16
[4096, 14336]
blk.28
blk.28.attn_k.weight
F16
F16
[4096, 1024]
blk.28.attn_norm.weight
F32
F32
[4096]
blk.28.attn_output.weight
F16
F16
[4096, 4096]
blk.28.attn_q.weight
F16
F16
[4096, 4096]
blk.28.attn_v.weight
F16
F16
[4096, 1024]
blk.28.ffn_down.weight
F16
F16
[14336, 4096]
blk.28.ffn_gate.weight
F16
F16
[4096, 14336]
blk.28.ffn_norm.weight
F32
F32
[4096]
blk.28.ffn_up.weight
F16
F16
[4096, 14336]
blk.29
blk.29.attn_k.weight
F16
F16
[4096, 1024]
blk.29.attn_norm.weight
F32
F32
[4096]
blk.29.attn_output.weight
F16
F16
[4096, 4096]
blk.29.attn_q.weight
F16
F16
[4096, 4096]
blk.29.attn_v.weight
F16
F16
[4096, 1024]
blk.29.ffn_down.weight
F16
F16
[14336, 4096]
blk.29.ffn_gate.weight
F16
F16
[4096, 14336]
blk.29.ffn_norm.weight
F32
F32
[4096]
blk.29.ffn_up.weight
F16
F16
[4096, 14336]
blk.30
blk.30.attn_k.weight
F16
F16
[4096, 1024]
blk.30.attn_norm.weight
F32
F32
[4096]
blk.30.attn_output.weight
F16
F16
[4096, 4096]
blk.30.attn_q.weight
F16
F16
[4096, 4096]
blk.30.attn_v.weight
F16
F16
[4096, 1024]
blk.30.ffn_down.weight
F16
F16
[14336, 4096]
blk.30.ffn_gate.weight
F16
F16
[4096, 14336]
blk.30.ffn_norm.weight
F32
F32
[4096]
blk.30.ffn_up.weight
F16
F16
[4096, 14336]
blk.31
blk.31.attn_k.weight
F16
F16
[4096, 1024]
blk.31.attn_norm.weight
F32
F32
[4096]
blk.31.attn_output.weight
F16
F16
[4096, 4096]
blk.31.attn_q.weight
F16
F16
[4096, 4096]
blk.31.attn_v.weight
F16
F16
[4096, 1024]
blk.31.ffn_down.weight
F16
F16
[14336, 4096]
blk.31.ffn_gate.weight
F16
F16
[4096, 14336]
blk.31.ffn_norm.weight
F32
F32
[4096]
blk.31.ffn_up.weight
F16
F16
[4096, 14336]
output.weight
F16
F16
[4096, 32002]
output_norm.weight
F32
F32
[4096]