no rope parameters

This commit is contained in:
Michael Yang
2024-04-05 17:59:58 -07:00
parent fc8e108642
commit be517e491c
4 changed files with 0 additions and 14 deletions

View File

@@ -32,7 +32,6 @@ type Params struct {
AttentionHeads int `json:"num_attention_heads"` // n_head
KeyValHeads int `json:"num_key_value_heads"`
NormEPS float64 `json:"rms_norm_eps"`
RopeFreqBase float64 `json:"rope_theta"`
BoSTokenID int `json:"bos_token_id"`
EoSTokenID int `json:"eos_token_id"`
HeadDimension int `json:"head_dim"`

View File

@@ -144,7 +144,6 @@ func (m *MistralModel) WriteGGUF() (string, error) {
"llama.attention.head_count": uint32(m.Params.AttentionHeads),
"llama.attention.head_count_kv": uint32(m.Params.KeyValHeads),
"llama.attention.layer_norm_rms_epsilon": float32(m.Params.NormEPS),
"llama.rope.freq_base": float32(m.Params.RopeFreqBase),
"general.file_type": uint32(1),
"tokenizer.ggml.model": "llama",