remove per-model types

mostly replaced by decoding tensors except ggml models which only
support llama
This commit is contained in:
Michael Yang
2023-12-08 16:39:26 -08:00
parent 7a1b37ac64
commit 56ffc3023a
3 changed files with 0 additions and 64 deletions

View File

@@ -120,27 +120,6 @@ func (llm *ggufModel) ModelType() string {
return format.HumanNumber(llm.parameters)
}
switch llm.ModelFamily() {
case "llama":
if blocks, ok := llm.kv["llama.block_count"].(uint32); ok {
heads, headsOK := llm.kv["llama.head_count"].(uint32)
headKVs, headsKVsOK := llm.kv["llama.head_count_kv"].(uint32)
if headsOK && headsKVsOK && heads/headKVs == 8 {
return "70B"
}
return llamaModelType(blocks)
}
case "falcon":
if blocks, ok := llm.kv["falcon.block_count"].(uint32); ok {
return falconModelType(blocks)
}
case "starcoder":
if blocks, ok := llm.kv["starcoder.block_count"].(uint32); ok {
return starCoderModelType(blocks)
}
}
return "unknown"
}