simpler check for model loading compatibility errors

This commit is contained in:
Jeffrey Morgan
2023-10-19 14:50:45 -04:00
parent cc1d03f4ec
commit 7ed5a39bc7
3 changed files with 11 additions and 12 deletions

View File

@@ -97,6 +97,13 @@ func load(ctx context.Context, workDir string, model *Model, reqOpts map[string]
llmRunner, err := llm.New(workDir, model.ModelPath, model.AdapterPaths, opts)
if err != nil {
// some older models are not compatible with newer versions of llama.cpp
// show a generalized compatibility error until there is a better way to
// check for model compatibility
if strings.Contains(err.Error(), "failed to load model") {
err = fmt.Errorf("%v: this model may be incompatible with your version of Ollama. If you previously pulled this model, try updating it by running `ollama pull %s`", err, model.ShortName)
}
return err
}