Update the /api/create endpoint to use JSON (#7935)

Replaces `POST /api/create` to use JSON instead of a Modelfile.

This is a breaking change.
This commit is contained in:
Patrick Devine
2024-12-31 18:02:30 -08:00
committed by GitHub
parent 459d822b51
commit 86a622cbdc
17 changed files with 1523 additions and 1094 deletions

View File

@@ -2,7 +2,6 @@ package server
import (
"bytes"
"cmp"
"context"
"crypto/sha256"
"encoding/hex"
@@ -24,8 +23,6 @@ import (
"github.com/ollama/ollama/api"
"github.com/ollama/ollama/envconfig"
"github.com/ollama/ollama/format"
"github.com/ollama/ollama/llama"
"github.com/ollama/ollama/llm"
"github.com/ollama/ollama/parser"
"github.com/ollama/ollama/template"
@@ -121,7 +118,7 @@ func (m *Model) CheckCapabilities(caps ...Capability) error {
}
func (m *Model) String() string {
var modelfile parser.File
var modelfile parser.Modelfile
modelfile.Commands = append(modelfile.Commands, parser.Command{
Name: "model",
@@ -330,328 +327,6 @@ func GetModel(name string) (*Model, error) {
return model, nil
}
func realpath(rel, from string) string {
abspath, err := filepath.Abs(from)
if err != nil {
return from
}
home, err := os.UserHomeDir()
if err != nil {
return abspath
}
if from == "~" {
return home
} else if strings.HasPrefix(from, "~/") {
return filepath.Join(home, from[2:])
}
if _, err := os.Stat(filepath.Join(rel, from)); err == nil {
// this is a file relative to the Modelfile
return filepath.Join(rel, from)
}
return abspath
}
func CreateModel(ctx context.Context, name model.Name, modelFileDir, quantization string, modelfile *parser.File, fn func(resp api.ProgressResponse)) (err error) {
config := ConfigV2{
OS: "linux",
Architecture: "amd64",
RootFS: RootFS{
Type: "layers",
},
}
var messages []*api.Message
parameters := make(map[string]any)
var layers []Layer
var baseLayers []*layerGGML
for _, c := range modelfile.Commands {
mediatype := fmt.Sprintf("application/vnd.ollama.image.%s", c.Name)
command := c.Name
switch command {
case "model", "adapter":
if name := model.ParseName(c.Args); name.IsValid() && command == "model" {
name, err := getExistingName(name)
if err != nil {
return err
}
baseLayers, err = parseFromModel(ctx, name, fn)
if err != nil {
return err
}
} else if strings.HasPrefix(c.Args, "@") {
digest := strings.TrimPrefix(c.Args, "@")
if ib, ok := intermediateBlobs[digest]; ok {
p, err := GetBlobsPath(ib)
if err != nil {
return err
}
if _, err := os.Stat(p); errors.Is(err, os.ErrNotExist) {
// pass
} else if err != nil {
return err
} else {
fn(api.ProgressResponse{Status: fmt.Sprintf("using cached layer %s", ib)})
digest = ib
}
}
blobpath, err := GetBlobsPath(digest)
if err != nil {
return err
}
blob, err := os.Open(blobpath)
if err != nil {
return err
}
defer blob.Close()
baseLayers, err = parseFromFile(ctx, command, baseLayers, blob, digest, fn)
if err != nil {
return err
}
} else if file, err := os.Open(realpath(modelFileDir, c.Args)); err == nil {
defer file.Close()
baseLayers, err = parseFromFile(ctx, command, baseLayers, file, "", fn)
if err != nil {
return err
}
} else {
return fmt.Errorf("invalid model reference: %s", c.Args)
}
for _, baseLayer := range baseLayers {
if quantization != "" &&
baseLayer.MediaType == "application/vnd.ollama.image.model" &&
baseLayer.GGML != nil &&
baseLayer.GGML.Name() == "gguf" {
want, err := llm.ParseFileType(quantization)
if err != nil {
return err
}
ft := baseLayer.GGML.KV().FileType()
if !slices.Contains([]string{"F16", "F32"}, ft.String()) {
return errors.New("quantization is only supported for F16 and F32 models")
} else if want != ft {
fn(api.ProgressResponse{Status: fmt.Sprintf("quantizing %s model to %s", ft, quantization)})
blob, err := GetBlobsPath(baseLayer.Digest)
if err != nil {
return err
}
temp, err := os.CreateTemp(filepath.Dir(blob), quantization)
if err != nil {
return err
}
defer temp.Close()
defer os.Remove(temp.Name())
if err := llama.Quantize(blob, temp.Name(), uint32(want)); err != nil {
return err
}
layer, err := NewLayer(temp, baseLayer.MediaType)
if err != nil {
return err
}
if _, err := temp.Seek(0, io.SeekStart); err != nil {
return err
}
ggml, _, err := llm.DecodeGGML(temp, 0)
if err != nil {
return err
}
baseLayer.Layer = layer
baseLayer.GGML = ggml
}
}
if baseLayer.GGML != nil {
config.ModelFormat = cmp.Or(config.ModelFormat, baseLayer.GGML.Name())
config.ModelFamily = cmp.Or(config.ModelFamily, baseLayer.GGML.KV().Architecture())
config.ModelType = cmp.Or(config.ModelType, format.HumanNumber(baseLayer.GGML.KV().ParameterCount()))
config.FileType = cmp.Or(config.FileType, baseLayer.GGML.KV().FileType().String())
config.ModelFamilies = append(config.ModelFamilies, baseLayer.GGML.KV().Architecture())
}
layers = append(layers, baseLayer.Layer)
}
case "license", "template", "system":
if c.Name == "template" {
if _, err := template.Parse(c.Args); err != nil {
return fmt.Errorf("%w: %s", errBadTemplate, err)
}
}
if c.Name != "license" {
// replace
layers = slices.DeleteFunc(layers, func(layer Layer) bool {
if layer.MediaType != mediatype {
return false
}
if err := layer.Remove(); err != nil {
return false
}
return true
})
}
blob := strings.NewReader(c.Args)
layer, err := NewLayer(blob, mediatype)
if err != nil {
return err
}
layers = append(layers, layer)
case "message":
role, content, ok := strings.Cut(c.Args, ": ")
if !ok {
return fmt.Errorf("invalid message: %s", c.Args)
}
messages = append(messages, &api.Message{Role: role, Content: content})
default:
ps, err := api.FormatParams(map[string][]string{c.Name: {c.Args}})
if err != nil {
return err
}
for k, v := range ps {
if ks, ok := parameters[k].([]string); ok {
parameters[k] = append(ks, v.([]string)...)
} else if vs, ok := v.([]string); ok {
parameters[k] = vs
} else {
parameters[k] = v
}
}
}
}
var err2 error
layers = slices.DeleteFunc(layers, func(layer Layer) bool {
switch layer.MediaType {
case "application/vnd.ollama.image.message":
// if there are new messages, remove the inherited ones
if len(messages) > 0 {
return true
}
return false
case "application/vnd.ollama.image.params":
// merge inherited parameters with new ones
r, err := layer.Open()
if err != nil {
err2 = err
return false
}
defer r.Close()
var ps map[string]any
if err := json.NewDecoder(r).Decode(&ps); err != nil {
err2 = err
return false
}
for k, v := range ps {
if _, ok := parameters[k]; !ok {
parameters[k] = v
}
}
return true
default:
return false
}
})
if err2 != nil {
return err2
}
if len(messages) > 0 {
var b bytes.Buffer
if err := json.NewEncoder(&b).Encode(messages); err != nil {
return err
}
layer, err := NewLayer(&b, "application/vnd.ollama.image.messages")
if err != nil {
return err
}
layers = append(layers, layer)
}
if len(parameters) > 0 {
var b bytes.Buffer
if err := json.NewEncoder(&b).Encode(parameters); err != nil {
return err
}
layer, err := NewLayer(&b, "application/vnd.ollama.image.params")
if err != nil {
return err
}
layers = append(layers, layer)
}
digests := make([]string, len(layers))
for i, layer := range layers {
digests[i] = layer.Digest
}
config.RootFS.DiffIDs = digests
var b bytes.Buffer
if err := json.NewEncoder(&b).Encode(config); err != nil {
return err
}
configLayer, err := NewLayer(&b, "application/vnd.docker.container.image.v1+json")
if err != nil {
return err
}
for _, layer := range append(layers, configLayer) {
if layer.status != "" {
fn(api.ProgressResponse{Status: layer.status})
}
}
old, _ := ParseNamedManifest(name)
fn(api.ProgressResponse{Status: "writing manifest"})
if err := WriteManifest(name, configLayer, layers); err != nil {
return err
}
if !envconfig.NoPrune() && old != nil {
if err := old.RemoveLayers(); err != nil {
return err
}
}
fn(api.ProgressResponse{Status: "success"})
return nil
}
func CopyModel(src, dst model.Name) error {
if !dst.IsFullyQualified() {
return model.Unqualified(dst)