separate prompt into template and system

This commit is contained in:
Michael Yang
2023-07-17 14:21:27 -07:00
parent 2d305fa99a
commit df146c41e2
3 changed files with 113 additions and 85 deletions

View File

@@ -16,6 +16,7 @@ import (
"reflect"
"strconv"
"strings"
"text/template"
"github.com/jmorganca/ollama/api"
"github.com/jmorganca/ollama/parser"
@@ -24,10 +25,33 @@ import (
type Model struct {
Name string `json:"name"`
ModelPath string
Prompt string
Template string
System string
Options api.Options
}
func (m *Model) Prompt(request api.GenerateRequest) (string, error) {
tmpl, err := template.New("").Parse(m.Template)
if err != nil {
return "", err
}
var vars struct {
System string
Prompt string
}
vars.System = m.System
vars.Prompt = request.Prompt
var sb strings.Builder
if err := tmpl.Execute(&sb, vars); err != nil {
return "", err
}
return sb.String(), nil
}
type ManifestV2 struct {
SchemaVersion int `json:"schemaVersion"`
MediaType string `json:"mediaType"`
@@ -71,20 +95,19 @@ func GetManifest(mp ModelPath) (*ManifestV2, error) {
if err != nil {
return nil, err
}
if _, err = os.Stat(fp); err != nil && !errors.Is(err, os.ErrNotExist) {
return nil, fmt.Errorf("couldn't find model '%s'", mp.GetShortTagname())
}
var manifest *ManifestV2
f, err := os.Open(fp)
bts, err := os.ReadFile(fp)
if err != nil {
return nil, fmt.Errorf("couldn't open file '%s'", fp)
}
decoder := json.NewDecoder(f)
err = decoder.Decode(&manifest)
if err != nil {
if err := json.Unmarshal(bts, &manifest); err != nil {
return nil, err
}
@@ -112,12 +135,20 @@ func GetModel(name string) (*Model, error) {
switch layer.MediaType {
case "application/vnd.ollama.image.model":
model.ModelPath = filename
case "application/vnd.ollama.image.prompt":
data, err := os.ReadFile(filename)
case "application/vnd.ollama.image.template":
bts, err := os.ReadFile(filename)
if err != nil {
return nil, err
}
model.Prompt = string(data)
model.Template = string(bts)
case "application/vnd.ollama.image.system":
bts, err := os.ReadFile(filename)
if err != nil {
return nil, err
}
model.System = string(bts)
case "application/vnd.ollama.image.params":
params, err := os.Open(filename)
if err != nil {
@@ -156,13 +187,13 @@ func CreateModel(name string, path string, fn func(status string)) error {
params := make(map[string]string)
for _, c := range commands {
log.Printf("[%s] - %s\n", c.Name, c.Arg)
log.Printf("[%s] - %s\n", c.Name, c.Args)
switch c.Name {
case "model":
fn("looking for model")
mf, err := GetManifest(ParseModelPath(c.Arg))
mf, err := GetManifest(ParseModelPath(c.Args))
if err != nil {
fp := c.Arg
fp := c.Args
// If filePath starts with ~/, replace it with the user's home directory.
if strings.HasPrefix(fp, "~/") {
@@ -183,7 +214,7 @@ func CreateModel(name string, path string, fn func(status string)) error {
fn("creating model layer")
file, err := os.Open(fp)
if err != nil {
fn(fmt.Sprintf("couldn't find model '%s'", c.Arg))
fn(fmt.Sprintf("couldn't find model '%s'", c.Args))
return fmt.Errorf("failed to open file: %v", err)
}
defer file.Close()
@@ -206,31 +237,21 @@ func CreateModel(name string, path string, fn func(status string)) error {
layers = append(layers, newLayer)
}
}
case "prompt":
fn("creating prompt layer")
case "license", "template", "system":
fn(fmt.Sprintf("creating %s layer", c.Name))
// remove the prompt layer if one exists
layers = removeLayerFromLayers(layers, "application/vnd.ollama.image.prompt")
mediaType := fmt.Sprintf("application/vnd.ollama.image.%s", c.Name)
layers = removeLayerFromLayers(layers, mediaType)
prompt := strings.NewReader(c.Arg)
l, err := CreateLayer(prompt)
layer, err := CreateLayer(strings.NewReader(c.Args))
if err != nil {
fn(fmt.Sprintf("couldn't create prompt layer: %v", err))
return fmt.Errorf("failed to create layer: %v", err)
return err
}
l.MediaType = "application/vnd.ollama.image.prompt"
layers = append(layers, l)
case "license":
fn("creating license layer")
license := strings.NewReader(c.Arg)
l, err := CreateLayer(license)
if err != nil {
fn(fmt.Sprintf("couldn't create license layer: %v", err))
return fmt.Errorf("failed to create layer: %v", err)
}
l.MediaType = "application/vnd.ollama.image.license"
layers = append(layers, l)
layer.MediaType = mediaType
layers = append(layers, layer)
default:
params[c.Name] = c.Arg
params[c.Name] = c.Args
}
}

View File

@@ -9,7 +9,6 @@ import (
"os"
"path/filepath"
"strings"
"text/template"
"time"
"dario.cat/mergo"
@@ -54,19 +53,12 @@ func generate(c *gin.Context) {
return
}
templ, err := template.New("").Parse(model.Prompt)
prompt, err := model.Prompt(req)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
var sb strings.Builder
if err = templ.Execute(&sb, req); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
req.Prompt = sb.String()
llm, err := llama.New(model.ModelPath, opts)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
@@ -77,7 +69,7 @@ func generate(c *gin.Context) {
ch := make(chan any)
go func() {
defer close(ch)
llm.Predict(req.Context, req.Prompt, func(r api.GenerateResponse) {
llm.Predict(req.Context, prompt, func(r api.GenerateResponse) {
r.Model = req.Model
r.CreatedAt = time.Now().UTC()
if r.Done {