separate prompt into template and system

This commit is contained in:
Michael Yang
2023-07-17 14:21:27 -07:00
parent 2d305fa99a
commit df146c41e2
3 changed files with 113 additions and 85 deletions

View File

@@ -9,7 +9,6 @@ import (
"os"
"path/filepath"
"strings"
"text/template"
"time"
"dario.cat/mergo"
@@ -54,19 +53,12 @@ func generate(c *gin.Context) {
return
}
templ, err := template.New("").Parse(model.Prompt)
prompt, err := model.Prompt(req)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
var sb strings.Builder
if err = templ.Execute(&sb, req); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
req.Prompt = sb.String()
llm, err := llama.New(model.ModelPath, opts)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
@@ -77,7 +69,7 @@ func generate(c *gin.Context) {
ch := make(chan any)
go func() {
defer close(ch)
llm.Predict(req.Context, req.Prompt, func(r api.GenerateResponse) {
llm.Predict(req.Context, prompt, func(r api.GenerateResponse) {
r.Model = req.Model
r.CreatedAt = time.Now().UTC()
if r.Done {