mirror of
https://github.com/dogkeeper886/ollama37.git
synced 2025-12-11 00:07:07 +00:00
Add gemma safetensors conversion (#3250)
Co-authored-by: Michael Yang <mxyng@pm.me>
This commit is contained in:
@@ -321,7 +321,7 @@ func CreateModel(ctx context.Context, name, modelFileDir string, commands []pars
|
||||
|
||||
pathName := realpath(modelFileDir, c.Args)
|
||||
|
||||
ggufName, err := convertSafetensors(name, pathName)
|
||||
ggufName, err := convertSafetensors(name, pathName, fn)
|
||||
if err != nil {
|
||||
var pathErr *fs.PathError
|
||||
switch {
|
||||
@@ -336,6 +336,7 @@ func CreateModel(ctx context.Context, name, modelFileDir string, commands []pars
|
||||
|
||||
if ggufName != "" {
|
||||
pathName = ggufName
|
||||
slog.Debug(fmt.Sprintf("new image layer path: %s", pathName))
|
||||
defer os.RemoveAll(ggufName)
|
||||
}
|
||||
|
||||
@@ -422,10 +423,13 @@ func CreateModel(ctx context.Context, name, modelFileDir string, commands []pars
|
||||
CREATE:
|
||||
for {
|
||||
fn(api.ProgressResponse{Status: "creating model layer"})
|
||||
if _, err := bin.Seek(offset, io.SeekStart); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
bin.Seek(offset, io.SeekStart)
|
||||
ggml, err := llm.DecodeGGML(bin)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("error decoding gguf file: %q", err))
|
||||
switch {
|
||||
case errors.Is(err, io.EOF):
|
||||
break CREATE
|
||||
@@ -621,8 +625,8 @@ func CreateModel(ctx context.Context, name, modelFileDir string, commands []pars
|
||||
return nil
|
||||
}
|
||||
|
||||
func convertSafetensors(name, fn string) (string, error) {
|
||||
r, err := zip.OpenReader(fn)
|
||||
func convertSafetensors(name, path string, fn func(resp api.ProgressResponse)) (string, error) {
|
||||
r, err := zip.OpenReader(path)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -634,6 +638,7 @@ func convertSafetensors(name, fn string) (string, error) {
|
||||
}
|
||||
defer os.RemoveAll(tempDir)
|
||||
|
||||
fn(api.ProgressResponse{Status: "unpacking model metadata"})
|
||||
for _, f := range r.File {
|
||||
fpath := filepath.Join(tempDir, f.Name)
|
||||
outFile, err := os.OpenFile(fpath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode())
|
||||
@@ -662,6 +667,7 @@ func convertSafetensors(name, fn string) (string, error) {
|
||||
|
||||
SupportedArchs := []string{
|
||||
"MistralForCausalLM",
|
||||
"GemmaForCausalLM",
|
||||
}
|
||||
|
||||
for _, arch := range params.Architectures {
|
||||
@@ -670,22 +676,24 @@ func convertSafetensors(name, fn string) (string, error) {
|
||||
}
|
||||
}
|
||||
|
||||
t, err := convert.GetSafeTensors(tempDir)
|
||||
fn(api.ProgressResponse{Status: "processing safetensors"})
|
||||
t, err := convert.GetSafeTensors(tempDir, params)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
vocab, err := convert.LoadTokens(tempDir)
|
||||
vocab, err := convert.LoadTokens(tempDir, params)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
fn, err = convert.WriteGGUF(name, t, params, vocab)
|
||||
fn(api.ProgressResponse{Status: "converting model"})
|
||||
path, err = convert.WriteGGUF(name, t, params, vocab)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return fn, nil
|
||||
return path, nil
|
||||
}
|
||||
|
||||
func CopyModel(src, dest string) error {
|
||||
|
||||
Reference in New Issue
Block a user