Switch windows build to fully dynamic

Refactor where we store build outputs, and support a fully dynamic loading
model on windows so the base executable has no special dependencies thus
doesn't require a special PATH.
This commit is contained in:
Daniel Hiltgen
2023-12-23 11:35:44 -08:00
parent 9a70aecccb
commit d966b730ac
17 changed files with 379 additions and 228 deletions

15
llm/ext_server_windows.go Normal file
View File

@@ -0,0 +1,15 @@
package llm
import (
"fmt"
"github.com/jmorganca/ollama/api"
)
func newDefaultExtServer(model string, adapters, projectors []string, numLayers int64, opts api.Options) (extServer, error) {
// On windows we always load the llama.cpp libraries dynamically to avoid startup DLL dependencies
// This ensures we can update the PATH at runtime to get everything loaded
// Should not happen
return nil, fmt.Errorf("no default impl on windows - all dynamic")
}