mirror of
https://github.com/dogkeeper886/ollama37.git
synced 2025-12-12 00:37:04 +00:00
Switch windows build to fully dynamic
Refactor where we store build outputs, and support a fully dynamic loading model on windows so the base executable has no special dependencies thus doesn't require a special PATH.
This commit is contained in:
15
llm/ext_server_windows.go
Normal file
15
llm/ext_server_windows.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package llm
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/jmorganca/ollama/api"
|
||||
)
|
||||
|
||||
func newDefaultExtServer(model string, adapters, projectors []string, numLayers int64, opts api.Options) (extServer, error) {
|
||||
// On windows we always load the llama.cpp libraries dynamically to avoid startup DLL dependencies
|
||||
// This ensures we can update the PATH at runtime to get everything loaded
|
||||
|
||||
// Should not happen
|
||||
return nil, fmt.Errorf("no default impl on windows - all dynamic")
|
||||
}
|
||||
Reference in New Issue
Block a user