mirror of
https://github.com/dogkeeper886/ollama37.git
synced 2025-12-12 00:37:04 +00:00
remove prompt cache
This commit is contained in:
@@ -57,11 +57,9 @@ type PredictOptions struct {
|
||||
LogitBias string
|
||||
TokenCallback func(string) bool
|
||||
|
||||
PathPromptCache string
|
||||
MLock, MMap, PromptCacheAll bool
|
||||
PromptCacheRO bool
|
||||
MainGPU string
|
||||
TensorSplit string
|
||||
MLock, MMap bool
|
||||
MainGPU string
|
||||
TensorSplit string
|
||||
}
|
||||
|
||||
type PredictOption func(p *PredictOptions)
|
||||
@@ -182,14 +180,6 @@ var Debug PredictOption = func(p *PredictOptions) {
|
||||
p.DebugMode = true
|
||||
}
|
||||
|
||||
var EnablePromptCacheAll PredictOption = func(p *PredictOptions) {
|
||||
p.PromptCacheAll = true
|
||||
}
|
||||
|
||||
var EnablePromptCacheRO PredictOption = func(p *PredictOptions) {
|
||||
p.PromptCacheRO = true
|
||||
}
|
||||
|
||||
var EnableMLock ModelOption = func(p *ModelOptions) {
|
||||
p.MLock = true
|
||||
}
|
||||
@@ -284,13 +274,6 @@ func SetTemperature(temp float64) PredictOption {
|
||||
}
|
||||
}
|
||||
|
||||
// SetPathPromptCache sets the session file to store the prompt cache.
|
||||
func SetPathPromptCache(f string) PredictOption {
|
||||
return func(p *PredictOptions) {
|
||||
p.PathPromptCache = f
|
||||
}
|
||||
}
|
||||
|
||||
// SetPenalty sets the repetition penalty for text generation.
|
||||
func SetPenalty(penalty float64) PredictOption {
|
||||
return func(p *PredictOptions) {
|
||||
|
||||
Reference in New Issue
Block a user