mirror of
https://github.com/tcsenpai/ollama.git
synced 2025-06-07 11:45:21 +00:00
enable cache_prompt
by default
This commit is contained in:
parent
c5f21f73a4
commit
d4ebdadbe7
@ -103,7 +103,6 @@ type Options struct {
|
|||||||
MirostatEta float32 `json:"mirostat_eta,omitempty"`
|
MirostatEta float32 `json:"mirostat_eta,omitempty"`
|
||||||
PenalizeNewline bool `json:"penalize_newline,omitempty"`
|
PenalizeNewline bool `json:"penalize_newline,omitempty"`
|
||||||
Stop []string `json:"stop,omitempty"`
|
Stop []string `json:"stop,omitempty"`
|
||||||
Cache bool `json:"cache,omitempty"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Runner options which must be set when the model is loaded into memory
|
// Runner options which must be set when the model is loaded into memory
|
||||||
|
@ -234,8 +234,8 @@ func predict(llm extServer, opts api.Options, ctx context.Context, predict Predi
|
|||||||
"penalize_nl": opts.PenalizeNewline,
|
"penalize_nl": opts.PenalizeNewline,
|
||||||
"seed": opts.Seed,
|
"seed": opts.Seed,
|
||||||
"stop": opts.Stop,
|
"stop": opts.Stop,
|
||||||
"image_data": imageData,
|
"image_data": imageData,
|
||||||
"cache_prompt": opts.Cache,
|
"cache_prompt": true,
|
||||||
}
|
}
|
||||||
|
|
||||||
if predict.Format == "json" {
|
if predict.Format == "json" {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user