mirror of
https://github.com/likelovewant/ollama-for-amd.git
synced 2025-12-21 22:33:56 +00:00
llm: Make "POST predict" error message more informative
"POST predict" basically means that the runner has crashed, which can have many reasons. However, many people think this is a specific error and either report only this message or group together unrelated bugs. This replaces it with a more friendly and helpful message.
This commit is contained in:
@@ -797,7 +797,8 @@ func (s *llmServer) Completion(ctx context.Context, req CompletionRequest, fn fu
|
|||||||
|
|
||||||
res, err := http.DefaultClient.Do(serverReq)
|
res, err := http.DefaultClient.Do(serverReq)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("POST predict: %v", err)
|
slog.Error("post predict", "error", err)
|
||||||
|
return errors.New("model runner has unexpectedly stopped, this may be due to resource limitations or an internal error, check ollama server logs for details")
|
||||||
}
|
}
|
||||||
defer res.Body.Close()
|
defer res.Body.Close()
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user