diff --git a/llm/server.go b/llm/server.go index e2402256..b23a7749 100644 --- a/llm/server.go +++ b/llm/server.go @@ -307,6 +307,11 @@ func NewLlamaServer(gpus gpu.GpuInfoList, model string, ggml *GGML, adapters, pr slog.Debug("subprocess", "environment", s.cmd.Env) if err = s.cmd.Start(); err != nil { + // Detect permission denied and augment them essage about noexec + if errors.Is(err, os.ErrPermission) { + finalErr = fmt.Errorf("unable to start server %w. %s may have noexec set. Set OLLAMA_TMPDIR for server to a writable executable directory", err, dir) + continue + } msg := "" if s.status != nil && s.status.LastErrMsg != "" { msg = s.status.LastErrMsg @@ -382,6 +387,10 @@ func (s *llmServer) getServerStatus(ctx context.Context) (ServerStatus, error) { if s.status != nil && s.status.LastErrMsg != "" { msg = s.status.LastErrMsg } + if s.cmd.ProcessState.ExitCode() == -1 { + // Most likely a signal killed it, log some more details to try to help troubleshoot + slog.Warn("llama runner process no longer running", "sys", s.cmd.ProcessState.Sys(), "string", s.cmd.ProcessState.String()) + } return ServerStatusError, fmt.Errorf("llama runner process no longer running: %d %s", s.cmd.ProcessState.ExitCode(), msg) }