From b599946b74f4d3226325e06f4c412274475775c6 Mon Sep 17 00:00:00 2001 From: Michael Yang Date: Wed, 11 Oct 2023 10:55:07 -0700 Subject: [PATCH] add format bytes --- api/client.go | 2 +- format/bytes.go | 16 ++++++++++++++++ llm/llama.go | 2 +- llm/llm.go | 36 ++++++++++++++++++------------------ 4 files changed, 36 insertions(+), 20 deletions(-) create mode 100644 format/bytes.go diff --git a/api/client.go b/api/client.go index 4a5b97c9..f308b233 100644 --- a/api/client.go +++ b/api/client.go @@ -127,7 +127,7 @@ func (c *Client) do(ctx context.Context, method, path string, reqData, respData return nil } -const maxBufferSize = 512 * 1024 // 512KB +const maxBufferSize = 512 * 1000 // 512KB func (c *Client) stream(ctx context.Context, method, path string, data any, fn func([]byte) error) error { var buf *bytes.Buffer diff --git a/format/bytes.go b/format/bytes.go new file mode 100644 index 00000000..63cc7b00 --- /dev/null +++ b/format/bytes.go @@ -0,0 +1,16 @@ +package format + +import "fmt" + +func HumanBytes(b int64) string { + switch { + case b > 1000*1000*1000: + return fmt.Sprintf("%d GB", b/1000/1000/1000) + case b > 1000*1000: + return fmt.Sprintf("%d MB", b/1000/1000) + case b > 1000: + return fmt.Sprintf("%d KB", b/1000) + default: + return fmt.Sprintf("%d B", b) + } +} diff --git a/llm/llama.go b/llm/llama.go index 8bd11f53..33468cba 100644 --- a/llm/llama.go +++ b/llm/llama.go @@ -454,7 +454,7 @@ type PredictRequest struct { Stop []string `json:"stop,omitempty"` } -const maxBufferSize = 512 * 1024 // 512KB +const maxBufferSize = 512 * 1000 // 512KB func (llm *llama) Predict(ctx context.Context, prevContext []int, prompt string, fn func(api.GenerateResponse)) error { prevConvo, err := llm.Decode(ctx, prevContext) diff --git a/llm/llm.go b/llm/llm.go index ef424b5d..6df2a47c 100644 --- a/llm/llm.go +++ b/llm/llm.go @@ -60,33 +60,33 @@ func New(workDir, model string, adapters []string, opts api.Options) (LLM, error totalResidentMemory := memory.TotalMemory() switch ggml.ModelType() { case "3B", "7B": - if ggml.FileType() == "F16" && totalResidentMemory < 16*1024*1024 { - return nil, fmt.Errorf("F16 model requires at least 16GB of memory") - } else if totalResidentMemory < 8*1024*1024 { - return nil, fmt.Errorf("model requires at least 8GB of memory") + if ggml.FileType() == "F16" && totalResidentMemory < 16*1000*1000 { + return nil, fmt.Errorf("F16 model requires at least 16 GB of memory") + } else if totalResidentMemory < 8*1000*1000 { + return nil, fmt.Errorf("model requires at least 8 GB of memory") } case "13B": - if ggml.FileType() == "F16" && totalResidentMemory < 32*1024*1024 { - return nil, fmt.Errorf("F16 model requires at least 32GB of memory") - } else if totalResidentMemory < 16*1024*1024 { - return nil, fmt.Errorf("model requires at least 16GB of memory") + if ggml.FileType() == "F16" && totalResidentMemory < 32*1000*1000 { + return nil, fmt.Errorf("F16 model requires at least 32 GB of memory") + } else if totalResidentMemory < 16*1000*1000 { + return nil, fmt.Errorf("model requires at least 16 GB of memory") } case "30B", "34B", "40B": - if ggml.FileType() == "F16" && totalResidentMemory < 64*1024*1024 { - return nil, fmt.Errorf("F16 model requires at least 64GB of memory") - } else if totalResidentMemory < 32*1024*1024 { - return nil, fmt.Errorf("model requires at least 32GB of memory") + if ggml.FileType() == "F16" && totalResidentMemory < 64*1000*1000 { + return nil, fmt.Errorf("F16 model requires at least 64 GB of memory") + } else if totalResidentMemory < 32*1000*1000 { + return nil, fmt.Errorf("model requires at least 32 GB of memory") } case "65B", "70B": - if ggml.FileType() == "F16" && totalResidentMemory < 128*1024*1024 { - return nil, fmt.Errorf("F16 model requires at least 128GB of memory") - } else if totalResidentMemory < 64*1024*1024 { - return nil, fmt.Errorf("model requires at least 64GB of memory") + if ggml.FileType() == "F16" && totalResidentMemory < 128*1000*1000 { + return nil, fmt.Errorf("F16 model requires at least 128 GB of memory") + } else if totalResidentMemory < 64*1000*1000 { + return nil, fmt.Errorf("model requires at least 64 GB of memory") } case "180B": - if ggml.FileType() == "F16" && totalResidentMemory < 512*1024*1024 { + if ggml.FileType() == "F16" && totalResidentMemory < 512*1000*1000 { return nil, fmt.Errorf("F16 model requires at least 512GB of memory") - } else if totalResidentMemory < 128*1024*1024 { + } else if totalResidentMemory < 128*1000*1000 { return nil, fmt.Errorf("model requires at least 128GB of memory") } }