Fix OpenAI finish_reason
values when empty (#4368)
This commit is contained in:
parent
8080fbce35
commit
41ba3017fd
|
@ -107,9 +107,14 @@ func toChatCompletion(id string, r api.ChatResponse) ChatCompletion {
|
||||||
Model: r.Model,
|
Model: r.Model,
|
||||||
SystemFingerprint: "fp_ollama",
|
SystemFingerprint: "fp_ollama",
|
||||||
Choices: []Choice{{
|
Choices: []Choice{{
|
||||||
Index: 0,
|
Index: 0,
|
||||||
Message: Message{Role: r.Message.Role, Content: r.Message.Content},
|
Message: Message{Role: r.Message.Role, Content: r.Message.Content},
|
||||||
FinishReason: &r.DoneReason,
|
FinishReason: func(reason string) *string {
|
||||||
|
if len(reason) > 0 {
|
||||||
|
return &reason
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}(r.DoneReason),
|
||||||
}},
|
}},
|
||||||
Usage: Usage{
|
Usage: Usage{
|
||||||
// TODO: ollama returns 0 for prompt eval if the prompt was cached, but openai returns the actual count
|
// TODO: ollama returns 0 for prompt eval if the prompt was cached, but openai returns the actual count
|
||||||
|
@ -127,13 +132,16 @@ func toChunk(id string, r api.ChatResponse) ChatCompletionChunk {
|
||||||
Created: time.Now().Unix(),
|
Created: time.Now().Unix(),
|
||||||
Model: r.Model,
|
Model: r.Model,
|
||||||
SystemFingerprint: "fp_ollama",
|
SystemFingerprint: "fp_ollama",
|
||||||
Choices: []ChunkChoice{
|
Choices: []ChunkChoice{{
|
||||||
{
|
Index: 0,
|
||||||
Index: 0,
|
Delta: Message{Role: "assistant", Content: r.Message.Content},
|
||||||
Delta: Message{Role: "assistant", Content: r.Message.Content},
|
FinishReason: func(reason string) *string {
|
||||||
FinishReason: &r.DoneReason,
|
if len(reason) > 0 {
|
||||||
},
|
return &reason
|
||||||
},
|
}
|
||||||
|
return nil
|
||||||
|
}(r.DoneReason),
|
||||||
|
}},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue