Skip to content

Commit d323871

Browse files
authored
fix v1 completions streaming mode (ikawrakow#768)
1 parent c519d41 commit d323871

File tree

1 file changed

+7
-12
lines changed

1 file changed

+7
-12
lines changed

examples/server/server.cpp

Lines changed: 7 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -275,34 +275,29 @@ struct server_task_result {
275275
json to_json_oaicompat_partial() {
276276
std::time_t t = std::time(0);
277277
json logprobs = json(nullptr); // OAI default to null
278-
if (!stream && probs_output.size() > 0) {
278+
if (probs_output.size() > 0) {
279279
logprobs = json{
280280
{"content", completion_token_output::probs_vector_to_json(probs_output, post_sampling_probs)},
281281
};
282282
}
283-
json finish_reason = "length";
284-
if (stop) {
285-
//if (stop == STOP_TYPE_WORD || stop == STOP_TYPE_EOS) {
286-
finish_reason = "stop";
287-
}
288283
json res = json{
289284
{"choices", json::array({
290285
json{
291-
{"text", stream ? "" : content}, // in stream mode, content is already in last partial chunk
286+
{"text", content},
292287
{"index", index},
293288
{"logprobs", logprobs},
294-
{"finish_reason", finish_reason},
289+
{"finish_reason", nullptr},
295290
}
296291
})},
297292
{"created", t},
298293
{"model", oaicompat_model},
299294
{"object", "text_completion"},
300295
{"usage", json {
301-
{"completion_tokens", n_decoded},
302-
{"prompt_tokens", n_prompt_tokens},
303-
{"total_tokens", n_decoded + n_prompt_tokens}
296+
{"completion_tokens", n_decoded},
297+
{"prompt_tokens", n_prompt_tokens},
298+
{"total_tokens", n_decoded + n_prompt_tokens}
304299
}},
305-
{"id", oaicompat_cmpl_id}
300+
{"id", oaicompat_cmpl_id}
306301
};
307302

308303
// extra fields for debugging purposes

0 commit comments

Comments
 (0)