Skip to content

Commit ca5205e

Browse files
committed
fix: docstring fix for streaming
Signed-off-by: Mark Sturdevant <mark.sturdevant@ibm.com>
1 parent 0a2e4d9 commit ca5205e

1 file changed

Lines changed: 14 additions & 1 deletion

File tree

mellea/helpers/openai_compatible_helpers.py

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -223,7 +223,16 @@ def messages_to_docs(msgs: list[Message]) -> list[dict[str, str]]:
223223

224224

225225
def build_completion_usage(output: Any) -> CompletionUsage | None:
226-
"""Build a normalized usage object from a model output, if available."""
226+
"""Build a normalized usage object from a model output, if available.
227+
228+
Args:
229+
output: Model output object that may expose a ``usage`` mapping with
230+
token counts.
231+
232+
Returns:
233+
A ``CompletionUsage`` object when usage metadata is present on the
234+
output, otherwise ``None``.
235+
"""
227236
if not hasattr(output, "usage") or output.usage is None:
228237
return None
229238

@@ -254,6 +263,10 @@ async def stream_chat_completion_chunks(
254263
stream_options: OpenAI-compatible streaming options. Currently supports
255264
``include_usage`` (bool) to control whether usage stats are included
256265
in the final chunk. Defaults to including usage when available.
266+
267+
Yields:
268+
Server-sent event payload strings representing OpenAI-compatible chat
269+
completion chunks, including the terminating ``[DONE]`` event.
257270
"""
258271
from cli.serve.models import (
259272
ChatCompletionChunk,

0 commit comments

Comments
 (0)