import { ChatCompletionStream } from "https://deno.land/x/openai@v4.62.1/lib/ChatCompletionStream.ts";
Constructors
new
ChatCompletionStream(params: ChatCompletionCreateParams | null)Properties
readonly
currentChatCompletionSnapshot: ChatCompletionSnapshot | undefinedMethods
protected
_createChatCompletion(): Promise<ParsedChatCompletion<ParsedT>>protected
_fromReadableStream(readableStream: ReadableStream, options?: Core.RequestOptions): Promise<ChatCompletion>[Symbol.asyncIterator](this: ChatCompletionStream<ParsedT>): AsyncIterator<ChatCompletionChunk>
Static Methods
createChatCompletion<ParsedT>(): ChatCompletionStream<ParsedT>
fromReadableStream(stream: ReadableStream): ChatCompletionStream<null>
Intended for use on the frontend, consuming a stream produced with
.toReadableStream()
on the backend.
Note that messages sent to the model do not appear in .on('message')
in this context.