Unit_AI/node_modules/openai/lib/ChatCompletionStream.mjs
2024-06-01 16:24:36 -04:00

290 lines
No EOL
14 KiB
JavaScript

var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
};
var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
if (kind === "m") throw new TypeError("Private method is not writable");
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
};
var _ChatCompletionStream_instances, _ChatCompletionStream_currentChatCompletionSnapshot, _ChatCompletionStream_beginRequest, _ChatCompletionStream_addChunk, _ChatCompletionStream_endRequest, _ChatCompletionStream_accumulateChatCompletion;
import { OpenAIError, APIUserAbortError } from 'openai/error';
import { AbstractChatCompletionRunner, } from "./AbstractChatCompletionRunner.mjs";
import { Stream } from 'openai/streaming';
export class ChatCompletionStream extends AbstractChatCompletionRunner {
constructor() {
super(...arguments);
_ChatCompletionStream_instances.add(this);
_ChatCompletionStream_currentChatCompletionSnapshot.set(this, void 0);
}
get currentChatCompletionSnapshot() {
return __classPrivateFieldGet(this, _ChatCompletionStream_currentChatCompletionSnapshot, "f");
}
/**
* Intended for use on the frontend, consuming a stream produced with
* `.toReadableStream()` on the backend.
*
* Note that messages sent to the model do not appear in `.on('message')`
* in this context.
*/
static fromReadableStream(stream) {
const runner = new ChatCompletionStream();
runner._run(() => runner._fromReadableStream(stream));
return runner;
}
static createChatCompletion(completions, params, options) {
const runner = new ChatCompletionStream();
runner._run(() => runner._runChatCompletion(completions, { ...params, stream: true }, { ...options, headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' } }));
return runner;
}
async _createChatCompletion(completions, params, options) {
const signal = options?.signal;
if (signal) {
if (signal.aborted)
this.controller.abort();
signal.addEventListener('abort', () => this.controller.abort());
}
__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_beginRequest).call(this);
const stream = await completions.create({ ...params, stream: true }, { ...options, signal: this.controller.signal });
this._connected();
for await (const chunk of stream) {
__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_addChunk).call(this, chunk);
}
if (stream.controller.signal?.aborted) {
throw new APIUserAbortError();
}
return this._addChatCompletion(__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_endRequest).call(this));
}
async _fromReadableStream(readableStream, options) {
const signal = options?.signal;
if (signal) {
if (signal.aborted)
this.controller.abort();
signal.addEventListener('abort', () => this.controller.abort());
}
__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_beginRequest).call(this);
this._connected();
const stream = Stream.fromReadableStream(readableStream, this.controller);
let chatId;
for await (const chunk of stream) {
if (chatId && chatId !== chunk.id) {
// A new request has been made.
this._addChatCompletion(__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_endRequest).call(this));
}
__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_addChunk).call(this, chunk);
chatId = chunk.id;
}
if (stream.controller.signal?.aborted) {
throw new APIUserAbortError();
}
return this._addChatCompletion(__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_endRequest).call(this));
}
[(_ChatCompletionStream_currentChatCompletionSnapshot = new WeakMap(), _ChatCompletionStream_instances = new WeakSet(), _ChatCompletionStream_beginRequest = function _ChatCompletionStream_beginRequest() {
if (this.ended)
return;
__classPrivateFieldSet(this, _ChatCompletionStream_currentChatCompletionSnapshot, undefined, "f");
}, _ChatCompletionStream_addChunk = function _ChatCompletionStream_addChunk(chunk) {
if (this.ended)
return;
const completion = __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_accumulateChatCompletion).call(this, chunk);
this._emit('chunk', chunk, completion);
const delta = chunk.choices[0]?.delta?.content;
const snapshot = completion.choices[0]?.message;
if (delta != null && snapshot?.role === 'assistant' && snapshot?.content) {
this._emit('content', delta, snapshot.content);
}
}, _ChatCompletionStream_endRequest = function _ChatCompletionStream_endRequest() {
if (this.ended) {
throw new OpenAIError(`stream has ended, this shouldn't happen`);
}
const snapshot = __classPrivateFieldGet(this, _ChatCompletionStream_currentChatCompletionSnapshot, "f");
if (!snapshot) {
throw new OpenAIError(`request ended without sending any chunks`);
}
__classPrivateFieldSet(this, _ChatCompletionStream_currentChatCompletionSnapshot, undefined, "f");
return finalizeChatCompletion(snapshot);
}, _ChatCompletionStream_accumulateChatCompletion = function _ChatCompletionStream_accumulateChatCompletion(chunk) {
var _a, _b, _c;
let snapshot = __classPrivateFieldGet(this, _ChatCompletionStream_currentChatCompletionSnapshot, "f");
const { choices, ...rest } = chunk;
if (!snapshot) {
snapshot = __classPrivateFieldSet(this, _ChatCompletionStream_currentChatCompletionSnapshot, {
...rest,
choices: [],
}, "f");
}
else {
Object.assign(snapshot, rest);
}
for (const { delta, finish_reason, index, logprobs = null, ...other } of chunk.choices) {
let choice = snapshot.choices[index];
if (!choice) {
choice = snapshot.choices[index] = { finish_reason, index, message: {}, logprobs, ...other };
}
if (logprobs) {
if (!choice.logprobs) {
choice.logprobs = Object.assign({}, logprobs);
}
else {
const { content, ...rest } = logprobs;
Object.assign(choice.logprobs, rest);
if (content) {
(_a = choice.logprobs).content ?? (_a.content = []);
choice.logprobs.content.push(...content);
}
}
}
if (finish_reason)
choice.finish_reason = finish_reason;
Object.assign(choice, other);
if (!delta)
continue; // Shouldn't happen; just in case.
const { content, function_call, role, tool_calls, ...rest } = delta;
Object.assign(choice.message, rest);
if (content)
choice.message.content = (choice.message.content || '') + content;
if (role)
choice.message.role = role;
if (function_call) {
if (!choice.message.function_call) {
choice.message.function_call = function_call;
}
else {
if (function_call.name)
choice.message.function_call.name = function_call.name;
if (function_call.arguments) {
(_b = choice.message.function_call).arguments ?? (_b.arguments = '');
choice.message.function_call.arguments += function_call.arguments;
}
}
}
if (tool_calls) {
if (!choice.message.tool_calls)
choice.message.tool_calls = [];
for (const { index, id, type, function: fn, ...rest } of tool_calls) {
const tool_call = ((_c = choice.message.tool_calls)[index] ?? (_c[index] = {}));
Object.assign(tool_call, rest);
if (id)
tool_call.id = id;
if (type)
tool_call.type = type;
if (fn)
tool_call.function ?? (tool_call.function = { arguments: '' });
if (fn?.name)
tool_call.function.name = fn.name;
if (fn?.arguments)
tool_call.function.arguments += fn.arguments;
}
}
}
return snapshot;
}, Symbol.asyncIterator)]() {
const pushQueue = [];
const readQueue = [];
let done = false;
this.on('chunk', (chunk) => {
const reader = readQueue.shift();
if (reader) {
reader(chunk);
}
else {
pushQueue.push(chunk);
}
});
this.on('end', () => {
done = true;
for (const reader of readQueue) {
reader(undefined);
}
readQueue.length = 0;
});
return {
next: async () => {
if (!pushQueue.length) {
if (done) {
return { value: undefined, done: true };
}
return new Promise((resolve) => readQueue.push(resolve)).then((chunk) => (chunk ? { value: chunk, done: false } : { value: undefined, done: true }));
}
const chunk = pushQueue.shift();
return { value: chunk, done: false };
},
};
}
toReadableStream() {
const stream = new Stream(this[Symbol.asyncIterator].bind(this), this.controller);
return stream.toReadableStream();
}
}
function finalizeChatCompletion(snapshot) {
const { id, choices, created, model, system_fingerprint, ...rest } = snapshot;
return {
...rest,
id,
choices: choices.map(({ message, finish_reason, index, logprobs, ...choiceRest }) => {
if (!finish_reason)
throw new OpenAIError(`missing finish_reason for choice ${index}`);
const { content = null, function_call, tool_calls, ...messageRest } = message;
const role = message.role; // this is what we expect; in theory it could be different which would make our types a slight lie but would be fine.
if (!role)
throw new OpenAIError(`missing role for choice ${index}`);
if (function_call) {
const { arguments: args, name } = function_call;
if (args == null)
throw new OpenAIError(`missing function_call.arguments for choice ${index}`);
if (!name)
throw new OpenAIError(`missing function_call.name for choice ${index}`);
return {
...choiceRest,
message: { content, function_call: { arguments: args, name }, role },
finish_reason,
index,
logprobs,
};
}
if (tool_calls) {
return {
...choiceRest,
index,
finish_reason,
logprobs,
message: {
...messageRest,
role,
content,
tool_calls: tool_calls.map((tool_call, i) => {
const { function: fn, type, id, ...toolRest } = tool_call;
const { arguments: args, name, ...fnRest } = fn || {};
if (id == null)
throw new OpenAIError(`missing choices[${index}].tool_calls[${i}].id\n${str(snapshot)}`);
if (type == null)
throw new OpenAIError(`missing choices[${index}].tool_calls[${i}].type\n${str(snapshot)}`);
if (name == null)
throw new OpenAIError(`missing choices[${index}].tool_calls[${i}].function.name\n${str(snapshot)}`);
if (args == null)
throw new OpenAIError(`missing choices[${index}].tool_calls[${i}].function.arguments\n${str(snapshot)}`);
return { ...toolRest, id, type, function: { ...fnRest, name, arguments: args } };
}),
},
};
}
return {
...choiceRest,
message: { ...messageRest, content, role },
finish_reason,
index,
logprobs,
};
}),
created,
model,
object: 'chat.completion',
...(system_fingerprint ? { system_fingerprint } : {}),
};
}
function str(x) {
return JSON.stringify(x);
}
//# sourceMappingURL=ChatCompletionStream.mjs.map