Works with
•JSR Score82%•This package works with Cloudflare Workers, Node.js, Deno, Bun, Browsers




Downloads96/wk
•Published7 months ago (0.0.8)
A tiny (1.18kB), tree-shakeable OpenAI client. Optionally supports response streaming in all JavaScript runtimes.
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369import { events } from 'jsr:@lukeed/fetch-event-stream@^0.1.5'; import type { ChatCompletion, ChatCompletionChunk, ChatCompletionCreateParamsNonStreaming, ChatCompletionCreateParamsStreaming, } from './types/chat.d.ts'; import type { Completion, CompletionCreateParamsNonStreaming, CompletionCreateParamsStreaming, } from './types/completions.d.ts'; // deno-lint-ignore no-namespace export namespace Chat { export type Request = ChatCompletionCreateParamsNonStreaming; export type Output = ChatCompletion; // deno-lint-ignore no-namespace export namespace Stream { export type Request = ChatCompletionCreateParamsStreaming; export type Output = ReadableStream<ChatCompletionChunk>; export type Chunk = ChatCompletionChunk; } } // deno-lint-ignore no-namespace export namespace Completion { export type Request = CompletionCreateParamsNonStreaming; export type Output = Completion; // deno-lint-ignore no-namespace export namespace Stream { export type Request = CompletionCreateParamsStreaming; export type Output = ReadableStream<Chunk>; export type Chunk = Completion; } } /** * Initialization options. * * These settings will be normalized into {@link Credentials}, which * will affect all associated HTTP requests. */ export type AuthOptions = { /** * OpenAI API Key */ apikey: string; /** * Organization ID */ organization?: string; /** * @default "https://api.openai.com/v1/" */ baseURL?: `https://${string}/`; /** * Any custom HTTP headers to include in every request. */ headers?: Record<string, string>; /** * Any custom URL search parameters (AKA, query string) to append to every request. */ params?: URLSearchParams | Record<string, string> | string; }; /** * The normalized and reusable credentials object. * * Required by {@link request} and {@link stream} functions. * * Managed by {@link Client} internally. */ export type Credentials = { base: `https://${string}`; headers: Headers; params?: URLSearchParams; }; const DEFAULT_BASE = 'https://api.openai.com/v1/'; /** * Generate normalized {@link Credentials} for reuse. */ export function credentials(options: AuthOptions): Credentials { let headers: HeadersInit = { 'Authorization': `Bearer ${options.apikey}`, ...options.headers, }; if (options.organization) { headers['OpenAI-Organization'] = options.organization; } return { base: (options.baseURL || DEFAULT_BASE).slice(0, -1) as `https://${string}`, params: options.params && new URLSearchParams(options.params) || undefined, headers: new Headers(headers), }; } /** * The request body's type definition. * * Must be an object, undefined, or null. */ export type Input = Record<string, unknown> | undefined | null; /** * The request initializtion options. * * This is the same as `RequestInit` for `globalThis.fetch`, except * that `body` is allowed to be a JSON object, a `path` is required, and * you may pass a custom `fetch` executor. * * @NOTE Most of this definition is to enforce a `body` property to be * defined when `T` is known to not be `undefined`. */ export type Options<T extends Input> = & Omit<RequestInit, 'body'> & { /** * The URL path; appended to {@link AuthOptions['baseURL']}. */ path: `/${string}`; /** * A custom `fetch` implementation, if any. * @default globalThis.fetch */ fetch?: typeof globalThis.fetch; } & (T extends undefined ? { body?: T } : { body: T }); // @private function dispatch< I extends Input, >( c: Credentials, options: Options<I>, ): Promise<Response> { let input: BodyInit | undefined; let { headers, base, params } = c; let { path, body, fetch: f, ...init } = options; let k, v, hh, url = base + path; if (params) url += '?' + params; if (body != null) { input = JSON.stringify(body); headers.set('Content-Type', 'application/json'); } if (init.headers) { hh = new Headers(init.headers); for ([k, v] of hh) headers.has(k) || headers.set(k, v); } return (f || fetch)(url, { ...init, headers, body: input }); } /** * Send a HTTP request and, if 2xx, parse the JSON response as `T` object type. * * The request body (`options.body`) is required if `I` is defined, otherwise `I` is inferred. * * NOTE: The `Response` is thrown if not 2xx status! * * @example Usage * ```ts * import * as OpenAI from '@agent/openai'; * * let ctx = OpenAI.credentials({ * apikey: '...', * }); * * try { * let reply = await OpenAI.request<OpenAI.Completion.Output>(ctx, { * method: 'POST', * path: '/completions', * body: { * model: '...', * prompt: '...', * } satisfies OpenAI.Completion.Request, * }); * * console.assert(reply.object === 'text_completion'); * } catch (err) { * if (err instanceof Response) { * console.log('OpenAI Error', err.status, await err.json()); * } else { * // ... * } * } * ``` */ export async function request<T, I extends Input = Input>( credentials: Credentials, options: Options<I>, ): Promise<T> { let r = await dispatch(credentials, options); if (r.ok) return r.json() as Promise<T>; else throw r; } const ABORTED = new Error('Aborted'); function read<T>(res: Response, signal?: AbortSignal | null): ReadableStream<T> { return new ReadableStream<T>({ async start(ctrl) { try { for await (let x of events(res, signal)) { if (signal?.aborted) { throw ABORTED; } if (x.data === '[DONE]') break; let t = x.data && JSON.parse(x.data); if (t) ctrl.enqueue(t as T); } } catch (err) { ctrl.error(err); } finally { ctrl.close(); } }, }); } /** * Send a HTTP request and, if 2xx, begin streaming the chunks of `T` object type. * * The request body (`options.body`) is required if `I` is defined, otherwise `I` is inferred. * * NOTE: The `Response` is thrown if not 2xx status! * * @example Usage * ```ts * import * as OpenAI from '@agent/openai'; * * let ctx = OpenAI.credentials({ * apikey: '...', * }); * * // NOTE: may throw * let events = await OpenAI.stream< * OpenAI.Chat.Stream.Chunk, * OpenAI.Chat.Stream.Request, * >(ctx, { * method: 'POST', * path: '/chat/completions', * body: { * stream: true, * model: '...', * messages: [...] * }, * }); * * for await (let msg of events) { * console.log(msg.choices); * } * ``` */ export async function stream<T, I extends Input & { stream: true } = Input & { stream: true }>( credentials: Credentials, options: Options<I>, ): Promise<ReadableStream<T>> { let r = await dispatch(credentials, options); if (r.ok) return read<T>(r, options.signal); else throw r; } /** * @example * ```ts * let client = new OpenAI.Client({ * apikey: '...', * }); * * // NOTE: All methods can throw! * let reply = await client.completion({ * model: '...', * prompt: '...', * }); * * console.assert(reply.object === 'text_completion'); * * // NOTE: All methods can throw! * let stream = await client.completion({ * model: '...', * prompt: '...', * stream: true, // << required! * }); * * for await (let event of stream) { * console.assert(event.object === 'text_completion') * } * ``` */ export class Client { #c: Credentials; constructor(options: AuthOptions) { this.#c = credentials(options); } /** * The `fetch` implementation. * * NOTE: You may override method with a custom definition. * * @default globalThis.fetch */ fetch(input: Request | URL | string, init?: RequestInit): Promise<Response> { return fetch(input, init); } async #run<T, C, I extends Input = Input>(options: Options<I>) { options.fetch = this.fetch; let sig = options.signal; // TODO: request.signal? let stream = !!(options.body?.stream); let res = await dispatch(this.#c, options); if (!res.ok) throw res; return stream ? read<C>(res, sig) : res.json() as Promise<T>; } /** * Dispatch a [Chat Completions](https://platform.openai.com/docs/api-reference/chat) request. * * When `input` includes `stream: true`, then a `ReadableStream` is returned for message streaming. * Otherwise the full {@link Chat.Output} JSON object is returned. * * **Important:** Will throw the `Response` on 4xx or 5xx status! */ chat(input: Chat.Request, signal?: AbortSignal): Promise<Chat.Output>; chat(input: Chat.Stream.Request, signal?: AbortSignal): Promise<Chat.Stream.Output>; chat(input: Chat.Request | Chat.Stream.Request, signal?: AbortSignal) { return this.#run<Chat.Output, Chat.Stream.Chunk, typeof input>({ method: 'POST', path: '/chat/completions', signal: signal, body: input as Chat.Request, }); } /** * Dispatch a [Completions (legacy)](https://platform.openai.com/docs/api-reference/completions) request. * * When `input` includes `stream: true`, then a `ReadableStream` is returned for message streaming. * Otherwise the full {@link Completion.Output} JSON object is returned. * * **Important:** Will throw the `Response` on 4xx or 5xx status! */ // deno-fmt-ignore completion(input: Completion.Stream.Request, signal?: AbortSignal): Promise<Completion.Stream.Output>; completion(input: Completion.Request, signal?: AbortSignal): Promise<Completion.Output>; completion(input: Completion.Request | Completion.Stream.Request, signal?: AbortSignal) { return this.#run<Completion.Output, Completion.Output, typeof input>({ method: 'POST', path: '/completions', signal: signal, body: input as Completion.Request, }); } }