import { __commonJS, __export, __toESM } from "./chunk-Dx39-ABv.mjs"; import crypto from "node:crypto"; //#region node_modules/.pnpm/uuid@10.0.0/node_modules/uuid/dist/esm-node/regex.js var regex_default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-8][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$/i; //#endregion //#region node_modules/.pnpm/uuid@10.0.0/node_modules/uuid/dist/esm-node/validate.js function validate$1(uuid) { return typeof uuid === "string" && regex_default.test(uuid); } var validate_default = validate$1; //#endregion //#region node_modules/.pnpm/uuid@10.0.0/node_modules/uuid/dist/esm-node/stringify.js /** * Convert array of 16 byte values to UUID string format of the form: * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX */ const byteToHex = []; for (let i = 0; i < 256; ++i) byteToHex.push((i + 256).toString(16).slice(1)); function unsafeStringify(arr$1, offset = 0) { return (byteToHex[arr$1[offset + 0]] + byteToHex[arr$1[offset + 1]] + byteToHex[arr$1[offset + 2]] + byteToHex[arr$1[offset + 3]] + "-" + byteToHex[arr$1[offset + 4]] + byteToHex[arr$1[offset + 5]] + "-" + byteToHex[arr$1[offset + 6]] + byteToHex[arr$1[offset + 7]] + "-" + byteToHex[arr$1[offset + 8]] + byteToHex[arr$1[offset + 9]] + "-" + byteToHex[arr$1[offset + 10]] + byteToHex[arr$1[offset + 11]] + byteToHex[arr$1[offset + 12]] + byteToHex[arr$1[offset + 13]] + byteToHex[arr$1[offset + 14]] + byteToHex[arr$1[offset + 15]]).toLowerCase(); } //#endregion //#region node_modules/.pnpm/uuid@10.0.0/node_modules/uuid/dist/esm-node/rng.js const rnds8Pool = new Uint8Array(256); let poolPtr = rnds8Pool.length; function rng() { if (poolPtr > rnds8Pool.length - 16) { crypto.randomFillSync(rnds8Pool); poolPtr = 0; } return rnds8Pool.slice(poolPtr, poolPtr += 16); } //#endregion //#region node_modules/.pnpm/uuid@10.0.0/node_modules/uuid/dist/esm-node/native.js var native_default = { randomUUID: crypto.randomUUID }; //#endregion //#region node_modules/.pnpm/uuid@10.0.0/node_modules/uuid/dist/esm-node/v4.js function v4(options, buf, offset) { if (native_default.randomUUID && !buf && !options) return native_default.randomUUID(); options = options || {}; const rnds = options.random || (options.rng || rng)(); rnds[6] = rnds[6] & 15 | 64; rnds[8] = rnds[8] & 63 | 128; if (buf) { offset = offset || 0; for (let i = 0; i < 16; ++i) buf[offset + i] = rnds[i]; return buf; } return unsafeStringify(rnds); } var v4_default = v4; //#endregion //#region node_modules/.pnpm/retry@0.13.1/node_modules/retry/lib/retry_operation.js var require_retry_operation = __commonJS({ "node_modules/.pnpm/retry@0.13.1/node_modules/retry/lib/retry_operation.js"(exports, module) { function RetryOperation$1(timeouts, options) { if (typeof options === "boolean") options = { forever: options }; this._originalTimeouts = JSON.parse(JSON.stringify(timeouts)); this._timeouts = timeouts; this._options = options || {}; this._maxRetryTime = options && options.maxRetryTime || Infinity; this._fn = null; this._errors = []; this._attempts = 1; this._operationTimeout = null; this._operationTimeoutCb = null; this._timeout = null; this._operationStart = null; this._timer = null; if (this._options.forever) this._cachedTimeouts = this._timeouts.slice(0); } module.exports = RetryOperation$1; RetryOperation$1.prototype.reset = function() { this._attempts = 1; this._timeouts = this._originalTimeouts.slice(0); }; RetryOperation$1.prototype.stop = function() { if (this._timeout) clearTimeout(this._timeout); if (this._timer) clearTimeout(this._timer); this._timeouts = []; this._cachedTimeouts = null; }; RetryOperation$1.prototype.retry = function(err) { if (this._timeout) clearTimeout(this._timeout); if (!err) return false; var currentTime = (/* @__PURE__ */ new Date()).getTime(); if (err && currentTime - this._operationStart >= this._maxRetryTime) { this._errors.push(err); this._errors.unshift(new Error("RetryOperation timeout occurred")); return false; } this._errors.push(err); var timeout = this._timeouts.shift(); if (timeout === void 0) if (this._cachedTimeouts) { this._errors.splice(0, this._errors.length - 1); timeout = this._cachedTimeouts.slice(-1); } else return false; var self = this; this._timer = setTimeout(function() { self._attempts++; if (self._operationTimeoutCb) { self._timeout = setTimeout(function() { self._operationTimeoutCb(self._attempts); }, self._operationTimeout); if (self._options.unref) self._timeout.unref(); } self._fn(self._attempts); }, timeout); if (this._options.unref) this._timer.unref(); return true; }; RetryOperation$1.prototype.attempt = function(fn, timeoutOps) { this._fn = fn; if (timeoutOps) { if (timeoutOps.timeout) this._operationTimeout = timeoutOps.timeout; if (timeoutOps.cb) this._operationTimeoutCb = timeoutOps.cb; } var self = this; if (this._operationTimeoutCb) this._timeout = setTimeout(function() { self._operationTimeoutCb(); }, self._operationTimeout); this._operationStart = (/* @__PURE__ */ new Date()).getTime(); this._fn(this._attempts); }; RetryOperation$1.prototype.try = function(fn) { console.log("Using RetryOperation.try() is deprecated"); this.attempt(fn); }; RetryOperation$1.prototype.start = function(fn) { console.log("Using RetryOperation.start() is deprecated"); this.attempt(fn); }; RetryOperation$1.prototype.start = RetryOperation$1.prototype.try; RetryOperation$1.prototype.errors = function() { return this._errors; }; RetryOperation$1.prototype.attempts = function() { return this._attempts; }; RetryOperation$1.prototype.mainError = function() { if (this._errors.length === 0) return null; var counts = {}; var mainError = null; var mainErrorCount = 0; for (var i = 0; i < this._errors.length; i++) { var error = this._errors[i]; var message = error.message; var count = (counts[message] || 0) + 1; counts[message] = count; if (count >= mainErrorCount) { mainError = error; mainErrorCount = count; } } return mainError; }; } }); //#endregion //#region node_modules/.pnpm/retry@0.13.1/node_modules/retry/lib/retry.js var require_retry$1 = __commonJS({ "node_modules/.pnpm/retry@0.13.1/node_modules/retry/lib/retry.js"(exports) { var RetryOperation = require_retry_operation(); exports.operation = function(options) { var timeouts = exports.timeouts(options); return new RetryOperation(timeouts, { forever: options && (options.forever || options.retries === Infinity), unref: options && options.unref, maxRetryTime: options && options.maxRetryTime }); }; exports.timeouts = function(options) { if (options instanceof Array) return [].concat(options); var opts = { retries: 10, factor: 2, minTimeout: 1 * 1e3, maxTimeout: Infinity, randomize: false }; for (var key in options) opts[key] = options[key]; if (opts.minTimeout > opts.maxTimeout) throw new Error("minTimeout is greater than maxTimeout"); var timeouts = []; for (var i = 0; i < opts.retries; i++) timeouts.push(this.createTimeout(i, opts)); if (options && options.forever && !timeouts.length) timeouts.push(this.createTimeout(i, opts)); timeouts.sort(function(a, b) { return a - b; }); return timeouts; }; exports.createTimeout = function(attempt, opts) { var random = opts.randomize ? Math.random() + 1 : 1; var timeout = Math.round(random * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt)); timeout = Math.min(timeout, opts.maxTimeout); return timeout; }; exports.wrap = function(obj, options, methods) { if (options instanceof Array) { methods = options; options = null; } if (!methods) { methods = []; for (var key in obj) if (typeof obj[key] === "function") methods.push(key); } for (var i = 0; i < methods.length; i++) { var method = methods[i]; var original = obj[method]; obj[method] = function retryWrapper(original$1) { var op = exports.operation(options); var args = Array.prototype.slice.call(arguments, 1); var callback = args.pop(); args.push(function(err) { if (op.retry(err)) return; if (err) arguments[0] = op.mainError(); callback.apply(this, arguments); }); op.attempt(function() { original$1.apply(obj, args); }); }.bind(obj, original); obj[method].options = options; } }; } }); //#endregion //#region node_modules/.pnpm/retry@0.13.1/node_modules/retry/index.js var require_retry = __commonJS({ "node_modules/.pnpm/retry@0.13.1/node_modules/retry/index.js"(exports, module) { module.exports = require_retry$1(); } }); //#endregion //#region node_modules/.pnpm/p-retry@4.6.2/node_modules/p-retry/index.js var require_p_retry = __commonJS({ "node_modules/.pnpm/p-retry@4.6.2/node_modules/p-retry/index.js"(exports, module) { const retry = require_retry(); const networkErrorMsgs = [ "Failed to fetch", "NetworkError when attempting to fetch resource.", "The Internet connection appears to be offline.", "Network request failed" ]; var AbortError = class extends Error { constructor(message) { super(); if (message instanceof Error) { this.originalError = message; ({message} = message); } else { this.originalError = new Error(message); this.originalError.stack = this.stack; } this.name = "AbortError"; this.message = message; } }; const decorateErrorWithCounts = (error, attemptNumber, options) => { const retriesLeft = options.retries - (attemptNumber - 1); error.attemptNumber = attemptNumber; error.retriesLeft = retriesLeft; return error; }; const isNetworkError = (errorMessage) => networkErrorMsgs.includes(errorMessage); const pRetry$3 = (input, options) => new Promise((resolve, reject) => { options = { onFailedAttempt: () => {}, retries: 10, ...options }; const operation = retry.operation(options); operation.attempt(async (attemptNumber) => { try { resolve(await input(attemptNumber)); } catch (error) { if (!(error instanceof Error)) { reject(new TypeError(`Non-error was thrown: "${error}". You should only throw errors.`)); return; } if (error instanceof AbortError) { operation.stop(); reject(error.originalError); } else if (error instanceof TypeError && !isNetworkError(error.message)) { operation.stop(); reject(error); } else { decorateErrorWithCounts(error, attemptNumber, options); try { await options.onFailedAttempt(error); } catch (error$1) { reject(error$1); return; } if (!operation.retry(error)) reject(operation.mainError()); } } }); }); module.exports = pRetry$3; module.exports.default = pRetry$3; module.exports.AbortError = AbortError; } }); //#endregion //#region node_modules/.pnpm/eventemitter3@4.0.7/node_modules/eventemitter3/index.js var require_eventemitter3 = __commonJS({ "node_modules/.pnpm/eventemitter3@4.0.7/node_modules/eventemitter3/index.js"(exports, module) { var has = Object.prototype.hasOwnProperty, prefix = "~"; /** * Constructor to create a storage for our `EE` objects. * An `Events` instance is a plain object whose properties are event names. * * @constructor * @private */ function Events() {} if (Object.create) { Events.prototype = Object.create(null); if (!new Events().__proto__) prefix = false; } /** * Representation of a single event listener. * * @param {Function} fn The listener function. * @param {*} context The context to invoke the listener with. * @param {Boolean} [once=false] Specify if the listener is a one-time listener. * @constructor * @private */ function EE(fn, context, once) { this.fn = fn; this.context = context; this.once = once || false; } /** * Add a listener for a given event. * * @param {EventEmitter} emitter Reference to the `EventEmitter` instance. * @param {(String|Symbol)} event The event name. * @param {Function} fn The listener function. * @param {*} context The context to invoke the listener with. * @param {Boolean} once Specify if the listener is a one-time listener. * @returns {EventEmitter} * @private */ function addListener(emitter, event, fn, context, once) { if (typeof fn !== "function") throw new TypeError("The listener must be a function"); var listener = new EE(fn, context || emitter, once), evt = prefix ? prefix + event : event; if (!emitter._events[evt]) emitter._events[evt] = listener, emitter._eventsCount++; else if (!emitter._events[evt].fn) emitter._events[evt].push(listener); else emitter._events[evt] = [emitter._events[evt], listener]; return emitter; } /** * Clear event by name. * * @param {EventEmitter} emitter Reference to the `EventEmitter` instance. * @param {(String|Symbol)} evt The Event name. * @private */ function clearEvent(emitter, evt) { if (--emitter._eventsCount === 0) emitter._events = new Events(); else delete emitter._events[evt]; } /** * Minimal `EventEmitter` interface that is molded against the Node.js * `EventEmitter` interface. * * @constructor * @public */ function EventEmitter$1() { this._events = new Events(); this._eventsCount = 0; } /** * Return an array listing the events for which the emitter has registered * listeners. * * @returns {Array} * @public */ EventEmitter$1.prototype.eventNames = function eventNames() { var names = [], events, name; if (this._eventsCount === 0) return names; for (name in events = this._events) if (has.call(events, name)) names.push(prefix ? name.slice(1) : name); if (Object.getOwnPropertySymbols) return names.concat(Object.getOwnPropertySymbols(events)); return names; }; /** * Return the listeners registered for a given event. * * @param {(String|Symbol)} event The event name. * @returns {Array} The registered listeners. * @public */ EventEmitter$1.prototype.listeners = function listeners(event) { var evt = prefix ? prefix + event : event, handlers = this._events[evt]; if (!handlers) return []; if (handlers.fn) return [handlers.fn]; for (var i = 0, l = handlers.length, ee = new Array(l); i < l; i++) ee[i] = handlers[i].fn; return ee; }; /** * Return the number of listeners listening to a given event. * * @param {(String|Symbol)} event The event name. * @returns {Number} The number of listeners. * @public */ EventEmitter$1.prototype.listenerCount = function listenerCount(event) { var evt = prefix ? prefix + event : event, listeners = this._events[evt]; if (!listeners) return 0; if (listeners.fn) return 1; return listeners.length; }; /** * Calls each of the listeners registered for a given event. * * @param {(String|Symbol)} event The event name. * @returns {Boolean} `true` if the event had listeners, else `false`. * @public */ EventEmitter$1.prototype.emit = function emit(event, a1, a2, a3, a4, a5) { var evt = prefix ? prefix + event : event; if (!this._events[evt]) return false; var listeners = this._events[evt], len = arguments.length, args, i; if (listeners.fn) { if (listeners.once) this.removeListener(event, listeners.fn, void 0, true); switch (len) { case 1: return listeners.fn.call(listeners.context), true; case 2: return listeners.fn.call(listeners.context, a1), true; case 3: return listeners.fn.call(listeners.context, a1, a2), true; case 4: return listeners.fn.call(listeners.context, a1, a2, a3), true; case 5: return listeners.fn.call(listeners.context, a1, a2, a3, a4), true; case 6: return listeners.fn.call(listeners.context, a1, a2, a3, a4, a5), true; } for (i = 1, args = new Array(len - 1); i < len; i++) args[i - 1] = arguments[i]; listeners.fn.apply(listeners.context, args); } else { var length = listeners.length, j; for (i = 0; i < length; i++) { if (listeners[i].once) this.removeListener(event, listeners[i].fn, void 0, true); switch (len) { case 1: listeners[i].fn.call(listeners[i].context); break; case 2: listeners[i].fn.call(listeners[i].context, a1); break; case 3: listeners[i].fn.call(listeners[i].context, a1, a2); break; case 4: listeners[i].fn.call(listeners[i].context, a1, a2, a3); break; default: if (!args) for (j = 1, args = new Array(len - 1); j < len; j++) args[j - 1] = arguments[j]; listeners[i].fn.apply(listeners[i].context, args); } } } return true; }; /** * Add a listener for a given event. * * @param {(String|Symbol)} event The event name. * @param {Function} fn The listener function. * @param {*} [context=this] The context to invoke the listener with. * @returns {EventEmitter} `this`. * @public */ EventEmitter$1.prototype.on = function on(event, fn, context) { return addListener(this, event, fn, context, false); }; /** * Add a one-time listener for a given event. * * @param {(String|Symbol)} event The event name. * @param {Function} fn The listener function. * @param {*} [context=this] The context to invoke the listener with. * @returns {EventEmitter} `this`. * @public */ EventEmitter$1.prototype.once = function once(event, fn, context) { return addListener(this, event, fn, context, true); }; /** * Remove the listeners of a given event. * * @param {(String|Symbol)} event The event name. * @param {Function} fn Only remove the listeners that match this function. * @param {*} context Only remove the listeners that have this context. * @param {Boolean} once Only remove one-time listeners. * @returns {EventEmitter} `this`. * @public */ EventEmitter$1.prototype.removeListener = function removeListener(event, fn, context, once) { var evt = prefix ? prefix + event : event; if (!this._events[evt]) return this; if (!fn) { clearEvent(this, evt); return this; } var listeners = this._events[evt]; if (listeners.fn) { if (listeners.fn === fn && (!once || listeners.once) && (!context || listeners.context === context)) clearEvent(this, evt); } else { for (var i = 0, events = [], length = listeners.length; i < length; i++) if (listeners[i].fn !== fn || once && !listeners[i].once || context && listeners[i].context !== context) events.push(listeners[i]); if (events.length) this._events[evt] = events.length === 1 ? events[0] : events; else clearEvent(this, evt); } return this; }; /** * Remove all listeners, or those of the specified event. * * @param {(String|Symbol)} [event] The event name. * @returns {EventEmitter} `this`. * @public */ EventEmitter$1.prototype.removeAllListeners = function removeAllListeners(event) { var evt; if (event) { evt = prefix ? prefix + event : event; if (this._events[evt]) clearEvent(this, evt); } else { this._events = new Events(); this._eventsCount = 0; } return this; }; EventEmitter$1.prototype.off = EventEmitter$1.prototype.removeListener; EventEmitter$1.prototype.addListener = EventEmitter$1.prototype.on; EventEmitter$1.prefixed = prefix; EventEmitter$1.EventEmitter = EventEmitter$1; if ("undefined" !== typeof module) module.exports = EventEmitter$1; } }); //#endregion //#region node_modules/.pnpm/p-finally@1.0.0/node_modules/p-finally/index.js var require_p_finally = __commonJS({ "node_modules/.pnpm/p-finally@1.0.0/node_modules/p-finally/index.js"(exports, module) { module.exports = (promise, onFinally) => { onFinally = onFinally || (() => {}); return promise.then((val) => new Promise((resolve) => { resolve(onFinally()); }).then(() => val), (err) => new Promise((resolve) => { resolve(onFinally()); }).then(() => { throw err; })); }; } }); //#endregion //#region node_modules/.pnpm/p-timeout@3.2.0/node_modules/p-timeout/index.js var require_p_timeout = __commonJS({ "node_modules/.pnpm/p-timeout@3.2.0/node_modules/p-timeout/index.js"(exports, module) { const pFinally = require_p_finally(); var TimeoutError = class extends Error { constructor(message) { super(message); this.name = "TimeoutError"; } }; const pTimeout = (promise, milliseconds, fallback) => new Promise((resolve, reject) => { if (typeof milliseconds !== "number" || milliseconds < 0) throw new TypeError("Expected `milliseconds` to be a positive number"); if (milliseconds === Infinity) { resolve(promise); return; } const timer = setTimeout(() => { if (typeof fallback === "function") { try { resolve(fallback()); } catch (error) { reject(error); } return; } const message = typeof fallback === "string" ? fallback : `Promise timed out after ${milliseconds} milliseconds`; const timeoutError$1 = fallback instanceof Error ? fallback : new TimeoutError(message); if (typeof promise.cancel === "function") promise.cancel(); reject(timeoutError$1); }, milliseconds); pFinally(promise.then(resolve, reject), () => { clearTimeout(timer); }); }); module.exports = pTimeout; module.exports.default = pTimeout; module.exports.TimeoutError = TimeoutError; } }); //#endregion //#region node_modules/.pnpm/p-queue@6.6.2/node_modules/p-queue/dist/lower-bound.js var require_lower_bound = __commonJS({ "node_modules/.pnpm/p-queue@6.6.2/node_modules/p-queue/dist/lower-bound.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); function lowerBound(array, value, comparator) { let first = 0; let count = array.length; while (count > 0) { const step = count / 2 | 0; let it = first + step; if (comparator(array[it], value) <= 0) { first = ++it; count -= step + 1; } else count = step; } return first; } exports.default = lowerBound; } }); //#endregion //#region node_modules/.pnpm/p-queue@6.6.2/node_modules/p-queue/dist/priority-queue.js var require_priority_queue = __commonJS({ "node_modules/.pnpm/p-queue@6.6.2/node_modules/p-queue/dist/priority-queue.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); const lower_bound_1 = require_lower_bound(); var PriorityQueue = class { constructor() { this._queue = []; } enqueue(run, options) { options = Object.assign({ priority: 0 }, options); const element = { priority: options.priority, run }; if (this.size && this._queue[this.size - 1].priority >= options.priority) { this._queue.push(element); return; } const index = lower_bound_1.default(this._queue, element, (a, b) => b.priority - a.priority); this._queue.splice(index, 0, element); } dequeue() { const item = this._queue.shift(); return item === null || item === void 0 ? void 0 : item.run; } filter(options) { return this._queue.filter((element) => element.priority === options.priority).map((element) => element.run); } get size() { return this._queue.length; } }; exports.default = PriorityQueue; } }); //#endregion //#region node_modules/.pnpm/p-queue@6.6.2/node_modules/p-queue/dist/index.js var require_dist = __commonJS({ "node_modules/.pnpm/p-queue@6.6.2/node_modules/p-queue/dist/index.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); const EventEmitter = require_eventemitter3(); const p_timeout_1 = require_p_timeout(); const priority_queue_1 = require_priority_queue(); const empty = () => {}; const timeoutError = new p_timeout_1.TimeoutError(); /** Promise queue with concurrency control. */ var PQueue = class extends EventEmitter { constructor(options) { var _a, _b, _c, _d; super(); this._intervalCount = 0; this._intervalEnd = 0; this._pendingCount = 0; this._resolveEmpty = empty; this._resolveIdle = empty; options = Object.assign({ carryoverConcurrencyCount: false, intervalCap: Infinity, interval: 0, concurrency: Infinity, autoStart: true, queueClass: priority_queue_1.default }, options); if (!(typeof options.intervalCap === "number" && options.intervalCap >= 1)) throw new TypeError(`Expected \`intervalCap\` to be a number from 1 and up, got \`${(_b = (_a = options.intervalCap) === null || _a === void 0 ? void 0 : _a.toString()) !== null && _b !== void 0 ? _b : ""}\` (${typeof options.intervalCap})`); if (options.interval === void 0 || !(Number.isFinite(options.interval) && options.interval >= 0)) throw new TypeError(`Expected \`interval\` to be a finite number >= 0, got \`${(_d = (_c = options.interval) === null || _c === void 0 ? void 0 : _c.toString()) !== null && _d !== void 0 ? _d : ""}\` (${typeof options.interval})`); this._carryoverConcurrencyCount = options.carryoverConcurrencyCount; this._isIntervalIgnored = options.intervalCap === Infinity || options.interval === 0; this._intervalCap = options.intervalCap; this._interval = options.interval; this._queue = new options.queueClass(); this._queueClass = options.queueClass; this.concurrency = options.concurrency; this._timeout = options.timeout; this._throwOnTimeout = options.throwOnTimeout === true; this._isPaused = options.autoStart === false; } get _doesIntervalAllowAnother() { return this._isIntervalIgnored || this._intervalCount < this._intervalCap; } get _doesConcurrentAllowAnother() { return this._pendingCount < this._concurrency; } _next() { this._pendingCount--; this._tryToStartAnother(); this.emit("next"); } _resolvePromises() { this._resolveEmpty(); this._resolveEmpty = empty; if (this._pendingCount === 0) { this._resolveIdle(); this._resolveIdle = empty; this.emit("idle"); } } _onResumeInterval() { this._onInterval(); this._initializeIntervalIfNeeded(); this._timeoutId = void 0; } _isIntervalPaused() { const now = Date.now(); if (this._intervalId === void 0) { const delay = this._intervalEnd - now; if (delay < 0) this._intervalCount = this._carryoverConcurrencyCount ? this._pendingCount : 0; else { if (this._timeoutId === void 0) this._timeoutId = setTimeout(() => { this._onResumeInterval(); }, delay); return true; } } return false; } _tryToStartAnother() { if (this._queue.size === 0) { if (this._intervalId) clearInterval(this._intervalId); this._intervalId = void 0; this._resolvePromises(); return false; } if (!this._isPaused) { const canInitializeInterval = !this._isIntervalPaused(); if (this._doesIntervalAllowAnother && this._doesConcurrentAllowAnother) { const job = this._queue.dequeue(); if (!job) return false; this.emit("active"); job(); if (canInitializeInterval) this._initializeIntervalIfNeeded(); return true; } } return false; } _initializeIntervalIfNeeded() { if (this._isIntervalIgnored || this._intervalId !== void 0) return; this._intervalId = setInterval(() => { this._onInterval(); }, this._interval); this._intervalEnd = Date.now() + this._interval; } _onInterval() { if (this._intervalCount === 0 && this._pendingCount === 0 && this._intervalId) { clearInterval(this._intervalId); this._intervalId = void 0; } this._intervalCount = this._carryoverConcurrencyCount ? this._pendingCount : 0; this._processQueue(); } /** Executes all queued functions until it reaches the limit. */ _processQueue() { while (this._tryToStartAnother()); } get concurrency() { return this._concurrency; } set concurrency(newConcurrency) { if (!(typeof newConcurrency === "number" && newConcurrency >= 1)) throw new TypeError(`Expected \`concurrency\` to be a number from 1 and up, got \`${newConcurrency}\` (${typeof newConcurrency})`); this._concurrency = newConcurrency; this._processQueue(); } /** Adds a sync or async task to the queue. Always returns a promise. */ async add(fn, options = {}) { return new Promise((resolve, reject) => { const run = async () => { this._pendingCount++; this._intervalCount++; try { const operation = this._timeout === void 0 && options.timeout === void 0 ? fn() : p_timeout_1.default(Promise.resolve(fn()), options.timeout === void 0 ? this._timeout : options.timeout, () => { if (options.throwOnTimeout === void 0 ? this._throwOnTimeout : options.throwOnTimeout) reject(timeoutError); return void 0; }); resolve(await operation); } catch (error) { reject(error); } this._next(); }; this._queue.enqueue(run, options); this._tryToStartAnother(); this.emit("add"); }); } /** Same as `.add()`, but accepts an array of sync or async functions. @returns A promise that resolves when all functions are resolved. */ async addAll(functions, options) { return Promise.all(functions.map(async (function_) => this.add(function_, options))); } /** Start (or resume) executing enqueued tasks within concurrency limit. No need to call this if queue is not paused (via `options.autoStart = false` or by `.pause()` method.) */ start() { if (!this._isPaused) return this; this._isPaused = false; this._processQueue(); return this; } /** Put queue execution on hold. */ pause() { this._isPaused = true; } /** Clear the queue. */ clear() { this._queue = new this._queueClass(); } /** Can be called multiple times. Useful if you for example add additional items at a later time. @returns A promise that settles when the queue becomes empty. */ async onEmpty() { if (this._queue.size === 0) return; return new Promise((resolve) => { const existingResolve = this._resolveEmpty; this._resolveEmpty = () => { existingResolve(); resolve(); }; }); } /** The difference with `.onEmpty` is that `.onIdle` guarantees that all work from the queue has finished. `.onEmpty` merely signals that the queue is empty, but it could mean that some promises haven't completed yet. @returns A promise that settles when the queue becomes empty, and all promises have completed; `queue.size === 0 && queue.pending === 0`. */ async onIdle() { if (this._pendingCount === 0 && this._queue.size === 0) return; return new Promise((resolve) => { const existingResolve = this._resolveIdle; this._resolveIdle = () => { existingResolve(); resolve(); }; }); } /** Size of the queue. */ get size() { return this._queue.size; } /** Size of the queue, filtered by the given options. For example, this can be used to find the number of items remaining in the queue with a specific priority level. */ sizeBy(options) { return this._queue.filter(options).length; } /** Number of pending promises. */ get pending() { return this._pendingCount; } /** Whether the queue is currently paused. */ get isPaused() { return this._isPaused; } get timeout() { return this._timeout; } /** Set the timeout for future operations. */ set timeout(milliseconds) { this._timeout = milliseconds; } }; exports.default = PQueue; } }); //#endregion //#region node_modules/.pnpm/langsmith@0.3.30_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/langsmith/dist/utils/env.js let globalEnv; const isBrowser$1 = () => typeof window !== "undefined" && typeof window.document !== "undefined"; const isWebWorker$1 = () => typeof globalThis === "object" && globalThis.constructor && globalThis.constructor.name === "DedicatedWorkerGlobalScope"; const isJsDom$1 = () => typeof window !== "undefined" && window.name === "nodejs" || typeof navigator !== "undefined" && (navigator.userAgent.includes("Node.js") || navigator.userAgent.includes("jsdom")); const isDeno$1 = () => typeof Deno !== "undefined"; const isNode$1 = () => typeof process !== "undefined" && typeof process.versions !== "undefined" && typeof process.versions.node !== "undefined" && !isDeno$1(); const getEnv$1 = () => { if (globalEnv) return globalEnv; if (isBrowser$1()) globalEnv = "browser"; else if (isNode$1()) globalEnv = "node"; else if (isWebWorker$1()) globalEnv = "webworker"; else if (isJsDom$1()) globalEnv = "jsdom"; else if (isDeno$1()) globalEnv = "deno"; else globalEnv = "other"; return globalEnv; }; let runtimeEnvironment$1; function getRuntimeEnvironment$1() { if (runtimeEnvironment$1 === void 0) { const env = getEnv$1(); const releaseEnv = getShas(); runtimeEnvironment$1 = { library: "langsmith", runtime: env, sdk: "langsmith-js", sdk_version: __version__, ...releaseEnv }; } return runtimeEnvironment$1; } /** * Retrieves the LangChain-specific metadata from the current runtime environment. * * @returns {Record} * - A record of LangChain-specific metadata environment variables. */ function getLangChainEnvVarsMetadata() { const allEnvVars = getEnvironmentVariables() || {}; const envVars = {}; const excluded = [ "LANGCHAIN_API_KEY", "LANGCHAIN_ENDPOINT", "LANGCHAIN_TRACING_V2", "LANGCHAIN_PROJECT", "LANGCHAIN_SESSION", "LANGSMITH_API_KEY", "LANGSMITH_ENDPOINT", "LANGSMITH_TRACING_V2", "LANGSMITH_PROJECT", "LANGSMITH_SESSION" ]; for (const [key, value] of Object.entries(allEnvVars)) if ((key.startsWith("LANGCHAIN_") || key.startsWith("LANGSMITH_")) && typeof value === "string" && !excluded.includes(key) && !key.toLowerCase().includes("key") && !key.toLowerCase().includes("secret") && !key.toLowerCase().includes("token")) if (key === "LANGCHAIN_REVISION_ID") envVars["revision_id"] = value; else envVars[key] = value; return envVars; } /** * Retrieves the environment variables from the current runtime environment. * * This function is designed to operate in a variety of JS environments, * including Node.js, Deno, browsers, etc. * * @returns {Record | undefined} * - A record of environment variables if available. * - `undefined` if the environment does not support or allows access to environment variables. */ function getEnvironmentVariables() { try { if (typeof process !== "undefined" && process.env) return Object.entries(process.env).reduce((acc, [key, value]) => { acc[key] = String(value); return acc; }, {}); return void 0; } catch (e) { return void 0; } } function getEnvironmentVariable$1(name) { try { return typeof process !== "undefined" ? process.env?.[name] : void 0; } catch (e) { return void 0; } } function getLangSmithEnvironmentVariable(name) { return getEnvironmentVariable$1(`LANGSMITH_${name}`) || getEnvironmentVariable$1(`LANGCHAIN_${name}`); } let cachedCommitSHAs; /** * Get the Git commit SHA from common environment variables * used by different CI/CD platforms. * @returns {string | undefined} The Git commit SHA or undefined if not found. */ function getShas() { if (cachedCommitSHAs !== void 0) return cachedCommitSHAs; const common_release_envs = [ "VERCEL_GIT_COMMIT_SHA", "NEXT_PUBLIC_VERCEL_GIT_COMMIT_SHA", "COMMIT_REF", "RENDER_GIT_COMMIT", "CI_COMMIT_SHA", "CIRCLE_SHA1", "CF_PAGES_COMMIT_SHA", "REACT_APP_GIT_SHA", "SOURCE_VERSION", "GITHUB_SHA", "TRAVIS_COMMIT", "GIT_COMMIT", "BUILD_VCS_NUMBER", "bamboo_planRepository_revision", "Build.SourceVersion", "BITBUCKET_COMMIT", "DRONE_COMMIT_SHA", "SEMAPHORE_GIT_SHA", "BUILDKITE_COMMIT" ]; const shas = {}; for (const env of common_release_envs) { const envVar = getEnvironmentVariable$1(env); if (envVar !== void 0) shas[env] = envVar; } cachedCommitSHAs = shas; return shas; } //#endregion //#region node_modules/.pnpm/langsmith@0.3.30_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/langsmith/dist/singletons/fetch.js const DEFAULT_FETCH_IMPLEMENTATION = (...args) => fetch(...args); const LANGSMITH_FETCH_IMPLEMENTATION_KEY = Symbol.for("ls:fetch_implementation"); const _globalFetchImplementationIsNodeFetch = () => { const fetchImpl = globalThis[LANGSMITH_FETCH_IMPLEMENTATION_KEY]; if (!fetchImpl) return false; return typeof fetchImpl === "function" && "Headers" in fetchImpl && "Request" in fetchImpl && "Response" in fetchImpl; }; /** * @internal */ const _getFetchImplementation = (debug$5) => { return async (...args) => { if (debug$5 || getLangSmithEnvironmentVariable("DEBUG") === "true") { const [url, options] = args; console.log(`→ ${options?.method || "GET"} ${url}`); } const res = await (globalThis[LANGSMITH_FETCH_IMPLEMENTATION_KEY] ?? DEFAULT_FETCH_IMPLEMENTATION)(...args); if (debug$5 || getLangSmithEnvironmentVariable("DEBUG") === "true") console.log(`← ${res.status} ${res.statusText} ${res.url}`); return res; }; }; //#endregion //#region node_modules/.pnpm/langsmith@0.3.30_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/langsmith/dist/utils/async_caller.js var import_p_retry$2 = __toESM(require_p_retry(), 1); var import_dist$2 = __toESM(require_dist(), 1); const STATUS_NO_RETRY$1 = [ 400, 401, 403, 404, 405, 406, 407, 408 ]; const STATUS_IGNORE = [409]; /** * A class that can be used to make async calls with concurrency and retry logic. * * This is useful for making calls to any kind of "expensive" external resource, * be it because it's rate-limited, subject to network issues, etc. * * Concurrent calls are limited by the `maxConcurrency` parameter, which defaults * to `Infinity`. This means that by default, all calls will be made in parallel. * * Retries are limited by the `maxRetries` parameter, which defaults to 6. This * means that by default, each call will be retried up to 6 times, with an * exponential backoff between each attempt. */ var AsyncCaller$1 = class { constructor(params) { Object.defineProperty(this, "maxConcurrency", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "maxRetries", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "queue", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "onFailedResponseHook", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "debug", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.maxConcurrency = params.maxConcurrency ?? Infinity; this.maxRetries = params.maxRetries ?? 6; this.debug = params.debug; if ("default" in import_dist$2.default) this.queue = new import_dist$2.default.default({ concurrency: this.maxConcurrency }); else this.queue = new import_dist$2.default({ concurrency: this.maxConcurrency }); this.onFailedResponseHook = params?.onFailedResponseHook; } call(callable, ...args) { const onFailedResponseHook = this.onFailedResponseHook; return this.queue.add(() => (0, import_p_retry$2.default)(() => callable(...args).catch((error) => { if (error instanceof Error) throw error; else throw new Error(error); }), { async onFailedAttempt(error) { if (error.message.startsWith("Cancel") || error.message.startsWith("TimeoutError") || error.message.startsWith("AbortError")) throw error; if (error?.code === "ECONNABORTED") throw error; const response = error?.response; const status = response?.status; if (status) { if (STATUS_NO_RETRY$1.includes(+status)) throw error; else if (STATUS_IGNORE.includes(+status)) return; if (onFailedResponseHook) await onFailedResponseHook(response); } }, retries: this.maxRetries, randomize: true }), { throwOnTimeout: true }); } callWithOptions(options, callable, ...args) { if (options.signal) return Promise.race([this.call(callable, ...args), new Promise((_, reject) => { options.signal?.addEventListener("abort", () => { reject(new Error("AbortError")); }); })]); return this.call(callable, ...args); } fetch(...args) { return this.call(() => _getFetchImplementation(this.debug)(...args).then((res) => res.ok ? res : Promise.reject(res))); } }; //#endregion //#region node_modules/.pnpm/langsmith@0.3.30_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/langsmith/dist/utils/messages.js function isLangChainMessage(message) { return typeof message?._getType === "function"; } function convertLangChainMessageToExample(message) { const converted = { type: message._getType(), data: { content: message.content } }; if (message?.additional_kwargs && Object.keys(message.additional_kwargs).length > 0) converted.data.additional_kwargs = { ...message.additional_kwargs }; return converted; } //#endregion //#region node_modules/.pnpm/langsmith@0.3.30_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/langsmith/dist/utils/_uuid.js function assertUuid(str, which) { if (!validate_default(str)) { const msg = which !== void 0 ? `Invalid UUID for ${which}: ${str}` : `Invalid UUID: ${str}`; throw new Error(msg); } return str; } //#endregion //#region node_modules/.pnpm/langsmith@0.3.30_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/langsmith/dist/utils/warn.js const warnedMessages = {}; function warnOnce(message) { if (!warnedMessages[message]) { console.warn(message); warnedMessages[message] = true; } } //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/internal/constants.js var require_constants = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/internal/constants.js"(exports, module) { const SEMVER_SPEC_VERSION = "2.0.0"; const MAX_LENGTH$2 = 256; const MAX_SAFE_INTEGER$1 = Number.MAX_SAFE_INTEGER || 9007199254740991; const MAX_SAFE_COMPONENT_LENGTH$1 = 16; const MAX_SAFE_BUILD_LENGTH$1 = MAX_LENGTH$2 - 6; const RELEASE_TYPES = [ "major", "premajor", "minor", "preminor", "patch", "prepatch", "prerelease" ]; module.exports = { MAX_LENGTH: MAX_LENGTH$2, MAX_SAFE_COMPONENT_LENGTH: MAX_SAFE_COMPONENT_LENGTH$1, MAX_SAFE_BUILD_LENGTH: MAX_SAFE_BUILD_LENGTH$1, MAX_SAFE_INTEGER: MAX_SAFE_INTEGER$1, RELEASE_TYPES, SEMVER_SPEC_VERSION, FLAG_INCLUDE_PRERELEASE: 1, FLAG_LOOSE: 2 }; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/internal/debug.js var require_debug = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/internal/debug.js"(exports, module) { const debug$4 = typeof process === "object" && process.env && process.env.NODE_DEBUG && /\bsemver\b/i.test(process.env.NODE_DEBUG) ? (...args) => console.error("SEMVER", ...args) : () => {}; module.exports = debug$4; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/internal/re.js var require_re = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/internal/re.js"(exports, module) { const { MAX_SAFE_COMPONENT_LENGTH, MAX_SAFE_BUILD_LENGTH, MAX_LENGTH: MAX_LENGTH$1 } = require_constants(); const debug$3 = require_debug(); exports = module.exports = {}; const re$4 = exports.re = []; const safeRe = exports.safeRe = []; const src = exports.src = []; const safeSrc = exports.safeSrc = []; const t$4 = exports.t = {}; let R = 0; const LETTERDASHNUMBER = "[a-zA-Z0-9-]"; const safeRegexReplacements = [ ["\\s", 1], ["\\d", MAX_LENGTH$1], [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH] ]; const makeSafeRegex = (value) => { for (const [token, max] of safeRegexReplacements) value = value.split(`${token}*`).join(`${token}{0,${max}}`).split(`${token}+`).join(`${token}{1,${max}}`); return value; }; const createToken = (name, value, isGlobal) => { const safe = makeSafeRegex(value); const index = R++; debug$3(name, index, value); t$4[name] = index; src[index] = value; safeSrc[index] = safe; re$4[index] = new RegExp(value, isGlobal ? "g" : void 0); safeRe[index] = new RegExp(safe, isGlobal ? "g" : void 0); }; createToken("NUMERICIDENTIFIER", "0|[1-9]\\d*"); createToken("NUMERICIDENTIFIERLOOSE", "\\d+"); createToken("NONNUMERICIDENTIFIER", `\\d*[a-zA-Z-]${LETTERDASHNUMBER}*`); createToken("MAINVERSION", `(${src[t$4.NUMERICIDENTIFIER]})\\.(${src[t$4.NUMERICIDENTIFIER]})\\.(${src[t$4.NUMERICIDENTIFIER]})`); createToken("MAINVERSIONLOOSE", `(${src[t$4.NUMERICIDENTIFIERLOOSE]})\\.(${src[t$4.NUMERICIDENTIFIERLOOSE]})\\.(${src[t$4.NUMERICIDENTIFIERLOOSE]})`); createToken("PRERELEASEIDENTIFIER", `(?:${src[t$4.NONNUMERICIDENTIFIER]}|${src[t$4.NUMERICIDENTIFIER]})`); createToken("PRERELEASEIDENTIFIERLOOSE", `(?:${src[t$4.NONNUMERICIDENTIFIER]}|${src[t$4.NUMERICIDENTIFIERLOOSE]})`); createToken("PRERELEASE", `(?:-(${src[t$4.PRERELEASEIDENTIFIER]}(?:\\.${src[t$4.PRERELEASEIDENTIFIER]})*))`); createToken("PRERELEASELOOSE", `(?:-?(${src[t$4.PRERELEASEIDENTIFIERLOOSE]}(?:\\.${src[t$4.PRERELEASEIDENTIFIERLOOSE]})*))`); createToken("BUILDIDENTIFIER", `${LETTERDASHNUMBER}+`); createToken("BUILD", `(?:\\+(${src[t$4.BUILDIDENTIFIER]}(?:\\.${src[t$4.BUILDIDENTIFIER]})*))`); createToken("FULLPLAIN", `v?${src[t$4.MAINVERSION]}${src[t$4.PRERELEASE]}?${src[t$4.BUILD]}?`); createToken("FULL", `^${src[t$4.FULLPLAIN]}$`); createToken("LOOSEPLAIN", `[v=\\s]*${src[t$4.MAINVERSIONLOOSE]}${src[t$4.PRERELEASELOOSE]}?${src[t$4.BUILD]}?`); createToken("LOOSE", `^${src[t$4.LOOSEPLAIN]}$`); createToken("GTLT", "((?:<|>)?=?)"); createToken("XRANGEIDENTIFIERLOOSE", `${src[t$4.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`); createToken("XRANGEIDENTIFIER", `${src[t$4.NUMERICIDENTIFIER]}|x|X|\\*`); createToken("XRANGEPLAIN", `[v=\\s]*(${src[t$4.XRANGEIDENTIFIER]})(?:\\.(${src[t$4.XRANGEIDENTIFIER]})(?:\\.(${src[t$4.XRANGEIDENTIFIER]})(?:${src[t$4.PRERELEASE]})?${src[t$4.BUILD]}?)?)?`); createToken("XRANGEPLAINLOOSE", `[v=\\s]*(${src[t$4.XRANGEIDENTIFIERLOOSE]})(?:\\.(${src[t$4.XRANGEIDENTIFIERLOOSE]})(?:\\.(${src[t$4.XRANGEIDENTIFIERLOOSE]})(?:${src[t$4.PRERELEASELOOSE]})?${src[t$4.BUILD]}?)?)?`); createToken("XRANGE", `^${src[t$4.GTLT]}\\s*${src[t$4.XRANGEPLAIN]}$`); createToken("XRANGELOOSE", `^${src[t$4.GTLT]}\\s*${src[t$4.XRANGEPLAINLOOSE]}$`); createToken("COERCEPLAIN", `(^|[^\\d])(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}})(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`); createToken("COERCE", `${src[t$4.COERCEPLAIN]}(?:$|[^\\d])`); createToken("COERCEFULL", src[t$4.COERCEPLAIN] + `(?:${src[t$4.PRERELEASE]})?(?:${src[t$4.BUILD]})?(?:$|[^\\d])`); createToken("COERCERTL", src[t$4.COERCE], true); createToken("COERCERTLFULL", src[t$4.COERCEFULL], true); createToken("LONETILDE", "(?:~>?)"); createToken("TILDETRIM", `(\\s*)${src[t$4.LONETILDE]}\\s+`, true); exports.tildeTrimReplace = "$1~"; createToken("TILDE", `^${src[t$4.LONETILDE]}${src[t$4.XRANGEPLAIN]}$`); createToken("TILDELOOSE", `^${src[t$4.LONETILDE]}${src[t$4.XRANGEPLAINLOOSE]}$`); createToken("LONECARET", "(?:\\^)"); createToken("CARETTRIM", `(\\s*)${src[t$4.LONECARET]}\\s+`, true); exports.caretTrimReplace = "$1^"; createToken("CARET", `^${src[t$4.LONECARET]}${src[t$4.XRANGEPLAIN]}$`); createToken("CARETLOOSE", `^${src[t$4.LONECARET]}${src[t$4.XRANGEPLAINLOOSE]}$`); createToken("COMPARATORLOOSE", `^${src[t$4.GTLT]}\\s*(${src[t$4.LOOSEPLAIN]})$|^$`); createToken("COMPARATOR", `^${src[t$4.GTLT]}\\s*(${src[t$4.FULLPLAIN]})$|^$`); createToken("COMPARATORTRIM", `(\\s*)${src[t$4.GTLT]}\\s*(${src[t$4.LOOSEPLAIN]}|${src[t$4.XRANGEPLAIN]})`, true); exports.comparatorTrimReplace = "$1$2$3"; createToken("HYPHENRANGE", `^\\s*(${src[t$4.XRANGEPLAIN]})\\s+-\\s+(${src[t$4.XRANGEPLAIN]})\\s*$`); createToken("HYPHENRANGELOOSE", `^\\s*(${src[t$4.XRANGEPLAINLOOSE]})\\s+-\\s+(${src[t$4.XRANGEPLAINLOOSE]})\\s*$`); createToken("STAR", "(<|>)?=?\\s*\\*"); createToken("GTE0", "^\\s*>=\\s*0\\.0\\.0\\s*$"); createToken("GTE0PRE", "^\\s*>=\\s*0\\.0\\.0-0\\s*$"); } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/internal/parse-options.js var require_parse_options = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/internal/parse-options.js"(exports, module) { const looseOption = Object.freeze({ loose: true }); const emptyOpts = Object.freeze({}); const parseOptions$3 = (options) => { if (!options) return emptyOpts; if (typeof options !== "object") return looseOption; return options; }; module.exports = parseOptions$3; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/internal/identifiers.js var require_identifiers = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/internal/identifiers.js"(exports, module) { const numeric = /^[0-9]+$/; const compareIdentifiers$1 = (a, b) => { const anum = numeric.test(a); const bnum = numeric.test(b); if (anum && bnum) { a = +a; b = +b; } return a === b ? 0 : anum && !bnum ? -1 : bnum && !anum ? 1 : a < b ? -1 : 1; }; const rcompareIdentifiers = (a, b) => compareIdentifiers$1(b, a); module.exports = { compareIdentifiers: compareIdentifiers$1, rcompareIdentifiers }; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/classes/semver.js var require_semver$1 = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/classes/semver.js"(exports, module) { const debug$2 = require_debug(); const { MAX_LENGTH, MAX_SAFE_INTEGER } = require_constants(); const { safeRe: re$3, t: t$3 } = require_re(); const parseOptions$2 = require_parse_options(); const { compareIdentifiers } = require_identifiers(); var SemVer$15 = class SemVer$15 { constructor(version, options) { options = parseOptions$2(options); if (version instanceof SemVer$15) if (version.loose === !!options.loose && version.includePrerelease === !!options.includePrerelease) return version; else version = version.version; else if (typeof version !== "string") throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`); if (version.length > MAX_LENGTH) throw new TypeError(`version is longer than ${MAX_LENGTH} characters`); debug$2("SemVer", version, options); this.options = options; this.loose = !!options.loose; this.includePrerelease = !!options.includePrerelease; const m = version.trim().match(options.loose ? re$3[t$3.LOOSE] : re$3[t$3.FULL]); if (!m) throw new TypeError(`Invalid Version: ${version}`); this.raw = version; this.major = +m[1]; this.minor = +m[2]; this.patch = +m[3]; if (this.major > MAX_SAFE_INTEGER || this.major < 0) throw new TypeError("Invalid major version"); if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) throw new TypeError("Invalid minor version"); if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) throw new TypeError("Invalid patch version"); if (!m[4]) this.prerelease = []; else this.prerelease = m[4].split(".").map((id) => { if (/^[0-9]+$/.test(id)) { const num = +id; if (num >= 0 && num < MAX_SAFE_INTEGER) return num; } return id; }); this.build = m[5] ? m[5].split(".") : []; this.format(); } format() { this.version = `${this.major}.${this.minor}.${this.patch}`; if (this.prerelease.length) this.version += `-${this.prerelease.join(".")}`; return this.version; } toString() { return this.version; } compare(other) { debug$2("SemVer.compare", this.version, this.options, other); if (!(other instanceof SemVer$15)) { if (typeof other === "string" && other === this.version) return 0; other = new SemVer$15(other, this.options); } if (other.version === this.version) return 0; return this.compareMain(other) || this.comparePre(other); } compareMain(other) { if (!(other instanceof SemVer$15)) other = new SemVer$15(other, this.options); return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch); } comparePre(other) { if (!(other instanceof SemVer$15)) other = new SemVer$15(other, this.options); if (this.prerelease.length && !other.prerelease.length) return -1; else if (!this.prerelease.length && other.prerelease.length) return 1; else if (!this.prerelease.length && !other.prerelease.length) return 0; let i = 0; do { const a = this.prerelease[i]; const b = other.prerelease[i]; debug$2("prerelease compare", i, a, b); if (a === void 0 && b === void 0) return 0; else if (b === void 0) return 1; else if (a === void 0) return -1; else if (a === b) continue; else return compareIdentifiers(a, b); } while (++i); } compareBuild(other) { if (!(other instanceof SemVer$15)) other = new SemVer$15(other, this.options); let i = 0; do { const a = this.build[i]; const b = other.build[i]; debug$2("build compare", i, a, b); if (a === void 0 && b === void 0) return 0; else if (b === void 0) return 1; else if (a === void 0) return -1; else if (a === b) continue; else return compareIdentifiers(a, b); } while (++i); } inc(release, identifier, identifierBase) { if (release.startsWith("pre")) { if (!identifier && identifierBase === false) throw new Error("invalid increment argument: identifier is empty"); if (identifier) { const match = `-${identifier}`.match(this.options.loose ? re$3[t$3.PRERELEASELOOSE] : re$3[t$3.PRERELEASE]); if (!match || match[1] !== identifier) throw new Error(`invalid identifier: ${identifier}`); } } switch (release) { case "premajor": this.prerelease.length = 0; this.patch = 0; this.minor = 0; this.major++; this.inc("pre", identifier, identifierBase); break; case "preminor": this.prerelease.length = 0; this.patch = 0; this.minor++; this.inc("pre", identifier, identifierBase); break; case "prepatch": this.prerelease.length = 0; this.inc("patch", identifier, identifierBase); this.inc("pre", identifier, identifierBase); break; case "prerelease": if (this.prerelease.length === 0) this.inc("patch", identifier, identifierBase); this.inc("pre", identifier, identifierBase); break; case "release": if (this.prerelease.length === 0) throw new Error(`version ${this.raw} is not a prerelease`); this.prerelease.length = 0; break; case "major": if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) this.major++; this.minor = 0; this.patch = 0; this.prerelease = []; break; case "minor": if (this.patch !== 0 || this.prerelease.length === 0) this.minor++; this.patch = 0; this.prerelease = []; break; case "patch": if (this.prerelease.length === 0) this.patch++; this.prerelease = []; break; case "pre": { const base = Number(identifierBase) ? 1 : 0; if (this.prerelease.length === 0) this.prerelease = [base]; else { let i = this.prerelease.length; while (--i >= 0) if (typeof this.prerelease[i] === "number") { this.prerelease[i]++; i = -2; } if (i === -1) { if (identifier === this.prerelease.join(".") && identifierBase === false) throw new Error("invalid increment argument: identifier already exists"); this.prerelease.push(base); } } if (identifier) { let prerelease$2 = [identifier, base]; if (identifierBase === false) prerelease$2 = [identifier]; if (compareIdentifiers(this.prerelease[0], identifier) === 0) { if (isNaN(this.prerelease[1])) this.prerelease = prerelease$2; } else this.prerelease = prerelease$2; } break; } default: throw new Error(`invalid increment argument: ${release}`); } this.raw = this.format(); if (this.build.length) this.raw += `+${this.build.join(".")}`; return this; } }; module.exports = SemVer$15; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/parse.js var require_parse = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/parse.js"(exports, module) { const SemVer$14 = require_semver$1(); const parse$6 = (version, options, throwErrors = false) => { if (version instanceof SemVer$14) return version; try { return new SemVer$14(version, options); } catch (er) { if (!throwErrors) return null; throw er; } }; module.exports = parse$6; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/valid.js var require_valid$1 = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/valid.js"(exports, module) { const parse$5 = require_parse(); const valid$1 = (version, options) => { const v = parse$5(version, options); return v ? v.version : null; }; module.exports = valid$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/clean.js var require_clean = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/clean.js"(exports, module) { const parse$4 = require_parse(); const clean$1 = (version, options) => { const s = parse$4(version.trim().replace(/^[=v]+/, ""), options); return s ? s.version : null; }; module.exports = clean$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/inc.js var require_inc = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/inc.js"(exports, module) { const SemVer$13 = require_semver$1(); const inc$1 = (version, release, options, identifier, identifierBase) => { if (typeof options === "string") { identifierBase = identifier; identifier = options; options = void 0; } try { return new SemVer$13(version instanceof SemVer$13 ? version.version : version, options).inc(release, identifier, identifierBase).version; } catch (er) { return null; } }; module.exports = inc$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/diff.js var require_diff = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/diff.js"(exports, module) { const parse$3 = require_parse(); const diff$1 = (version1, version2) => { const v1 = parse$3(version1, null, true); const v2 = parse$3(version2, null, true); const comparison = v1.compare(v2); if (comparison === 0) return null; const v1Higher = comparison > 0; const highVersion = v1Higher ? v1 : v2; const lowVersion = v1Higher ? v2 : v1; const highHasPre = !!highVersion.prerelease.length; const lowHasPre = !!lowVersion.prerelease.length; if (lowHasPre && !highHasPre) { if (!lowVersion.patch && !lowVersion.minor) return "major"; if (lowVersion.compareMain(highVersion) === 0) { if (lowVersion.minor && !lowVersion.patch) return "minor"; return "patch"; } } const prefix$1 = highHasPre ? "pre" : ""; if (v1.major !== v2.major) return prefix$1 + "major"; if (v1.minor !== v2.minor) return prefix$1 + "minor"; if (v1.patch !== v2.patch) return prefix$1 + "patch"; return "prerelease"; }; module.exports = diff$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/major.js var require_major = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/major.js"(exports, module) { const SemVer$12 = require_semver$1(); const major$1 = (a, loose) => new SemVer$12(a, loose).major; module.exports = major$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/minor.js var require_minor = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/minor.js"(exports, module) { const SemVer$11 = require_semver$1(); const minor$1 = (a, loose) => new SemVer$11(a, loose).minor; module.exports = minor$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/patch.js var require_patch = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/patch.js"(exports, module) { const SemVer$10 = require_semver$1(); const patch$1 = (a, loose) => new SemVer$10(a, loose).patch; module.exports = patch$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/prerelease.js var require_prerelease = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/prerelease.js"(exports, module) { const parse$2 = require_parse(); const prerelease$1 = (version, options) => { const parsed = parse$2(version, options); return parsed && parsed.prerelease.length ? parsed.prerelease : null; }; module.exports = prerelease$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/compare.js var require_compare = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/compare.js"(exports, module) { const SemVer$9 = require_semver$1(); const compare$12 = (a, b, loose) => new SemVer$9(a, loose).compare(new SemVer$9(b, loose)); module.exports = compare$12; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/rcompare.js var require_rcompare = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/rcompare.js"(exports, module) { const compare$11 = require_compare(); const rcompare$1 = (a, b, loose) => compare$11(b, a, loose); module.exports = rcompare$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/compare-loose.js var require_compare_loose = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/compare-loose.js"(exports, module) { const compare$10 = require_compare(); const compareLoose$1 = (a, b) => compare$10(a, b, true); module.exports = compareLoose$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/compare-build.js var require_compare_build = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/compare-build.js"(exports, module) { const SemVer$8 = require_semver$1(); const compareBuild$3 = (a, b, loose) => { const versionA = new SemVer$8(a, loose); const versionB = new SemVer$8(b, loose); return versionA.compare(versionB) || versionA.compareBuild(versionB); }; module.exports = compareBuild$3; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/sort.js var require_sort = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/sort.js"(exports, module) { const compareBuild$2 = require_compare_build(); const sort$1 = (list, loose) => list.sort((a, b) => compareBuild$2(a, b, loose)); module.exports = sort$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/rsort.js var require_rsort = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/rsort.js"(exports, module) { const compareBuild$1 = require_compare_build(); const rsort$1 = (list, loose) => list.sort((a, b) => compareBuild$1(b, a, loose)); module.exports = rsort$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/gt.js var require_gt = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/gt.js"(exports, module) { const compare$9 = require_compare(); const gt$4 = (a, b, loose) => compare$9(a, b, loose) > 0; module.exports = gt$4; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/lt.js var require_lt = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/lt.js"(exports, module) { const compare$8 = require_compare(); const lt$3 = (a, b, loose) => compare$8(a, b, loose) < 0; module.exports = lt$3; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/eq.js var require_eq = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/eq.js"(exports, module) { const compare$7 = require_compare(); const eq$2 = (a, b, loose) => compare$7(a, b, loose) === 0; module.exports = eq$2; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/neq.js var require_neq = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/neq.js"(exports, module) { const compare$6 = require_compare(); const neq$2 = (a, b, loose) => compare$6(a, b, loose) !== 0; module.exports = neq$2; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/gte.js var require_gte = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/gte.js"(exports, module) { const compare$5 = require_compare(); const gte$3 = (a, b, loose) => compare$5(a, b, loose) >= 0; module.exports = gte$3; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/lte.js var require_lte = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/lte.js"(exports, module) { const compare$4 = require_compare(); const lte$3 = (a, b, loose) => compare$4(a, b, loose) <= 0; module.exports = lte$3; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/cmp.js var require_cmp = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/cmp.js"(exports, module) { const eq$1 = require_eq(); const neq$1 = require_neq(); const gt$3 = require_gt(); const gte$2 = require_gte(); const lt$2 = require_lt(); const lte$2 = require_lte(); const cmp$2 = (a, op, b, loose) => { switch (op) { case "===": if (typeof a === "object") a = a.version; if (typeof b === "object") b = b.version; return a === b; case "!==": if (typeof a === "object") a = a.version; if (typeof b === "object") b = b.version; return a !== b; case "": case "=": case "==": return eq$1(a, b, loose); case "!=": return neq$1(a, b, loose); case ">": return gt$3(a, b, loose); case ">=": return gte$2(a, b, loose); case "<": return lt$2(a, b, loose); case "<=": return lte$2(a, b, loose); default: throw new TypeError(`Invalid operator: ${op}`); } }; module.exports = cmp$2; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/coerce.js var require_coerce = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/coerce.js"(exports, module) { const SemVer$7 = require_semver$1(); const parse$1 = require_parse(); const { safeRe: re$2, t: t$2 } = require_re(); const coerce$1 = (version, options) => { if (version instanceof SemVer$7) return version; if (typeof version === "number") version = String(version); if (typeof version !== "string") return null; options = options || {}; let match = null; if (!options.rtl) match = version.match(options.includePrerelease ? re$2[t$2.COERCEFULL] : re$2[t$2.COERCE]); else { const coerceRtlRegex = options.includePrerelease ? re$2[t$2.COERCERTLFULL] : re$2[t$2.COERCERTL]; let next; while ((next = coerceRtlRegex.exec(version)) && (!match || match.index + match[0].length !== version.length)) { if (!match || next.index + next[0].length !== match.index + match[0].length) match = next; coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length; } coerceRtlRegex.lastIndex = -1; } if (match === null) return null; const major$2 = match[2]; const minor$2 = match[3] || "0"; const patch$2 = match[4] || "0"; const prerelease$2 = options.includePrerelease && match[5] ? `-${match[5]}` : ""; const build = options.includePrerelease && match[6] ? `+${match[6]}` : ""; return parse$1(`${major$2}.${minor$2}.${patch$2}${prerelease$2}${build}`, options); }; module.exports = coerce$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/internal/lrucache.js var require_lrucache = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/internal/lrucache.js"(exports, module) { var LRUCache = class { constructor() { this.max = 1e3; this.map = /* @__PURE__ */ new Map(); } get(key) { const value = this.map.get(key); if (value === void 0) return void 0; else { this.map.delete(key); this.map.set(key, value); return value; } } delete(key) { return this.map.delete(key); } set(key, value) { const deleted = this.delete(key); if (!deleted && value !== void 0) { if (this.map.size >= this.max) { const firstKey = this.map.keys().next().value; this.delete(firstKey); } this.map.set(key, value); } return this; } }; module.exports = LRUCache; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/classes/range.js var require_range = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/classes/range.js"(exports, module) { const SPACE_CHARACTERS = /\s+/g; var Range$11 = class Range$11 { constructor(range, options) { options = parseOptions$1(options); if (range instanceof Range$11) if (range.loose === !!options.loose && range.includePrerelease === !!options.includePrerelease) return range; else return new Range$11(range.raw, options); if (range instanceof Comparator$4) { this.raw = range.value; this.set = [[range]]; this.formatted = void 0; return this; } this.options = options; this.loose = !!options.loose; this.includePrerelease = !!options.includePrerelease; this.raw = range.trim().replace(SPACE_CHARACTERS, " "); this.set = this.raw.split("||").map((r) => this.parseRange(r.trim())).filter((c) => c.length); if (!this.set.length) throw new TypeError(`Invalid SemVer Range: ${this.raw}`); if (this.set.length > 1) { const first = this.set[0]; this.set = this.set.filter((c) => !isNullSet(c[0])); if (this.set.length === 0) this.set = [first]; else if (this.set.length > 1) { for (const c of this.set) if (c.length === 1 && isAny(c[0])) { this.set = [c]; break; } } } this.formatted = void 0; } get range() { if (this.formatted === void 0) { this.formatted = ""; for (let i = 0; i < this.set.length; i++) { if (i > 0) this.formatted += "||"; const comps = this.set[i]; for (let k = 0; k < comps.length; k++) { if (k > 0) this.formatted += " "; this.formatted += comps[k].toString().trim(); } } } return this.formatted; } format() { return this.range; } toString() { return this.range; } parseRange(range) { const memoOpts = (this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) | (this.options.loose && FLAG_LOOSE); const memoKey = memoOpts + ":" + range; const cached = cache.get(memoKey); if (cached) return cached; const loose = this.options.loose; const hr = loose ? re$1[t$1.HYPHENRANGELOOSE] : re$1[t$1.HYPHENRANGE]; range = range.replace(hr, hyphenReplace(this.options.includePrerelease)); debug$1("hyphen replace", range); range = range.replace(re$1[t$1.COMPARATORTRIM], comparatorTrimReplace); debug$1("comparator trim", range); range = range.replace(re$1[t$1.TILDETRIM], tildeTrimReplace); debug$1("tilde trim", range); range = range.replace(re$1[t$1.CARETTRIM], caretTrimReplace); debug$1("caret trim", range); let rangeList = range.split(" ").map((comp) => parseComparator(comp, this.options)).join(" ").split(/\s+/).map((comp) => replaceGTE0(comp, this.options)); if (loose) rangeList = rangeList.filter((comp) => { debug$1("loose invalid filter", comp, this.options); return !!comp.match(re$1[t$1.COMPARATORLOOSE]); }); debug$1("range list", rangeList); const rangeMap = /* @__PURE__ */ new Map(); const comparators = rangeList.map((comp) => new Comparator$4(comp, this.options)); for (const comp of comparators) { if (isNullSet(comp)) return [comp]; rangeMap.set(comp.value, comp); } if (rangeMap.size > 1 && rangeMap.has("")) rangeMap.delete(""); const result = [...rangeMap.values()]; cache.set(memoKey, result); return result; } intersects(range, options) { if (!(range instanceof Range$11)) throw new TypeError("a Range is required"); return this.set.some((thisComparators) => { return isSatisfiable(thisComparators, options) && range.set.some((rangeComparators) => { return isSatisfiable(rangeComparators, options) && thisComparators.every((thisComparator) => { return rangeComparators.every((rangeComparator) => { return thisComparator.intersects(rangeComparator, options); }); }); }); }); } test(version) { if (!version) return false; if (typeof version === "string") try { version = new SemVer$6(version, this.options); } catch (er) { return false; } for (let i = 0; i < this.set.length; i++) if (testSet(this.set[i], version, this.options)) return true; return false; } }; module.exports = Range$11; const LRU = require_lrucache(); const cache = new LRU(); const parseOptions$1 = require_parse_options(); const Comparator$4 = require_comparator(); const debug$1 = require_debug(); const SemVer$6 = require_semver$1(); const { safeRe: re$1, t: t$1, comparatorTrimReplace, tildeTrimReplace, caretTrimReplace } = require_re(); const { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = require_constants(); const isNullSet = (c) => c.value === "<0.0.0-0"; const isAny = (c) => c.value === ""; const isSatisfiable = (comparators, options) => { let result = true; const remainingComparators = comparators.slice(); let testComparator = remainingComparators.pop(); while (result && remainingComparators.length) { result = remainingComparators.every((otherComparator) => { return testComparator.intersects(otherComparator, options); }); testComparator = remainingComparators.pop(); } return result; }; const parseComparator = (comp, options) => { debug$1("comp", comp, options); comp = replaceCarets(comp, options); debug$1("caret", comp); comp = replaceTildes(comp, options); debug$1("tildes", comp); comp = replaceXRanges(comp, options); debug$1("xrange", comp); comp = replaceStars(comp, options); debug$1("stars", comp); return comp; }; const isX = (id) => !id || id.toLowerCase() === "x" || id === "*"; const replaceTildes = (comp, options) => { return comp.trim().split(/\s+/).map((c) => replaceTilde(c, options)).join(" "); }; const replaceTilde = (comp, options) => { const r = options.loose ? re$1[t$1.TILDELOOSE] : re$1[t$1.TILDE]; return comp.replace(r, (_, M, m, p, pr) => { debug$1("tilde", comp, _, M, m, p, pr); let ret; if (isX(M)) ret = ""; else if (isX(m)) ret = `>=${M}.0.0 <${+M + 1}.0.0-0`; else if (isX(p)) ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0`; else if (pr) { debug$1("replaceTilde pr", pr); ret = `>=${M}.${m}.${p}-${pr} <${M}.${+m + 1}.0-0`; } else ret = `>=${M}.${m}.${p} <${M}.${+m + 1}.0-0`; debug$1("tilde return", ret); return ret; }); }; const replaceCarets = (comp, options) => { return comp.trim().split(/\s+/).map((c) => replaceCaret(c, options)).join(" "); }; const replaceCaret = (comp, options) => { debug$1("caret", comp, options); const r = options.loose ? re$1[t$1.CARETLOOSE] : re$1[t$1.CARET]; const z = options.includePrerelease ? "-0" : ""; return comp.replace(r, (_, M, m, p, pr) => { debug$1("caret", comp, _, M, m, p, pr); let ret; if (isX(M)) ret = ""; else if (isX(m)) ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0`; else if (isX(p)) if (M === "0") ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0`; else ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0`; else if (pr) { debug$1("replaceCaret pr", pr); if (M === "0") if (m === "0") ret = `>=${M}.${m}.${p}-${pr} <${M}.${m}.${+p + 1}-0`; else ret = `>=${M}.${m}.${p}-${pr} <${M}.${+m + 1}.0-0`; else ret = `>=${M}.${m}.${p}-${pr} <${+M + 1}.0.0-0`; } else { debug$1("no pr"); if (M === "0") if (m === "0") ret = `>=${M}.${m}.${p}${z} <${M}.${m}.${+p + 1}-0`; else ret = `>=${M}.${m}.${p}${z} <${M}.${+m + 1}.0-0`; else ret = `>=${M}.${m}.${p} <${+M + 1}.0.0-0`; } debug$1("caret return", ret); return ret; }); }; const replaceXRanges = (comp, options) => { debug$1("replaceXRanges", comp, options); return comp.split(/\s+/).map((c) => replaceXRange(c, options)).join(" "); }; const replaceXRange = (comp, options) => { comp = comp.trim(); const r = options.loose ? re$1[t$1.XRANGELOOSE] : re$1[t$1.XRANGE]; return comp.replace(r, (ret, gtlt, M, m, p, pr) => { debug$1("xRange", comp, ret, gtlt, M, m, p, pr); const xM = isX(M); const xm = xM || isX(m); const xp = xm || isX(p); const anyX = xp; if (gtlt === "=" && anyX) gtlt = ""; pr = options.includePrerelease ? "-0" : ""; if (xM) if (gtlt === ">" || gtlt === "<") ret = "<0.0.0-0"; else ret = "*"; else if (gtlt && anyX) { if (xm) m = 0; p = 0; if (gtlt === ">") { gtlt = ">="; if (xm) { M = +M + 1; m = 0; p = 0; } else { m = +m + 1; p = 0; } } else if (gtlt === "<=") { gtlt = "<"; if (xm) M = +M + 1; else m = +m + 1; } if (gtlt === "<") pr = "-0"; ret = `${gtlt + M}.${m}.${p}${pr}`; } else if (xm) ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0`; else if (xp) ret = `>=${M}.${m}.0${pr} <${M}.${+m + 1}.0-0`; debug$1("xRange return", ret); return ret; }); }; const replaceStars = (comp, options) => { debug$1("replaceStars", comp, options); return comp.trim().replace(re$1[t$1.STAR], ""); }; const replaceGTE0 = (comp, options) => { debug$1("replaceGTE0", comp, options); return comp.trim().replace(re$1[options.includePrerelease ? t$1.GTE0PRE : t$1.GTE0], ""); }; const hyphenReplace = (incPr) => ($0, from, fM, fm, fp, fpr, fb, to, tM, tm, tp, tpr) => { if (isX(fM)) from = ""; else if (isX(fm)) from = `>=${fM}.0.0${incPr ? "-0" : ""}`; else if (isX(fp)) from = `>=${fM}.${fm}.0${incPr ? "-0" : ""}`; else if (fpr) from = `>=${from}`; else from = `>=${from}${incPr ? "-0" : ""}`; if (isX(tM)) to = ""; else if (isX(tm)) to = `<${+tM + 1}.0.0-0`; else if (isX(tp)) to = `<${tM}.${+tm + 1}.0-0`; else if (tpr) to = `<=${tM}.${tm}.${tp}-${tpr}`; else if (incPr) to = `<${tM}.${tm}.${+tp + 1}-0`; else to = `<=${to}`; return `${from} ${to}`.trim(); }; const testSet = (set, version, options) => { for (let i = 0; i < set.length; i++) if (!set[i].test(version)) return false; if (version.prerelease.length && !options.includePrerelease) { for (let i = 0; i < set.length; i++) { debug$1(set[i].semver); if (set[i].semver === Comparator$4.ANY) continue; if (set[i].semver.prerelease.length > 0) { const allowed = set[i].semver; if (allowed.major === version.major && allowed.minor === version.minor && allowed.patch === version.patch) return true; } } return false; } return true; }; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/classes/comparator.js var require_comparator = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/classes/comparator.js"(exports, module) { const ANY$2 = Symbol("SemVer ANY"); var Comparator$3 = class Comparator$3 { static get ANY() { return ANY$2; } constructor(comp, options) { options = parseOptions(options); if (comp instanceof Comparator$3) if (comp.loose === !!options.loose) return comp; else comp = comp.value; comp = comp.trim().split(/\s+/).join(" "); debug("comparator", comp, options); this.options = options; this.loose = !!options.loose; this.parse(comp); if (this.semver === ANY$2) this.value = ""; else this.value = this.operator + this.semver.version; debug("comp", this); } parse(comp) { const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]; const m = comp.match(r); if (!m) throw new TypeError(`Invalid comparator: ${comp}`); this.operator = m[1] !== void 0 ? m[1] : ""; if (this.operator === "=") this.operator = ""; if (!m[2]) this.semver = ANY$2; else this.semver = new SemVer$5(m[2], this.options.loose); } toString() { return this.value; } test(version) { debug("Comparator.test", version, this.options.loose); if (this.semver === ANY$2 || version === ANY$2) return true; if (typeof version === "string") try { version = new SemVer$5(version, this.options); } catch (er) { return false; } return cmp$1(version, this.operator, this.semver, this.options); } intersects(comp, options) { if (!(comp instanceof Comparator$3)) throw new TypeError("a Comparator is required"); if (this.operator === "") { if (this.value === "") return true; return new Range$10(comp.value, options).test(this.value); } else if (comp.operator === "") { if (comp.value === "") return true; return new Range$10(this.value, options).test(comp.semver); } options = parseOptions(options); if (options.includePrerelease && (this.value === "<0.0.0-0" || comp.value === "<0.0.0-0")) return false; if (!options.includePrerelease && (this.value.startsWith("<0.0.0") || comp.value.startsWith("<0.0.0"))) return false; if (this.operator.startsWith(">") && comp.operator.startsWith(">")) return true; if (this.operator.startsWith("<") && comp.operator.startsWith("<")) return true; if (this.semver.version === comp.semver.version && this.operator.includes("=") && comp.operator.includes("=")) return true; if (cmp$1(this.semver, "<", comp.semver, options) && this.operator.startsWith(">") && comp.operator.startsWith("<")) return true; if (cmp$1(this.semver, ">", comp.semver, options) && this.operator.startsWith("<") && comp.operator.startsWith(">")) return true; return false; } }; module.exports = Comparator$3; const parseOptions = require_parse_options(); const { safeRe: re, t } = require_re(); const cmp$1 = require_cmp(); const debug = require_debug(); const SemVer$5 = require_semver$1(); const Range$10 = require_range(); } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/satisfies.js var require_satisfies = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/functions/satisfies.js"(exports, module) { const Range$9 = require_range(); const satisfies$4 = (version, range, options) => { try { range = new Range$9(range, options); } catch (er) { return false; } return range.test(version); }; module.exports = satisfies$4; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/to-comparators.js var require_to_comparators = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/to-comparators.js"(exports, module) { const Range$8 = require_range(); const toComparators$1 = (range, options) => new Range$8(range, options).set.map((comp) => comp.map((c) => c.value).join(" ").trim().split(" ")); module.exports = toComparators$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/max-satisfying.js var require_max_satisfying = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/max-satisfying.js"(exports, module) { const SemVer$4 = require_semver$1(); const Range$7 = require_range(); const maxSatisfying$1 = (versions, range, options) => { let max = null; let maxSV = null; let rangeObj = null; try { rangeObj = new Range$7(range, options); } catch (er) { return null; } versions.forEach((v) => { if (rangeObj.test(v)) { if (!max || maxSV.compare(v) === -1) { max = v; maxSV = new SemVer$4(max, options); } } }); return max; }; module.exports = maxSatisfying$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/min-satisfying.js var require_min_satisfying = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/min-satisfying.js"(exports, module) { const SemVer$3 = require_semver$1(); const Range$6 = require_range(); const minSatisfying$1 = (versions, range, options) => { let min = null; let minSV = null; let rangeObj = null; try { rangeObj = new Range$6(range, options); } catch (er) { return null; } versions.forEach((v) => { if (rangeObj.test(v)) { if (!min || minSV.compare(v) === 1) { min = v; minSV = new SemVer$3(min, options); } } }); return min; }; module.exports = minSatisfying$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/min-version.js var require_min_version = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/min-version.js"(exports, module) { const SemVer$2 = require_semver$1(); const Range$5 = require_range(); const gt$2 = require_gt(); const minVersion$1 = (range, loose) => { range = new Range$5(range, loose); let minver = new SemVer$2("0.0.0"); if (range.test(minver)) return minver; minver = new SemVer$2("0.0.0-0"); if (range.test(minver)) return minver; minver = null; for (let i = 0; i < range.set.length; ++i) { const comparators = range.set[i]; let setMin = null; comparators.forEach((comparator) => { const compver = new SemVer$2(comparator.semver.version); switch (comparator.operator) { case ">": if (compver.prerelease.length === 0) compver.patch++; else compver.prerelease.push(0); compver.raw = compver.format(); case "": case ">=": if (!setMin || gt$2(compver, setMin)) setMin = compver; break; case "<": case "<=": break; default: throw new Error(`Unexpected operation: ${comparator.operator}`); } }); if (setMin && (!minver || gt$2(minver, setMin))) minver = setMin; } if (minver && range.test(minver)) return minver; return null; }; module.exports = minVersion$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/valid.js var require_valid = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/valid.js"(exports, module) { const Range$4 = require_range(); const validRange$1 = (range, options) => { try { return new Range$4(range, options).range || "*"; } catch (er) { return null; } }; module.exports = validRange$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/outside.js var require_outside = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/outside.js"(exports, module) { const SemVer$1 = require_semver$1(); const Comparator$2 = require_comparator(); const { ANY: ANY$1 } = Comparator$2; const Range$3 = require_range(); const satisfies$3 = require_satisfies(); const gt$1 = require_gt(); const lt$1 = require_lt(); const lte$1 = require_lte(); const gte$1 = require_gte(); const outside$3 = (version, range, hilo, options) => { version = new SemVer$1(version, options); range = new Range$3(range, options); let gtfn, ltefn, ltfn, comp, ecomp; switch (hilo) { case ">": gtfn = gt$1; ltefn = lte$1; ltfn = lt$1; comp = ">"; ecomp = ">="; break; case "<": gtfn = lt$1; ltefn = gte$1; ltfn = gt$1; comp = "<"; ecomp = "<="; break; default: throw new TypeError("Must provide a hilo val of \"<\" or \">\""); } if (satisfies$3(version, range, options)) return false; for (let i = 0; i < range.set.length; ++i) { const comparators = range.set[i]; let high = null; let low = null; comparators.forEach((comparator) => { if (comparator.semver === ANY$1) comparator = new Comparator$2(">=0.0.0"); high = high || comparator; low = low || comparator; if (gtfn(comparator.semver, high.semver, options)) high = comparator; else if (ltfn(comparator.semver, low.semver, options)) low = comparator; }); if (high.operator === comp || high.operator === ecomp) return false; if ((!low.operator || low.operator === comp) && ltefn(version, low.semver)) return false; else if (low.operator === ecomp && ltfn(version, low.semver)) return false; } return true; }; module.exports = outside$3; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/gtr.js var require_gtr = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/gtr.js"(exports, module) { const outside$2 = require_outside(); const gtr$1 = (version, range, options) => outside$2(version, range, ">", options); module.exports = gtr$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/ltr.js var require_ltr = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/ltr.js"(exports, module) { const outside$1 = require_outside(); const ltr$1 = (version, range, options) => outside$1(version, range, "<", options); module.exports = ltr$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/intersects.js var require_intersects = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/intersects.js"(exports, module) { const Range$2 = require_range(); const intersects$1 = (r1, r2, options) => { r1 = new Range$2(r1, options); r2 = new Range$2(r2, options); return r1.intersects(r2, options); }; module.exports = intersects$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/simplify.js var require_simplify = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/simplify.js"(exports, module) { const satisfies$2 = require_satisfies(); const compare$3 = require_compare(); module.exports = (versions, range, options) => { const set = []; let first = null; let prev = null; const v = versions.sort((a, b) => compare$3(a, b, options)); for (const version of v) { const included = satisfies$2(version, range, options); if (included) { prev = version; if (!first) first = version; } else { if (prev) set.push([first, prev]); prev = null; first = null; } } if (first) set.push([first, null]); const ranges = []; for (const [min, max] of set) if (min === max) ranges.push(min); else if (!max && min === v[0]) ranges.push("*"); else if (!max) ranges.push(`>=${min}`); else if (min === v[0]) ranges.push(`<=${max}`); else ranges.push(`${min} - ${max}`); const simplified = ranges.join(" || "); const original = typeof range.raw === "string" ? range.raw : String(range); return simplified.length < original.length ? simplified : range; }; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/subset.js var require_subset = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/ranges/subset.js"(exports, module) { const Range$1 = require_range(); const Comparator$1 = require_comparator(); const { ANY } = Comparator$1; const satisfies$1 = require_satisfies(); const compare$2 = require_compare(); const subset$1 = (sub, dom, options = {}) => { if (sub === dom) return true; sub = new Range$1(sub, options); dom = new Range$1(dom, options); let sawNonNull = false; OUTER: for (const simpleSub of sub.set) { for (const simpleDom of dom.set) { const isSub = simpleSubset(simpleSub, simpleDom, options); sawNonNull = sawNonNull || isSub !== null; if (isSub) continue OUTER; } if (sawNonNull) return false; } return true; }; const minimumVersionWithPreRelease = [new Comparator$1(">=0.0.0-0")]; const minimumVersion = [new Comparator$1(">=0.0.0")]; const simpleSubset = (sub, dom, options) => { if (sub === dom) return true; if (sub.length === 1 && sub[0].semver === ANY) if (dom.length === 1 && dom[0].semver === ANY) return true; else if (options.includePrerelease) sub = minimumVersionWithPreRelease; else sub = minimumVersion; if (dom.length === 1 && dom[0].semver === ANY) if (options.includePrerelease) return true; else dom = minimumVersion; const eqSet = /* @__PURE__ */ new Set(); let gt$5, lt$4; for (const c of sub) if (c.operator === ">" || c.operator === ">=") gt$5 = higherGT(gt$5, c, options); else if (c.operator === "<" || c.operator === "<=") lt$4 = lowerLT(lt$4, c, options); else eqSet.add(c.semver); if (eqSet.size > 1) return null; let gtltComp; if (gt$5 && lt$4) { gtltComp = compare$2(gt$5.semver, lt$4.semver, options); if (gtltComp > 0) return null; else if (gtltComp === 0 && (gt$5.operator !== ">=" || lt$4.operator !== "<=")) return null; } for (const eq$3 of eqSet) { if (gt$5 && !satisfies$1(eq$3, String(gt$5), options)) return null; if (lt$4 && !satisfies$1(eq$3, String(lt$4), options)) return null; for (const c of dom) if (!satisfies$1(eq$3, String(c), options)) return false; return true; } let higher, lower; let hasDomLT, hasDomGT; let needDomLTPre = lt$4 && !options.includePrerelease && lt$4.semver.prerelease.length ? lt$4.semver : false; let needDomGTPre = gt$5 && !options.includePrerelease && gt$5.semver.prerelease.length ? gt$5.semver : false; if (needDomLTPre && needDomLTPre.prerelease.length === 1 && lt$4.operator === "<" && needDomLTPre.prerelease[0] === 0) needDomLTPre = false; for (const c of dom) { hasDomGT = hasDomGT || c.operator === ">" || c.operator === ">="; hasDomLT = hasDomLT || c.operator === "<" || c.operator === "<="; if (gt$5) { if (needDomGTPre) { if (c.semver.prerelease && c.semver.prerelease.length && c.semver.major === needDomGTPre.major && c.semver.minor === needDomGTPre.minor && c.semver.patch === needDomGTPre.patch) needDomGTPre = false; } if (c.operator === ">" || c.operator === ">=") { higher = higherGT(gt$5, c, options); if (higher === c && higher !== gt$5) return false; } else if (gt$5.operator === ">=" && !satisfies$1(gt$5.semver, String(c), options)) return false; } if (lt$4) { if (needDomLTPre) { if (c.semver.prerelease && c.semver.prerelease.length && c.semver.major === needDomLTPre.major && c.semver.minor === needDomLTPre.minor && c.semver.patch === needDomLTPre.patch) needDomLTPre = false; } if (c.operator === "<" || c.operator === "<=") { lower = lowerLT(lt$4, c, options); if (lower === c && lower !== lt$4) return false; } else if (lt$4.operator === "<=" && !satisfies$1(lt$4.semver, String(c), options)) return false; } if (!c.operator && (lt$4 || gt$5) && gtltComp !== 0) return false; } if (gt$5 && hasDomLT && !lt$4 && gtltComp !== 0) return false; if (lt$4 && hasDomGT && !gt$5 && gtltComp !== 0) return false; if (needDomGTPre || needDomLTPre) return false; return true; }; const higherGT = (a, b, options) => { if (!a) return b; const comp = compare$2(a.semver, b.semver, options); return comp > 0 ? a : comp < 0 ? b : b.operator === ">" && a.operator === ">=" ? b : a; }; const lowerLT = (a, b, options) => { if (!a) return b; const comp = compare$2(a.semver, b.semver, options); return comp < 0 ? a : comp > 0 ? b : b.operator === "<" && a.operator === "<=" ? b : a; }; module.exports = subset$1; } }); //#endregion //#region node_modules/.pnpm/semver@7.7.2/node_modules/semver/index.js var require_semver = __commonJS({ "node_modules/.pnpm/semver@7.7.2/node_modules/semver/index.js"(exports, module) { const internalRe = require_re(); const constants = require_constants(); const SemVer = require_semver$1(); const identifiers = require_identifiers(); const parse = require_parse(); const valid = require_valid$1(); const clean = require_clean(); const inc = require_inc(); const diff = require_diff(); const major = require_major(); const minor = require_minor(); const patch = require_patch(); const prerelease = require_prerelease(); const compare$1 = require_compare(); const rcompare = require_rcompare(); const compareLoose = require_compare_loose(); const compareBuild = require_compare_build(); const sort = require_sort(); const rsort = require_rsort(); const gt = require_gt(); const lt = require_lt(); const eq = require_eq(); const neq = require_neq(); const gte = require_gte(); const lte = require_lte(); const cmp = require_cmp(); const coerce = require_coerce(); const Comparator = require_comparator(); const Range = require_range(); const satisfies = require_satisfies(); const toComparators = require_to_comparators(); const maxSatisfying = require_max_satisfying(); const minSatisfying = require_min_satisfying(); const minVersion = require_min_version(); const validRange = require_valid(); const outside = require_outside(); const gtr = require_gtr(); const ltr = require_ltr(); const intersects = require_intersects(); const simplifyRange = require_simplify(); const subset = require_subset(); module.exports = { parse, valid, clean, inc, diff, major, minor, patch, prerelease, compare: compare$1, rcompare, compareLoose, compareBuild, sort, rsort, gt, lt, eq, neq, gte, lte, cmp, coerce, Comparator, Range, satisfies, toComparators, maxSatisfying, minSatisfying, minVersion, validRange, outside, gtr, ltr, intersects, simplifyRange, subset, SemVer, re: internalRe.re, src: internalRe.src, tokens: internalRe.t, SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION, RELEASE_TYPES: constants.RELEASE_TYPES, compareIdentifiers: identifiers.compareIdentifiers, rcompareIdentifiers: identifiers.rcompareIdentifiers }; } }); //#endregion //#region node_modules/.pnpm/langsmith@0.3.30_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/langsmith/dist/utils/prompts.js var import_semver = __toESM(require_semver(), 1); function parsePromptIdentifier(identifier) { if (!identifier || identifier.split("/").length > 2 || identifier.startsWith("/") || identifier.endsWith("/") || identifier.split(":").length > 2) throw new Error(`Invalid identifier format: ${identifier}`); const [ownerNamePart, commitPart] = identifier.split(":"); const commit = commitPart || "latest"; if (ownerNamePart.includes("/")) { const [owner, name] = ownerNamePart.split("/", 2); if (!owner || !name) throw new Error(`Invalid identifier format: ${identifier}`); return [ owner, name, commit ]; } else { if (!ownerNamePart) throw new Error(`Invalid identifier format: ${identifier}`); return [ "-", ownerNamePart, commit ]; } } //#endregion //#region node_modules/.pnpm/langsmith@0.3.30_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/langsmith/dist/utils/error.js /** * LangSmithConflictError * * Represents an error that occurs when there's a conflict during an operation, * typically corresponding to HTTP 409 status code responses. * * This error is thrown when an attempt to create or modify a resource conflicts * with the current state of the resource on the server. Common scenarios include: * - Attempting to create a resource that already exists * - Trying to update a resource that has been modified by another process * - Violating a uniqueness constraint in the data * * @extends Error * * @example * try { * await createProject("existingProject"); * } catch (error) { * if (error instanceof ConflictError) { * console.log("A conflict occurred:", error.message); * // Handle the conflict, e.g., by suggesting a different project name * } else { * // Handle other types of errors * } * } * * @property {string} name - Always set to 'ConflictError' for easy identification * @property {string} message - Detailed error message including server response * * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/409 */ var LangSmithConflictError = class extends Error { constructor(message) { super(message); Object.defineProperty(this, "status", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.name = "LangSmithConflictError"; this.status = 409; } }; /** * Throws an appropriate error based on the response status and body. * * @param response - The fetch Response object * @param context - Additional context to include in the error message (e.g., operation being performed) * @throws {LangSmithConflictError} When the response status is 409 * @throws {Error} For all other non-ok responses */ async function raiseForStatus(response, context, consume) { let errorBody; if (response.ok) { if (consume) errorBody = await response.text(); return; } errorBody = await response.text(); const fullMessage = `Failed to ${context}. Received status [${response.status}]: ${response.statusText}. Server response: ${errorBody}`; if (response.status === 409) throw new LangSmithConflictError(fullMessage); const err = new Error(fullMessage); err.status = response.status; throw err; } //#endregion //#region node_modules/.pnpm/langsmith@0.3.30_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/langsmith/dist/utils/fast-safe-stringify/index.js var LIMIT_REPLACE_NODE = "[...]"; var CIRCULAR_REPLACE_NODE = { result: "[Circular]" }; var arr = []; var replacerStack = []; const encoder = new TextEncoder(); function defaultOptions$1() { return { depthLimit: Number.MAX_SAFE_INTEGER, edgesLimit: Number.MAX_SAFE_INTEGER }; } function encodeString(str) { return encoder.encode(str); } function serialize(obj, errorContext, replacer, spacer, options) { try { const str = JSON.stringify(obj, replacer, spacer); return encodeString(str); } catch (e) { if (!e.message?.includes("Converting circular structure to JSON")) { console.warn(`[WARNING]: LangSmith received unserializable value.${errorContext ? `\nContext: ${errorContext}` : ""}`); return encodeString("[Unserializable]"); } console.warn(`[WARNING]: LangSmith received circular JSON. This will decrease tracer performance. ${errorContext ? `\nContext: ${errorContext}` : ""}`); if (typeof options === "undefined") options = defaultOptions$1(); decirc(obj, "", 0, [], void 0, 0, options); let res; try { if (replacerStack.length === 0) res = JSON.stringify(obj, replacer, spacer); else res = JSON.stringify(obj, replaceGetterValues(replacer), spacer); } catch (_) { return encodeString("[unable to serialize, circular reference is too complex to analyze]"); } finally { while (arr.length !== 0) { const part = arr.pop(); if (part.length === 4) Object.defineProperty(part[0], part[1], part[3]); else part[0][part[1]] = part[2]; } } return encodeString(res); } } function setReplace(replace, val, k, parent) { var propertyDescriptor = Object.getOwnPropertyDescriptor(parent, k); if (propertyDescriptor.get !== void 0) if (propertyDescriptor.configurable) { Object.defineProperty(parent, k, { value: replace }); arr.push([ parent, k, val, propertyDescriptor ]); } else replacerStack.push([ val, k, replace ]); else { parent[k] = replace; arr.push([ parent, k, val ]); } } function decirc(val, k, edgeIndex, stack, parent, depth, options) { depth += 1; var i; if (typeof val === "object" && val !== null) { for (i = 0; i < stack.length; i++) if (stack[i] === val) { setReplace(CIRCULAR_REPLACE_NODE, val, k, parent); return; } if (typeof options.depthLimit !== "undefined" && depth > options.depthLimit) { setReplace(LIMIT_REPLACE_NODE, val, k, parent); return; } if (typeof options.edgesLimit !== "undefined" && edgeIndex + 1 > options.edgesLimit) { setReplace(LIMIT_REPLACE_NODE, val, k, parent); return; } stack.push(val); if (Array.isArray(val)) for (i = 0; i < val.length; i++) decirc(val[i], i, i, stack, val, depth, options); else { var keys = Object.keys(val); for (i = 0; i < keys.length; i++) { var key = keys[i]; decirc(val[key], key, i, stack, val, depth, options); } } stack.pop(); } } function replaceGetterValues(replacer) { replacer = typeof replacer !== "undefined" ? replacer : function(k, v) { return v; }; return function(key, val) { if (replacerStack.length > 0) for (var i = 0; i < replacerStack.length; i++) { var part = replacerStack[i]; if (part[1] === key && part[0] === val) { val = part[2]; replacerStack.splice(i, 1); break; } } return replacer.call(this, key, val); }; } //#endregion //#region node_modules/.pnpm/langsmith@0.3.30_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/langsmith/dist/client.js function mergeRuntimeEnvIntoRunCreate(run) { const runtimeEnv = getRuntimeEnvironment$1(); const envVars = getLangChainEnvVarsMetadata(); const extra = run.extra ?? {}; const metadata = extra.metadata; run.extra = { ...extra, runtime: { ...runtimeEnv, ...extra?.runtime }, metadata: { ...envVars, ...envVars.revision_id || run.revision_id ? { revision_id: run.revision_id ?? envVars.revision_id } : {}, ...metadata } }; return run; } const getTracingSamplingRate = (configRate) => { const samplingRateStr = configRate?.toString() ?? getLangSmithEnvironmentVariable("TRACING_SAMPLING_RATE"); if (samplingRateStr === void 0) return void 0; const samplingRate = parseFloat(samplingRateStr); if (samplingRate < 0 || samplingRate > 1) throw new Error(`LANGSMITH_TRACING_SAMPLING_RATE must be between 0 and 1 if set. Got: ${samplingRate}`); return samplingRate; }; const isLocalhost = (url) => { const strippedUrl = url.replace("http://", "").replace("https://", ""); const hostname = strippedUrl.split("/")[0].split(":")[0]; return hostname === "localhost" || hostname === "127.0.0.1" || hostname === "::1"; }; async function toArray(iterable) { const result = []; for await (const item of iterable) result.push(item); return result; } function trimQuotes(str) { if (str === void 0) return void 0; return str.trim().replace(/^"(.*)"$/, "$1").replace(/^'(.*)'$/, "$1"); } const handle429 = async (response) => { if (response?.status === 429) { const retryAfter = parseInt(response.headers.get("retry-after") ?? "30", 10) * 1e3; if (retryAfter > 0) { await new Promise((resolve) => setTimeout(resolve, retryAfter)); return true; } } return false; }; function _formatFeedbackScore(score) { if (typeof score === "number") return Number(score.toFixed(4)); return score; } var AutoBatchQueue = class { constructor() { Object.defineProperty(this, "items", { enumerable: true, configurable: true, writable: true, value: [] }); Object.defineProperty(this, "sizeBytes", { enumerable: true, configurable: true, writable: true, value: 0 }); } peek() { return this.items[0]; } push(item) { let itemPromiseResolve; const itemPromise = new Promise((resolve) => { itemPromiseResolve = resolve; }); const size = serialize(item.item, `Serializing run with id: ${item.item.id}`).length; this.items.push({ action: item.action, payload: item.item, itemPromiseResolve, itemPromise, size }); this.sizeBytes += size; return itemPromise; } pop(upToSizeBytes) { if (upToSizeBytes < 1) throw new Error("Number of bytes to pop off may not be less than 1."); const popped = []; let poppedSizeBytes = 0; while (poppedSizeBytes + (this.peek()?.size ?? 0) < upToSizeBytes && this.items.length > 0) { const item = this.items.shift(); if (item) { popped.push(item); poppedSizeBytes += item.size; this.sizeBytes -= item.size; } } if (popped.length === 0 && this.items.length > 0) { const item = this.items.shift(); popped.push(item); poppedSizeBytes += item.size; this.sizeBytes -= item.size; } return [popped.map((it) => ({ action: it.action, item: it.payload })), () => popped.forEach((it) => it.itemPromiseResolve())]; } }; const DEFAULT_BATCH_SIZE_LIMIT_BYTES = 20971520; const SERVER_INFO_REQUEST_TIMEOUT = 2500; var Client = class Client { constructor(config = {}) { Object.defineProperty(this, "apiKey", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "apiUrl", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "webUrl", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "caller", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "batchIngestCaller", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "timeout_ms", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "_tenantId", { enumerable: true, configurable: true, writable: true, value: null }); Object.defineProperty(this, "hideInputs", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "hideOutputs", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "tracingSampleRate", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "filteredPostUuids", { enumerable: true, configurable: true, writable: true, value: /* @__PURE__ */ new Set() }); Object.defineProperty(this, "autoBatchTracing", { enumerable: true, configurable: true, writable: true, value: true }); Object.defineProperty(this, "autoBatchQueue", { enumerable: true, configurable: true, writable: true, value: new AutoBatchQueue() }); Object.defineProperty(this, "autoBatchTimeout", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "autoBatchAggregationDelayMs", { enumerable: true, configurable: true, writable: true, value: 250 }); Object.defineProperty(this, "batchSizeBytesLimit", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "fetchOptions", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "settings", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "blockOnRootRunFinalization", { enumerable: true, configurable: true, writable: true, value: getEnvironmentVariable$1("LANGSMITH_TRACING_BACKGROUND") === "false" }); Object.defineProperty(this, "traceBatchConcurrency", { enumerable: true, configurable: true, writable: true, value: 5 }); Object.defineProperty(this, "_serverInfo", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "_getServerInfoPromise", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "manualFlushMode", { enumerable: true, configurable: true, writable: true, value: false }); Object.defineProperty(this, "debug", { enumerable: true, configurable: true, writable: true, value: getEnvironmentVariable$1("LANGSMITH_DEBUG") === "true" }); const defaultConfig = Client.getDefaultClientConfig(); this.tracingSampleRate = getTracingSamplingRate(config.tracingSamplingRate); this.apiUrl = trimQuotes(config.apiUrl ?? defaultConfig.apiUrl) ?? ""; if (this.apiUrl.endsWith("/")) this.apiUrl = this.apiUrl.slice(0, -1); this.apiKey = trimQuotes(config.apiKey ?? defaultConfig.apiKey); this.webUrl = trimQuotes(config.webUrl ?? defaultConfig.webUrl); if (this.webUrl?.endsWith("/")) this.webUrl = this.webUrl.slice(0, -1); this.timeout_ms = config.timeout_ms ?? 9e4; this.caller = new AsyncCaller$1({ ...config.callerOptions ?? {}, debug: config.debug ?? this.debug }); this.traceBatchConcurrency = config.traceBatchConcurrency ?? this.traceBatchConcurrency; if (this.traceBatchConcurrency < 1) throw new Error("Trace batch concurrency must be positive."); this.debug = config.debug ?? this.debug; this.batchIngestCaller = new AsyncCaller$1({ maxRetries: 2, maxConcurrency: this.traceBatchConcurrency, ...config.callerOptions ?? {}, onFailedResponseHook: handle429, debug: config.debug ?? this.debug }); this.hideInputs = config.hideInputs ?? config.anonymizer ?? defaultConfig.hideInputs; this.hideOutputs = config.hideOutputs ?? config.anonymizer ?? defaultConfig.hideOutputs; this.autoBatchTracing = config.autoBatchTracing ?? this.autoBatchTracing; this.blockOnRootRunFinalization = config.blockOnRootRunFinalization ?? this.blockOnRootRunFinalization; this.batchSizeBytesLimit = config.batchSizeBytesLimit; this.fetchOptions = config.fetchOptions || {}; this.manualFlushMode = config.manualFlushMode ?? this.manualFlushMode; } static getDefaultClientConfig() { const apiKey = getLangSmithEnvironmentVariable("API_KEY"); const apiUrl = getLangSmithEnvironmentVariable("ENDPOINT") ?? "https://api.smith.langchain.com"; const hideInputs = getLangSmithEnvironmentVariable("HIDE_INPUTS") === "true"; const hideOutputs = getLangSmithEnvironmentVariable("HIDE_OUTPUTS") === "true"; return { apiUrl, apiKey, webUrl: void 0, hideInputs, hideOutputs }; } getHostUrl() { if (this.webUrl) return this.webUrl; else if (isLocalhost(this.apiUrl)) { this.webUrl = "http://localhost:3000"; return this.webUrl; } else if (this.apiUrl.endsWith("/api/v1")) { this.webUrl = this.apiUrl.replace("/api/v1", ""); return this.webUrl; } else if (this.apiUrl.includes("/api") && !this.apiUrl.split(".", 1)[0].endsWith("api")) { this.webUrl = this.apiUrl.replace("/api", ""); return this.webUrl; } else if (this.apiUrl.split(".", 1)[0].includes("dev")) { this.webUrl = "https://dev.smith.langchain.com"; return this.webUrl; } else if (this.apiUrl.split(".", 1)[0].includes("eu")) { this.webUrl = "https://eu.smith.langchain.com"; return this.webUrl; } else if (this.apiUrl.split(".", 1)[0].includes("beta")) { this.webUrl = "https://beta.smith.langchain.com"; return this.webUrl; } else { this.webUrl = "https://smith.langchain.com"; return this.webUrl; } } get headers() { const headers = { "User-Agent": `langsmith-js/${__version__}` }; if (this.apiKey) headers["x-api-key"] = `${this.apiKey}`; return headers; } async processInputs(inputs) { if (this.hideInputs === false) return inputs; if (this.hideInputs === true) return {}; if (typeof this.hideInputs === "function") return this.hideInputs(inputs); return inputs; } async processOutputs(outputs) { if (this.hideOutputs === false) return outputs; if (this.hideOutputs === true) return {}; if (typeof this.hideOutputs === "function") return this.hideOutputs(outputs); return outputs; } async prepareRunCreateOrUpdateInputs(run) { const runParams = { ...run }; if (runParams.inputs !== void 0) runParams.inputs = await this.processInputs(runParams.inputs); if (runParams.outputs !== void 0) runParams.outputs = await this.processOutputs(runParams.outputs); return runParams; } async _getResponse(path, queryParams) { const paramsString = queryParams?.toString() ?? ""; const url = `${this.apiUrl}${path}?${paramsString}`; const response = await this.caller.call(_getFetchImplementation(this.debug), url, { method: "GET", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, `Failed to fetch ${path}`); return response; } async _get(path, queryParams) { const response = await this._getResponse(path, queryParams); return response.json(); } async *_getPaginated(path, queryParams = new URLSearchParams(), transform) { let offset = Number(queryParams.get("offset")) || 0; const limit = Number(queryParams.get("limit")) || 100; while (true) { queryParams.set("offset", String(offset)); queryParams.set("limit", String(limit)); const url = `${this.apiUrl}${path}?${queryParams}`; const response = await this.caller.call(_getFetchImplementation(this.debug), url, { method: "GET", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, `Failed to fetch ${path}`); const items = transform ? transform(await response.json()) : await response.json(); if (items.length === 0) break; yield items; if (items.length < limit) break; offset += items.length; } } async *_getCursorPaginatedList(path, body = null, requestMethod = "POST", dataKey = "runs") { const bodyParams = body ? { ...body } : {}; while (true) { const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}${path}`, { method: requestMethod, headers: { ...this.headers, "Content-Type": "application/json" }, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions, body: JSON.stringify(bodyParams) }); const responseBody = await response.json(); if (!responseBody) break; if (!responseBody[dataKey]) break; yield responseBody[dataKey]; const cursors = responseBody.cursors; if (!cursors) break; if (!cursors.next) break; bodyParams.cursor = cursors.next; } } _shouldSample() { if (this.tracingSampleRate === void 0) return true; return Math.random() < this.tracingSampleRate; } _filterForSampling(runs, patch$2 = false) { if (this.tracingSampleRate === void 0) return runs; if (patch$2) { const sampled = []; for (const run of runs) if (!this.filteredPostUuids.has(run.id)) sampled.push(run); else this.filteredPostUuids.delete(run.id); return sampled; } else { const sampled = []; for (const run of runs) { const traceId = run.trace_id ?? run.id; if (this.filteredPostUuids.has(traceId)) continue; if (run.id === traceId) if (this._shouldSample()) sampled.push(run); else this.filteredPostUuids.add(traceId); else sampled.push(run); } return sampled; } } async _getBatchSizeLimitBytes() { const serverInfo = await this._ensureServerInfo(); return this.batchSizeBytesLimit ?? serverInfo.batch_ingest_config?.size_limit_bytes ?? DEFAULT_BATCH_SIZE_LIMIT_BYTES; } async _getMultiPartSupport() { const serverInfo = await this._ensureServerInfo(); return serverInfo.instance_flags?.dataset_examples_multipart_enabled ?? false; } drainAutoBatchQueue(batchSizeLimit) { const promises = []; while (this.autoBatchQueue.items.length > 0) { const [batch, done] = this.autoBatchQueue.pop(batchSizeLimit); if (!batch.length) { done(); break; } const batchPromise = this._processBatch(batch, done).catch(console.error); promises.push(batchPromise); } return Promise.all(promises); } async _processBatch(batch, done) { if (!batch.length) { done(); return; } try { const ingestParams = { runCreates: batch.filter((item) => item.action === "create").map((item) => item.item), runUpdates: batch.filter((item) => item.action === "update").map((item) => item.item) }; const serverInfo = await this._ensureServerInfo(); if (serverInfo?.batch_ingest_config?.use_multipart_endpoint) await this.multipartIngestRuns(ingestParams); else await this.batchIngestRuns(ingestParams); } finally { done(); } } async processRunOperation(item) { clearTimeout(this.autoBatchTimeout); this.autoBatchTimeout = void 0; if (item.action === "create") item.item = mergeRuntimeEnvIntoRunCreate(item.item); const itemPromise = this.autoBatchQueue.push(item); if (this.manualFlushMode) return itemPromise; const sizeLimitBytes = await this._getBatchSizeLimitBytes(); if (this.autoBatchQueue.sizeBytes > sizeLimitBytes) this.drainAutoBatchQueue(sizeLimitBytes); if (this.autoBatchQueue.items.length > 0) this.autoBatchTimeout = setTimeout(() => { this.autoBatchTimeout = void 0; this.drainAutoBatchQueue(sizeLimitBytes); }, this.autoBatchAggregationDelayMs); return itemPromise; } async _getServerInfo() { const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/info`, { method: "GET", headers: { Accept: "application/json" }, signal: AbortSignal.timeout(SERVER_INFO_REQUEST_TIMEOUT), ...this.fetchOptions }); await raiseForStatus(response, "get server info"); const json = await response.json(); if (this.debug) console.log("\n=== LangSmith Server Configuration ===\n" + JSON.stringify(json, null, 2) + "\n"); return json; } async _ensureServerInfo() { if (this._getServerInfoPromise === void 0) this._getServerInfoPromise = (async () => { if (this._serverInfo === void 0) try { this._serverInfo = await this._getServerInfo(); } catch (e) { console.warn(`[WARNING]: LangSmith failed to fetch info on supported operations with status code ${e.status}. Falling back to batch operations and default limits.`); } return this._serverInfo ?? {}; })(); return this._getServerInfoPromise.then((serverInfo) => { if (this._serverInfo === void 0) this._getServerInfoPromise = void 0; return serverInfo; }); } async _getSettings() { if (!this.settings) this.settings = this._get("/settings"); return await this.settings; } /** * Flushes current queued traces. */ async flush() { const sizeLimitBytes = await this._getBatchSizeLimitBytes(); await this.drainAutoBatchQueue(sizeLimitBytes); } async createRun(run) { if (!this._filterForSampling([run]).length) return; const headers = { ...this.headers, "Content-Type": "application/json" }; const session_name = run.project_name; delete run.project_name; const runCreate = await this.prepareRunCreateOrUpdateInputs({ session_name, ...run, start_time: run.start_time ?? Date.now() }); if (this.autoBatchTracing && runCreate.trace_id !== void 0 && runCreate.dotted_order !== void 0) { this.processRunOperation({ action: "create", item: runCreate }).catch(console.error); return; } const mergedRunCreateParam = mergeRuntimeEnvIntoRunCreate(runCreate); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/runs`, { method: "POST", headers, body: serialize(mergedRunCreateParam, `Creating run with id: ${mergedRunCreateParam.id}`), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "create run", true); } /** * Batch ingest/upsert multiple runs in the Langsmith system. * @param runs */ async batchIngestRuns({ runCreates, runUpdates }) { if (runCreates === void 0 && runUpdates === void 0) return; let preparedCreateParams = await Promise.all(runCreates?.map((create) => this.prepareRunCreateOrUpdateInputs(create)) ?? []); let preparedUpdateParams = await Promise.all(runUpdates?.map((update) => this.prepareRunCreateOrUpdateInputs(update)) ?? []); if (preparedCreateParams.length > 0 && preparedUpdateParams.length > 0) { const createById = preparedCreateParams.reduce((params, run) => { if (!run.id) return params; params[run.id] = run; return params; }, {}); const standaloneUpdates = []; for (const updateParam of preparedUpdateParams) if (updateParam.id !== void 0 && createById[updateParam.id]) createById[updateParam.id] = { ...createById[updateParam.id], ...updateParam }; else standaloneUpdates.push(updateParam); preparedCreateParams = Object.values(createById); preparedUpdateParams = standaloneUpdates; } const rawBatch = { post: preparedCreateParams, patch: preparedUpdateParams }; if (!rawBatch.post.length && !rawBatch.patch.length) return; const batchChunks = { post: [], patch: [] }; for (const k of ["post", "patch"]) { const key = k; const batchItems = rawBatch[key].reverse(); let batchItem = batchItems.pop(); while (batchItem !== void 0) { batchChunks[key].push(batchItem); batchItem = batchItems.pop(); } } if (batchChunks.post.length > 0 || batchChunks.patch.length > 0) { const runIds = batchChunks.post.map((item) => item.id).concat(batchChunks.patch.map((item) => item.id)).join(","); await this._postBatchIngestRuns(serialize(batchChunks, `Ingesting runs with ids: ${runIds}`)); } } async _postBatchIngestRuns(body) { const headers = { ...this.headers, "Content-Type": "application/json", Accept: "application/json" }; const response = await this.batchIngestCaller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/runs/batch`, { method: "POST", headers, body, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "batch create run", true); } /** * Batch ingest/upsert multiple runs in the Langsmith system. * @param runs */ async multipartIngestRuns({ runCreates, runUpdates }) { if (runCreates === void 0 && runUpdates === void 0) return; const allAttachments = {}; let preparedCreateParams = []; for (const create of runCreates ?? []) { const preparedCreate = await this.prepareRunCreateOrUpdateInputs(create); if (preparedCreate.id !== void 0 && preparedCreate.attachments !== void 0) allAttachments[preparedCreate.id] = preparedCreate.attachments; delete preparedCreate.attachments; preparedCreateParams.push(preparedCreate); } let preparedUpdateParams = []; for (const update of runUpdates ?? []) preparedUpdateParams.push(await this.prepareRunCreateOrUpdateInputs(update)); const invalidRunCreate = preparedCreateParams.find((runCreate) => { return runCreate.trace_id === void 0 || runCreate.dotted_order === void 0; }); if (invalidRunCreate !== void 0) throw new Error(`Multipart ingest requires "trace_id" and "dotted_order" to be set when creating a run`); const invalidRunUpdate = preparedUpdateParams.find((runUpdate) => { return runUpdate.trace_id === void 0 || runUpdate.dotted_order === void 0; }); if (invalidRunUpdate !== void 0) throw new Error(`Multipart ingest requires "trace_id" and "dotted_order" to be set when updating a run`); if (preparedCreateParams.length > 0 && preparedUpdateParams.length > 0) { const createById = preparedCreateParams.reduce((params, run) => { if (!run.id) return params; params[run.id] = run; return params; }, {}); const standaloneUpdates = []; for (const updateParam of preparedUpdateParams) if (updateParam.id !== void 0 && createById[updateParam.id]) createById[updateParam.id] = { ...createById[updateParam.id], ...updateParam }; else standaloneUpdates.push(updateParam); preparedCreateParams = Object.values(createById); preparedUpdateParams = standaloneUpdates; } if (preparedCreateParams.length === 0 && preparedUpdateParams.length === 0) return; const accumulatedContext = []; const accumulatedParts = []; for (const [method, payloads] of [["post", preparedCreateParams], ["patch", preparedUpdateParams]]) for (const originalPayload of payloads) { const { inputs, outputs, events, attachments,...payload } = originalPayload; const fields = { inputs, outputs, events }; const stringifiedPayload = serialize(payload, `Serializing for multipart ingestion of run with id: ${payload.id}`); accumulatedParts.push({ name: `${method}.${payload.id}`, payload: new Blob([stringifiedPayload], { type: `application/json; length=${stringifiedPayload.length}` }) }); for (const [key, value] of Object.entries(fields)) { if (value === void 0) continue; const stringifiedValue = serialize(value, `Serializing ${key} for multipart ingestion of run with id: ${payload.id}`); accumulatedParts.push({ name: `${method}.${payload.id}.${key}`, payload: new Blob([stringifiedValue], { type: `application/json; length=${stringifiedValue.length}` }) }); } if (payload.id !== void 0) { const attachments$1 = allAttachments[payload.id]; if (attachments$1) { delete allAttachments[payload.id]; for (const [name, attachment] of Object.entries(attachments$1)) { let contentType; let content; if (Array.isArray(attachment)) [contentType, content] = attachment; else { contentType = attachment.mimeType; content = attachment.data; } if (name.includes(".")) { console.warn(`Skipping attachment '${name}' for run ${payload.id}: Invalid attachment name. Attachment names must not contain periods ('.'). Please rename the attachment and try again.`); continue; } accumulatedParts.push({ name: `attachment.${payload.id}.${name}`, payload: new Blob([content], { type: `${contentType}; length=${content.byteLength}` }) }); } } } accumulatedContext.push(`trace=${payload.trace_id},id=${payload.id}`); } await this._sendMultipartRequest(accumulatedParts, accumulatedContext.join("; ")); } async _createNodeFetchBody(parts, boundary) { const chunks = []; for (const part of parts) { chunks.push(new Blob([`--${boundary}\r\n`])); chunks.push(new Blob([`Content-Disposition: form-data; name="${part.name}"\r\n`, `Content-Type: ${part.payload.type}\r\n\r\n`])); chunks.push(part.payload); chunks.push(new Blob(["\r\n"])); } chunks.push(new Blob([`--${boundary}--\r\n`])); const body = new Blob(chunks); const arrayBuffer = await body.arrayBuffer(); return arrayBuffer; } async _createMultipartStream(parts, boundary) { const encoder$1 = new TextEncoder(); const stream = new ReadableStream({ async start(controller) { const writeChunk = async (chunk) => { if (typeof chunk === "string") controller.enqueue(encoder$1.encode(chunk)); else controller.enqueue(chunk); }; for (const part of parts) { await writeChunk(`--${boundary}\r\n`); await writeChunk(`Content-Disposition: form-data; name="${part.name}"\r\n`); await writeChunk(`Content-Type: ${part.payload.type}\r\n\r\n`); const payloadStream = part.payload.stream(); const reader = payloadStream.getReader(); try { let result; while (!(result = await reader.read()).done) controller.enqueue(result.value); } finally { reader.releaseLock(); } await writeChunk("\r\n"); } await writeChunk(`--${boundary}--\r\n`); controller.close(); } }); return stream; } async _sendMultipartRequest(parts, context) { try { const boundary = "----LangSmithFormBoundary" + Math.random().toString(36).slice(2); const body = await (_globalFetchImplementationIsNodeFetch() ? this._createNodeFetchBody(parts, boundary) : this._createMultipartStream(parts, boundary)); const res = await this.batchIngestCaller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/runs/multipart`, { method: "POST", headers: { ...this.headers, "Content-Type": `multipart/form-data; boundary=${boundary}` }, body, duplex: "half", signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(res, "ingest multipart runs", true); } catch (e) { console.warn(`${e.message.trim()}\n\nContext: ${context}`); } } async updateRun(runId, run) { assertUuid(runId); if (run.inputs) run.inputs = await this.processInputs(run.inputs); if (run.outputs) run.outputs = await this.processOutputs(run.outputs); const data = { ...run, id: runId }; if (!this._filterForSampling([data], true).length) return; if (this.autoBatchTracing && data.trace_id !== void 0 && data.dotted_order !== void 0) { if (run.end_time !== void 0 && data.parent_run_id === void 0 && this.blockOnRootRunFinalization && !this.manualFlushMode) { await this.processRunOperation({ action: "update", item: data }).catch(console.error); return; } else this.processRunOperation({ action: "update", item: data }).catch(console.error); return; } const headers = { ...this.headers, "Content-Type": "application/json" }; const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/runs/${runId}`, { method: "PATCH", headers, body: serialize(run, `Serializing payload to update run with id: ${runId}`), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "update run", true); } async readRun(runId, { loadChildRuns } = { loadChildRuns: false }) { assertUuid(runId); let run = await this._get(`/runs/${runId}`); if (loadChildRuns) run = await this._loadChildRuns(run); return run; } async getRunUrl({ runId, run, projectOpts }) { if (run !== void 0) { let sessionId; if (run.session_id) sessionId = run.session_id; else if (projectOpts?.projectName) sessionId = (await this.readProject({ projectName: projectOpts?.projectName })).id; else if (projectOpts?.projectId) sessionId = projectOpts?.projectId; else { const project = await this.readProject({ projectName: getLangSmithEnvironmentVariable("PROJECT") || "default" }); sessionId = project.id; } const tenantId = await this._getTenantId(); return `${this.getHostUrl()}/o/${tenantId}/projects/p/${sessionId}/r/${run.id}?poll=true`; } else if (runId !== void 0) { const run_ = await this.readRun(runId); if (!run_.app_path) throw new Error(`Run ${runId} has no app_path`); const baseUrl = this.getHostUrl(); return `${baseUrl}${run_.app_path}`; } else throw new Error("Must provide either runId or run"); } async _loadChildRuns(run) { const childRuns = await toArray(this.listRuns({ isRoot: false, projectId: run.session_id, traceId: run.trace_id })); const treemap = {}; const runs = {}; childRuns.sort((a, b) => (a?.dotted_order ?? "").localeCompare(b?.dotted_order ?? "")); for (const childRun of childRuns) { if (childRun.parent_run_id === null || childRun.parent_run_id === void 0) throw new Error(`Child run ${childRun.id} has no parent`); if (childRun.dotted_order?.startsWith(run.dotted_order ?? "") && childRun.id !== run.id) { if (!(childRun.parent_run_id in treemap)) treemap[childRun.parent_run_id] = []; treemap[childRun.parent_run_id].push(childRun); runs[childRun.id] = childRun; } } run.child_runs = treemap[run.id] || []; for (const runId in treemap) if (runId !== run.id) runs[runId].child_runs = treemap[runId]; return run; } /** * List runs from the LangSmith server. * @param projectId - The ID of the project to filter by. * @param projectName - The name of the project to filter by. * @param parentRunId - The ID of the parent run to filter by. * @param traceId - The ID of the trace to filter by. * @param referenceExampleId - The ID of the reference example to filter by. * @param startTime - The start time to filter by. * @param isRoot - Indicates whether to only return root runs. * @param runType - The run type to filter by. * @param error - Indicates whether to filter by error runs. * @param id - The ID of the run to filter by. * @param query - The query string to filter by. * @param filter - The filter string to apply to the run spans. * @param traceFilter - The filter string to apply on the root run of the trace. * @param treeFilter - The filter string to apply on other runs in the trace. * @param limit - The maximum number of runs to retrieve. * @returns {AsyncIterable} - The runs. * * @example * // List all runs in a project * const projectRuns = client.listRuns({ projectName: "" }); * * @example * // List LLM and Chat runs in the last 24 hours * const todaysLLMRuns = client.listRuns({ * projectName: "", * start_time: new Date(Date.now() - 24 * 60 * 60 * 1000), * run_type: "llm", * }); * * @example * // List traces in a project * const rootRuns = client.listRuns({ * projectName: "", * execution_order: 1, * }); * * @example * // List runs without errors * const correctRuns = client.listRuns({ * projectName: "", * error: false, * }); * * @example * // List runs by run ID * const runIds = [ * "a36092d2-4ad5-4fb4-9c0d-0dba9a2ed836", * "9398e6be-964f-4aa4-8ae9-ad78cd4b7074", * ]; * const selectedRuns = client.listRuns({ run_ids: runIds }); * * @example * // List all "chain" type runs that took more than 10 seconds and had `total_tokens` greater than 5000 * const chainRuns = client.listRuns({ * projectName: "", * filter: 'and(eq(run_type, "chain"), gt(latency, 10), gt(total_tokens, 5000))', * }); * * @example * // List all runs called "extractor" whose root of the trace was assigned feedback "user_score" score of 1 * const goodExtractorRuns = client.listRuns({ * projectName: "", * filter: 'eq(name, "extractor")', * traceFilter: 'and(eq(feedback_key, "user_score"), eq(feedback_score, 1))', * }); * * @example * // List all runs that started after a specific timestamp and either have "error" not equal to null or a "Correctness" feedback score equal to 0 * const complexRuns = client.listRuns({ * projectName: "", * filter: 'and(gt(start_time, "2023-07-15T12:34:56Z"), or(neq(error, null), and(eq(feedback_key, "Correctness"), eq(feedback_score, 0.0))))', * }); * * @example * // List all runs where `tags` include "experimental" or "beta" and `latency` is greater than 2 seconds * const taggedRuns = client.listRuns({ * projectName: "", * filter: 'and(or(has(tags, "experimental"), has(tags, "beta")), gt(latency, 2))', * }); */ async *listRuns(props) { const { projectId, projectName, parentRunId, traceId, referenceExampleId, startTime, executionOrder, isRoot, runType, error, id, query, filter, traceFilter, treeFilter, limit, select, order } = props; let projectIds = []; if (projectId) projectIds = Array.isArray(projectId) ? projectId : [projectId]; if (projectName) { const projectNames = Array.isArray(projectName) ? projectName : [projectName]; const projectIds_ = await Promise.all(projectNames.map((name) => this.readProject({ projectName: name }).then((project) => project.id))); projectIds.push(...projectIds_); } const default_select = [ "app_path", "completion_cost", "completion_tokens", "dotted_order", "end_time", "error", "events", "extra", "feedback_stats", "first_token_time", "id", "inputs", "name", "outputs", "parent_run_id", "parent_run_ids", "prompt_cost", "prompt_tokens", "reference_example_id", "run_type", "session_id", "start_time", "status", "tags", "total_cost", "total_tokens", "trace_id" ]; const body = { session: projectIds.length ? projectIds : null, run_type: runType, reference_example: referenceExampleId, query, filter, trace_filter: traceFilter, tree_filter: treeFilter, execution_order: executionOrder, parent_run: parentRunId, start_time: startTime ? startTime.toISOString() : null, error, id, limit, trace: traceId, select: select ? select : default_select, is_root: isRoot, order }; let runsYielded = 0; for await (const runs of this._getCursorPaginatedList("/runs/query", body)) if (limit) { if (runsYielded >= limit) break; if (runs.length + runsYielded > limit) { const newRuns = runs.slice(0, limit - runsYielded); yield* newRuns; break; } runsYielded += runs.length; yield* runs; } else yield* runs; } async *listGroupRuns(props) { const { projectId, projectName, groupBy, filter, startTime, endTime, limit, offset } = props; const sessionId = projectId || (await this.readProject({ projectName })).id; const baseBody = { session_id: sessionId, group_by: groupBy, filter, start_time: startTime ? startTime.toISOString() : null, end_time: endTime ? endTime.toISOString() : null, limit: Number(limit) || 100 }; let currentOffset = Number(offset) || 0; const path = "/runs/group"; const url = `${this.apiUrl}${path}`; while (true) { const currentBody = { ...baseBody, offset: currentOffset }; const filteredPayload = Object.fromEntries(Object.entries(currentBody).filter(([_, value]) => value !== void 0)); const response = await this.caller.call(_getFetchImplementation(), url, { method: "POST", headers: { ...this.headers, "Content-Type": "application/json" }, body: JSON.stringify(filteredPayload), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, `Failed to fetch ${path}`); const items = await response.json(); const { groups, total } = items; if (groups.length === 0) break; for (const thread of groups) yield thread; currentOffset += groups.length; if (currentOffset >= total) break; } } async getRunStats({ id, trace, parentRun, runType, projectNames, projectIds, referenceExampleIds, startTime, endTime, error, query, filter, traceFilter, treeFilter, isRoot, dataSourceType }) { let projectIds_ = projectIds || []; if (projectNames) projectIds_ = [...projectIds || [], ...await Promise.all(projectNames.map((name) => this.readProject({ projectName: name }).then((project) => project.id)))]; const payload = { id, trace, parent_run: parentRun, run_type: runType, session: projectIds_, reference_example: referenceExampleIds, start_time: startTime, end_time: endTime, error, query, filter, trace_filter: traceFilter, tree_filter: treeFilter, is_root: isRoot, data_source_type: dataSourceType }; const filteredPayload = Object.fromEntries(Object.entries(payload).filter(([_, value]) => value !== void 0)); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/runs/stats`, { method: "POST", headers: this.headers, body: JSON.stringify(filteredPayload), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); const result = await response.json(); return result; } async shareRun(runId, { shareId } = {}) { const data = { run_id: runId, share_token: shareId || v4_default() }; assertUuid(runId); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/runs/${runId}/share`, { method: "PUT", headers: this.headers, body: JSON.stringify(data), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); const result = await response.json(); if (result === null || !("share_token" in result)) throw new Error("Invalid response from server"); return `${this.getHostUrl()}/public/${result["share_token"]}/r`; } async unshareRun(runId) { assertUuid(runId); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/runs/${runId}/share`, { method: "DELETE", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "unshare run", true); } async readRunSharedLink(runId) { assertUuid(runId); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/runs/${runId}/share`, { method: "GET", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); const result = await response.json(); if (result === null || !("share_token" in result)) return void 0; return `${this.getHostUrl()}/public/${result["share_token"]}/r`; } async listSharedRuns(shareToken, { runIds } = {}) { const queryParams = new URLSearchParams({ share_token: shareToken }); if (runIds !== void 0) for (const runId of runIds) queryParams.append("id", runId); assertUuid(shareToken); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/public/${shareToken}/runs${queryParams}`, { method: "GET", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); const runs = await response.json(); return runs; } async readDatasetSharedSchema(datasetId, datasetName) { if (!datasetId && !datasetName) throw new Error("Either datasetId or datasetName must be given"); if (!datasetId) { const dataset = await this.readDataset({ datasetName }); datasetId = dataset.id; } assertUuid(datasetId); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/datasets/${datasetId}/share`, { method: "GET", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); const shareSchema = await response.json(); shareSchema.url = `${this.getHostUrl()}/public/${shareSchema.share_token}/d`; return shareSchema; } async shareDataset(datasetId, datasetName) { if (!datasetId && !datasetName) throw new Error("Either datasetId or datasetName must be given"); if (!datasetId) { const dataset = await this.readDataset({ datasetName }); datasetId = dataset.id; } const data = { dataset_id: datasetId }; assertUuid(datasetId); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/datasets/${datasetId}/share`, { method: "PUT", headers: this.headers, body: JSON.stringify(data), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); const shareSchema = await response.json(); shareSchema.url = `${this.getHostUrl()}/public/${shareSchema.share_token}/d`; return shareSchema; } async unshareDataset(datasetId) { assertUuid(datasetId); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/datasets/${datasetId}/share`, { method: "DELETE", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "unshare dataset", true); } async readSharedDataset(shareToken) { assertUuid(shareToken); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/public/${shareToken}/datasets`, { method: "GET", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); const dataset = await response.json(); return dataset; } /** * Get shared examples. * * @param {string} shareToken The share token to get examples for. A share token is the UUID (or LangSmith URL, including UUID) generated when explicitly marking an example as public. * @param {Object} [options] Additional options for listing the examples. * @param {string[] | undefined} [options.exampleIds] A list of example IDs to filter by. * @returns {Promise} The shared examples. */ async listSharedExamples(shareToken, options) { const params = {}; if (options?.exampleIds) params.id = options.exampleIds; const urlParams = new URLSearchParams(); Object.entries(params).forEach(([key, value]) => { if (Array.isArray(value)) value.forEach((v) => urlParams.append(key, v)); else urlParams.append(key, value); }); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/public/${shareToken}/examples?${urlParams.toString()}`, { method: "GET", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); const result = await response.json(); if (!response.ok) { if ("detail" in result) throw new Error(`Failed to list shared examples.\nStatus: ${response.status}\nMessage: ${Array.isArray(result.detail) ? result.detail.join("\n") : "Unspecified error"}`); throw new Error(`Failed to list shared examples: ${response.status} ${response.statusText}`); } return result.map((example) => ({ ...example, _hostUrl: this.getHostUrl() })); } async createProject({ projectName, description = null, metadata = null, upsert = false, projectExtra = null, referenceDatasetId = null }) { const upsert_ = upsert ? `?upsert=true` : ""; const endpoint = `${this.apiUrl}/sessions${upsert_}`; const extra = projectExtra || {}; if (metadata) extra["metadata"] = metadata; const body = { name: projectName, extra, description }; if (referenceDatasetId !== null) body["reference_dataset_id"] = referenceDatasetId; const response = await this.caller.call(_getFetchImplementation(this.debug), endpoint, { method: "POST", headers: { ...this.headers, "Content-Type": "application/json" }, body: JSON.stringify(body), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "create project"); const result = await response.json(); return result; } async updateProject(projectId, { name = null, description = null, metadata = null, projectExtra = null, endTime = null }) { const endpoint = `${this.apiUrl}/sessions/${projectId}`; let extra = projectExtra; if (metadata) extra = { ...extra || {}, metadata }; const body = { name, extra, description, end_time: endTime ? new Date(endTime).toISOString() : null }; const response = await this.caller.call(_getFetchImplementation(this.debug), endpoint, { method: "PATCH", headers: { ...this.headers, "Content-Type": "application/json" }, body: JSON.stringify(body), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "update project"); const result = await response.json(); return result; } async hasProject({ projectId, projectName }) { let path = "/sessions"; const params = new URLSearchParams(); if (projectId !== void 0 && projectName !== void 0) throw new Error("Must provide either projectName or projectId, not both"); else if (projectId !== void 0) { assertUuid(projectId); path += `/${projectId}`; } else if (projectName !== void 0) params.append("name", projectName); else throw new Error("Must provide projectName or projectId"); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}${path}?${params}`, { method: "GET", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); try { const result = await response.json(); if (!response.ok) return false; if (Array.isArray(result)) return result.length > 0; return true; } catch (e) { return false; } } async readProject({ projectId, projectName, includeStats }) { let path = "/sessions"; const params = new URLSearchParams(); if (projectId !== void 0 && projectName !== void 0) throw new Error("Must provide either projectName or projectId, not both"); else if (projectId !== void 0) { assertUuid(projectId); path += `/${projectId}`; } else if (projectName !== void 0) params.append("name", projectName); else throw new Error("Must provide projectName or projectId"); if (includeStats !== void 0) params.append("include_stats", includeStats.toString()); const response = await this._get(path, params); let result; if (Array.isArray(response)) { if (response.length === 0) throw new Error(`Project[id=${projectId}, name=${projectName}] not found`); result = response[0]; } else result = response; return result; } async getProjectUrl({ projectId, projectName }) { if (projectId === void 0 && projectName === void 0) throw new Error("Must provide either projectName or projectId"); const project = await this.readProject({ projectId, projectName }); const tenantId = await this._getTenantId(); return `${this.getHostUrl()}/o/${tenantId}/projects/p/${project.id}`; } async getDatasetUrl({ datasetId, datasetName }) { if (datasetId === void 0 && datasetName === void 0) throw new Error("Must provide either datasetName or datasetId"); const dataset = await this.readDataset({ datasetId, datasetName }); const tenantId = await this._getTenantId(); return `${this.getHostUrl()}/o/${tenantId}/datasets/${dataset.id}`; } async _getTenantId() { if (this._tenantId !== null) return this._tenantId; const queryParams = new URLSearchParams({ limit: "1" }); for await (const projects of this._getPaginated("/sessions", queryParams)) { this._tenantId = projects[0].tenant_id; return projects[0].tenant_id; } throw new Error("No projects found to resolve tenant."); } async *listProjects({ projectIds, name, nameContains, referenceDatasetId, referenceDatasetName, referenceFree, metadata } = {}) { const params = new URLSearchParams(); if (projectIds !== void 0) for (const projectId of projectIds) params.append("id", projectId); if (name !== void 0) params.append("name", name); if (nameContains !== void 0) params.append("name_contains", nameContains); if (referenceDatasetId !== void 0) params.append("reference_dataset", referenceDatasetId); else if (referenceDatasetName !== void 0) { const dataset = await this.readDataset({ datasetName: referenceDatasetName }); params.append("reference_dataset", dataset.id); } if (referenceFree !== void 0) params.append("reference_free", referenceFree.toString()); if (metadata !== void 0) params.append("metadata", JSON.stringify(metadata)); for await (const projects of this._getPaginated("/sessions", params)) yield* projects; } async deleteProject({ projectId, projectName }) { let projectId_; if (projectId === void 0 && projectName === void 0) throw new Error("Must provide projectName or projectId"); else if (projectId !== void 0 && projectName !== void 0) throw new Error("Must provide either projectName or projectId, not both"); else if (projectId === void 0) projectId_ = (await this.readProject({ projectName })).id; else projectId_ = projectId; assertUuid(projectId_); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/sessions/${projectId_}`, { method: "DELETE", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, `delete session ${projectId_} (${projectName})`, true); } async uploadCsv({ csvFile, fileName, inputKeys, outputKeys, description, dataType, name }) { const url = `${this.apiUrl}/datasets/upload`; const formData = new FormData(); formData.append("file", csvFile, fileName); inputKeys.forEach((key) => { formData.append("input_keys", key); }); outputKeys.forEach((key) => { formData.append("output_keys", key); }); if (description) formData.append("description", description); if (dataType) formData.append("data_type", dataType); if (name) formData.append("name", name); const response = await this.caller.call(_getFetchImplementation(this.debug), url, { method: "POST", headers: this.headers, body: formData, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "upload CSV"); const result = await response.json(); return result; } async createDataset(name, { description, dataType, inputsSchema, outputsSchema, metadata } = {}) { const body = { name, description, extra: metadata ? { metadata } : void 0 }; if (dataType) body.data_type = dataType; if (inputsSchema) body.inputs_schema_definition = inputsSchema; if (outputsSchema) body.outputs_schema_definition = outputsSchema; const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/datasets`, { method: "POST", headers: { ...this.headers, "Content-Type": "application/json" }, body: JSON.stringify(body), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "create dataset"); const result = await response.json(); return result; } async readDataset({ datasetId, datasetName }) { let path = "/datasets"; const params = new URLSearchParams({ limit: "1" }); if (datasetId !== void 0 && datasetName !== void 0) throw new Error("Must provide either datasetName or datasetId, not both"); else if (datasetId !== void 0) { assertUuid(datasetId); path += `/${datasetId}`; } else if (datasetName !== void 0) params.append("name", datasetName); else throw new Error("Must provide datasetName or datasetId"); const response = await this._get(path, params); let result; if (Array.isArray(response)) { if (response.length === 0) throw new Error(`Dataset[id=${datasetId}, name=${datasetName}] not found`); result = response[0]; } else result = response; return result; } async hasDataset({ datasetId, datasetName }) { try { await this.readDataset({ datasetId, datasetName }); return true; } catch (e) { if (e instanceof Error && e.message.toLocaleLowerCase().includes("not found")) return false; throw e; } } async diffDatasetVersions({ datasetId, datasetName, fromVersion, toVersion }) { let datasetId_ = datasetId; if (datasetId_ === void 0 && datasetName === void 0) throw new Error("Must provide either datasetName or datasetId"); else if (datasetId_ !== void 0 && datasetName !== void 0) throw new Error("Must provide either datasetName or datasetId, not both"); else if (datasetId_ === void 0) { const dataset = await this.readDataset({ datasetName }); datasetId_ = dataset.id; } const urlParams = new URLSearchParams({ from_version: typeof fromVersion === "string" ? fromVersion : fromVersion.toISOString(), to_version: typeof toVersion === "string" ? toVersion : toVersion.toISOString() }); const response = await this._get(`/datasets/${datasetId_}/versions/diff`, urlParams); return response; } async readDatasetOpenaiFinetuning({ datasetId, datasetName }) { const path = "/datasets"; if (datasetId !== void 0) {} else if (datasetName !== void 0) datasetId = (await this.readDataset({ datasetName })).id; else throw new Error("Must provide either datasetName or datasetId"); const response = await this._getResponse(`${path}/${datasetId}/openai_ft`); const datasetText = await response.text(); const dataset = datasetText.trim().split("\n").map((line) => JSON.parse(line)); return dataset; } async *listDatasets({ limit = 100, offset = 0, datasetIds, datasetName, datasetNameContains, metadata } = {}) { const path = "/datasets"; const params = new URLSearchParams({ limit: limit.toString(), offset: offset.toString() }); if (datasetIds !== void 0) for (const id_ of datasetIds) params.append("id", id_); if (datasetName !== void 0) params.append("name", datasetName); if (datasetNameContains !== void 0) params.append("name_contains", datasetNameContains); if (metadata !== void 0) params.append("metadata", JSON.stringify(metadata)); for await (const datasets of this._getPaginated(path, params)) yield* datasets; } /** * Update a dataset * @param props The dataset details to update * @returns The updated dataset */ async updateDataset(props) { const { datasetId, datasetName,...update } = props; if (!datasetId && !datasetName) throw new Error("Must provide either datasetName or datasetId"); const _datasetId = datasetId ?? (await this.readDataset({ datasetName })).id; assertUuid(_datasetId); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/datasets/${_datasetId}`, { method: "PATCH", headers: { ...this.headers, "Content-Type": "application/json" }, body: JSON.stringify(update), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "update dataset"); return await response.json(); } /** * Updates a tag on a dataset. * * If the tag is already assigned to a different version of this dataset, * the tag will be moved to the new version. The as_of parameter is used to * determine which version of the dataset to apply the new tags to. * * It must be an exact version of the dataset to succeed. You can * use the "readDatasetVersion" method to find the exact version * to apply the tags to. * @param params.datasetId The ID of the dataset to update. Must be provided if "datasetName" is not provided. * @param params.datasetName The name of the dataset to update. Must be provided if "datasetId" is not provided. * @param params.asOf The timestamp of the dataset to apply the new tags to. * @param params.tag The new tag to apply to the dataset. */ async updateDatasetTag(props) { const { datasetId, datasetName, asOf, tag } = props; if (!datasetId && !datasetName) throw new Error("Must provide either datasetName or datasetId"); const _datasetId = datasetId ?? (await this.readDataset({ datasetName })).id; assertUuid(_datasetId); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/datasets/${_datasetId}/tags`, { method: "PUT", headers: { ...this.headers, "Content-Type": "application/json" }, body: JSON.stringify({ as_of: typeof asOf === "string" ? asOf : asOf.toISOString(), tag }), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "update dataset tags"); } async deleteDataset({ datasetId, datasetName }) { let path = "/datasets"; let datasetId_ = datasetId; if (datasetId !== void 0 && datasetName !== void 0) throw new Error("Must provide either datasetName or datasetId, not both"); else if (datasetName !== void 0) { const dataset = await this.readDataset({ datasetName }); datasetId_ = dataset.id; } if (datasetId_ !== void 0) { assertUuid(datasetId_); path += `/${datasetId_}`; } else throw new Error("Must provide datasetName or datasetId"); const response = await this.caller.call(_getFetchImplementation(this.debug), this.apiUrl + path, { method: "DELETE", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, `delete ${path}`); await response.json(); } async indexDataset({ datasetId, datasetName, tag }) { let datasetId_ = datasetId; if (!datasetId_ && !datasetName) throw new Error("Must provide either datasetName or datasetId"); else if (datasetId_ && datasetName) throw new Error("Must provide either datasetName or datasetId, not both"); else if (!datasetId_) { const dataset = await this.readDataset({ datasetName }); datasetId_ = dataset.id; } assertUuid(datasetId_); const data = { tag }; const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/datasets/${datasetId_}/index`, { method: "POST", headers: { ...this.headers, "Content-Type": "application/json" }, body: JSON.stringify(data), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "index dataset"); await response.json(); } /** * Lets you run a similarity search query on a dataset. * * Requires the dataset to be indexed. Please see the `indexDataset` method to set up indexing. * * @param inputs The input on which to run the similarity search. Must have the * same schema as the dataset. * * @param datasetId The dataset to search for similar examples. * * @param limit The maximum number of examples to return. Will return the top `limit` most * similar examples in order of most similar to least similar. If no similar * examples are found, random examples will be returned. * * @param filter A filter string to apply to the search. Only examples will be returned that * match the filter string. Some examples of filters * * - eq(metadata.mykey, "value") * - and(neq(metadata.my.nested.key, "value"), neq(metadata.mykey, "value")) * - or(eq(metadata.mykey, "value"), eq(metadata.mykey, "othervalue")) * * @returns A list of similar examples. * * * @example * dataset_id = "123e4567-e89b-12d3-a456-426614174000" * inputs = {"text": "How many people live in Berlin?"} * limit = 5 * examples = await client.similarExamples(inputs, dataset_id, limit) */ async similarExamples(inputs, datasetId, limit, { filter } = {}) { const data = { limit, inputs }; if (filter !== void 0) data["filter"] = filter; assertUuid(datasetId); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/datasets/${datasetId}/search`, { method: "POST", headers: { ...this.headers, "Content-Type": "application/json" }, body: JSON.stringify(data), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "fetch similar examples"); const result = await response.json(); return result["examples"]; } async createExample(inputsOrUpdate, outputs, options) { if (isExampleCreate(inputsOrUpdate)) { if (outputs !== void 0 || options !== void 0) throw new Error("Cannot provide outputs or options when using ExampleCreate object"); } let datasetId_ = outputs ? options?.datasetId : inputsOrUpdate.dataset_id; const datasetName_ = outputs ? options?.datasetName : inputsOrUpdate.dataset_name; if (datasetId_ === void 0 && datasetName_ === void 0) throw new Error("Must provide either datasetName or datasetId"); else if (datasetId_ !== void 0 && datasetName_ !== void 0) throw new Error("Must provide either datasetName or datasetId, not both"); else if (datasetId_ === void 0) { const dataset = await this.readDataset({ datasetName: datasetName_ }); datasetId_ = dataset.id; } const createdAt_ = (outputs ? options?.createdAt : inputsOrUpdate.created_at) || /* @__PURE__ */ new Date(); let data; if (!isExampleCreate(inputsOrUpdate)) data = { inputs: inputsOrUpdate, outputs, created_at: createdAt_?.toISOString(), id: options?.exampleId, metadata: options?.metadata, split: options?.split, source_run_id: options?.sourceRunId, use_source_run_io: options?.useSourceRunIO, use_source_run_attachments: options?.useSourceRunAttachments, attachments: options?.attachments }; else data = inputsOrUpdate; const response = await this._uploadExamplesMultipart(datasetId_, [data]); const example = await this.readExample(response.example_ids?.[0] ?? v4_default()); return example; } async createExamples(propsOrUploads) { if (Array.isArray(propsOrUploads)) { if (propsOrUploads.length === 0) return []; const uploads = propsOrUploads; let datasetId_$1 = uploads[0].dataset_id; const datasetName_$1 = uploads[0].dataset_name; if (datasetId_$1 === void 0 && datasetName_$1 === void 0) throw new Error("Must provide either datasetName or datasetId"); else if (datasetId_$1 !== void 0 && datasetName_$1 !== void 0) throw new Error("Must provide either datasetName or datasetId, not both"); else if (datasetId_$1 === void 0) { const dataset = await this.readDataset({ datasetName: datasetName_$1 }); datasetId_$1 = dataset.id; } const response$1 = await this._uploadExamplesMultipart(datasetId_$1, uploads); const examples$1 = await Promise.all(response$1.example_ids.map((id) => this.readExample(id))); return examples$1; } const { inputs, outputs, metadata, splits, sourceRunIds, useSourceRunIOs, useSourceRunAttachments, attachments, exampleIds, datasetId, datasetName } = propsOrUploads; if (inputs === void 0) throw new Error("Must provide inputs when using legacy parameters"); let datasetId_ = datasetId; const datasetName_ = datasetName; if (datasetId_ === void 0 && datasetName_ === void 0) throw new Error("Must provide either datasetName or datasetId"); else if (datasetId_ !== void 0 && datasetName_ !== void 0) throw new Error("Must provide either datasetName or datasetId, not both"); else if (datasetId_ === void 0) { const dataset = await this.readDataset({ datasetName: datasetName_ }); datasetId_ = dataset.id; } const formattedExamples = inputs.map((input, idx) => { return { dataset_id: datasetId_, inputs: input, outputs: outputs?.[idx], metadata: metadata?.[idx], split: splits?.[idx], id: exampleIds?.[idx], attachments: attachments?.[idx], source_run_id: sourceRunIds?.[idx], use_source_run_io: useSourceRunIOs?.[idx], use_source_run_attachments: useSourceRunAttachments?.[idx] }; }); const response = await this._uploadExamplesMultipart(datasetId_, formattedExamples); const examples = await Promise.all(response.example_ids.map((id) => this.readExample(id))); return examples; } async createLLMExample(input, generation, options) { return this.createExample({ input }, { output: generation }, options); } async createChatExample(input, generations, options) { const finalInput = input.map((message) => { if (isLangChainMessage(message)) return convertLangChainMessageToExample(message); return message; }); const finalOutput = isLangChainMessage(generations) ? convertLangChainMessageToExample(generations) : generations; return this.createExample({ input: finalInput }, { output: finalOutput }, options); } async readExample(exampleId) { assertUuid(exampleId); const path = `/examples/${exampleId}`; const rawExample = await this._get(path); const { attachment_urls,...rest } = rawExample; const example = rest; if (attachment_urls) example.attachments = Object.entries(attachment_urls).reduce((acc, [key, value]) => { acc[key.slice(11)] = { presigned_url: value.presigned_url, mime_type: value.mime_type }; return acc; }, {}); return example; } async *listExamples({ datasetId, datasetName, exampleIds, asOf, splits, inlineS3Urls, metadata, limit, offset, filter, includeAttachments } = {}) { let datasetId_; if (datasetId !== void 0 && datasetName !== void 0) throw new Error("Must provide either datasetName or datasetId, not both"); else if (datasetId !== void 0) datasetId_ = datasetId; else if (datasetName !== void 0) { const dataset = await this.readDataset({ datasetName }); datasetId_ = dataset.id; } else throw new Error("Must provide a datasetName or datasetId"); const params = new URLSearchParams({ dataset: datasetId_ }); const dataset_version = asOf ? typeof asOf === "string" ? asOf : asOf?.toISOString() : void 0; if (dataset_version) params.append("as_of", dataset_version); const inlineS3Urls_ = inlineS3Urls ?? true; params.append("inline_s3_urls", inlineS3Urls_.toString()); if (exampleIds !== void 0) for (const id_ of exampleIds) params.append("id", id_); if (splits !== void 0) for (const split of splits) params.append("splits", split); if (metadata !== void 0) { const serializedMetadata = JSON.stringify(metadata); params.append("metadata", serializedMetadata); } if (limit !== void 0) params.append("limit", limit.toString()); if (offset !== void 0) params.append("offset", offset.toString()); if (filter !== void 0) params.append("filter", filter); if (includeAttachments === true) [ "attachment_urls", "outputs", "metadata" ].forEach((field) => params.append("select", field)); let i = 0; for await (const rawExamples of this._getPaginated("/examples", params)) { for (const rawExample of rawExamples) { const { attachment_urls,...rest } = rawExample; const example = rest; if (attachment_urls) example.attachments = Object.entries(attachment_urls).reduce((acc, [key, value]) => { acc[key.slice(11)] = { presigned_url: value.presigned_url, mime_type: value.mime_type || void 0 }; return acc; }, {}); yield example; i++; } if (limit !== void 0 && i >= limit) break; } } async deleteExample(exampleId) { assertUuid(exampleId); const path = `/examples/${exampleId}`; const response = await this.caller.call(_getFetchImplementation(this.debug), this.apiUrl + path, { method: "DELETE", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, `delete ${path}`); await response.json(); } async updateExample(exampleIdOrUpdate, update) { let exampleId; if (update) exampleId = exampleIdOrUpdate; else exampleId = exampleIdOrUpdate.id; assertUuid(exampleId); let updateToUse; if (update) updateToUse = { id: exampleId, ...update }; else updateToUse = exampleIdOrUpdate; let datasetId; if (updateToUse.dataset_id !== void 0) datasetId = updateToUse.dataset_id; else { const example = await this.readExample(exampleId); datasetId = example.dataset_id; } return this._updateExamplesMultipart(datasetId, [updateToUse]); } async updateExamples(update) { let datasetId; if (update[0].dataset_id === void 0) { const example = await this.readExample(update[0].id); datasetId = example.dataset_id; } else datasetId = update[0].dataset_id; return this._updateExamplesMultipart(datasetId, update); } /** * Get dataset version by closest date or exact tag. * * Use this to resolve the nearest version to a given timestamp or for a given tag. * * @param options The options for getting the dataset version * @param options.datasetId The ID of the dataset * @param options.datasetName The name of the dataset * @param options.asOf The timestamp of the dataset to retrieve * @param options.tag The tag of the dataset to retrieve * @returns The dataset version */ async readDatasetVersion({ datasetId, datasetName, asOf, tag }) { let resolvedDatasetId; if (!datasetId) { const dataset = await this.readDataset({ datasetName }); resolvedDatasetId = dataset.id; } else resolvedDatasetId = datasetId; assertUuid(resolvedDatasetId); if (asOf && tag || !asOf && !tag) throw new Error("Exactly one of asOf and tag must be specified."); const params = new URLSearchParams(); if (asOf !== void 0) params.append("as_of", typeof asOf === "string" ? asOf : asOf.toISOString()); if (tag !== void 0) params.append("tag", tag); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/datasets/${resolvedDatasetId}/version?${params.toString()}`, { method: "GET", headers: { ...this.headers }, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "read dataset version"); return await response.json(); } async listDatasetSplits({ datasetId, datasetName, asOf }) { let datasetId_; if (datasetId === void 0 && datasetName === void 0) throw new Error("Must provide dataset name or ID"); else if (datasetId !== void 0 && datasetName !== void 0) throw new Error("Must provide either datasetName or datasetId, not both"); else if (datasetId === void 0) { const dataset = await this.readDataset({ datasetName }); datasetId_ = dataset.id; } else datasetId_ = datasetId; assertUuid(datasetId_); const params = new URLSearchParams(); const dataset_version = asOf ? typeof asOf === "string" ? asOf : asOf?.toISOString() : void 0; if (dataset_version) params.append("as_of", dataset_version); const response = await this._get(`/datasets/${datasetId_}/splits`, params); return response; } async updateDatasetSplits({ datasetId, datasetName, splitName, exampleIds, remove = false }) { let datasetId_; if (datasetId === void 0 && datasetName === void 0) throw new Error("Must provide dataset name or ID"); else if (datasetId !== void 0 && datasetName !== void 0) throw new Error("Must provide either datasetName or datasetId, not both"); else if (datasetId === void 0) { const dataset = await this.readDataset({ datasetName }); datasetId_ = dataset.id; } else datasetId_ = datasetId; assertUuid(datasetId_); const data = { split_name: splitName, examples: exampleIds.map((id) => { assertUuid(id); return id; }), remove }; const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/datasets/${datasetId_}/splits`, { method: "PUT", headers: { ...this.headers, "Content-Type": "application/json" }, body: JSON.stringify(data), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "update dataset splits", true); } /** * @deprecated This method is deprecated and will be removed in future LangSmith versions, use `evaluate` from `langsmith/evaluation` instead. */ async evaluateRun(run, evaluator, { sourceInfo, loadChildRuns, referenceExample } = { loadChildRuns: false }) { warnOnce("This method is deprecated and will be removed in future LangSmith versions, use `evaluate` from `langsmith/evaluation` instead."); let run_; if (typeof run === "string") run_ = await this.readRun(run, { loadChildRuns }); else if (typeof run === "object" && "id" in run) run_ = run; else throw new Error(`Invalid run type: ${typeof run}`); if (run_.reference_example_id !== null && run_.reference_example_id !== void 0) referenceExample = await this.readExample(run_.reference_example_id); const feedbackResult = await evaluator.evaluateRun(run_, referenceExample); const [_, feedbacks] = await this._logEvaluationFeedback(feedbackResult, run_, sourceInfo); return feedbacks[0]; } async createFeedback(runId, key, { score, value, correction, comment, sourceInfo, feedbackSourceType = "api", sourceRunId, feedbackId, feedbackConfig, projectId, comparativeExperimentId }) { if (!runId && !projectId) throw new Error("One of runId or projectId must be provided"); if (runId && projectId) throw new Error("Only one of runId or projectId can be provided"); const feedback_source = { type: feedbackSourceType ?? "api", metadata: sourceInfo ?? {} }; if (sourceRunId !== void 0 && feedback_source?.metadata !== void 0 && !feedback_source.metadata["__run"]) feedback_source.metadata["__run"] = { run_id: sourceRunId }; if (feedback_source?.metadata !== void 0 && feedback_source.metadata["__run"]?.run_id !== void 0) assertUuid(feedback_source.metadata["__run"].run_id); const feedback = { id: feedbackId ?? v4_default(), run_id: runId, key, score: _formatFeedbackScore(score), value, correction, comment, feedback_source, comparative_experiment_id: comparativeExperimentId, feedbackConfig, session_id: projectId }; const url = `${this.apiUrl}/feedback`; const response = await this.caller.call(_getFetchImplementation(this.debug), url, { method: "POST", headers: { ...this.headers, "Content-Type": "application/json" }, body: JSON.stringify(feedback), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "create feedback", true); return feedback; } async updateFeedback(feedbackId, { score, value, correction, comment }) { const feedbackUpdate = {}; if (score !== void 0 && score !== null) feedbackUpdate["score"] = _formatFeedbackScore(score); if (value !== void 0 && value !== null) feedbackUpdate["value"] = value; if (correction !== void 0 && correction !== null) feedbackUpdate["correction"] = correction; if (comment !== void 0 && comment !== null) feedbackUpdate["comment"] = comment; assertUuid(feedbackId); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/feedback/${feedbackId}`, { method: "PATCH", headers: { ...this.headers, "Content-Type": "application/json" }, body: JSON.stringify(feedbackUpdate), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "update feedback", true); } async readFeedback(feedbackId) { assertUuid(feedbackId); const path = `/feedback/${feedbackId}`; const response = await this._get(path); return response; } async deleteFeedback(feedbackId) { assertUuid(feedbackId); const path = `/feedback/${feedbackId}`; const response = await this.caller.call(_getFetchImplementation(this.debug), this.apiUrl + path, { method: "DELETE", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, `delete ${path}`); await response.json(); } async *listFeedback({ runIds, feedbackKeys, feedbackSourceTypes } = {}) { const queryParams = new URLSearchParams(); if (runIds) queryParams.append("run", runIds.join(",")); if (feedbackKeys) for (const key of feedbackKeys) queryParams.append("key", key); if (feedbackSourceTypes) for (const type of feedbackSourceTypes) queryParams.append("source", type); for await (const feedbacks of this._getPaginated("/feedback", queryParams)) yield* feedbacks; } /** * Creates a presigned feedback token and URL. * * The token can be used to authorize feedback metrics without * needing an API key. This is useful for giving browser-based * applications the ability to submit feedback without needing * to expose an API key. * * @param runId The ID of the run. * @param feedbackKey The feedback key. * @param options Additional options for the token. * @param options.expiration The expiration time for the token. * * @returns A promise that resolves to a FeedbackIngestToken. */ async createPresignedFeedbackToken(runId, feedbackKey, { expiration, feedbackConfig } = {}) { const body = { run_id: runId, feedback_key: feedbackKey, feedback_config: feedbackConfig }; if (expiration) { if (typeof expiration === "string") body["expires_at"] = expiration; else if (expiration?.hours || expiration?.minutes || expiration?.days) body["expires_in"] = expiration; } else body["expires_in"] = { hours: 3 }; const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/feedback/tokens`, { method: "POST", headers: { ...this.headers, "Content-Type": "application/json" }, body: JSON.stringify(body), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); const result = await response.json(); return result; } async createComparativeExperiment({ name, experimentIds, referenceDatasetId, createdAt, description, metadata, id }) { if (experimentIds.length === 0) throw new Error("At least one experiment is required"); if (!referenceDatasetId) referenceDatasetId = (await this.readProject({ projectId: experimentIds[0] })).reference_dataset_id; if (!referenceDatasetId == null) throw new Error("A reference dataset is required"); const body = { id, name, experiment_ids: experimentIds, reference_dataset_id: referenceDatasetId, description, created_at: (createdAt ?? /* @__PURE__ */ new Date())?.toISOString(), extra: {} }; if (metadata) body.extra["metadata"] = metadata; const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/datasets/comparative`, { method: "POST", headers: { ...this.headers, "Content-Type": "application/json" }, body: JSON.stringify(body), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); return await response.json(); } /** * Retrieves a list of presigned feedback tokens for a given run ID. * @param runId The ID of the run. * @returns An async iterable of FeedbackIngestToken objects. */ async *listPresignedFeedbackTokens(runId) { assertUuid(runId); const params = new URLSearchParams({ run_id: runId }); for await (const tokens of this._getPaginated("/feedback/tokens", params)) yield* tokens; } _selectEvalResults(results) { let results_; if ("results" in results) results_ = results.results; else if (Array.isArray(results)) results_ = results; else results_ = [results]; return results_; } async _logEvaluationFeedback(evaluatorResponse, run, sourceInfo) { const evalResults = this._selectEvalResults(evaluatorResponse); const feedbacks = []; for (const res of evalResults) { let sourceInfo_ = sourceInfo || {}; if (res.evaluatorInfo) sourceInfo_ = { ...res.evaluatorInfo, ...sourceInfo_ }; let runId_ = null; if (res.targetRunId) runId_ = res.targetRunId; else if (run) runId_ = run.id; feedbacks.push(await this.createFeedback(runId_, res.key, { score: res.score, value: res.value, comment: res.comment, correction: res.correction, sourceInfo: sourceInfo_, sourceRunId: res.sourceRunId, feedbackConfig: res.feedbackConfig, feedbackSourceType: "model" })); } return [evalResults, feedbacks]; } async logEvaluationFeedback(evaluatorResponse, run, sourceInfo) { const [results] = await this._logEvaluationFeedback(evaluatorResponse, run, sourceInfo); return results; } /** * API for managing annotation queues */ /** * List the annotation queues on the LangSmith API. * @param options - The options for listing annotation queues * @param options.queueIds - The IDs of the queues to filter by * @param options.name - The name of the queue to filter by * @param options.nameContains - The substring that the queue name should contain * @param options.limit - The maximum number of queues to return * @returns An iterator of AnnotationQueue objects */ async *listAnnotationQueues(options = {}) { const { queueIds, name, nameContains, limit } = options; const params = new URLSearchParams(); if (queueIds) queueIds.forEach((id, i) => { assertUuid(id, `queueIds[${i}]`); params.append("ids", id); }); if (name) params.append("name", name); if (nameContains) params.append("name_contains", nameContains); params.append("limit", (limit !== void 0 ? Math.min(limit, 100) : 100).toString()); let count = 0; for await (const queues of this._getPaginated("/annotation-queues", params)) { yield* queues; count++; if (limit !== void 0 && count >= limit) break; } } /** * Create an annotation queue on the LangSmith API. * @param options - The options for creating an annotation queue * @param options.name - The name of the annotation queue * @param options.description - The description of the annotation queue * @param options.queueId - The ID of the annotation queue * @returns The created AnnotationQueue object */ async createAnnotationQueue(options) { const { name, description, queueId, rubricInstructions } = options; const body = { name, description, id: queueId || v4_default(), rubric_instructions: rubricInstructions }; const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/annotation-queues`, { method: "POST", headers: { ...this.headers, "Content-Type": "application/json" }, body: JSON.stringify(Object.fromEntries(Object.entries(body).filter(([_, v]) => v !== void 0))), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "create annotation queue"); const data = await response.json(); return data; } /** * Read an annotation queue with the specified queue ID. * @param queueId - The ID of the annotation queue to read * @returns The AnnotationQueueWithDetails object */ async readAnnotationQueue(queueId) { const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/annotation-queues/${assertUuid(queueId, "queueId")}`, { method: "GET", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "read annotation queue"); const data = await response.json(); return data; } /** * Update an annotation queue with the specified queue ID. * @param queueId - The ID of the annotation queue to update * @param options - The options for updating the annotation queue * @param options.name - The new name for the annotation queue * @param options.description - The new description for the annotation queue */ async updateAnnotationQueue(queueId, options) { const { name, description, rubricInstructions } = options; const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/annotation-queues/${assertUuid(queueId, "queueId")}`, { method: "PATCH", headers: { ...this.headers, "Content-Type": "application/json" }, body: JSON.stringify({ name, description, rubric_instructions: rubricInstructions }), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "update annotation queue"); } /** * Delete an annotation queue with the specified queue ID. * @param queueId - The ID of the annotation queue to delete */ async deleteAnnotationQueue(queueId) { const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/annotation-queues/${assertUuid(queueId, "queueId")}`, { method: "DELETE", headers: { ...this.headers, Accept: "application/json" }, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "delete annotation queue"); } /** * Add runs to an annotation queue with the specified queue ID. * @param queueId - The ID of the annotation queue * @param runIds - The IDs of the runs to be added to the annotation queue */ async addRunsToAnnotationQueue(queueId, runIds) { const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/annotation-queues/${assertUuid(queueId, "queueId")}/runs`, { method: "POST", headers: { ...this.headers, "Content-Type": "application/json" }, body: JSON.stringify(runIds.map((id, i) => assertUuid(id, `runIds[${i}]`).toString())), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "add runs to annotation queue"); } /** * Get a run from an annotation queue at the specified index. * @param queueId - The ID of the annotation queue * @param index - The index of the run to retrieve * @returns A Promise that resolves to a RunWithAnnotationQueueInfo object * @throws {Error} If the run is not found at the given index or for other API-related errors */ async getRunFromAnnotationQueue(queueId, index) { const baseUrl = `/annotation-queues/${assertUuid(queueId, "queueId")}/run`; const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}${baseUrl}/${index}`, { method: "GET", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "get run from annotation queue"); return await response.json(); } /** * Delete a run from an an annotation queue. * @param queueId - The ID of the annotation queue to delete the run from * @param queueRunId - The ID of the run to delete from the annotation queue */ async deleteRunFromAnnotationQueue(queueId, queueRunId) { const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/annotation-queues/${assertUuid(queueId, "queueId")}/runs/${assertUuid(queueRunId, "queueRunId")}`, { method: "DELETE", headers: { ...this.headers, Accept: "application/json" }, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "delete run from annotation queue"); } /** * Get the size of an annotation queue. * @param queueId - The ID of the annotation queue */ async getSizeFromAnnotationQueue(queueId) { const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/annotation-queues/${assertUuid(queueId, "queueId")}/size`, { method: "GET", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "get size from annotation queue"); return await response.json(); } async _currentTenantIsOwner(owner) { const settings = await this._getSettings(); return owner == "-" || settings.tenant_handle === owner; } async _ownerConflictError(action, owner) { const settings = await this._getSettings(); return new Error(`Cannot ${action} for another tenant.\n Current tenant: ${settings.tenant_handle}\n Requested tenant: ${owner}`); } async _getLatestCommitHash(promptOwnerAndName) { const res = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/commits/${promptOwnerAndName}/?limit=1&offset=0`, { method: "GET", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); const json = await res.json(); if (!res.ok) { const detail = typeof json.detail === "string" ? json.detail : JSON.stringify(json.detail); const error = new Error(`Error ${res.status}: ${res.statusText}\n${detail}`); error.statusCode = res.status; throw error; } if (json.commits.length === 0) return void 0; return json.commits[0].commit_hash; } async _likeOrUnlikePrompt(promptIdentifier, like) { const [owner, promptName, _] = parsePromptIdentifier(promptIdentifier); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/likes/${owner}/${promptName}`, { method: "POST", body: JSON.stringify({ like }), headers: { ...this.headers, "Content-Type": "application/json" }, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, `${like ? "like" : "unlike"} prompt`); return await response.json(); } async _getPromptUrl(promptIdentifier) { const [owner, promptName, commitHash] = parsePromptIdentifier(promptIdentifier); if (!await this._currentTenantIsOwner(owner)) if (commitHash !== "latest") return `${this.getHostUrl()}/hub/${owner}/${promptName}/${commitHash.substring(0, 8)}`; else return `${this.getHostUrl()}/hub/${owner}/${promptName}`; else { const settings = await this._getSettings(); if (commitHash !== "latest") return `${this.getHostUrl()}/prompts/${promptName}/${commitHash.substring(0, 8)}?organizationId=${settings.id}`; else return `${this.getHostUrl()}/prompts/${promptName}?organizationId=${settings.id}`; } } async promptExists(promptIdentifier) { const prompt = await this.getPrompt(promptIdentifier); return !!prompt; } async likePrompt(promptIdentifier) { return this._likeOrUnlikePrompt(promptIdentifier, true); } async unlikePrompt(promptIdentifier) { return this._likeOrUnlikePrompt(promptIdentifier, false); } async *listCommits(promptOwnerAndName) { for await (const commits of this._getPaginated(`/commits/${promptOwnerAndName}/`, new URLSearchParams(), (res) => res.commits)) yield* commits; } async *listPrompts(options) { const params = new URLSearchParams(); params.append("sort_field", options?.sortField ?? "updated_at"); params.append("sort_direction", "desc"); params.append("is_archived", (!!options?.isArchived).toString()); if (options?.isPublic !== void 0) params.append("is_public", options.isPublic.toString()); if (options?.query) params.append("query", options.query); for await (const prompts of this._getPaginated("/repos", params, (res) => res.repos)) yield* prompts; } async getPrompt(promptIdentifier) { const [owner, promptName, _] = parsePromptIdentifier(promptIdentifier); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/repos/${owner}/${promptName}`, { method: "GET", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); if (response.status === 404) return null; await raiseForStatus(response, "get prompt"); const result = await response.json(); if (result.repo) return result.repo; else return null; } async createPrompt(promptIdentifier, options) { const settings = await this._getSettings(); if (options?.isPublic && !settings.tenant_handle) throw new Error(`Cannot create a public prompt without first\n creating a LangChain Hub handle. You can add a handle by creating a public prompt at:\n https://smith.langchain.com/prompts`); const [owner, promptName, _] = parsePromptIdentifier(promptIdentifier); if (!await this._currentTenantIsOwner(owner)) throw await this._ownerConflictError("create a prompt", owner); const data = { repo_handle: promptName, ...options?.description && { description: options.description }, ...options?.readme && { readme: options.readme }, ...options?.tags && { tags: options.tags }, is_public: !!options?.isPublic }; const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/repos/`, { method: "POST", headers: { ...this.headers, "Content-Type": "application/json" }, body: JSON.stringify(data), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "create prompt"); const { repo } = await response.json(); return repo; } async createCommit(promptIdentifier, object, options) { if (!await this.promptExists(promptIdentifier)) throw new Error("Prompt does not exist, you must create it first."); const [owner, promptName, _] = parsePromptIdentifier(promptIdentifier); const resolvedParentCommitHash = options?.parentCommitHash === "latest" || !options?.parentCommitHash ? await this._getLatestCommitHash(`${owner}/${promptName}`) : options?.parentCommitHash; const payload = { manifest: JSON.parse(JSON.stringify(object)), parent_commit: resolvedParentCommitHash }; const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/commits/${owner}/${promptName}`, { method: "POST", headers: { ...this.headers, "Content-Type": "application/json" }, body: JSON.stringify(payload), signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "create commit"); const result = await response.json(); return this._getPromptUrl(`${owner}/${promptName}${result.commit_hash ? `:${result.commit_hash}` : ""}`); } /** * Update examples with attachments using multipart form data. * @param updates List of ExampleUpdateWithAttachments objects to upsert * @returns Promise with the update response */ async updateExamplesMultipart(datasetId, updates = []) { return this._updateExamplesMultipart(datasetId, updates); } async _updateExamplesMultipart(datasetId, updates = []) { if (!await this._getMultiPartSupport()) throw new Error("Your LangSmith deployment does not allow using the multipart examples endpoint, please upgrade your deployment to the latest version."); const formData = new FormData(); for (const example of updates) { const exampleId = example.id; const exampleBody = { ...example.metadata && { metadata: example.metadata }, ...example.split && { split: example.split } }; const stringifiedExample = serialize(exampleBody, `Serializing body for example with id: ${exampleId}`); const exampleBlob = new Blob([stringifiedExample], { type: "application/json" }); formData.append(exampleId, exampleBlob); if (example.inputs) { const stringifiedInputs = serialize(example.inputs, `Serializing inputs for example with id: ${exampleId}`); const inputsBlob = new Blob([stringifiedInputs], { type: "application/json" }); formData.append(`${exampleId}.inputs`, inputsBlob); } if (example.outputs) { const stringifiedOutputs = serialize(example.outputs, `Serializing outputs whle updating example with id: ${exampleId}`); const outputsBlob = new Blob([stringifiedOutputs], { type: "application/json" }); formData.append(`${exampleId}.outputs`, outputsBlob); } if (example.attachments) for (const [name, attachment] of Object.entries(example.attachments)) { let mimeType; let data; if (Array.isArray(attachment)) [mimeType, data] = attachment; else { mimeType = attachment.mimeType; data = attachment.data; } const attachmentBlob = new Blob([data], { type: `${mimeType}; length=${data.byteLength}` }); formData.append(`${exampleId}.attachment.${name}`, attachmentBlob); } if (example.attachments_operations) { const stringifiedAttachmentsOperations = serialize(example.attachments_operations, `Serializing attachments while updating example with id: ${exampleId}`); const attachmentsOperationsBlob = new Blob([stringifiedAttachmentsOperations], { type: "application/json" }); formData.append(`${exampleId}.attachments_operations`, attachmentsOperationsBlob); } } const datasetIdToUse = datasetId ?? updates[0]?.dataset_id; const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/v1/platform/datasets/${datasetIdToUse}/examples`, { method: "PATCH", headers: this.headers, body: formData }); const result = await response.json(); return result; } /** * Upload examples with attachments using multipart form data. * @param uploads List of ExampleUploadWithAttachments objects to upload * @returns Promise with the upload response * @deprecated This method is deprecated and will be removed in future LangSmith versions, please use `createExamples` instead */ async uploadExamplesMultipart(datasetId, uploads = []) { return this._uploadExamplesMultipart(datasetId, uploads); } async _uploadExamplesMultipart(datasetId, uploads = []) { if (!await this._getMultiPartSupport()) throw new Error("Your LangSmith deployment does not allow using the multipart examples endpoint, please upgrade your deployment to the latest version."); const formData = new FormData(); for (const example of uploads) { const exampleId = (example.id ?? v4_default()).toString(); const exampleBody = { created_at: example.created_at, ...example.metadata && { metadata: example.metadata }, ...example.split && { split: example.split }, ...example.source_run_id && { source_run_id: example.source_run_id }, ...example.use_source_run_io && { use_source_run_io: example.use_source_run_io }, ...example.use_source_run_attachments && { use_source_run_attachments: example.use_source_run_attachments } }; const stringifiedExample = serialize(exampleBody, `Serializing body for uploaded example with id: ${exampleId}`); const exampleBlob = new Blob([stringifiedExample], { type: "application/json" }); formData.append(exampleId, exampleBlob); if (example.inputs) { const stringifiedInputs = serialize(example.inputs, `Serializing inputs for uploaded example with id: ${exampleId}`); const inputsBlob = new Blob([stringifiedInputs], { type: "application/json" }); formData.append(`${exampleId}.inputs`, inputsBlob); } if (example.outputs) { const stringifiedOutputs = serialize(example.outputs, `Serializing outputs for uploaded example with id: ${exampleId}`); const outputsBlob = new Blob([stringifiedOutputs], { type: "application/json" }); formData.append(`${exampleId}.outputs`, outputsBlob); } if (example.attachments) for (const [name, attachment] of Object.entries(example.attachments)) { let mimeType; let data; if (Array.isArray(attachment)) [mimeType, data] = attachment; else { mimeType = attachment.mimeType; data = attachment.data; } const attachmentBlob = new Blob([data], { type: `${mimeType}; length=${data.byteLength}` }); formData.append(`${exampleId}.attachment.${name}`, attachmentBlob); } } const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/v1/platform/datasets/${datasetId}/examples`, { method: "POST", headers: this.headers, body: formData }); await raiseForStatus(response, "upload examples"); const result = await response.json(); return result; } async updatePrompt(promptIdentifier, options) { if (!await this.promptExists(promptIdentifier)) throw new Error("Prompt does not exist, you must create it first."); const [owner, promptName] = parsePromptIdentifier(promptIdentifier); if (!await this._currentTenantIsOwner(owner)) throw await this._ownerConflictError("update a prompt", owner); const payload = {}; if (options?.description !== void 0) payload.description = options.description; if (options?.readme !== void 0) payload.readme = options.readme; if (options?.tags !== void 0) payload.tags = options.tags; if (options?.isPublic !== void 0) payload.is_public = options.isPublic; if (options?.isArchived !== void 0) payload.is_archived = options.isArchived; if (Object.keys(payload).length === 0) throw new Error("No valid update options provided"); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/repos/${owner}/${promptName}`, { method: "PATCH", body: JSON.stringify(payload), headers: { ...this.headers, "Content-Type": "application/json" }, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "update prompt"); return response.json(); } async deletePrompt(promptIdentifier) { if (!await this.promptExists(promptIdentifier)) throw new Error("Prompt does not exist, you must create it first."); const [owner, promptName, _] = parsePromptIdentifier(promptIdentifier); if (!await this._currentTenantIsOwner(owner)) throw await this._ownerConflictError("delete a prompt", owner); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/repos/${owner}/${promptName}`, { method: "DELETE", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); return await response.json(); } async pullPromptCommit(promptIdentifier, options) { const [owner, promptName, commitHash] = parsePromptIdentifier(promptIdentifier); const response = await this.caller.call(_getFetchImplementation(this.debug), `${this.apiUrl}/commits/${owner}/${promptName}/${commitHash}${options?.includeModel ? "?include_model=true" : ""}`, { method: "GET", headers: this.headers, signal: AbortSignal.timeout(this.timeout_ms), ...this.fetchOptions }); await raiseForStatus(response, "pull prompt commit"); const result = await response.json(); return { owner, repo: promptName, commit_hash: result.commit_hash, manifest: result.manifest, examples: result.examples }; } /** * This method should not be used directly, use `import { pull } from "langchain/hub"` instead. * Using this method directly returns the JSON string of the prompt rather than a LangChain object. * @private */ async _pullPrompt(promptIdentifier, options) { const promptObject = await this.pullPromptCommit(promptIdentifier, { includeModel: options?.includeModel }); const prompt = JSON.stringify(promptObject.manifest); return prompt; } async pushPrompt(promptIdentifier, options) { if (await this.promptExists(promptIdentifier)) { if (options && Object.keys(options).some((key) => key !== "object")) await this.updatePrompt(promptIdentifier, { description: options?.description, readme: options?.readme, tags: options?.tags, isPublic: options?.isPublic }); } else await this.createPrompt(promptIdentifier, { description: options?.description, readme: options?.readme, tags: options?.tags, isPublic: options?.isPublic }); if (!options?.object) return await this._getPromptUrl(promptIdentifier); const url = await this.createCommit(promptIdentifier, options?.object, { parentCommitHash: options?.parentCommitHash }); return url; } /** * Clone a public dataset to your own langsmith tenant. * This operation is idempotent. If you already have a dataset with the given name, * this function will do nothing. * @param {string} tokenOrUrl The token of the public dataset to clone. * @param {Object} [options] Additional options for cloning the dataset. * @param {string} [options.sourceApiUrl] The URL of the langsmith server where the data is hosted. Defaults to the API URL of your current client. * @param {string} [options.datasetName] The name of the dataset to create in your tenant. Defaults to the name of the public dataset. * @returns {Promise} */ async clonePublicDataset(tokenOrUrl, options = {}) { const { sourceApiUrl = this.apiUrl, datasetName } = options; const [parsedApiUrl, tokenUuid] = this.parseTokenOrUrl(tokenOrUrl, sourceApiUrl); const sourceClient = new Client({ apiUrl: parsedApiUrl, apiKey: "placeholder" }); const ds = await sourceClient.readSharedDataset(tokenUuid); const finalDatasetName = datasetName || ds.name; try { if (await this.hasDataset({ datasetId: finalDatasetName })) { console.log(`Dataset ${finalDatasetName} already exists in your tenant. Skipping.`); return; } } catch (_) {} const examples = await sourceClient.listSharedExamples(tokenUuid); const dataset = await this.createDataset(finalDatasetName, { description: ds.description, dataType: ds.data_type || "kv", inputsSchema: ds.inputs_schema_definition ?? void 0, outputsSchema: ds.outputs_schema_definition ?? void 0 }); try { await this.createExamples({ inputs: examples.map((e) => e.inputs), outputs: examples.flatMap((e) => e.outputs ? [e.outputs] : []), datasetId: dataset.id }); } catch (e) { console.error(`An error occurred while creating dataset ${finalDatasetName}. You should delete it manually.`); throw e; } } parseTokenOrUrl(urlOrToken, apiUrl, numParts = 2, kind = "dataset") { try { assertUuid(urlOrToken); return [apiUrl, urlOrToken]; } catch (_) {} try { const parsedUrl = new URL(urlOrToken); const pathParts = parsedUrl.pathname.split("/").filter((part) => part !== ""); if (pathParts.length >= numParts) { const tokenUuid = pathParts[pathParts.length - numParts]; return [apiUrl, tokenUuid]; } else throw new Error(`Invalid public ${kind} URL: ${urlOrToken}`); } catch (error) { throw new Error(`Invalid public ${kind} URL or token: ${urlOrToken}`); } } /** * Awaits all pending trace batches. Useful for environments where * you need to be sure that all tracing requests finish before execution ends, * such as serverless environments. * * @example * ``` * import { Client } from "langsmith"; * * const client = new Client(); * * try { * // Tracing happens here * ... * } finally { * await client.awaitPendingTraceBatches(); * } * ``` * * @returns A promise that resolves once all currently pending traces have sent. */ awaitPendingTraceBatches() { if (this.manualFlushMode) { console.warn("[WARNING]: When tracing in manual flush mode, you must call `await client.flush()` manually to submit trace batches."); return Promise.resolve(); } return Promise.all([...this.autoBatchQueue.items.map(({ itemPromise }) => itemPromise), this.batchIngestCaller.queue.onIdle()]); } }; function isExampleCreate(input) { return "dataset_id" in input || "dataset_name" in input; } //#endregion //#region node_modules/.pnpm/langsmith@0.3.30_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/langsmith/dist/env.js const isTracingEnabled$1 = (tracingEnabled) => { if (tracingEnabled !== void 0) return tracingEnabled; const envVars = ["TRACING_V2", "TRACING"]; return !!envVars.find((envVar) => getLangSmithEnvironmentVariable(envVar) === "true"); }; //#endregion //#region node_modules/.pnpm/langsmith@0.3.30_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/langsmith/dist/singletons/constants.js const _LC_CONTEXT_VARIABLES_KEY = Symbol.for("lc:context_variables"); //#endregion //#region node_modules/.pnpm/langsmith@0.3.30_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/langsmith/dist/run_trees.js function stripNonAlphanumeric$1(input) { return input.replace(/[-:.]/g, ""); } function convertToDottedOrderFormat$1(epoch, runId, executionOrder = 1) { const paddedOrder = executionOrder.toFixed(0).slice(0, 3).padStart(3, "0"); return stripNonAlphanumeric$1(`${new Date(epoch).toISOString().slice(0, -1)}${paddedOrder}Z`) + runId; } /** * Baggage header information */ var Baggage = class Baggage { constructor(metadata, tags, project_name) { Object.defineProperty(this, "metadata", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "tags", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "project_name", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.metadata = metadata; this.tags = tags; this.project_name = project_name; } static fromHeader(value) { const items = value.split(","); let metadata = {}; let tags = []; let project_name; for (const item of items) { const [key, uriValue] = item.split("="); const value$1 = decodeURIComponent(uriValue); if (key === "langsmith-metadata") metadata = JSON.parse(value$1); else if (key === "langsmith-tags") tags = value$1.split(","); else if (key === "langsmith-project") project_name = value$1; } return new Baggage(metadata, tags, project_name); } toHeader() { const items = []; if (this.metadata && Object.keys(this.metadata).length > 0) items.push(`langsmith-metadata=${encodeURIComponent(JSON.stringify(this.metadata))}`); if (this.tags && this.tags.length > 0) items.push(`langsmith-tags=${encodeURIComponent(this.tags.join(","))}`); if (this.project_name) items.push(`langsmith-project=${encodeURIComponent(this.project_name)}`); return items.join(","); } }; var RunTree = class RunTree { constructor(originalConfig) { Object.defineProperty(this, "id", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "name", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "run_type", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "project_name", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "parent_run", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "child_runs", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "start_time", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "end_time", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "extra", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "tags", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "error", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "serialized", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "inputs", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "outputs", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "reference_example_id", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "client", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "events", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "trace_id", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "dotted_order", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "tracingEnabled", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "execution_order", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "child_execution_order", { enumerable: true, configurable: true, writable: true, value: void 0 }); /** * Attachments associated with the run. * Each entry is a tuple of [mime_type, bytes] */ Object.defineProperty(this, "attachments", { enumerable: true, configurable: true, writable: true, value: void 0 }); if (isRunTree(originalConfig)) { Object.assign(this, { ...originalConfig }); return; } const defaultConfig = RunTree.getDefaultConfig(); const { metadata,...config } = originalConfig; const client$1 = config.client ?? RunTree.getSharedClient(); const dedupedMetadata = { ...metadata, ...config?.extra?.metadata }; config.extra = { ...config.extra, metadata: dedupedMetadata }; Object.assign(this, { ...defaultConfig, ...config, client: client$1 }); if (!this.trace_id) if (this.parent_run) this.trace_id = this.parent_run.trace_id ?? this.id; else this.trace_id = this.id; this.execution_order ??= 1; this.child_execution_order ??= 1; if (!this.dotted_order) { const currentDottedOrder = convertToDottedOrderFormat$1(this.start_time, this.id, this.execution_order); if (this.parent_run) this.dotted_order = this.parent_run.dotted_order + "." + currentDottedOrder; else this.dotted_order = currentDottedOrder; } } set metadata(metadata) { this.extra = { ...this.extra, metadata: { ...this.extra?.metadata, ...metadata } }; } get metadata() { return this.extra?.metadata; } static getDefaultConfig() { return { id: v4_default(), run_type: "chain", project_name: getLangSmithEnvironmentVariable("PROJECT") ?? getEnvironmentVariable$1("LANGCHAIN_SESSION") ?? "default", child_runs: [], api_url: getEnvironmentVariable$1("LANGCHAIN_ENDPOINT") ?? "http://localhost:1984", api_key: getEnvironmentVariable$1("LANGCHAIN_API_KEY"), caller_options: {}, start_time: Date.now(), serialized: {}, inputs: {}, extra: {} }; } static getSharedClient() { if (!RunTree.sharedClient) RunTree.sharedClient = new Client(); return RunTree.sharedClient; } createChild(config) { const child_execution_order = this.child_execution_order + 1; const child = new RunTree({ ...config, parent_run: this, project_name: this.project_name, client: this.client, tracingEnabled: this.tracingEnabled, execution_order: child_execution_order, child_execution_order }); if (_LC_CONTEXT_VARIABLES_KEY in this) child[_LC_CONTEXT_VARIABLES_KEY] = this[_LC_CONTEXT_VARIABLES_KEY]; const LC_CHILD = Symbol.for("lc:child_config"); const presentConfig = config.extra?.[LC_CHILD] ?? this.extra[LC_CHILD]; if (isRunnableConfigLike(presentConfig)) { const newConfig = { ...presentConfig }; const callbacks = isCallbackManagerLike(newConfig.callbacks) ? newConfig.callbacks.copy?.() : void 0; if (callbacks) { Object.assign(callbacks, { _parentRunId: child.id }); callbacks.handlers?.find(isLangChainTracerLike)?.updateFromRunTree?.(child); newConfig.callbacks = callbacks; } child.extra[LC_CHILD] = newConfig; } const visited = /* @__PURE__ */ new Set(); let current = this; while (current != null && !visited.has(current.id)) { visited.add(current.id); current.child_execution_order = Math.max(current.child_execution_order, child_execution_order); current = current.parent_run; } this.child_runs.push(child); return child; } async end(outputs, error, endTime = Date.now(), metadata) { this.outputs = this.outputs ?? outputs; this.error = this.error ?? error; this.end_time = this.end_time ?? endTime; if (metadata && Object.keys(metadata).length > 0) this.extra = this.extra ? { ...this.extra, metadata: { ...this.extra.metadata, ...metadata } } : { metadata }; } _convertToCreate(run, runtimeEnv, excludeChildRuns = true) { const runExtra = run.extra ?? {}; if (!runExtra.runtime) runExtra.runtime = {}; if (runtimeEnv) { for (const [k, v] of Object.entries(runtimeEnv)) if (!runExtra.runtime[k]) runExtra.runtime[k] = v; } let child_runs; let parent_run_id; if (!excludeChildRuns) { child_runs = run.child_runs.map((child_run) => this._convertToCreate(child_run, runtimeEnv, excludeChildRuns)); parent_run_id = void 0; } else { parent_run_id = run.parent_run?.id; child_runs = []; } const persistedRun = { id: run.id, name: run.name, start_time: run.start_time, end_time: run.end_time, run_type: run.run_type, reference_example_id: run.reference_example_id, extra: runExtra, serialized: run.serialized, error: run.error, inputs: run.inputs, outputs: run.outputs, session_name: run.project_name, child_runs, parent_run_id, trace_id: run.trace_id, dotted_order: run.dotted_order, tags: run.tags, attachments: run.attachments }; return persistedRun; } async postRun(excludeChildRuns = true) { try { const runtimeEnv = getRuntimeEnvironment$1(); const runCreate = await this._convertToCreate(this, runtimeEnv, true); await this.client.createRun(runCreate); if (!excludeChildRuns) { warnOnce("Posting with excludeChildRuns=false is deprecated and will be removed in a future version."); for (const childRun of this.child_runs) await childRun.postRun(false); } } catch (error) { console.error(`Error in postRun for run ${this.id}:`, error); } } async patchRun() { try { const runUpdate = { end_time: this.end_time, error: this.error, inputs: this.inputs, outputs: this.outputs, parent_run_id: this.parent_run?.id, reference_example_id: this.reference_example_id, extra: this.extra, events: this.events, dotted_order: this.dotted_order, trace_id: this.trace_id, tags: this.tags, attachments: this.attachments, session_name: this.project_name }; await this.client.updateRun(this.id, runUpdate); } catch (error) { console.error(`Error in patchRun for run ${this.id}`, error); } } toJSON() { return this._convertToCreate(this, void 0, false); } /** * Add an event to the run tree. * @param event - A single event or string to add */ addEvent(event) { if (!this.events) this.events = []; if (typeof event === "string") this.events.push({ name: "event", time: (/* @__PURE__ */ new Date()).toISOString(), message: event }); else this.events.push({ ...event, time: event.time ?? (/* @__PURE__ */ new Date()).toISOString() }); } static fromRunnableConfig(parentConfig, props) { const callbackManager = parentConfig?.callbacks; let parentRun; let projectName; let client$1; let tracingEnabled = isTracingEnabled$1(); if (callbackManager) { const parentRunId = callbackManager?.getParentRunId?.() ?? ""; const langChainTracer = callbackManager?.handlers?.find((handler) => handler?.name == "langchain_tracer"); parentRun = langChainTracer?.getRun?.(parentRunId); projectName = langChainTracer?.projectName; client$1 = langChainTracer?.client; tracingEnabled = tracingEnabled || !!langChainTracer; } if (!parentRun) return new RunTree({ ...props, client: client$1, tracingEnabled, project_name: projectName }); const parentRunTree = new RunTree({ name: parentRun.name, id: parentRun.id, trace_id: parentRun.trace_id, dotted_order: parentRun.dotted_order, client: client$1, tracingEnabled, project_name: projectName, tags: [...new Set((parentRun?.tags ?? []).concat(parentConfig?.tags ?? []))], extra: { metadata: { ...parentRun?.extra?.metadata, ...parentConfig?.metadata } } }); return parentRunTree.createChild(props); } static fromDottedOrder(dottedOrder) { return this.fromHeaders({ "langsmith-trace": dottedOrder }); } static fromHeaders(headers, inheritArgs) { const rawHeaders = "get" in headers && typeof headers.get === "function" ? { "langsmith-trace": headers.get("langsmith-trace"), baggage: headers.get("baggage") } : headers; const headerTrace = rawHeaders["langsmith-trace"]; if (!headerTrace || typeof headerTrace !== "string") return void 0; const parentDottedOrder = headerTrace.trim(); const parsedDottedOrder = parentDottedOrder.split(".").map((part) => { const [strTime, uuid] = part.split("Z"); return { strTime, time: Date.parse(strTime + "Z"), uuid }; }); const traceId = parsedDottedOrder[0].uuid; const config = { ...inheritArgs, name: inheritArgs?.["name"] ?? "parent", run_type: inheritArgs?.["run_type"] ?? "chain", start_time: inheritArgs?.["start_time"] ?? Date.now(), id: parsedDottedOrder.at(-1)?.uuid, trace_id: traceId, dotted_order: parentDottedOrder }; if (rawHeaders["baggage"] && typeof rawHeaders["baggage"] === "string") { const baggage = Baggage.fromHeader(rawHeaders["baggage"]); config.metadata = baggage.metadata; config.tags = baggage.tags; config.project_name = baggage.project_name; } return new RunTree(config); } toHeaders(headers) { const result = { "langsmith-trace": this.dotted_order, baggage: new Baggage(this.extra?.metadata, this.tags, this.project_name).toHeader() }; if (headers) for (const [key, value] of Object.entries(result)) headers.set(key, value); return result; } }; Object.defineProperty(RunTree, "sharedClient", { enumerable: true, configurable: true, writable: true, value: null }); function isRunTree(x) { return x !== void 0 && typeof x.createChild === "function" && typeof x.postRun === "function"; } function isLangChainTracerLike(x) { return typeof x === "object" && x != null && typeof x.name === "string" && x.name === "langchain_tracer"; } function containsLangChainTracerLike(x) { return Array.isArray(x) && x.some((callback) => isLangChainTracerLike(callback)); } function isCallbackManagerLike(x) { return typeof x === "object" && x != null && Array.isArray(x.handlers); } function isRunnableConfigLike(x) { return x !== void 0 && typeof x.callbacks === "object" && (containsLangChainTracerLike(x.callbacks?.handlers) || containsLangChainTracerLike(x.callbacks)); } //#endregion //#region node_modules/.pnpm/langsmith@0.3.30_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/langsmith/dist/index.js const __version__ = "0.3.30"; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/singletons/async_local_storage/globals.js const TRACING_ALS_KEY$1 = Symbol.for("ls:tracing_async_local_storage"); const _CONTEXT_VARIABLES_KEY = Symbol.for("lc:context_variables"); const setGlobalAsyncLocalStorageInstance = (instance) => { globalThis[TRACING_ALS_KEY$1] = instance; }; const getGlobalAsyncLocalStorageInstance = () => { return globalThis[TRACING_ALS_KEY$1]; }; //#endregion //#region node_modules/.pnpm/decamelize@1.2.0/node_modules/decamelize/index.js var require_decamelize = __commonJS({ "node_modules/.pnpm/decamelize@1.2.0/node_modules/decamelize/index.js"(exports, module) { module.exports = function(str, sep) { if (typeof str !== "string") throw new TypeError("Expected a string"); sep = typeof sep === "undefined" ? "_" : sep; return str.replace(/([a-z\d])([A-Z])/g, "$1" + sep + "$2").replace(/([A-Z]+)([A-Z][a-z\d]+)/g, "$1" + sep + "$2").toLowerCase(); }; } }); //#endregion //#region node_modules/.pnpm/camelcase@6.3.0/node_modules/camelcase/index.js var require_camelcase = __commonJS({ "node_modules/.pnpm/camelcase@6.3.0/node_modules/camelcase/index.js"(exports, module) { const UPPERCASE = /[\p{Lu}]/u; const LOWERCASE = /[\p{Ll}]/u; const LEADING_CAPITAL = /^[\p{Lu}](?![\p{Lu}])/gu; const IDENTIFIER = /([\p{Alpha}\p{N}_]|$)/u; const SEPARATORS = /[_.\- ]+/; const LEADING_SEPARATORS = new RegExp("^" + SEPARATORS.source); const SEPARATORS_AND_IDENTIFIER = new RegExp(SEPARATORS.source + IDENTIFIER.source, "gu"); const NUMBERS_AND_IDENTIFIER = new RegExp("\\d+" + IDENTIFIER.source, "gu"); const preserveCamelCase = (string, toLowerCase, toUpperCase) => { let isLastCharLower = false; let isLastCharUpper = false; let isLastLastCharUpper = false; for (let i = 0; i < string.length; i++) { const character = string[i]; if (isLastCharLower && UPPERCASE.test(character)) { string = string.slice(0, i) + "-" + string.slice(i); isLastCharLower = false; isLastLastCharUpper = isLastCharUpper; isLastCharUpper = true; i++; } else if (isLastCharUpper && isLastLastCharUpper && LOWERCASE.test(character)) { string = string.slice(0, i - 1) + "-" + string.slice(i - 1); isLastLastCharUpper = isLastCharUpper; isLastCharUpper = false; isLastCharLower = true; } else { isLastCharLower = toLowerCase(character) === character && toUpperCase(character) !== character; isLastLastCharUpper = isLastCharUpper; isLastCharUpper = toUpperCase(character) === character && toLowerCase(character) !== character; } } return string; }; const preserveConsecutiveUppercase = (input, toLowerCase) => { LEADING_CAPITAL.lastIndex = 0; return input.replace(LEADING_CAPITAL, (m1) => toLowerCase(m1)); }; const postProcess = (input, toUpperCase) => { SEPARATORS_AND_IDENTIFIER.lastIndex = 0; NUMBERS_AND_IDENTIFIER.lastIndex = 0; return input.replace(SEPARATORS_AND_IDENTIFIER, (_, identifier) => toUpperCase(identifier)).replace(NUMBERS_AND_IDENTIFIER, (m) => toUpperCase(m)); }; const camelCase$1 = (input, options) => { if (!(typeof input === "string" || Array.isArray(input))) throw new TypeError("Expected the input to be `string | string[]`"); options = { pascalCase: false, preserveConsecutiveUppercase: false, ...options }; if (Array.isArray(input)) input = input.map((x) => x.trim()).filter((x) => x.length).join("-"); else input = input.trim(); if (input.length === 0) return ""; const toLowerCase = options.locale === false ? (string) => string.toLowerCase() : (string) => string.toLocaleLowerCase(options.locale); const toUpperCase = options.locale === false ? (string) => string.toUpperCase() : (string) => string.toLocaleUpperCase(options.locale); if (input.length === 1) return options.pascalCase ? toUpperCase(input) : toLowerCase(input); const hasUpperCase = input !== toLowerCase(input); if (hasUpperCase) input = preserveCamelCase(input, toLowerCase, toUpperCase); input = input.replace(LEADING_SEPARATORS, ""); if (options.preserveConsecutiveUppercase) input = preserveConsecutiveUppercase(input, toLowerCase); else input = toLowerCase(input); if (options.pascalCase) input = toUpperCase(input.charAt(0)) + input.slice(1); return postProcess(input, toUpperCase); }; module.exports = camelCase$1; module.exports.default = camelCase$1; } }); //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/load/map_keys.js var import_decamelize = __toESM(require_decamelize(), 1); var import_camelcase = __toESM(require_camelcase(), 1); function keyToJson(key, map) { return map?.[key] || (0, import_decamelize.default)(key); } function keyFromJson(key, map) { return map?.[key] || (0, import_camelcase.default)(key); } function mapKeys(fields, mapper, map) { const mapped = {}; for (const key in fields) if (Object.hasOwn(fields, key)) mapped[mapper(key, map)] = fields[key]; return mapped; } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/load/serializable.js var serializable_exports = {}; __export(serializable_exports, { Serializable: () => Serializable, get_lc_unique_name: () => get_lc_unique_name }); function shallowCopy(obj) { return Array.isArray(obj) ? [...obj] : { ...obj }; } function replaceSecrets(root, secretsMap) { const result = shallowCopy(root); for (const [path, secretId] of Object.entries(secretsMap)) { const [last, ...partsReverse] = path.split(".").reverse(); let current = result; for (const part of partsReverse.reverse()) { if (current[part] === void 0) break; current[part] = shallowCopy(current[part]); current = current[part]; } if (current[last] !== void 0) current[last] = { lc: 1, type: "secret", id: [secretId] }; } return result; } /** * Get a unique name for the module, rather than parent class implementations. * Should not be subclassed, subclass lc_name above instead. */ function get_lc_unique_name(serializableClass) { const parentClass = Object.getPrototypeOf(serializableClass); const lcNameIsSubclassed = typeof serializableClass.lc_name === "function" && (typeof parentClass.lc_name !== "function" || serializableClass.lc_name() !== parentClass.lc_name()); if (lcNameIsSubclassed) return serializableClass.lc_name(); else return serializableClass.name; } var Serializable = class Serializable { /** * The name of the serializable. Override to provide an alias or * to preserve the serialized module name in minified environments. * * Implemented as a static method to support loading logic. */ static lc_name() { return this.name; } /** * The final serialized identifier for the module. */ get lc_id() { return [...this.lc_namespace, get_lc_unique_name(this.constructor)]; } /** * A map of secrets, which will be omitted from serialization. * Keys are paths to the secret in constructor args, e.g. "foo.bar.baz". * Values are the secret ids, which will be used when deserializing. */ get lc_secrets() { return void 0; } /** * A map of additional attributes to merge with constructor args. * Keys are the attribute names, e.g. "foo". * Values are the attribute values, which will be serialized. * These attributes need to be accepted by the constructor as arguments. */ get lc_attributes() { return void 0; } /** * A map of aliases for constructor args. * Keys are the attribute names, e.g. "foo". * Values are the alias that will replace the key in serialization. * This is used to eg. make argument names match Python. */ get lc_aliases() { return void 0; } /** * A manual list of keys that should be serialized. * If not overridden, all fields passed into the constructor will be serialized. */ get lc_serializable_keys() { return void 0; } constructor(kwargs, ..._args) { Object.defineProperty(this, "lc_serializable", { enumerable: true, configurable: true, writable: true, value: false }); Object.defineProperty(this, "lc_kwargs", { enumerable: true, configurable: true, writable: true, value: void 0 }); if (this.lc_serializable_keys !== void 0) this.lc_kwargs = Object.fromEntries(Object.entries(kwargs || {}).filter(([key]) => this.lc_serializable_keys?.includes(key))); else this.lc_kwargs = kwargs ?? {}; } toJSON() { if (!this.lc_serializable) return this.toJSONNotImplemented(); if (this.lc_kwargs instanceof Serializable || typeof this.lc_kwargs !== "object" || Array.isArray(this.lc_kwargs)) return this.toJSONNotImplemented(); const aliases = {}; const secrets = {}; const kwargs = Object.keys(this.lc_kwargs).reduce((acc, key) => { acc[key] = key in this ? this[key] : this.lc_kwargs[key]; return acc; }, {}); for (let current = Object.getPrototypeOf(this); current; current = Object.getPrototypeOf(current)) { Object.assign(aliases, Reflect.get(current, "lc_aliases", this)); Object.assign(secrets, Reflect.get(current, "lc_secrets", this)); Object.assign(kwargs, Reflect.get(current, "lc_attributes", this)); } Object.keys(secrets).forEach((keyPath) => { let read = this; let write = kwargs; const [last, ...partsReverse] = keyPath.split(".").reverse(); for (const key of partsReverse.reverse()) { if (!(key in read) || read[key] === void 0) return; if (!(key in write) || write[key] === void 0) { if (typeof read[key] === "object" && read[key] != null) write[key] = {}; else if (Array.isArray(read[key])) write[key] = []; } read = read[key]; write = write[key]; } if (last in read && read[last] !== void 0) write[last] = write[last] || read[last]; }); return { lc: 1, type: "constructor", id: this.lc_id, kwargs: mapKeys(Object.keys(secrets).length ? replaceSecrets(kwargs, secrets) : kwargs, keyToJson, aliases) }; } toJSONNotImplemented() { return { lc: 1, type: "not_implemented", id: this.lc_id }; } }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/utils/env.js var env_exports = {}; __export(env_exports, { getEnv: () => getEnv, getEnvironmentVariable: () => getEnvironmentVariable, getRuntimeEnvironment: () => getRuntimeEnvironment, isBrowser: () => isBrowser, isDeno: () => isDeno, isJsDom: () => isJsDom, isNode: () => isNode, isWebWorker: () => isWebWorker }); const isBrowser = () => typeof window !== "undefined" && typeof window.document !== "undefined"; const isWebWorker = () => typeof globalThis === "object" && globalThis.constructor && globalThis.constructor.name === "DedicatedWorkerGlobalScope"; const isJsDom = () => typeof window !== "undefined" && window.name === "nodejs" || typeof navigator !== "undefined" && (navigator.userAgent.includes("Node.js") || navigator.userAgent.includes("jsdom")); const isDeno = () => typeof Deno !== "undefined"; const isNode = () => typeof process !== "undefined" && typeof process.versions !== "undefined" && typeof process.versions.node !== "undefined" && !isDeno(); const getEnv = () => { let env; if (isBrowser()) env = "browser"; else if (isNode()) env = "node"; else if (isWebWorker()) env = "webworker"; else if (isJsDom()) env = "jsdom"; else if (isDeno()) env = "deno"; else env = "other"; return env; }; let runtimeEnvironment; async function getRuntimeEnvironment() { if (runtimeEnvironment === void 0) { const env = getEnv(); runtimeEnvironment = { library: "langchain-js", runtime: env }; } return runtimeEnvironment; } function getEnvironmentVariable(name) { try { if (typeof process !== "undefined") return process.env?.[name]; else if (isDeno()) return Deno?.env.get(name); else return void 0; } catch (e) { return void 0; } } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/callbacks/base.js var base_exports$1 = {}; __export(base_exports$1, { BaseCallbackHandler: () => BaseCallbackHandler, callbackHandlerPrefersStreaming: () => callbackHandlerPrefersStreaming, isBaseCallbackHandler: () => isBaseCallbackHandler }); /** * Abstract class that provides a set of optional methods that can be * overridden in derived classes to handle various events during the * execution of a LangChain application. */ var BaseCallbackHandlerMethodsClass = class {}; function callbackHandlerPrefersStreaming(x) { return "lc_prefer_streaming" in x && x.lc_prefer_streaming; } /** * Abstract base class for creating callback handlers in the LangChain * framework. It provides a set of optional methods that can be overridden * in derived classes to handle various events during the execution of a * LangChain application. */ var BaseCallbackHandler = class extends BaseCallbackHandlerMethodsClass { get lc_namespace() { return [ "langchain_core", "callbacks", this.name ]; } get lc_secrets() { return void 0; } get lc_attributes() { return void 0; } get lc_aliases() { return void 0; } get lc_serializable_keys() { return void 0; } /** * The name of the serializable. Override to provide an alias or * to preserve the serialized module name in minified environments. * * Implemented as a static method to support loading logic. */ static lc_name() { return this.name; } /** * The final serialized identifier for the module. */ get lc_id() { return [...this.lc_namespace, get_lc_unique_name(this.constructor)]; } constructor(input) { super(); Object.defineProperty(this, "lc_serializable", { enumerable: true, configurable: true, writable: true, value: false }); Object.defineProperty(this, "lc_kwargs", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "ignoreLLM", { enumerable: true, configurable: true, writable: true, value: false }); Object.defineProperty(this, "ignoreChain", { enumerable: true, configurable: true, writable: true, value: false }); Object.defineProperty(this, "ignoreAgent", { enumerable: true, configurable: true, writable: true, value: false }); Object.defineProperty(this, "ignoreRetriever", { enumerable: true, configurable: true, writable: true, value: false }); Object.defineProperty(this, "ignoreCustomEvent", { enumerable: true, configurable: true, writable: true, value: false }); Object.defineProperty(this, "raiseError", { enumerable: true, configurable: true, writable: true, value: false }); Object.defineProperty(this, "awaitHandlers", { enumerable: true, configurable: true, writable: true, value: getEnvironmentVariable("LANGCHAIN_CALLBACKS_BACKGROUND") === "false" }); this.lc_kwargs = input || {}; if (input) { this.ignoreLLM = input.ignoreLLM ?? this.ignoreLLM; this.ignoreChain = input.ignoreChain ?? this.ignoreChain; this.ignoreAgent = input.ignoreAgent ?? this.ignoreAgent; this.ignoreRetriever = input.ignoreRetriever ?? this.ignoreRetriever; this.ignoreCustomEvent = input.ignoreCustomEvent ?? this.ignoreCustomEvent; this.raiseError = input.raiseError ?? this.raiseError; this.awaitHandlers = this.raiseError || (input._awaitHandler ?? this.awaitHandlers); } } copy() { return new this.constructor(this); } toJSON() { return Serializable.prototype.toJSON.call(this); } toJSONNotImplemented() { return Serializable.prototype.toJSONNotImplemented.call(this); } static fromMethods(methods) { class Handler extends BaseCallbackHandler { constructor() { super(); Object.defineProperty(this, "name", { enumerable: true, configurable: true, writable: true, value: v4_default() }); Object.assign(this, methods); } } return new Handler(); } }; const isBaseCallbackHandler = (x) => { const callbackHandler = x; return callbackHandler !== void 0 && typeof callbackHandler.copy === "function" && typeof callbackHandler.name === "string" && typeof callbackHandler.awaitHandlers === "boolean"; }; //#endregion //#region node_modules/.pnpm/ansi-styles@5.2.0/node_modules/ansi-styles/index.js var require_ansi_styles = __commonJS({ "node_modules/.pnpm/ansi-styles@5.2.0/node_modules/ansi-styles/index.js"(exports, module) { const ANSI_BACKGROUND_OFFSET = 10; const wrapAnsi256 = (offset = 0) => (code) => `\u001B[${38 + offset};5;${code}m`; const wrapAnsi16m = (offset = 0) => (red, green, blue) => `\u001B[${38 + offset};2;${red};${green};${blue}m`; function assembleStyles() { const codes = /* @__PURE__ */ new Map(); const styles$1 = { modifier: { reset: [0, 0], bold: [1, 22], dim: [2, 22], italic: [3, 23], underline: [4, 24], overline: [53, 55], inverse: [7, 27], hidden: [8, 28], strikethrough: [9, 29] }, color: { black: [30, 39], red: [31, 39], green: [32, 39], yellow: [33, 39], blue: [34, 39], magenta: [35, 39], cyan: [36, 39], white: [37, 39], blackBright: [90, 39], redBright: [91, 39], greenBright: [92, 39], yellowBright: [93, 39], blueBright: [94, 39], magentaBright: [95, 39], cyanBright: [96, 39], whiteBright: [97, 39] }, bgColor: { bgBlack: [40, 49], bgRed: [41, 49], bgGreen: [42, 49], bgYellow: [43, 49], bgBlue: [44, 49], bgMagenta: [45, 49], bgCyan: [46, 49], bgWhite: [47, 49], bgBlackBright: [100, 49], bgRedBright: [101, 49], bgGreenBright: [102, 49], bgYellowBright: [103, 49], bgBlueBright: [104, 49], bgMagentaBright: [105, 49], bgCyanBright: [106, 49], bgWhiteBright: [107, 49] } }; styles$1.color.gray = styles$1.color.blackBright; styles$1.bgColor.bgGray = styles$1.bgColor.bgBlackBright; styles$1.color.grey = styles$1.color.blackBright; styles$1.bgColor.bgGrey = styles$1.bgColor.bgBlackBright; for (const [groupName, group] of Object.entries(styles$1)) { for (const [styleName, style] of Object.entries(group)) { styles$1[styleName] = { open: `\u001B[${style[0]}m`, close: `\u001B[${style[1]}m` }; group[styleName] = styles$1[styleName]; codes.set(style[0], style[1]); } Object.defineProperty(styles$1, groupName, { value: group, enumerable: false }); } Object.defineProperty(styles$1, "codes", { value: codes, enumerable: false }); styles$1.color.close = "\x1B[39m"; styles$1.bgColor.close = "\x1B[49m"; styles$1.color.ansi256 = wrapAnsi256(); styles$1.color.ansi16m = wrapAnsi16m(); styles$1.bgColor.ansi256 = wrapAnsi256(ANSI_BACKGROUND_OFFSET); styles$1.bgColor.ansi16m = wrapAnsi16m(ANSI_BACKGROUND_OFFSET); Object.defineProperties(styles$1, { rgbToAnsi256: { value: (red, green, blue) => { if (red === green && green === blue) { if (red < 8) return 16; if (red > 248) return 231; return Math.round((red - 8) / 247 * 24) + 232; } return 16 + 36 * Math.round(red / 255 * 5) + 6 * Math.round(green / 255 * 5) + Math.round(blue / 255 * 5); }, enumerable: false }, hexToRgb: { value: (hex) => { const matches = /(?[a-f\d]{6}|[a-f\d]{3})/i.exec(hex.toString(16)); if (!matches) return [ 0, 0, 0 ]; let { colorString } = matches.groups; if (colorString.length === 3) colorString = colorString.split("").map((character) => character + character).join(""); const integer = Number.parseInt(colorString, 16); return [ integer >> 16 & 255, integer >> 8 & 255, integer & 255 ]; }, enumerable: false }, hexToAnsi256: { value: (hex) => styles$1.rgbToAnsi256(...styles$1.hexToRgb(hex)), enumerable: false } }); return styles$1; } Object.defineProperty(module, "exports", { enumerable: true, get: assembleStyles }); } }); //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/tracers/base.js var base_exports = {}; __export(base_exports, { BaseTracer: () => BaseTracer, isBaseTracer: () => isBaseTracer }); function _coerceToDict$2(value, defaultKey) { return value && !Array.isArray(value) && typeof value === "object" ? value : { [defaultKey]: value }; } function stripNonAlphanumeric(input) { return input.replace(/[-:.]/g, ""); } function convertToDottedOrderFormat(epoch, runId, executionOrder) { const paddedOrder = executionOrder.toFixed(0).slice(0, 3).padStart(3, "0"); return stripNonAlphanumeric(`${new Date(epoch).toISOString().slice(0, -1)}${paddedOrder}Z`) + runId; } function isBaseTracer(x) { return typeof x._addRunToRunMap === "function"; } var BaseTracer = class extends BaseCallbackHandler { constructor(_fields) { super(...arguments); Object.defineProperty(this, "runMap", { enumerable: true, configurable: true, writable: true, value: /* @__PURE__ */ new Map() }); } copy() { return this; } stringifyError(error) { if (error instanceof Error) return error.message + (error?.stack ? `\n\n${error.stack}` : ""); if (typeof error === "string") return error; return `${error}`; } _addChildRun(parentRun, childRun) { parentRun.child_runs.push(childRun); } _addRunToRunMap(run) { const currentDottedOrder = convertToDottedOrderFormat(run.start_time, run.id, run.execution_order); const storedRun = { ...run }; if (storedRun.parent_run_id !== void 0) { const parentRun = this.runMap.get(storedRun.parent_run_id); if (parentRun) { this._addChildRun(parentRun, storedRun); parentRun.child_execution_order = Math.max(parentRun.child_execution_order, storedRun.child_execution_order); storedRun.trace_id = parentRun.trace_id; if (parentRun.dotted_order !== void 0) storedRun.dotted_order = [parentRun.dotted_order, currentDottedOrder].join("."); } } else { storedRun.trace_id = storedRun.id; storedRun.dotted_order = currentDottedOrder; } this.runMap.set(storedRun.id, storedRun); return storedRun; } async _endTrace(run) { const parentRun = run.parent_run_id !== void 0 && this.runMap.get(run.parent_run_id); if (parentRun) parentRun.child_execution_order = Math.max(parentRun.child_execution_order, run.child_execution_order); else await this.persistRun(run); this.runMap.delete(run.id); await this.onRunUpdate?.(run); } _getExecutionOrder(parentRunId) { const parentRun = parentRunId !== void 0 && this.runMap.get(parentRunId); if (!parentRun) return 1; return parentRun.child_execution_order + 1; } /** * Create and add a run to the run map for LLM start events. * This must sometimes be done synchronously to avoid race conditions * when callbacks are backgrounded, so we expose it as a separate method here. */ _createRunForLLMStart(llm, prompts, runId, parentRunId, extraParams, tags, metadata, name) { const execution_order = this._getExecutionOrder(parentRunId); const start_time = Date.now(); const finalExtraParams = metadata ? { ...extraParams, metadata } : extraParams; const run = { id: runId, name: name ?? llm.id[llm.id.length - 1], parent_run_id: parentRunId, start_time, serialized: llm, events: [{ name: "start", time: new Date(start_time).toISOString() }], inputs: { prompts }, execution_order, child_runs: [], child_execution_order: execution_order, run_type: "llm", extra: finalExtraParams ?? {}, tags: tags || [] }; return this._addRunToRunMap(run); } async handleLLMStart(llm, prompts, runId, parentRunId, extraParams, tags, metadata, name) { const run = this.runMap.get(runId) ?? this._createRunForLLMStart(llm, prompts, runId, parentRunId, extraParams, tags, metadata, name); await this.onRunCreate?.(run); await this.onLLMStart?.(run); return run; } /** * Create and add a run to the run map for chat model start events. * This must sometimes be done synchronously to avoid race conditions * when callbacks are backgrounded, so we expose it as a separate method here. */ _createRunForChatModelStart(llm, messages, runId, parentRunId, extraParams, tags, metadata, name) { const execution_order = this._getExecutionOrder(parentRunId); const start_time = Date.now(); const finalExtraParams = metadata ? { ...extraParams, metadata } : extraParams; const run = { id: runId, name: name ?? llm.id[llm.id.length - 1], parent_run_id: parentRunId, start_time, serialized: llm, events: [{ name: "start", time: new Date(start_time).toISOString() }], inputs: { messages }, execution_order, child_runs: [], child_execution_order: execution_order, run_type: "llm", extra: finalExtraParams ?? {}, tags: tags || [] }; return this._addRunToRunMap(run); } async handleChatModelStart(llm, messages, runId, parentRunId, extraParams, tags, metadata, name) { const run = this.runMap.get(runId) ?? this._createRunForChatModelStart(llm, messages, runId, parentRunId, extraParams, tags, metadata, name); await this.onRunCreate?.(run); await this.onLLMStart?.(run); return run; } async handleLLMEnd(output, runId, _parentRunId, _tags, extraParams) { const run = this.runMap.get(runId); if (!run || run?.run_type !== "llm") throw new Error("No LLM run to end."); run.end_time = Date.now(); run.outputs = output; run.events.push({ name: "end", time: new Date(run.end_time).toISOString() }); run.extra = { ...run.extra, ...extraParams }; await this.onLLMEnd?.(run); await this._endTrace(run); return run; } async handleLLMError(error, runId, _parentRunId, _tags, extraParams) { const run = this.runMap.get(runId); if (!run || run?.run_type !== "llm") throw new Error("No LLM run to end."); run.end_time = Date.now(); run.error = this.stringifyError(error); run.events.push({ name: "error", time: new Date(run.end_time).toISOString() }); run.extra = { ...run.extra, ...extraParams }; await this.onLLMError?.(run); await this._endTrace(run); return run; } /** * Create and add a run to the run map for chain start events. * This must sometimes be done synchronously to avoid race conditions * when callbacks are backgrounded, so we expose it as a separate method here. */ _createRunForChainStart(chain, inputs, runId, parentRunId, tags, metadata, runType, name) { const execution_order = this._getExecutionOrder(parentRunId); const start_time = Date.now(); const run = { id: runId, name: name ?? chain.id[chain.id.length - 1], parent_run_id: parentRunId, start_time, serialized: chain, events: [{ name: "start", time: new Date(start_time).toISOString() }], inputs, execution_order, child_execution_order: execution_order, run_type: runType ?? "chain", child_runs: [], extra: metadata ? { metadata } : {}, tags: tags || [] }; return this._addRunToRunMap(run); } async handleChainStart(chain, inputs, runId, parentRunId, tags, metadata, runType, name) { const run = this.runMap.get(runId) ?? this._createRunForChainStart(chain, inputs, runId, parentRunId, tags, metadata, runType, name); await this.onRunCreate?.(run); await this.onChainStart?.(run); return run; } async handleChainEnd(outputs, runId, _parentRunId, _tags, kwargs) { const run = this.runMap.get(runId); if (!run) throw new Error("No chain run to end."); run.end_time = Date.now(); run.outputs = _coerceToDict$2(outputs, "output"); run.events.push({ name: "end", time: new Date(run.end_time).toISOString() }); if (kwargs?.inputs !== void 0) run.inputs = _coerceToDict$2(kwargs.inputs, "input"); await this.onChainEnd?.(run); await this._endTrace(run); return run; } async handleChainError(error, runId, _parentRunId, _tags, kwargs) { const run = this.runMap.get(runId); if (!run) throw new Error("No chain run to end."); run.end_time = Date.now(); run.error = this.stringifyError(error); run.events.push({ name: "error", time: new Date(run.end_time).toISOString() }); if (kwargs?.inputs !== void 0) run.inputs = _coerceToDict$2(kwargs.inputs, "input"); await this.onChainError?.(run); await this._endTrace(run); return run; } /** * Create and add a run to the run map for tool start events. * This must sometimes be done synchronously to avoid race conditions * when callbacks are backgrounded, so we expose it as a separate method here. */ _createRunForToolStart(tool, input, runId, parentRunId, tags, metadata, name) { const execution_order = this._getExecutionOrder(parentRunId); const start_time = Date.now(); const run = { id: runId, name: name ?? tool.id[tool.id.length - 1], parent_run_id: parentRunId, start_time, serialized: tool, events: [{ name: "start", time: new Date(start_time).toISOString() }], inputs: { input }, execution_order, child_execution_order: execution_order, run_type: "tool", child_runs: [], extra: metadata ? { metadata } : {}, tags: tags || [] }; return this._addRunToRunMap(run); } async handleToolStart(tool, input, runId, parentRunId, tags, metadata, name) { const run = this.runMap.get(runId) ?? this._createRunForToolStart(tool, input, runId, parentRunId, tags, metadata, name); await this.onRunCreate?.(run); await this.onToolStart?.(run); return run; } async handleToolEnd(output, runId) { const run = this.runMap.get(runId); if (!run || run?.run_type !== "tool") throw new Error("No tool run to end"); run.end_time = Date.now(); run.outputs = { output }; run.events.push({ name: "end", time: new Date(run.end_time).toISOString() }); await this.onToolEnd?.(run); await this._endTrace(run); return run; } async handleToolError(error, runId) { const run = this.runMap.get(runId); if (!run || run?.run_type !== "tool") throw new Error("No tool run to end"); run.end_time = Date.now(); run.error = this.stringifyError(error); run.events.push({ name: "error", time: new Date(run.end_time).toISOString() }); await this.onToolError?.(run); await this._endTrace(run); return run; } async handleAgentAction(action, runId) { const run = this.runMap.get(runId); if (!run || run?.run_type !== "chain") return; const agentRun = run; agentRun.actions = agentRun.actions || []; agentRun.actions.push(action); agentRun.events.push({ name: "agent_action", time: (/* @__PURE__ */ new Date()).toISOString(), kwargs: { action } }); await this.onAgentAction?.(run); } async handleAgentEnd(action, runId) { const run = this.runMap.get(runId); if (!run || run?.run_type !== "chain") return; run.events.push({ name: "agent_end", time: (/* @__PURE__ */ new Date()).toISOString(), kwargs: { action } }); await this.onAgentEnd?.(run); } /** * Create and add a run to the run map for retriever start events. * This must sometimes be done synchronously to avoid race conditions * when callbacks are backgrounded, so we expose it as a separate method here. */ _createRunForRetrieverStart(retriever, query, runId, parentRunId, tags, metadata, name) { const execution_order = this._getExecutionOrder(parentRunId); const start_time = Date.now(); const run = { id: runId, name: name ?? retriever.id[retriever.id.length - 1], parent_run_id: parentRunId, start_time, serialized: retriever, events: [{ name: "start", time: new Date(start_time).toISOString() }], inputs: { query }, execution_order, child_execution_order: execution_order, run_type: "retriever", child_runs: [], extra: metadata ? { metadata } : {}, tags: tags || [] }; return this._addRunToRunMap(run); } async handleRetrieverStart(retriever, query, runId, parentRunId, tags, metadata, name) { const run = this.runMap.get(runId) ?? this._createRunForRetrieverStart(retriever, query, runId, parentRunId, tags, metadata, name); await this.onRunCreate?.(run); await this.onRetrieverStart?.(run); return run; } async handleRetrieverEnd(documents, runId) { const run = this.runMap.get(runId); if (!run || run?.run_type !== "retriever") throw new Error("No retriever run to end"); run.end_time = Date.now(); run.outputs = { documents }; run.events.push({ name: "end", time: new Date(run.end_time).toISOString() }); await this.onRetrieverEnd?.(run); await this._endTrace(run); return run; } async handleRetrieverError(error, runId) { const run = this.runMap.get(runId); if (!run || run?.run_type !== "retriever") throw new Error("No retriever run to end"); run.end_time = Date.now(); run.error = this.stringifyError(error); run.events.push({ name: "error", time: new Date(run.end_time).toISOString() }); await this.onRetrieverError?.(run); await this._endTrace(run); return run; } async handleText(text, runId) { const run = this.runMap.get(runId); if (!run || run?.run_type !== "chain") return; run.events.push({ name: "text", time: (/* @__PURE__ */ new Date()).toISOString(), kwargs: { text } }); await this.onText?.(run); } async handleLLMNewToken(token, idx, runId, _parentRunId, _tags, fields) { const run = this.runMap.get(runId); if (!run || run?.run_type !== "llm") throw new Error(`Invalid "runId" provided to "handleLLMNewToken" callback.`); run.events.push({ name: "new_token", time: (/* @__PURE__ */ new Date()).toISOString(), kwargs: { token, idx, chunk: fields?.chunk } }); await this.onLLMNewToken?.(run, token, { chunk: fields?.chunk }); return run; } }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/tracers/console.js var console_exports = {}; __export(console_exports, { ConsoleCallbackHandler: () => ConsoleCallbackHandler }); var import_ansi_styles = __toESM(require_ansi_styles(), 1); function wrap(style, text) { return `${style.open}${text}${style.close}`; } function tryJsonStringify(obj, fallback) { try { return JSON.stringify(obj, null, 2); } catch (err) { return fallback; } } function formatKVMapItem(value) { if (typeof value === "string") return value.trim(); if (value === null || value === void 0) return value; return tryJsonStringify(value, value.toString()); } function elapsed(run) { if (!run.end_time) return ""; const elapsed$1 = run.end_time - run.start_time; if (elapsed$1 < 1e3) return `${elapsed$1}ms`; return `${(elapsed$1 / 1e3).toFixed(2)}s`; } const { color } = import_ansi_styles.default; /** * A tracer that logs all events to the console. It extends from the * `BaseTracer` class and overrides its methods to provide custom logging * functionality. * @example * ```typescript * * const llm = new ChatAnthropic({ * temperature: 0, * tags: ["example", "callbacks", "constructor"], * callbacks: [new ConsoleCallbackHandler()], * }); * * ``` */ var ConsoleCallbackHandler = class extends BaseTracer { constructor() { super(...arguments); Object.defineProperty(this, "name", { enumerable: true, configurable: true, writable: true, value: "console_callback_handler" }); } /** * Method used to persist the run. In this case, it simply returns a * resolved promise as there's no persistence logic. * @param _run The run to persist. * @returns A resolved promise. */ persistRun(_run) { return Promise.resolve(); } /** * Method used to get all the parent runs of a given run. * @param run The run whose parents are to be retrieved. * @returns An array of parent runs. */ getParents(run) { const parents = []; let currentRun = run; while (currentRun.parent_run_id) { const parent = this.runMap.get(currentRun.parent_run_id); if (parent) { parents.push(parent); currentRun = parent; } else break; } return parents; } /** * Method used to get a string representation of the run's lineage, which * is used in logging. * @param run The run whose lineage is to be retrieved. * @returns A string representation of the run's lineage. */ getBreadcrumbs(run) { const parents = this.getParents(run).reverse(); const string = [...parents, run].map((parent, i, arr$1) => { const name = `${parent.execution_order}:${parent.run_type}:${parent.name}`; return i === arr$1.length - 1 ? wrap(import_ansi_styles.default.bold, name) : name; }).join(" > "); return wrap(color.grey, string); } /** * Method used to log the start of a chain run. * @param run The chain run that has started. * @returns void */ onChainStart(run) { const crumbs = this.getBreadcrumbs(run); console.log(`${wrap(color.green, "[chain/start]")} [${crumbs}] Entering Chain run with input: ${tryJsonStringify(run.inputs, "[inputs]")}`); } /** * Method used to log the end of a chain run. * @param run The chain run that has ended. * @returns void */ onChainEnd(run) { const crumbs = this.getBreadcrumbs(run); console.log(`${wrap(color.cyan, "[chain/end]")} [${crumbs}] [${elapsed(run)}] Exiting Chain run with output: ${tryJsonStringify(run.outputs, "[outputs]")}`); } /** * Method used to log any errors of a chain run. * @param run The chain run that has errored. * @returns void */ onChainError(run) { const crumbs = this.getBreadcrumbs(run); console.log(`${wrap(color.red, "[chain/error]")} [${crumbs}] [${elapsed(run)}] Chain run errored with error: ${tryJsonStringify(run.error, "[error]")}`); } /** * Method used to log the start of an LLM run. * @param run The LLM run that has started. * @returns void */ onLLMStart(run) { const crumbs = this.getBreadcrumbs(run); const inputs = "prompts" in run.inputs ? { prompts: run.inputs.prompts.map((p) => p.trim()) } : run.inputs; console.log(`${wrap(color.green, "[llm/start]")} [${crumbs}] Entering LLM run with input: ${tryJsonStringify(inputs, "[inputs]")}`); } /** * Method used to log the end of an LLM run. * @param run The LLM run that has ended. * @returns void */ onLLMEnd(run) { const crumbs = this.getBreadcrumbs(run); console.log(`${wrap(color.cyan, "[llm/end]")} [${crumbs}] [${elapsed(run)}] Exiting LLM run with output: ${tryJsonStringify(run.outputs, "[response]")}`); } /** * Method used to log any errors of an LLM run. * @param run The LLM run that has errored. * @returns void */ onLLMError(run) { const crumbs = this.getBreadcrumbs(run); console.log(`${wrap(color.red, "[llm/error]")} [${crumbs}] [${elapsed(run)}] LLM run errored with error: ${tryJsonStringify(run.error, "[error]")}`); } /** * Method used to log the start of a tool run. * @param run The tool run that has started. * @returns void */ onToolStart(run) { const crumbs = this.getBreadcrumbs(run); console.log(`${wrap(color.green, "[tool/start]")} [${crumbs}] Entering Tool run with input: "${formatKVMapItem(run.inputs.input)}"`); } /** * Method used to log the end of a tool run. * @param run The tool run that has ended. * @returns void */ onToolEnd(run) { const crumbs = this.getBreadcrumbs(run); console.log(`${wrap(color.cyan, "[tool/end]")} [${crumbs}] [${elapsed(run)}] Exiting Tool run with output: "${formatKVMapItem(run.outputs?.output)}"`); } /** * Method used to log any errors of a tool run. * @param run The tool run that has errored. * @returns void */ onToolError(run) { const crumbs = this.getBreadcrumbs(run); console.log(`${wrap(color.red, "[tool/error]")} [${crumbs}] [${elapsed(run)}] Tool run errored with error: ${tryJsonStringify(run.error, "[error]")}`); } /** * Method used to log the start of a retriever run. * @param run The retriever run that has started. * @returns void */ onRetrieverStart(run) { const crumbs = this.getBreadcrumbs(run); console.log(`${wrap(color.green, "[retriever/start]")} [${crumbs}] Entering Retriever run with input: ${tryJsonStringify(run.inputs, "[inputs]")}`); } /** * Method used to log the end of a retriever run. * @param run The retriever run that has ended. * @returns void */ onRetrieverEnd(run) { const crumbs = this.getBreadcrumbs(run); console.log(`${wrap(color.cyan, "[retriever/end]")} [${crumbs}] [${elapsed(run)}] Exiting Retriever run with output: ${tryJsonStringify(run.outputs, "[outputs]")}`); } /** * Method used to log any errors of a retriever run. * @param run The retriever run that has errored. * @returns void */ onRetrieverError(run) { const crumbs = this.getBreadcrumbs(run); console.log(`${wrap(color.red, "[retriever/error]")} [${crumbs}] [${elapsed(run)}] Retriever run errored with error: ${tryJsonStringify(run.error, "[error]")}`); } /** * Method used to log the action selected by the agent. * @param run The run in which the agent action occurred. * @returns void */ onAgentAction(run) { const agentRun = run; const crumbs = this.getBreadcrumbs(run); console.log(`${wrap(color.blue, "[agent/action]")} [${crumbs}] Agent selected action: ${tryJsonStringify(agentRun.actions[agentRun.actions.length - 1], "[action]")}`); } }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/errors/index.js function addLangChainErrorFields(error, lc_error_code) { error.lc_error_code = lc_error_code; error.message = `${error.message}\n\nTroubleshooting URL: https://js.langchain.com/docs/troubleshooting/errors/${lc_error_code}/\n`; return error; } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/tools/utils.js function _isToolCall(toolCall) { return !!(toolCall && typeof toolCall === "object" && "type" in toolCall && toolCall.type === "tool_call"); } function _configHasToolCallId(config) { return !!(config && typeof config === "object" && "toolCall" in config && config.toolCall != null && typeof config.toolCall === "object" && "id" in config.toolCall && typeof config.toolCall.id === "string"); } /** * Custom error class used to handle exceptions related to tool input parsing. * It extends the built-in `Error` class and adds an optional `output` * property that can hold the output that caused the exception. */ var ToolInputParsingException = class extends Error { constructor(message, output) { super(message); Object.defineProperty(this, "output", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.output = output; } }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/utils/json.js function parseJsonMarkdown(s, parser = parsePartialJson) { s = s.trim(); const firstFenceIndex = s.indexOf("```"); if (firstFenceIndex === -1) return parser(s); let contentAfterFence = s.substring(firstFenceIndex + 3); if (contentAfterFence.startsWith("json\n")) contentAfterFence = contentAfterFence.substring(5); else if (contentAfterFence.startsWith("json")) contentAfterFence = contentAfterFence.substring(4); else if (contentAfterFence.startsWith("\n")) contentAfterFence = contentAfterFence.substring(1); const closingFenceIndex = contentAfterFence.indexOf("```"); let finalContent = contentAfterFence; if (closingFenceIndex !== -1) finalContent = contentAfterFence.substring(0, closingFenceIndex); return parser(finalContent.trim()); } function parsePartialJson(s) { if (typeof s === "undefined") return null; try { return JSON.parse(s); } catch (error) {} let new_s = ""; const stack = []; let isInsideString = false; let escaped = false; for (let char of s) { if (isInsideString) if (char === "\"" && !escaped) isInsideString = false; else if (char === "\n" && !escaped) char = "\\n"; else if (char === "\\") escaped = !escaped; else escaped = false; else if (char === "\"") { isInsideString = true; escaped = false; } else if (char === "{") stack.push("}"); else if (char === "[") stack.push("]"); else if (char === "}" || char === "]") if (stack && stack[stack.length - 1] === char) stack.pop(); else return null; new_s += char; } if (isInsideString) new_s += "\""; for (let i = stack.length - 1; i >= 0; i -= 1) new_s += stack[i]; try { return JSON.parse(new_s); } catch (error) { return null; } } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/messages/content_blocks.js function isDataContentBlock(content_block) { return typeof content_block === "object" && content_block !== null && "type" in content_block && typeof content_block.type === "string" && "source_type" in content_block && (content_block.source_type === "url" || content_block.source_type === "base64" || content_block.source_type === "text" || content_block.source_type === "id"); } function isURLContentBlock(content_block) { return isDataContentBlock(content_block) && content_block.source_type === "url" && "url" in content_block && typeof content_block.url === "string"; } function isBase64ContentBlock(content_block) { return isDataContentBlock(content_block) && content_block.source_type === "base64" && "data" in content_block && typeof content_block.data === "string"; } function isPlainTextContentBlock(content_block) { return isDataContentBlock(content_block) && content_block.source_type === "text" && "text" in content_block && typeof content_block.text === "string"; } function isIDContentBlock(content_block) { return isDataContentBlock(content_block) && content_block.source_type === "id" && "id" in content_block && typeof content_block.id === "string"; } function convertToOpenAIImageBlock(content_block) { if (isDataContentBlock(content_block)) { if (content_block.source_type === "url") return { type: "image_url", image_url: { url: content_block.url } }; if (content_block.source_type === "base64") { if (!content_block.mime_type) throw new Error("mime_type key is required for base64 data."); const mime_type = content_block.mime_type; return { type: "image_url", image_url: { url: `data:${mime_type};base64,${content_block.data}` } }; } } throw new Error("Unsupported source type. Only 'url' and 'base64' are supported."); } /** * Utility function for ChatModelProviders. Parses a mime type into a type, subtype, and parameters. * * @param mime_type - The mime type to parse. * @returns An object containing the type, subtype, and parameters. */ function parseMimeType(mime_type) { const parts = mime_type.split(";")[0].split("/"); if (parts.length !== 2) throw new Error(`Invalid mime type: "${mime_type}" - does not match type/subtype format.`); const type = parts[0].trim(); const subtype = parts[1].trim(); if (type === "" || subtype === "") throw new Error(`Invalid mime type: "${mime_type}" - type or subtype is empty.`); const parameters = {}; for (const parameterKvp of mime_type.split(";").slice(1)) { const parameterParts = parameterKvp.split("="); if (parameterParts.length !== 2) throw new Error(`Invalid parameter syntax in mime type: "${mime_type}".`); const key = parameterParts[0].trim(); const value = parameterParts[1].trim(); if (key === "") throw new Error(`Invalid parameter syntax in mime type: "${mime_type}".`); parameters[key] = value; } return { type, subtype, parameters }; } /** * Utility function for ChatModelProviders. Parses a base64 data URL into a typed array or string. * * @param dataUrl - The base64 data URL to parse. * @param asTypedArray - Whether to return the data as a typed array. * @returns The parsed data and mime type, or undefined if the data URL is invalid. */ function parseBase64DataUrl({ dataUrl: data_url, asTypedArray = false }) { const formatMatch = data_url.match(/^data:(\w+\/\w+);base64,([A-Za-z0-9+/]+=*)$/); let mime_type; if (formatMatch) { mime_type = formatMatch[1].toLowerCase(); const data = asTypedArray ? Uint8Array.from(atob(formatMatch[2]), (c) => c.charCodeAt(0)) : formatMatch[2]; return { mime_type, data }; } return void 0; } /** * Convert from a standard data content block to a provider's proprietary data content block format. * * Don't override this method. Instead, override the more specific conversion methods and use this * method unmodified. * * @param block - The standard data content block to convert. * @returns The provider data content block. * @throws An error if the standard data content block type is not supported. */ function convertToProviderContentBlock(block, converter) { if (block.type === "text") { if (!converter.fromStandardTextBlock) throw new Error(`Converter for ${converter.providerName} does not implement \`fromStandardTextBlock\` method.`); return converter.fromStandardTextBlock(block); } if (block.type === "image") { if (!converter.fromStandardImageBlock) throw new Error(`Converter for ${converter.providerName} does not implement \`fromStandardImageBlock\` method.`); return converter.fromStandardImageBlock(block); } if (block.type === "audio") { if (!converter.fromStandardAudioBlock) throw new Error(`Converter for ${converter.providerName} does not implement \`fromStandardAudioBlock\` method.`); return converter.fromStandardAudioBlock(block); } if (block.type === "file") { if (!converter.fromStandardFileBlock) throw new Error(`Converter for ${converter.providerName} does not implement \`fromStandardFileBlock\` method.`); return converter.fromStandardFileBlock(block); } throw new Error(`Unable to convert content block type '${block.type}' to provider-specific format: not recognized.`); } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/messages/base.js function mergeContent(firstContent, secondContent) { if (typeof firstContent === "string") { if (firstContent === "") return secondContent; if (typeof secondContent === "string") return firstContent + secondContent; else if (Array.isArray(secondContent) && secondContent.some((c) => isDataContentBlock(c))) return [{ type: "text", source_type: "text", text: firstContent }, ...secondContent]; else return [{ type: "text", text: firstContent }, ...secondContent]; } else if (Array.isArray(secondContent)) return _mergeLists(firstContent, secondContent) ?? [...firstContent, ...secondContent]; else if (secondContent === "") return firstContent; else if (Array.isArray(firstContent) && firstContent.some((c) => isDataContentBlock(c))) return [...firstContent, { type: "file", source_type: "text", text: secondContent }]; else return [...firstContent, { type: "text", text: secondContent }]; } /** * 'Merge' two statuses. If either value passed is 'error', it will return 'error'. Else * it will return 'success'. * * @param {"success" | "error" | undefined} left The existing value to 'merge' with the new value. * @param {"success" | "error" | undefined} right The new value to 'merge' with the existing value * @returns {"success" | "error"} The 'merged' value. */ function _mergeStatus(left, right) { if (left === "error" || right === "error") return "error"; return "success"; } function stringifyWithDepthLimit(obj, depthLimit) { function helper(obj$1, currentDepth) { if (typeof obj$1 !== "object" || obj$1 === null || obj$1 === void 0) return obj$1; if (currentDepth >= depthLimit) { if (Array.isArray(obj$1)) return "[Array]"; return "[Object]"; } if (Array.isArray(obj$1)) return obj$1.map((item) => helper(item, currentDepth + 1)); const result = {}; for (const key of Object.keys(obj$1)) result[key] = helper(obj$1[key], currentDepth + 1); return result; } return JSON.stringify(helper(obj, 0), null, 2); } /** * Base class for all types of messages in a conversation. It includes * properties like `content`, `name`, and `additional_kwargs`. It also * includes methods like `toDict()` and `_getType()`. */ var BaseMessage = class extends Serializable { get lc_aliases() { return { additional_kwargs: "additional_kwargs", response_metadata: "response_metadata" }; } /** * Get text content of the message. */ get text() { if (typeof this.content === "string") return this.content; if (!Array.isArray(this.content)) return ""; return this.content.map((c) => { if (typeof c === "string") return c; if (c.type === "text") return c.text; return ""; }).join(""); } /** The type of the message. */ getType() { return this._getType(); } constructor(fields, kwargs) { if (typeof fields === "string") fields = { content: fields, additional_kwargs: kwargs, response_metadata: {} }; if (!fields.additional_kwargs) fields.additional_kwargs = {}; if (!fields.response_metadata) fields.response_metadata = {}; super(fields); Object.defineProperty(this, "lc_namespace", { enumerable: true, configurable: true, writable: true, value: ["langchain_core", "messages"] }); Object.defineProperty(this, "lc_serializable", { enumerable: true, configurable: true, writable: true, value: true }); /** The content of the message. */ Object.defineProperty(this, "content", { enumerable: true, configurable: true, writable: true, value: void 0 }); /** The name of the message sender in a multi-user chat. */ Object.defineProperty(this, "name", { enumerable: true, configurable: true, writable: true, value: void 0 }); /** Additional keyword arguments */ Object.defineProperty(this, "additional_kwargs", { enumerable: true, configurable: true, writable: true, value: void 0 }); /** Response metadata. For example: response headers, logprobs, token counts. */ Object.defineProperty(this, "response_metadata", { enumerable: true, configurable: true, writable: true, value: void 0 }); /** * An optional unique identifier for the message. This should ideally be * provided by the provider/model which created the message. */ Object.defineProperty(this, "id", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.name = fields.name; this.content = fields.content; this.additional_kwargs = fields.additional_kwargs; this.response_metadata = fields.response_metadata; this.id = fields.id; } toDict() { return { type: this._getType(), data: this.toJSON().kwargs }; } static lc_name() { return "BaseMessage"; } get _printableFields() { return { id: this.id, content: this.content, name: this.name, additional_kwargs: this.additional_kwargs, response_metadata: this.response_metadata }; } _updateId(value) { this.id = value; this.lc_kwargs.id = value; } get [Symbol.toStringTag]() { return this.constructor.lc_name(); } [Symbol.for("nodejs.util.inspect.custom")](depth) { if (depth === null) return this; const printable = stringifyWithDepthLimit(this._printableFields, Math.max(4, depth)); return `${this.constructor.lc_name()} ${printable}`; } }; function isOpenAIToolCallArray(value) { return Array.isArray(value) && value.every((v) => typeof v.index === "number"); } function _mergeDicts(left, right) { const merged = { ...left }; for (const [key, value] of Object.entries(right)) if (merged[key] == null) merged[key] = value; else if (value == null) continue; else if (typeof merged[key] !== typeof value || Array.isArray(merged[key]) !== Array.isArray(value)) throw new Error(`field[${key}] already exists in the message chunk, but with a different type.`); else if (typeof merged[key] === "string") { if (key === "type") continue; merged[key] += value; } else if (typeof merged[key] === "object" && !Array.isArray(merged[key])) merged[key] = _mergeDicts(merged[key], value); else if (Array.isArray(merged[key])) merged[key] = _mergeLists(merged[key], value); else if (merged[key] === value) continue; else console.warn(`field[${key}] already exists in this message chunk and value has unsupported type.`); return merged; } function _mergeLists(left, right) { if (left === void 0 && right === void 0) return void 0; else if (left === void 0 || right === void 0) return left || right; else { const merged = [...left]; for (const item of right) if (typeof item === "object" && "index" in item && typeof item.index === "number") { const toMerge = merged.findIndex((leftItem) => leftItem.index === item.index); if (toMerge !== -1) merged[toMerge] = _mergeDicts(merged[toMerge], item); else merged.push(item); } else if (typeof item === "object" && "text" in item && item.text === "") continue; else merged.push(item); return merged; } } function _mergeObj(left, right) { if (!left && !right) throw new Error("Cannot merge two undefined objects."); if (!left || !right) return left || right; else if (typeof left !== typeof right) throw new Error(`Cannot merge objects of different types.\nLeft ${typeof left}\nRight ${typeof right}`); else if (typeof left === "string" && typeof right === "string") return left + right; else if (Array.isArray(left) && Array.isArray(right)) return _mergeLists(left, right); else if (typeof left === "object" && typeof right === "object") return _mergeDicts(left, right); else if (left === right) return left; else throw new Error(`Can not merge objects of different types.\nLeft ${left}\nRight ${right}`); } /** * Represents a chunk of a message, which can be concatenated with other * message chunks. It includes a method `_merge_kwargs_dict()` for merging * additional keyword arguments from another `BaseMessageChunk` into this * one. It also overrides the `__add__()` method to support concatenation * of `BaseMessageChunk` instances. */ var BaseMessageChunk = class extends BaseMessage {}; function _isMessageFieldWithRole(x) { return typeof x.role === "string"; } function isBaseMessage(messageLike) { return typeof messageLike?._getType === "function"; } function isBaseMessageChunk(messageLike) { return isBaseMessage(messageLike) && typeof messageLike.concat === "function"; } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/messages/tool.js function isDirectToolOutput(x) { return x != null && typeof x === "object" && "lc_direct_tool_output" in x && x.lc_direct_tool_output === true; } /** * Represents a tool message in a conversation. */ var ToolMessage = class extends BaseMessage { static lc_name() { return "ToolMessage"; } get lc_aliases() { return { tool_call_id: "tool_call_id" }; } constructor(fields, tool_call_id, name) { if (typeof fields === "string") fields = { content: fields, name, tool_call_id }; super(fields); Object.defineProperty(this, "lc_direct_tool_output", { enumerable: true, configurable: true, writable: true, value: true }); /** * Status of the tool invocation. * @version 0.2.19 */ Object.defineProperty(this, "status", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "tool_call_id", { enumerable: true, configurable: true, writable: true, value: void 0 }); /** * Artifact of the Tool execution which is not meant to be sent to the model. * * Should only be specified if it is different from the message content, e.g. if only * a subset of the full tool output is being passed as message content but the full * output is needed in other parts of the code. */ Object.defineProperty(this, "artifact", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.tool_call_id = fields.tool_call_id; this.artifact = fields.artifact; this.status = fields.status; } _getType() { return "tool"; } static isInstance(message) { return message._getType() === "tool"; } get _printableFields() { return { ...super._printableFields, tool_call_id: this.tool_call_id, artifact: this.artifact }; } }; /** * Represents a chunk of a tool message, which can be concatenated * with other tool message chunks. */ var ToolMessageChunk = class ToolMessageChunk extends BaseMessageChunk { constructor(fields) { super(fields); Object.defineProperty(this, "tool_call_id", { enumerable: true, configurable: true, writable: true, value: void 0 }); /** * Status of the tool invocation. * @version 0.2.19 */ Object.defineProperty(this, "status", { enumerable: true, configurable: true, writable: true, value: void 0 }); /** * Artifact of the Tool execution which is not meant to be sent to the model. * * Should only be specified if it is different from the message content, e.g. if only * a subset of the full tool output is being passed as message content but the full * output is needed in other parts of the code. */ Object.defineProperty(this, "artifact", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.tool_call_id = fields.tool_call_id; this.artifact = fields.artifact; this.status = fields.status; } static lc_name() { return "ToolMessageChunk"; } _getType() { return "tool"; } concat(chunk) { return new ToolMessageChunk({ content: mergeContent(this.content, chunk.content), additional_kwargs: _mergeDicts(this.additional_kwargs, chunk.additional_kwargs), response_metadata: _mergeDicts(this.response_metadata, chunk.response_metadata), artifact: _mergeObj(this.artifact, chunk.artifact), tool_call_id: this.tool_call_id, id: this.id ?? chunk.id, status: _mergeStatus(this.status, chunk.status) }); } get _printableFields() { return { ...super._printableFields, tool_call_id: this.tool_call_id, artifact: this.artifact }; } }; function defaultToolCallParser(rawToolCalls) { const toolCalls = []; const invalidToolCalls = []; for (const toolCall of rawToolCalls) if (!toolCall.function) continue; else { const functionName = toolCall.function.name; try { const functionArgs = JSON.parse(toolCall.function.arguments); const parsed = { name: functionName || "", args: functionArgs || {}, id: toolCall.id }; toolCalls.push(parsed); } catch (error) { invalidToolCalls.push({ name: functionName, args: toolCall.function.arguments, id: toolCall.id, error: "Malformed args." }); } } return [toolCalls, invalidToolCalls]; } function isToolMessage(x) { return x._getType() === "tool"; } function isToolMessageChunk(x) { return x._getType() === "tool"; } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/messages/ai.js /** * Represents an AI message in a conversation. */ var AIMessage = class extends BaseMessage { get lc_aliases() { return { ...super.lc_aliases, tool_calls: "tool_calls", invalid_tool_calls: "invalid_tool_calls" }; } constructor(fields, kwargs) { let initParams; if (typeof fields === "string") initParams = { content: fields, tool_calls: [], invalid_tool_calls: [], additional_kwargs: kwargs ?? {} }; else { initParams = fields; const rawToolCalls = initParams.additional_kwargs?.tool_calls; const toolCalls = initParams.tool_calls; if (!(rawToolCalls == null) && rawToolCalls.length > 0 && (toolCalls === void 0 || toolCalls.length === 0)) console.warn([ "New LangChain packages are available that more efficiently handle", "tool calling.\n\nPlease upgrade your packages to versions that set", "message tool calls. e.g., `yarn add @langchain/anthropic`,", "yarn add @langchain/openai`, etc." ].join(" ")); try { if (!(rawToolCalls == null) && toolCalls === void 0) { const [toolCalls$1, invalidToolCalls] = defaultToolCallParser(rawToolCalls); initParams.tool_calls = toolCalls$1 ?? []; initParams.invalid_tool_calls = invalidToolCalls ?? []; } else { initParams.tool_calls = initParams.tool_calls ?? []; initParams.invalid_tool_calls = initParams.invalid_tool_calls ?? []; } } catch (e) { initParams.tool_calls = []; initParams.invalid_tool_calls = []; } } super(initParams); Object.defineProperty(this, "tool_calls", { enumerable: true, configurable: true, writable: true, value: [] }); Object.defineProperty(this, "invalid_tool_calls", { enumerable: true, configurable: true, writable: true, value: [] }); /** * If provided, token usage information associated with the message. */ Object.defineProperty(this, "usage_metadata", { enumerable: true, configurable: true, writable: true, value: void 0 }); if (typeof initParams !== "string") { this.tool_calls = initParams.tool_calls ?? this.tool_calls; this.invalid_tool_calls = initParams.invalid_tool_calls ?? this.invalid_tool_calls; } this.usage_metadata = initParams.usage_metadata; } static lc_name() { return "AIMessage"; } _getType() { return "ai"; } get _printableFields() { return { ...super._printableFields, tool_calls: this.tool_calls, invalid_tool_calls: this.invalid_tool_calls, usage_metadata: this.usage_metadata }; } }; function isAIMessage(x) { return x._getType() === "ai"; } function isAIMessageChunk(x) { return x._getType() === "ai"; } /** * Represents a chunk of an AI message, which can be concatenated with * other AI message chunks. */ var AIMessageChunk = class AIMessageChunk extends BaseMessageChunk { constructor(fields) { let initParams; if (typeof fields === "string") initParams = { content: fields, tool_calls: [], invalid_tool_calls: [], tool_call_chunks: [] }; else if (fields.tool_call_chunks === void 0) initParams = { ...fields, tool_calls: fields.tool_calls ?? [], invalid_tool_calls: [], tool_call_chunks: [], usage_metadata: fields.usage_metadata !== void 0 ? fields.usage_metadata : void 0 }; else { const toolCalls = []; const invalidToolCalls = []; for (const toolCallChunk of fields.tool_call_chunks) { let parsedArgs = {}; try { parsedArgs = parsePartialJson(toolCallChunk.args || "{}"); if (parsedArgs === null || typeof parsedArgs !== "object" || Array.isArray(parsedArgs)) throw new Error("Malformed tool call chunk args."); toolCalls.push({ name: toolCallChunk.name ?? "", args: parsedArgs, id: toolCallChunk.id, type: "tool_call" }); } catch (e) { invalidToolCalls.push({ name: toolCallChunk.name, args: toolCallChunk.args, id: toolCallChunk.id, error: "Malformed args.", type: "invalid_tool_call" }); } } initParams = { ...fields, tool_calls: toolCalls, invalid_tool_calls: invalidToolCalls, usage_metadata: fields.usage_metadata !== void 0 ? fields.usage_metadata : void 0 }; } super(initParams); Object.defineProperty(this, "tool_calls", { enumerable: true, configurable: true, writable: true, value: [] }); Object.defineProperty(this, "invalid_tool_calls", { enumerable: true, configurable: true, writable: true, value: [] }); Object.defineProperty(this, "tool_call_chunks", { enumerable: true, configurable: true, writable: true, value: [] }); /** * If provided, token usage information associated with the message. */ Object.defineProperty(this, "usage_metadata", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.tool_call_chunks = initParams.tool_call_chunks ?? this.tool_call_chunks; this.tool_calls = initParams.tool_calls ?? this.tool_calls; this.invalid_tool_calls = initParams.invalid_tool_calls ?? this.invalid_tool_calls; this.usage_metadata = initParams.usage_metadata; } get lc_aliases() { return { ...super.lc_aliases, tool_calls: "tool_calls", invalid_tool_calls: "invalid_tool_calls", tool_call_chunks: "tool_call_chunks" }; } static lc_name() { return "AIMessageChunk"; } _getType() { return "ai"; } get _printableFields() { return { ...super._printableFields, tool_calls: this.tool_calls, tool_call_chunks: this.tool_call_chunks, invalid_tool_calls: this.invalid_tool_calls, usage_metadata: this.usage_metadata }; } concat(chunk) { const combinedFields = { content: mergeContent(this.content, chunk.content), additional_kwargs: _mergeDicts(this.additional_kwargs, chunk.additional_kwargs), response_metadata: _mergeDicts(this.response_metadata, chunk.response_metadata), tool_call_chunks: [], id: this.id ?? chunk.id }; if (this.tool_call_chunks !== void 0 || chunk.tool_call_chunks !== void 0) { const rawToolCalls = _mergeLists(this.tool_call_chunks, chunk.tool_call_chunks); if (rawToolCalls !== void 0 && rawToolCalls.length > 0) combinedFields.tool_call_chunks = rawToolCalls; } if (this.usage_metadata !== void 0 || chunk.usage_metadata !== void 0) { const inputTokenDetails = { ...(this.usage_metadata?.input_token_details?.audio !== void 0 || chunk.usage_metadata?.input_token_details?.audio !== void 0) && { audio: (this.usage_metadata?.input_token_details?.audio ?? 0) + (chunk.usage_metadata?.input_token_details?.audio ?? 0) }, ...(this.usage_metadata?.input_token_details?.cache_read !== void 0 || chunk.usage_metadata?.input_token_details?.cache_read !== void 0) && { cache_read: (this.usage_metadata?.input_token_details?.cache_read ?? 0) + (chunk.usage_metadata?.input_token_details?.cache_read ?? 0) }, ...(this.usage_metadata?.input_token_details?.cache_creation !== void 0 || chunk.usage_metadata?.input_token_details?.cache_creation !== void 0) && { cache_creation: (this.usage_metadata?.input_token_details?.cache_creation ?? 0) + (chunk.usage_metadata?.input_token_details?.cache_creation ?? 0) } }; const outputTokenDetails = { ...(this.usage_metadata?.output_token_details?.audio !== void 0 || chunk.usage_metadata?.output_token_details?.audio !== void 0) && { audio: (this.usage_metadata?.output_token_details?.audio ?? 0) + (chunk.usage_metadata?.output_token_details?.audio ?? 0) }, ...(this.usage_metadata?.output_token_details?.reasoning !== void 0 || chunk.usage_metadata?.output_token_details?.reasoning !== void 0) && { reasoning: (this.usage_metadata?.output_token_details?.reasoning ?? 0) + (chunk.usage_metadata?.output_token_details?.reasoning ?? 0) } }; const left = this.usage_metadata ?? { input_tokens: 0, output_tokens: 0, total_tokens: 0 }; const right = chunk.usage_metadata ?? { input_tokens: 0, output_tokens: 0, total_tokens: 0 }; const usage_metadata = { input_tokens: left.input_tokens + right.input_tokens, output_tokens: left.output_tokens + right.output_tokens, total_tokens: left.total_tokens + right.total_tokens, ...Object.keys(inputTokenDetails).length > 0 && { input_token_details: inputTokenDetails }, ...Object.keys(outputTokenDetails).length > 0 && { output_token_details: outputTokenDetails } }; combinedFields.usage_metadata = usage_metadata; } return new AIMessageChunk(combinedFields); } }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/messages/chat.js /** * Represents a chat message in a conversation. */ var ChatMessage = class ChatMessage extends BaseMessage { static lc_name() { return "ChatMessage"; } static _chatMessageClass() { return ChatMessage; } constructor(fields, role) { if (typeof fields === "string") fields = { content: fields, role }; super(fields); Object.defineProperty(this, "role", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.role = fields.role; } _getType() { return "generic"; } static isInstance(message) { return message._getType() === "generic"; } get _printableFields() { return { ...super._printableFields, role: this.role }; } }; /** * Represents a chunk of a chat message, which can be concatenated with * other chat message chunks. */ var ChatMessageChunk = class ChatMessageChunk extends BaseMessageChunk { static lc_name() { return "ChatMessageChunk"; } constructor(fields, role) { if (typeof fields === "string") fields = { content: fields, role }; super(fields); Object.defineProperty(this, "role", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.role = fields.role; } _getType() { return "generic"; } concat(chunk) { return new ChatMessageChunk({ content: mergeContent(this.content, chunk.content), additional_kwargs: _mergeDicts(this.additional_kwargs, chunk.additional_kwargs), response_metadata: _mergeDicts(this.response_metadata, chunk.response_metadata), role: this.role, id: this.id ?? chunk.id }); } get _printableFields() { return { ...super._printableFields, role: this.role }; } }; function isChatMessage(x) { return x._getType() === "generic"; } function isChatMessageChunk(x) { return x._getType() === "generic"; } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/messages/function.js /** * Represents a function message in a conversation. */ var FunctionMessage = class extends BaseMessage { static lc_name() { return "FunctionMessage"; } constructor(fields, name) { if (typeof fields === "string") fields = { content: fields, name }; super(fields); } _getType() { return "function"; } }; /** * Represents a chunk of a function message, which can be concatenated * with other function message chunks. */ var FunctionMessageChunk = class FunctionMessageChunk extends BaseMessageChunk { static lc_name() { return "FunctionMessageChunk"; } _getType() { return "function"; } concat(chunk) { return new FunctionMessageChunk({ content: mergeContent(this.content, chunk.content), additional_kwargs: _mergeDicts(this.additional_kwargs, chunk.additional_kwargs), response_metadata: _mergeDicts(this.response_metadata, chunk.response_metadata), name: this.name ?? "", id: this.id ?? chunk.id }); } }; function isFunctionMessage(x) { return x._getType() === "function"; } function isFunctionMessageChunk(x) { return x._getType() === "function"; } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/messages/human.js /** * Represents a human message in a conversation. */ var HumanMessage = class extends BaseMessage { static lc_name() { return "HumanMessage"; } _getType() { return "human"; } constructor(fields, kwargs) { super(fields, kwargs); } }; /** * Represents a chunk of a human message, which can be concatenated with * other human message chunks. */ var HumanMessageChunk = class HumanMessageChunk extends BaseMessageChunk { static lc_name() { return "HumanMessageChunk"; } _getType() { return "human"; } constructor(fields, kwargs) { super(fields, kwargs); } concat(chunk) { return new HumanMessageChunk({ content: mergeContent(this.content, chunk.content), additional_kwargs: _mergeDicts(this.additional_kwargs, chunk.additional_kwargs), response_metadata: _mergeDicts(this.response_metadata, chunk.response_metadata), id: this.id ?? chunk.id }); } }; function isHumanMessage(x) { return x.getType() === "human"; } function isHumanMessageChunk(x) { return x.getType() === "human"; } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/messages/system.js /** * Represents a system message in a conversation. */ var SystemMessage = class extends BaseMessage { static lc_name() { return "SystemMessage"; } _getType() { return "system"; } constructor(fields, kwargs) { super(fields, kwargs); } }; /** * Represents a chunk of a system message, which can be concatenated with * other system message chunks. */ var SystemMessageChunk = class SystemMessageChunk extends BaseMessageChunk { static lc_name() { return "SystemMessageChunk"; } _getType() { return "system"; } constructor(fields, kwargs) { super(fields, kwargs); } concat(chunk) { return new SystemMessageChunk({ content: mergeContent(this.content, chunk.content), additional_kwargs: _mergeDicts(this.additional_kwargs, chunk.additional_kwargs), response_metadata: _mergeDicts(this.response_metadata, chunk.response_metadata), id: this.id ?? chunk.id }); } }; function isSystemMessage(x) { return x._getType() === "system"; } function isSystemMessageChunk(x) { return x._getType() === "system"; } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/messages/utils.js function _coerceToolCall(toolCall) { if (_isToolCall(toolCall)) return toolCall; else if (typeof toolCall.id === "string" && toolCall.type === "function" && typeof toolCall.function === "object" && toolCall.function !== null && "arguments" in toolCall.function && typeof toolCall.function.arguments === "string" && "name" in toolCall.function && typeof toolCall.function.name === "string") return { id: toolCall.id, args: JSON.parse(toolCall.function.arguments), name: toolCall.function.name, type: "tool_call" }; else return toolCall; } function isSerializedConstructor(x) { return typeof x === "object" && x != null && x.lc === 1 && Array.isArray(x.id) && x.kwargs != null && typeof x.kwargs === "object"; } function _constructMessageFromParams(params) { let type; let rest; if (isSerializedConstructor(params)) { const className = params.id.at(-1); if (className === "HumanMessage" || className === "HumanMessageChunk") type = "user"; else if (className === "AIMessage" || className === "AIMessageChunk") type = "assistant"; else if (className === "SystemMessage" || className === "SystemMessageChunk") type = "system"; else if (className === "FunctionMessage" || className === "FunctionMessageChunk") type = "function"; else if (className === "ToolMessage" || className === "ToolMessageChunk") type = "tool"; else type = "unknown"; rest = params.kwargs; } else { const { type: extractedType,...otherParams } = params; type = extractedType; rest = otherParams; } if (type === "human" || type === "user") return new HumanMessage(rest); else if (type === "ai" || type === "assistant") { const { tool_calls: rawToolCalls,...other } = rest; if (!Array.isArray(rawToolCalls)) return new AIMessage(rest); const tool_calls = rawToolCalls.map(_coerceToolCall); return new AIMessage({ ...other, tool_calls }); } else if (type === "system") return new SystemMessage(rest); else if (type === "developer") return new SystemMessage({ ...rest, additional_kwargs: { ...rest.additional_kwargs, __openai_role__: "developer" } }); else if (type === "tool" && "tool_call_id" in rest) return new ToolMessage({ ...rest, content: rest.content, tool_call_id: rest.tool_call_id, name: rest.name }); else { const error = addLangChainErrorFields(new Error(`Unable to coerce message from array: only human, AI, system, developer, or tool message coercion is currently supported.\n\nReceived: ${JSON.stringify(params, null, 2)}`), "MESSAGE_COERCION_FAILURE"); throw error; } } function coerceMessageLikeToMessage(messageLike) { if (typeof messageLike === "string") return new HumanMessage(messageLike); else if (isBaseMessage(messageLike)) return messageLike; if (Array.isArray(messageLike)) { const [type, content] = messageLike; return _constructMessageFromParams({ type, content }); } else if (_isMessageFieldWithRole(messageLike)) { const { role: type,...rest } = messageLike; return _constructMessageFromParams({ ...rest, type }); } else return _constructMessageFromParams(messageLike); } /** * This function is used by memory classes to get a string representation * of the chat message history, based on the message content and role. */ function getBufferString(messages, humanPrefix = "Human", aiPrefix = "AI") { const string_messages = []; for (const m of messages) { let role; if (m._getType() === "human") role = humanPrefix; else if (m._getType() === "ai") role = aiPrefix; else if (m._getType() === "system") role = "System"; else if (m._getType() === "function") role = "Function"; else if (m._getType() === "tool") role = "Tool"; else if (m._getType() === "generic") role = m.role; else throw new Error(`Got unsupported message type: ${m._getType()}`); const nameStr = m.name ? `${m.name}, ` : ""; const readableContent = typeof m.content === "string" ? m.content : JSON.stringify(m.content, null, 2); string_messages.push(`${role}: ${nameStr}${readableContent}`); } return string_messages.join("\n"); } /** * Maps messages from an older format (V1) to the current `StoredMessage` * format. If the message is already in the `StoredMessage` format, it is * returned as is. Otherwise, it transforms the V1 message into a * `StoredMessage`. This function is important for maintaining * compatibility with older message formats. */ function mapV1MessageToStoredMessage(message) { if (message.data !== void 0) return message; else { const v1Message = message; return { type: v1Message.type, data: { content: v1Message.text, role: v1Message.role, name: void 0, tool_call_id: void 0 } }; } } function mapStoredMessageToChatMessage(message) { const storedMessage = mapV1MessageToStoredMessage(message); switch (storedMessage.type) { case "human": return new HumanMessage(storedMessage.data); case "ai": return new AIMessage(storedMessage.data); case "system": return new SystemMessage(storedMessage.data); case "function": if (storedMessage.data.name === void 0) throw new Error("Name must be defined for function messages"); return new FunctionMessage(storedMessage.data); case "tool": if (storedMessage.data.tool_call_id === void 0) throw new Error("Tool call ID must be defined for tool messages"); return new ToolMessage(storedMessage.data); case "generic": { if (storedMessage.data.role === void 0) throw new Error("Role must be defined for chat messages"); return new ChatMessage(storedMessage.data); } default: throw new Error(`Got unexpected type: ${storedMessage.type}`); } } /** * Transforms an array of `StoredMessage` instances into an array of * `BaseMessage` instances. It uses the `mapV1MessageToStoredMessage` * function to ensure all messages are in the `StoredMessage` format, then * creates new instances of the appropriate `BaseMessage` subclass based * on the type of each message. This function is used to prepare stored * messages for use in a chat context. */ function mapStoredMessagesToChatMessages(messages) { return messages.map(mapStoredMessageToChatMessage); } /** * Transforms an array of `BaseMessage` instances into an array of * `StoredMessage` instances. It does this by calling the `toDict` method * on each `BaseMessage`, which returns a `StoredMessage`. This function * is used to prepare chat messages for storage. */ function mapChatMessagesToStoredMessages(messages) { return messages.map((message) => message.toDict()); } function convertToChunk(message) { const type = message._getType(); if (type === "human") return new HumanMessageChunk({ ...message }); else if (type === "ai") { let aiChunkFields = { ...message }; if ("tool_calls" in aiChunkFields) aiChunkFields = { ...aiChunkFields, tool_call_chunks: aiChunkFields.tool_calls?.map((tc) => ({ ...tc, type: "tool_call_chunk", index: void 0, args: JSON.stringify(tc.args) })) }; return new AIMessageChunk({ ...aiChunkFields }); } else if (type === "system") return new SystemMessageChunk({ ...message }); else if (type === "function") return new FunctionMessageChunk({ ...message }); else if (ChatMessage.isInstance(message)) return new ChatMessageChunk({ ...message }); else throw new Error("Unknown message type."); } //#endregion //#region node_modules/.pnpm/langsmith@0.3.30_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/langsmith/dist/singletons/traceable.js var MockAsyncLocalStorage$1 = class { getStore() { return void 0; } run(_, callback) { return callback(); } }; const TRACING_ALS_KEY = Symbol.for("ls:tracing_async_local_storage"); const mockAsyncLocalStorage$1 = new MockAsyncLocalStorage$1(); var AsyncLocalStorageProvider$1 = class { getInstance() { return globalThis[TRACING_ALS_KEY] ?? mockAsyncLocalStorage$1; } initializeGlobalInstance(instance) { if (globalThis[TRACING_ALS_KEY] === void 0) globalThis[TRACING_ALS_KEY] = instance; } }; const AsyncLocalStorageProviderSingleton$1 = new AsyncLocalStorageProvider$1(); function getCurrentRunTree(permitAbsentRunTree = false) { const runTree = AsyncLocalStorageProviderSingleton$1.getInstance().getStore(); if (!permitAbsentRunTree && !isRunTree(runTree)) throw new Error("Could not get the current run tree.\n\nPlease make sure you are calling this method within a traceable function and that tracing is enabled."); return runTree; } const ROOT = Symbol.for("langsmith:traceable:root"); function isTraceableFunction(x) { return typeof x === "function" && "langsmith:traceable" in x; } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/singletons/tracer.js let client; const getDefaultLangChainClientSingleton = () => { if (client === void 0) { const clientParams = getEnvironmentVariable("LANGCHAIN_CALLBACKS_BACKGROUND") === "false" ? { blockOnRootRunFinalization: true } : {}; client = new Client(clientParams); } return client; }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/tracers/tracer_langchain.js var tracer_langchain_exports = {}; __export(tracer_langchain_exports, { LangChainTracer: () => LangChainTracer }); var LangChainTracer = class LangChainTracer extends BaseTracer { constructor(fields = {}) { super(fields); Object.defineProperty(this, "name", { enumerable: true, configurable: true, writable: true, value: "langchain_tracer" }); Object.defineProperty(this, "projectName", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "exampleId", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "client", { enumerable: true, configurable: true, writable: true, value: void 0 }); const { exampleId, projectName, client: client$1 } = fields; this.projectName = projectName ?? getEnvironmentVariable("LANGCHAIN_PROJECT") ?? getEnvironmentVariable("LANGCHAIN_SESSION"); this.exampleId = exampleId; this.client = client$1 ?? getDefaultLangChainClientSingleton(); const traceableTree = LangChainTracer.getTraceableRunTree(); if (traceableTree) this.updateFromRunTree(traceableTree); } async _convertToCreate(run, example_id = void 0) { return { ...run, extra: { ...run.extra, runtime: await getRuntimeEnvironment() }, child_runs: void 0, session_name: this.projectName, reference_example_id: run.parent_run_id ? void 0 : example_id }; } async persistRun(_run) {} async onRunCreate(run) { const persistedRun = await this._convertToCreate(run, this.exampleId); await this.client.createRun(persistedRun); } async onRunUpdate(run) { const runUpdate = { end_time: run.end_time, error: run.error, outputs: run.outputs, events: run.events, inputs: run.inputs, trace_id: run.trace_id, dotted_order: run.dotted_order, parent_run_id: run.parent_run_id, extra: run.extra, session_name: this.projectName }; await this.client.updateRun(run.id, runUpdate); } getRun(id) { return this.runMap.get(id); } updateFromRunTree(runTree) { let rootRun = runTree; const visited = /* @__PURE__ */ new Set(); while (rootRun.parent_run) { if (visited.has(rootRun.id)) break; visited.add(rootRun.id); if (!rootRun.parent_run) break; rootRun = rootRun.parent_run; } visited.clear(); const queue$1 = [rootRun]; while (queue$1.length > 0) { const current = queue$1.shift(); if (!current || visited.has(current.id)) continue; visited.add(current.id); this.runMap.set(current.id, current); if (current.child_runs) queue$1.push(...current.child_runs); } this.client = runTree.client ?? this.client; this.projectName = runTree.project_name ?? this.projectName; this.exampleId = runTree.reference_example_id ?? this.exampleId; } convertToRunTree(id) { const runTreeMap = {}; const runTreeList = []; for (const [id$1, run] of this.runMap) { const runTree = new RunTree({ ...run, child_runs: [], parent_run: void 0, client: this.client, project_name: this.projectName, reference_example_id: this.exampleId, tracingEnabled: true }); runTreeMap[id$1] = runTree; runTreeList.push([id$1, run.dotted_order]); } runTreeList.sort((a, b) => { if (!a[1] || !b[1]) return 0; return a[1].localeCompare(b[1]); }); for (const [id$1] of runTreeList) { const run = this.runMap.get(id$1); const runTree = runTreeMap[id$1]; if (!run || !runTree) continue; if (run.parent_run_id) { const parentRunTree = runTreeMap[run.parent_run_id]; if (parentRunTree) { parentRunTree.child_runs.push(runTree); runTree.parent_run = parentRunTree; } } } return runTreeMap[id]; } static getTraceableRunTree() { try { return getCurrentRunTree(true); } catch { return void 0; } } }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/singletons/callbacks.js var import_dist$1 = __toESM(require_dist(), 1); let queue; /** * Creates a queue using the p-queue library. The queue is configured to * auto-start and has a concurrency of 1, meaning it will process tasks * one at a time. */ function createQueue() { const PQueue$1 = "default" in import_dist$1.default ? import_dist$1.default.default : import_dist$1.default; return new PQueue$1({ autoStart: true, concurrency: 1 }); } function getQueue() { if (typeof queue === "undefined") queue = createQueue(); return queue; } /** * Consume a promise, either adding it to the queue or waiting for it to resolve * @param promiseFn Promise to consume * @param wait Whether to wait for the promise to resolve or resolve immediately */ async function consumeCallback(promiseFn, wait) { if (wait === true) { const asyncLocalStorageInstance = getGlobalAsyncLocalStorageInstance(); if (asyncLocalStorageInstance !== void 0) await asyncLocalStorageInstance.run(void 0, async () => promiseFn()); else await promiseFn(); } else { queue = getQueue(); queue.add(async () => { const asyncLocalStorageInstance = getGlobalAsyncLocalStorageInstance(); if (asyncLocalStorageInstance !== void 0) await asyncLocalStorageInstance.run(void 0, async () => promiseFn()); else await promiseFn(); }); } } /** * Waits for all promises in the queue to resolve. If the queue is * undefined, it immediately resolves a promise. */ function awaitAllCallbacks() { return typeof queue !== "undefined" ? queue.onIdle() : Promise.resolve(); } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/callbacks/promises.js var promises_exports = {}; __export(promises_exports, { awaitAllCallbacks: () => awaitAllCallbacks, consumeCallback: () => consumeCallback }); //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/utils/callbacks.js const isTracingEnabled = (tracingEnabled) => { if (tracingEnabled !== void 0) return tracingEnabled; const envVars = [ "LANGSMITH_TRACING_V2", "LANGCHAIN_TRACING_V2", "LANGSMITH_TRACING", "LANGCHAIN_TRACING" ]; return !!envVars.find((envVar) => getEnvironmentVariable(envVar) === "true"); }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/singletons/async_local_storage/context.js /** * Get the value of a previously set context variable. Context variables * are scoped to any child runnables called by the current runnable, * or globally if set outside of any runnable. * * @remarks * This function is only supported in environments that support AsyncLocalStorage, * including Node.js, Deno, and Cloudflare Workers. * * @example * ```ts * import { RunnableLambda } from "@langchain/core/runnables"; * import { * getContextVariable, * setContextVariable * } from "@langchain/core/context"; * * const nested = RunnableLambda.from(() => { * // "bar" because it was set by a parent * console.log(getContextVariable("foo")); * * // Override to "baz", but only for child runnables * setContextVariable("foo", "baz"); * * // Now "baz", but only for child runnables * return getContextVariable("foo"); * }); * * const runnable = RunnableLambda.from(async () => { * // Set a context variable named "foo" * setContextVariable("foo", "bar"); * * const res = await nested.invoke({}); * * // Still "bar" since child changes do not affect parents * console.log(getContextVariable("foo")); * * return res; * }); * * // undefined, because context variable has not been set yet * console.log(getContextVariable("foo")); * * // Final return value is "baz" * const result = await runnable.invoke({}); * ``` * * @param name The name of the context variable. */ function getContextVariable(name) { const asyncLocalStorageInstance = getGlobalAsyncLocalStorageInstance(); if (asyncLocalStorageInstance === void 0) return void 0; const runTree = asyncLocalStorageInstance.getStore(); return runTree?.[_CONTEXT_VARIABLES_KEY]?.[name]; } const LC_CONFIGURE_HOOKS_KEY = Symbol("lc:configure_hooks"); const _getConfigureHooks = () => getContextVariable(LC_CONFIGURE_HOOKS_KEY) || []; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/callbacks/manager.js var manager_exports = {}; __export(manager_exports, { BaseCallbackManager: () => BaseCallbackManager, BaseRunManager: () => BaseRunManager, CallbackManager: () => CallbackManager, CallbackManagerForChainRun: () => CallbackManagerForChainRun, CallbackManagerForLLMRun: () => CallbackManagerForLLMRun, CallbackManagerForRetrieverRun: () => CallbackManagerForRetrieverRun, CallbackManagerForToolRun: () => CallbackManagerForToolRun, TraceGroup: () => TraceGroup, ensureHandler: () => ensureHandler, parseCallbackConfigArg: () => parseCallbackConfigArg, traceAsGroup: () => traceAsGroup }); function parseCallbackConfigArg(arg) { if (!arg) return {}; else if (Array.isArray(arg) || "name" in arg) return { callbacks: arg }; else return arg; } /** * Manage callbacks from different components of LangChain. */ var BaseCallbackManager = class { setHandler(handler) { return this.setHandlers([handler]); } }; /** * Base class for run manager in LangChain. */ var BaseRunManager = class { constructor(runId, handlers, inheritableHandlers, tags, inheritableTags, metadata, inheritableMetadata, _parentRunId) { Object.defineProperty(this, "runId", { enumerable: true, configurable: true, writable: true, value: runId }); Object.defineProperty(this, "handlers", { enumerable: true, configurable: true, writable: true, value: handlers }); Object.defineProperty(this, "inheritableHandlers", { enumerable: true, configurable: true, writable: true, value: inheritableHandlers }); Object.defineProperty(this, "tags", { enumerable: true, configurable: true, writable: true, value: tags }); Object.defineProperty(this, "inheritableTags", { enumerable: true, configurable: true, writable: true, value: inheritableTags }); Object.defineProperty(this, "metadata", { enumerable: true, configurable: true, writable: true, value: metadata }); Object.defineProperty(this, "inheritableMetadata", { enumerable: true, configurable: true, writable: true, value: inheritableMetadata }); Object.defineProperty(this, "_parentRunId", { enumerable: true, configurable: true, writable: true, value: _parentRunId }); } get parentRunId() { return this._parentRunId; } async handleText(text) { await Promise.all(this.handlers.map((handler) => consumeCallback(async () => { try { await handler.handleText?.(text, this.runId, this._parentRunId, this.tags); } catch (err) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleText: ${err}`); if (handler.raiseError) throw err; } }, handler.awaitHandlers))); } async handleCustomEvent(eventName, data, _runId, _tags, _metadata) { await Promise.all(this.handlers.map((handler) => consumeCallback(async () => { try { await handler.handleCustomEvent?.(eventName, data, this.runId, this.tags, this.metadata); } catch (err) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleCustomEvent: ${err}`); if (handler.raiseError) throw err; } }, handler.awaitHandlers))); } }; /** * Manages callbacks for retriever runs. */ var CallbackManagerForRetrieverRun = class extends BaseRunManager { getChild(tag) { const manager = new CallbackManager(this.runId); manager.setHandlers(this.inheritableHandlers); manager.addTags(this.inheritableTags); manager.addMetadata(this.inheritableMetadata); if (tag) manager.addTags([tag], false); return manager; } async handleRetrieverEnd(documents) { await Promise.all(this.handlers.map((handler) => consumeCallback(async () => { if (!handler.ignoreRetriever) try { await handler.handleRetrieverEnd?.(documents, this.runId, this._parentRunId, this.tags); } catch (err) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleRetriever`); if (handler.raiseError) throw err; } }, handler.awaitHandlers))); } async handleRetrieverError(err) { await Promise.all(this.handlers.map((handler) => consumeCallback(async () => { if (!handler.ignoreRetriever) try { await handler.handleRetrieverError?.(err, this.runId, this._parentRunId, this.tags); } catch (error) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleRetrieverError: ${error}`); if (handler.raiseError) throw err; } }, handler.awaitHandlers))); } }; var CallbackManagerForLLMRun = class extends BaseRunManager { async handleLLMNewToken(token, idx, _runId, _parentRunId, _tags, fields) { await Promise.all(this.handlers.map((handler) => consumeCallback(async () => { if (!handler.ignoreLLM) try { await handler.handleLLMNewToken?.(token, idx ?? { prompt: 0, completion: 0 }, this.runId, this._parentRunId, this.tags, fields); } catch (err) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleLLMNewToken: ${err}`); if (handler.raiseError) throw err; } }, handler.awaitHandlers))); } async handleLLMError(err, _runId, _parentRunId, _tags, extraParams) { await Promise.all(this.handlers.map((handler) => consumeCallback(async () => { if (!handler.ignoreLLM) try { await handler.handleLLMError?.(err, this.runId, this._parentRunId, this.tags, extraParams); } catch (err$1) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleLLMError: ${err$1}`); if (handler.raiseError) throw err$1; } }, handler.awaitHandlers))); } async handleLLMEnd(output, _runId, _parentRunId, _tags, extraParams) { await Promise.all(this.handlers.map((handler) => consumeCallback(async () => { if (!handler.ignoreLLM) try { await handler.handleLLMEnd?.(output, this.runId, this._parentRunId, this.tags, extraParams); } catch (err) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleLLMEnd: ${err}`); if (handler.raiseError) throw err; } }, handler.awaitHandlers))); } }; var CallbackManagerForChainRun = class extends BaseRunManager { getChild(tag) { const manager = new CallbackManager(this.runId); manager.setHandlers(this.inheritableHandlers); manager.addTags(this.inheritableTags); manager.addMetadata(this.inheritableMetadata); if (tag) manager.addTags([tag], false); return manager; } async handleChainError(err, _runId, _parentRunId, _tags, kwargs) { await Promise.all(this.handlers.map((handler) => consumeCallback(async () => { if (!handler.ignoreChain) try { await handler.handleChainError?.(err, this.runId, this._parentRunId, this.tags, kwargs); } catch (err$1) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleChainError: ${err$1}`); if (handler.raiseError) throw err$1; } }, handler.awaitHandlers))); } async handleChainEnd(output, _runId, _parentRunId, _tags, kwargs) { await Promise.all(this.handlers.map((handler) => consumeCallback(async () => { if (!handler.ignoreChain) try { await handler.handleChainEnd?.(output, this.runId, this._parentRunId, this.tags, kwargs); } catch (err) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleChainEnd: ${err}`); if (handler.raiseError) throw err; } }, handler.awaitHandlers))); } async handleAgentAction(action) { await Promise.all(this.handlers.map((handler) => consumeCallback(async () => { if (!handler.ignoreAgent) try { await handler.handleAgentAction?.(action, this.runId, this._parentRunId, this.tags); } catch (err) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleAgentAction: ${err}`); if (handler.raiseError) throw err; } }, handler.awaitHandlers))); } async handleAgentEnd(action) { await Promise.all(this.handlers.map((handler) => consumeCallback(async () => { if (!handler.ignoreAgent) try { await handler.handleAgentEnd?.(action, this.runId, this._parentRunId, this.tags); } catch (err) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleAgentEnd: ${err}`); if (handler.raiseError) throw err; } }, handler.awaitHandlers))); } }; var CallbackManagerForToolRun = class extends BaseRunManager { getChild(tag) { const manager = new CallbackManager(this.runId); manager.setHandlers(this.inheritableHandlers); manager.addTags(this.inheritableTags); manager.addMetadata(this.inheritableMetadata); if (tag) manager.addTags([tag], false); return manager; } async handleToolError(err) { await Promise.all(this.handlers.map((handler) => consumeCallback(async () => { if (!handler.ignoreAgent) try { await handler.handleToolError?.(err, this.runId, this._parentRunId, this.tags); } catch (err$1) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleToolError: ${err$1}`); if (handler.raiseError) throw err$1; } }, handler.awaitHandlers))); } async handleToolEnd(output) { await Promise.all(this.handlers.map((handler) => consumeCallback(async () => { if (!handler.ignoreAgent) try { await handler.handleToolEnd?.(output, this.runId, this._parentRunId, this.tags); } catch (err) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleToolEnd: ${err}`); if (handler.raiseError) throw err; } }, handler.awaitHandlers))); } }; /** * @example * ```typescript * const prompt = PromptTemplate.fromTemplate("What is the answer to {question}?"); * * // Example of using LLMChain with OpenAI and a simple prompt * const chain = new LLMChain({ * llm: new ChatOpenAI({ temperature: 0.9 }), * prompt, * }); * * // Running the chain with a single question * const result = await chain.call({ * question: "What is the airspeed velocity of an unladen swallow?", * }); * console.log("The answer is:", result); * ``` */ var CallbackManager = class CallbackManager extends BaseCallbackManager { constructor(parentRunId, options) { super(); Object.defineProperty(this, "handlers", { enumerable: true, configurable: true, writable: true, value: [] }); Object.defineProperty(this, "inheritableHandlers", { enumerable: true, configurable: true, writable: true, value: [] }); Object.defineProperty(this, "tags", { enumerable: true, configurable: true, writable: true, value: [] }); Object.defineProperty(this, "inheritableTags", { enumerable: true, configurable: true, writable: true, value: [] }); Object.defineProperty(this, "metadata", { enumerable: true, configurable: true, writable: true, value: {} }); Object.defineProperty(this, "inheritableMetadata", { enumerable: true, configurable: true, writable: true, value: {} }); Object.defineProperty(this, "name", { enumerable: true, configurable: true, writable: true, value: "callback_manager" }); Object.defineProperty(this, "_parentRunId", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.handlers = options?.handlers ?? this.handlers; this.inheritableHandlers = options?.inheritableHandlers ?? this.inheritableHandlers; this.tags = options?.tags ?? this.tags; this.inheritableTags = options?.inheritableTags ?? this.inheritableTags; this.metadata = options?.metadata ?? this.metadata; this.inheritableMetadata = options?.inheritableMetadata ?? this.inheritableMetadata; this._parentRunId = parentRunId; } /** * Gets the parent run ID, if any. * * @returns The parent run ID. */ getParentRunId() { return this._parentRunId; } async handleLLMStart(llm, prompts, runId = void 0, _parentRunId = void 0, extraParams = void 0, _tags = void 0, _metadata = void 0, runName = void 0) { return Promise.all(prompts.map(async (prompt, idx) => { const runId_ = idx === 0 && runId ? runId : v4_default(); await Promise.all(this.handlers.map((handler) => { if (handler.ignoreLLM) return; if (isBaseTracer(handler)) handler._createRunForLLMStart(llm, [prompt], runId_, this._parentRunId, extraParams, this.tags, this.metadata, runName); return consumeCallback(async () => { try { await handler.handleLLMStart?.(llm, [prompt], runId_, this._parentRunId, extraParams, this.tags, this.metadata, runName); } catch (err) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleLLMStart: ${err}`); if (handler.raiseError) throw err; } }, handler.awaitHandlers); })); return new CallbackManagerForLLMRun(runId_, this.handlers, this.inheritableHandlers, this.tags, this.inheritableTags, this.metadata, this.inheritableMetadata, this._parentRunId); })); } async handleChatModelStart(llm, messages, runId = void 0, _parentRunId = void 0, extraParams = void 0, _tags = void 0, _metadata = void 0, runName = void 0) { return Promise.all(messages.map(async (messageGroup, idx) => { const runId_ = idx === 0 && runId ? runId : v4_default(); await Promise.all(this.handlers.map((handler) => { if (handler.ignoreLLM) return; if (isBaseTracer(handler)) handler._createRunForChatModelStart(llm, [messageGroup], runId_, this._parentRunId, extraParams, this.tags, this.metadata, runName); return consumeCallback(async () => { try { if (handler.handleChatModelStart) await handler.handleChatModelStart?.(llm, [messageGroup], runId_, this._parentRunId, extraParams, this.tags, this.metadata, runName); else if (handler.handleLLMStart) { const messageString = getBufferString(messageGroup); await handler.handleLLMStart?.(llm, [messageString], runId_, this._parentRunId, extraParams, this.tags, this.metadata, runName); } } catch (err) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleLLMStart: ${err}`); if (handler.raiseError) throw err; } }, handler.awaitHandlers); })); return new CallbackManagerForLLMRun(runId_, this.handlers, this.inheritableHandlers, this.tags, this.inheritableTags, this.metadata, this.inheritableMetadata, this._parentRunId); })); } async handleChainStart(chain, inputs, runId = v4_default(), runType = void 0, _tags = void 0, _metadata = void 0, runName = void 0) { await Promise.all(this.handlers.map((handler) => { if (handler.ignoreChain) return; if (isBaseTracer(handler)) handler._createRunForChainStart(chain, inputs, runId, this._parentRunId, this.tags, this.metadata, runType, runName); return consumeCallback(async () => { try { await handler.handleChainStart?.(chain, inputs, runId, this._parentRunId, this.tags, this.metadata, runType, runName); } catch (err) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleChainStart: ${err}`); if (handler.raiseError) throw err; } }, handler.awaitHandlers); })); return new CallbackManagerForChainRun(runId, this.handlers, this.inheritableHandlers, this.tags, this.inheritableTags, this.metadata, this.inheritableMetadata, this._parentRunId); } async handleToolStart(tool, input, runId = v4_default(), _parentRunId = void 0, _tags = void 0, _metadata = void 0, runName = void 0) { await Promise.all(this.handlers.map((handler) => { if (handler.ignoreAgent) return; if (isBaseTracer(handler)) handler._createRunForToolStart(tool, input, runId, this._parentRunId, this.tags, this.metadata, runName); return consumeCallback(async () => { try { await handler.handleToolStart?.(tool, input, runId, this._parentRunId, this.tags, this.metadata, runName); } catch (err) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleToolStart: ${err}`); if (handler.raiseError) throw err; } }, handler.awaitHandlers); })); return new CallbackManagerForToolRun(runId, this.handlers, this.inheritableHandlers, this.tags, this.inheritableTags, this.metadata, this.inheritableMetadata, this._parentRunId); } async handleRetrieverStart(retriever, query, runId = v4_default(), _parentRunId = void 0, _tags = void 0, _metadata = void 0, runName = void 0) { await Promise.all(this.handlers.map((handler) => { if (handler.ignoreRetriever) return; if (isBaseTracer(handler)) handler._createRunForRetrieverStart(retriever, query, runId, this._parentRunId, this.tags, this.metadata, runName); return consumeCallback(async () => { try { await handler.handleRetrieverStart?.(retriever, query, runId, this._parentRunId, this.tags, this.metadata, runName); } catch (err) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleRetrieverStart: ${err}`); if (handler.raiseError) throw err; } }, handler.awaitHandlers); })); return new CallbackManagerForRetrieverRun(runId, this.handlers, this.inheritableHandlers, this.tags, this.inheritableTags, this.metadata, this.inheritableMetadata, this._parentRunId); } async handleCustomEvent(eventName, data, runId, _tags, _metadata) { await Promise.all(this.handlers.map((handler) => consumeCallback(async () => { if (!handler.ignoreCustomEvent) try { await handler.handleCustomEvent?.(eventName, data, runId, this.tags, this.metadata); } catch (err) { const logFunction = handler.raiseError ? console.error : console.warn; logFunction(`Error in handler ${handler.constructor.name}, handleCustomEvent: ${err}`); if (handler.raiseError) throw err; } }, handler.awaitHandlers))); } addHandler(handler, inherit = true) { this.handlers.push(handler); if (inherit) this.inheritableHandlers.push(handler); } removeHandler(handler) { this.handlers = this.handlers.filter((_handler) => _handler !== handler); this.inheritableHandlers = this.inheritableHandlers.filter((_handler) => _handler !== handler); } setHandlers(handlers, inherit = true) { this.handlers = []; this.inheritableHandlers = []; for (const handler of handlers) this.addHandler(handler, inherit); } addTags(tags, inherit = true) { this.removeTags(tags); this.tags.push(...tags); if (inherit) this.inheritableTags.push(...tags); } removeTags(tags) { this.tags = this.tags.filter((tag) => !tags.includes(tag)); this.inheritableTags = this.inheritableTags.filter((tag) => !tags.includes(tag)); } addMetadata(metadata, inherit = true) { this.metadata = { ...this.metadata, ...metadata }; if (inherit) this.inheritableMetadata = { ...this.inheritableMetadata, ...metadata }; } removeMetadata(metadata) { for (const key of Object.keys(metadata)) { delete this.metadata[key]; delete this.inheritableMetadata[key]; } } copy(additionalHandlers = [], inherit = true) { const manager = new CallbackManager(this._parentRunId); for (const handler of this.handlers) { const inheritable = this.inheritableHandlers.includes(handler); manager.addHandler(handler, inheritable); } for (const tag of this.tags) { const inheritable = this.inheritableTags.includes(tag); manager.addTags([tag], inheritable); } for (const key of Object.keys(this.metadata)) { const inheritable = Object.keys(this.inheritableMetadata).includes(key); manager.addMetadata({ [key]: this.metadata[key] }, inheritable); } for (const handler of additionalHandlers) { if (manager.handlers.filter((h) => h.name === "console_callback_handler").some((h) => h.name === handler.name)) continue; manager.addHandler(handler, inherit); } return manager; } static fromHandlers(handlers) { class Handler extends BaseCallbackHandler { constructor() { super(); Object.defineProperty(this, "name", { enumerable: true, configurable: true, writable: true, value: v4_default() }); Object.assign(this, handlers); } } const manager = new this(); manager.addHandler(new Handler()); return manager; } static configure(inheritableHandlers, localHandlers, inheritableTags, localTags, inheritableMetadata, localMetadata, options) { return this._configureSync(inheritableHandlers, localHandlers, inheritableTags, localTags, inheritableMetadata, localMetadata, options); } static _configureSync(inheritableHandlers, localHandlers, inheritableTags, localTags, inheritableMetadata, localMetadata, options) { let callbackManager; if (inheritableHandlers || localHandlers) { if (Array.isArray(inheritableHandlers) || !inheritableHandlers) { callbackManager = new CallbackManager(); callbackManager.setHandlers(inheritableHandlers?.map(ensureHandler) ?? [], true); } else callbackManager = inheritableHandlers; callbackManager = callbackManager.copy(Array.isArray(localHandlers) ? localHandlers.map(ensureHandler) : localHandlers?.handlers, false); } const verboseEnabled = getEnvironmentVariable("LANGCHAIN_VERBOSE") === "true" || options?.verbose; const tracingV2Enabled = LangChainTracer.getTraceableRunTree()?.tracingEnabled || isTracingEnabled(); const tracingEnabled = tracingV2Enabled || (getEnvironmentVariable("LANGCHAIN_TRACING") ?? false); if (verboseEnabled || tracingEnabled) { if (!callbackManager) callbackManager = new CallbackManager(); if (verboseEnabled && !callbackManager.handlers.some((handler) => handler.name === ConsoleCallbackHandler.prototype.name)) { const consoleHandler = new ConsoleCallbackHandler(); callbackManager.addHandler(consoleHandler, true); } if (tracingEnabled && !callbackManager.handlers.some((handler) => handler.name === "langchain_tracer")) { if (tracingV2Enabled) { const tracerV2 = new LangChainTracer(); callbackManager.addHandler(tracerV2, true); callbackManager._parentRunId = LangChainTracer.getTraceableRunTree()?.id ?? callbackManager._parentRunId; } } } for (const { contextVar, inheritable = true, handlerClass, envVar } of _getConfigureHooks()) { const createIfNotInContext = envVar && getEnvironmentVariable(envVar) === "true" && handlerClass; let handler; const contextVarValue = contextVar !== void 0 ? getContextVariable(contextVar) : void 0; if (contextVarValue && isBaseCallbackHandler(contextVarValue)) handler = contextVarValue; else if (createIfNotInContext) handler = new handlerClass({}); if (handler !== void 0) { if (!callbackManager) callbackManager = new CallbackManager(); if (!callbackManager.handlers.some((h) => h.name === handler.name)) callbackManager.addHandler(handler, inheritable); } } if (inheritableTags || localTags) { if (callbackManager) { callbackManager.addTags(inheritableTags ?? []); callbackManager.addTags(localTags ?? [], false); } } if (inheritableMetadata || localMetadata) { if (callbackManager) { callbackManager.addMetadata(inheritableMetadata ?? {}); callbackManager.addMetadata(localMetadata ?? {}, false); } } return callbackManager; } }; function ensureHandler(handler) { if ("name" in handler) return handler; return BaseCallbackHandler.fromMethods(handler); } /** * @deprecated Use [`traceable`](https://docs.smith.langchain.com/observability/how_to_guides/tracing/annotate_code) * from "langsmith" instead. */ var TraceGroup = class { constructor(groupName, options) { Object.defineProperty(this, "groupName", { enumerable: true, configurable: true, writable: true, value: groupName }); Object.defineProperty(this, "options", { enumerable: true, configurable: true, writable: true, value: options }); Object.defineProperty(this, "runManager", { enumerable: true, configurable: true, writable: true, value: void 0 }); } async getTraceGroupCallbackManager(group_name, inputs, options) { const cb = new LangChainTracer(options); const cm = await CallbackManager.configure([cb]); const runManager = await cm?.handleChainStart({ lc: 1, type: "not_implemented", id: [ "langchain", "callbacks", "groups", group_name ] }, inputs ?? {}); if (!runManager) throw new Error("Failed to create run group callback manager."); return runManager; } async start(inputs) { if (!this.runManager) this.runManager = await this.getTraceGroupCallbackManager(this.groupName, inputs, this.options); return this.runManager.getChild(); } async error(err) { if (this.runManager) { await this.runManager.handleChainError(err); this.runManager = void 0; } } async end(output) { if (this.runManager) { await this.runManager.handleChainEnd(output ?? {}); this.runManager = void 0; } } }; function _coerceToDict$1(value, defaultKey) { return value && !Array.isArray(value) && typeof value === "object" ? value : { [defaultKey]: value }; } async function traceAsGroup(groupOptions, enclosedCode, ...args) { const traceGroup = new TraceGroup(groupOptions.name, groupOptions); const callbackManager = await traceGroup.start({ ...args }); try { const result = await enclosedCode(callbackManager, ...args); await traceGroup.end(_coerceToDict$1(result, "output")); return result; } catch (err) { await traceGroup.error(err); throw err; } } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/singletons/async_local_storage/index.js var MockAsyncLocalStorage = class { getStore() { return void 0; } run(_store, callback) { return callback(); } enterWith(_store) { return void 0; } }; const mockAsyncLocalStorage = new MockAsyncLocalStorage(); const LC_CHILD_KEY = Symbol.for("lc:child_config"); var AsyncLocalStorageProvider = class { getInstance() { return getGlobalAsyncLocalStorageInstance() ?? mockAsyncLocalStorage; } getRunnableConfig() { const storage = this.getInstance(); return storage.getStore()?.extra?.[LC_CHILD_KEY]; } runWithConfig(config, callback, avoidCreatingRootRunTree) { const callbackManager = CallbackManager._configureSync(config?.callbacks, void 0, config?.tags, void 0, config?.metadata); const storage = this.getInstance(); const previousValue = storage.getStore(); const parentRunId = callbackManager?.getParentRunId(); const langChainTracer = callbackManager?.handlers?.find((handler) => handler?.name === "langchain_tracer"); let runTree; if (langChainTracer && parentRunId) runTree = langChainTracer.convertToRunTree(parentRunId); else if (!avoidCreatingRootRunTree) runTree = new RunTree({ name: "", tracingEnabled: false }); if (runTree) runTree.extra = { ...runTree.extra, [LC_CHILD_KEY]: config }; if (previousValue !== void 0 && previousValue[_CONTEXT_VARIABLES_KEY] !== void 0) { if (runTree === void 0) runTree = {}; runTree[_CONTEXT_VARIABLES_KEY] = previousValue[_CONTEXT_VARIABLES_KEY]; } return storage.run(runTree, callback); } initializeGlobalInstance(instance) { if (getGlobalAsyncLocalStorageInstance() === void 0) setGlobalAsyncLocalStorageInstance(instance); } }; const AsyncLocalStorageProviderSingleton = new AsyncLocalStorageProvider(); //#endregion //#region node_modules/.pnpm/zod@3.25.57/node_modules/zod/dist/esm/v3/helpers/util.js var util; (function(util$1) { util$1.assertEqual = (_) => {}; function assertIs(_arg) {} util$1.assertIs = assertIs; function assertNever(_x) { throw new Error(); } util$1.assertNever = assertNever; util$1.arrayToEnum = (items) => { const obj = {}; for (const item of items) obj[item] = item; return obj; }; util$1.getValidEnumValues = (obj) => { const validKeys = util$1.objectKeys(obj).filter((k) => typeof obj[obj[k]] !== "number"); const filtered = {}; for (const k of validKeys) filtered[k] = obj[k]; return util$1.objectValues(filtered); }; util$1.objectValues = (obj) => { return util$1.objectKeys(obj).map(function(e) { return obj[e]; }); }; util$1.objectKeys = typeof Object.keys === "function" ? (obj) => Object.keys(obj) : (object) => { const keys = []; for (const key in object) if (Object.prototype.hasOwnProperty.call(object, key)) keys.push(key); return keys; }; util$1.find = (arr$1, checker) => { for (const item of arr$1) if (checker(item)) return item; return void 0; }; util$1.isInteger = typeof Number.isInteger === "function" ? (val) => Number.isInteger(val) : (val) => typeof val === "number" && Number.isFinite(val) && Math.floor(val) === val; function joinValues(array, separator = " | ") { return array.map((val) => typeof val === "string" ? `'${val}'` : val).join(separator); } util$1.joinValues = joinValues; util$1.jsonStringifyReplacer = (_, value) => { if (typeof value === "bigint") return value.toString(); return value; }; })(util || (util = {})); var objectUtil; (function(objectUtil$1) { objectUtil$1.mergeShapes = (first, second) => { return { ...first, ...second }; }; })(objectUtil || (objectUtil = {})); const ZodParsedType = util.arrayToEnum([ "string", "nan", "number", "integer", "float", "boolean", "date", "bigint", "symbol", "function", "undefined", "null", "array", "object", "unknown", "promise", "void", "never", "map", "set" ]); const getParsedType = (data) => { const t$5 = typeof data; switch (t$5) { case "undefined": return ZodParsedType.undefined; case "string": return ZodParsedType.string; case "number": return Number.isNaN(data) ? ZodParsedType.nan : ZodParsedType.number; case "boolean": return ZodParsedType.boolean; case "function": return ZodParsedType.function; case "bigint": return ZodParsedType.bigint; case "symbol": return ZodParsedType.symbol; case "object": if (Array.isArray(data)) return ZodParsedType.array; if (data === null) return ZodParsedType.null; if (data.then && typeof data.then === "function" && data.catch && typeof data.catch === "function") return ZodParsedType.promise; if (typeof Map !== "undefined" && data instanceof Map) return ZodParsedType.map; if (typeof Set !== "undefined" && data instanceof Set) return ZodParsedType.set; if (typeof Date !== "undefined" && data instanceof Date) return ZodParsedType.date; return ZodParsedType.object; default: return ZodParsedType.unknown; } }; //#endregion //#region node_modules/.pnpm/zod@3.25.57/node_modules/zod/dist/esm/v3/ZodError.js const ZodIssueCode = util.arrayToEnum([ "invalid_type", "invalid_literal", "custom", "invalid_union", "invalid_union_discriminator", "invalid_enum_value", "unrecognized_keys", "invalid_arguments", "invalid_return_type", "invalid_date", "invalid_string", "too_small", "too_big", "invalid_intersection_types", "not_multiple_of", "not_finite" ]); var ZodError = class ZodError extends Error { get errors() { return this.issues; } constructor(issues) { super(); this.issues = []; this.addIssue = (sub) => { this.issues = [...this.issues, sub]; }; this.addIssues = (subs = []) => { this.issues = [...this.issues, ...subs]; }; const actualProto = new.target.prototype; if (Object.setPrototypeOf) Object.setPrototypeOf(this, actualProto); else this.__proto__ = actualProto; this.name = "ZodError"; this.issues = issues; } format(_mapper) { const mapper = _mapper || function(issue) { return issue.message; }; const fieldErrors = { _errors: [] }; const processError = (error) => { for (const issue of error.issues) if (issue.code === "invalid_union") issue.unionErrors.map(processError); else if (issue.code === "invalid_return_type") processError(issue.returnTypeError); else if (issue.code === "invalid_arguments") processError(issue.argumentsError); else if (issue.path.length === 0) fieldErrors._errors.push(mapper(issue)); else { let curr = fieldErrors; let i = 0; while (i < issue.path.length) { const el = issue.path[i]; const terminal = i === issue.path.length - 1; if (!terminal) curr[el] = curr[el] || { _errors: [] }; else { curr[el] = curr[el] || { _errors: [] }; curr[el]._errors.push(mapper(issue)); } curr = curr[el]; i++; } } }; processError(this); return fieldErrors; } static assert(value) { if (!(value instanceof ZodError)) throw new Error(`Not a ZodError: ${value}`); } toString() { return this.message; } get message() { return JSON.stringify(this.issues, util.jsonStringifyReplacer, 2); } get isEmpty() { return this.issues.length === 0; } flatten(mapper = (issue) => issue.message) { const fieldErrors = {}; const formErrors = []; for (const sub of this.issues) if (sub.path.length > 0) { fieldErrors[sub.path[0]] = fieldErrors[sub.path[0]] || []; fieldErrors[sub.path[0]].push(mapper(sub)); } else formErrors.push(mapper(sub)); return { formErrors, fieldErrors }; } get formErrors() { return this.flatten(); } }; ZodError.create = (issues) => { const error = new ZodError(issues); return error; }; //#endregion //#region node_modules/.pnpm/zod@3.25.57/node_modules/zod/dist/esm/v3/locales/en.js const errorMap = (issue, _ctx) => { let message; switch (issue.code) { case ZodIssueCode.invalid_type: if (issue.received === ZodParsedType.undefined) message = "Required"; else message = `Expected ${issue.expected}, received ${issue.received}`; break; case ZodIssueCode.invalid_literal: message = `Invalid literal value, expected ${JSON.stringify(issue.expected, util.jsonStringifyReplacer)}`; break; case ZodIssueCode.unrecognized_keys: message = `Unrecognized key(s) in object: ${util.joinValues(issue.keys, ", ")}`; break; case ZodIssueCode.invalid_union: message = `Invalid input`; break; case ZodIssueCode.invalid_union_discriminator: message = `Invalid discriminator value. Expected ${util.joinValues(issue.options)}`; break; case ZodIssueCode.invalid_enum_value: message = `Invalid enum value. Expected ${util.joinValues(issue.options)}, received '${issue.received}'`; break; case ZodIssueCode.invalid_arguments: message = `Invalid function arguments`; break; case ZodIssueCode.invalid_return_type: message = `Invalid function return type`; break; case ZodIssueCode.invalid_date: message = `Invalid date`; break; case ZodIssueCode.invalid_string: if (typeof issue.validation === "object") if ("includes" in issue.validation) { message = `Invalid input: must include "${issue.validation.includes}"`; if (typeof issue.validation.position === "number") message = `${message} at one or more positions greater than or equal to ${issue.validation.position}`; } else if ("startsWith" in issue.validation) message = `Invalid input: must start with "${issue.validation.startsWith}"`; else if ("endsWith" in issue.validation) message = `Invalid input: must end with "${issue.validation.endsWith}"`; else util.assertNever(issue.validation); else if (issue.validation !== "regex") message = `Invalid ${issue.validation}`; else message = "Invalid"; break; case ZodIssueCode.too_small: if (issue.type === "array") message = `Array must contain ${issue.exact ? "exactly" : issue.inclusive ? `at least` : `more than`} ${issue.minimum} element(s)`; else if (issue.type === "string") message = `String must contain ${issue.exact ? "exactly" : issue.inclusive ? `at least` : `over`} ${issue.minimum} character(s)`; else if (issue.type === "number") message = `Number must be ${issue.exact ? `exactly equal to ` : issue.inclusive ? `greater than or equal to ` : `greater than `}${issue.minimum}`; else if (issue.type === "date") message = `Date must be ${issue.exact ? `exactly equal to ` : issue.inclusive ? `greater than or equal to ` : `greater than `}${new Date(Number(issue.minimum))}`; else message = "Invalid input"; break; case ZodIssueCode.too_big: if (issue.type === "array") message = `Array must contain ${issue.exact ? `exactly` : issue.inclusive ? `at most` : `less than`} ${issue.maximum} element(s)`; else if (issue.type === "string") message = `String must contain ${issue.exact ? `exactly` : issue.inclusive ? `at most` : `under`} ${issue.maximum} character(s)`; else if (issue.type === "number") message = `Number must be ${issue.exact ? `exactly` : issue.inclusive ? `less than or equal to` : `less than`} ${issue.maximum}`; else if (issue.type === "bigint") message = `BigInt must be ${issue.exact ? `exactly` : issue.inclusive ? `less than or equal to` : `less than`} ${issue.maximum}`; else if (issue.type === "date") message = `Date must be ${issue.exact ? `exactly` : issue.inclusive ? `smaller than or equal to` : `smaller than`} ${new Date(Number(issue.maximum))}`; else message = "Invalid input"; break; case ZodIssueCode.custom: message = `Invalid input`; break; case ZodIssueCode.invalid_intersection_types: message = `Intersection results could not be merged`; break; case ZodIssueCode.not_multiple_of: message = `Number must be a multiple of ${issue.multipleOf}`; break; case ZodIssueCode.not_finite: message = "Number must be finite"; break; default: message = _ctx.defaultError; util.assertNever(issue); } return { message }; }; var en_default = errorMap; //#endregion //#region node_modules/.pnpm/zod@3.25.57/node_modules/zod/dist/esm/v3/errors.js let overrideErrorMap = en_default; function getErrorMap() { return overrideErrorMap; } //#endregion //#region node_modules/.pnpm/zod@3.25.57/node_modules/zod/dist/esm/v3/helpers/parseUtil.js const makeIssue = (params) => { const { data, path, errorMaps, issueData } = params; const fullPath = [...path, ...issueData.path || []]; const fullIssue = { ...issueData, path: fullPath }; if (issueData.message !== void 0) return { ...issueData, path: fullPath, message: issueData.message }; let errorMessage = ""; const maps = errorMaps.filter((m) => !!m).slice().reverse(); for (const map of maps) errorMessage = map(fullIssue, { data, defaultError: errorMessage }).message; return { ...issueData, path: fullPath, message: errorMessage }; }; function addIssueToContext(ctx, issueData) { const overrideMap = getErrorMap(); const issue = makeIssue({ issueData, data: ctx.data, path: ctx.path, errorMaps: [ ctx.common.contextualErrorMap, ctx.schemaErrorMap, overrideMap, overrideMap === en_default ? void 0 : en_default ].filter((x) => !!x) }); ctx.common.issues.push(issue); } var ParseStatus = class ParseStatus { constructor() { this.value = "valid"; } dirty() { if (this.value === "valid") this.value = "dirty"; } abort() { if (this.value !== "aborted") this.value = "aborted"; } static mergeArray(status, results) { const arrayValue = []; for (const s of results) { if (s.status === "aborted") return INVALID; if (s.status === "dirty") status.dirty(); arrayValue.push(s.value); } return { status: status.value, value: arrayValue }; } static async mergeObjectAsync(status, pairs) { const syncPairs = []; for (const pair of pairs) { const key = await pair.key; const value = await pair.value; syncPairs.push({ key, value }); } return ParseStatus.mergeObjectSync(status, syncPairs); } static mergeObjectSync(status, pairs) { const finalObject = {}; for (const pair of pairs) { const { key, value } = pair; if (key.status === "aborted") return INVALID; if (value.status === "aborted") return INVALID; if (key.status === "dirty") status.dirty(); if (value.status === "dirty") status.dirty(); if (key.value !== "__proto__" && (typeof value.value !== "undefined" || pair.alwaysSet)) finalObject[key.value] = value.value; } return { status: status.value, value: finalObject }; } }; const INVALID = Object.freeze({ status: "aborted" }); const DIRTY = (value) => ({ status: "dirty", value }); const OK = (value) => ({ status: "valid", value }); const isAborted = (x) => x.status === "aborted"; const isDirty = (x) => x.status === "dirty"; const isValid = (x) => x.status === "valid"; const isAsync = (x) => typeof Promise !== "undefined" && x instanceof Promise; //#endregion //#region node_modules/.pnpm/zod@3.25.57/node_modules/zod/dist/esm/v3/helpers/errorUtil.js var errorUtil; (function(errorUtil$1) { errorUtil$1.errToObj = (message) => typeof message === "string" ? { message } : message || {}; errorUtil$1.toString = (message) => typeof message === "string" ? message : message?.message; })(errorUtil || (errorUtil = {})); //#endregion //#region node_modules/.pnpm/zod@3.25.57/node_modules/zod/dist/esm/v3/types.js var ParseInputLazyPath = class { constructor(parent, value, path, key) { this._cachedPath = []; this.parent = parent; this.data = value; this._path = path; this._key = key; } get path() { if (!this._cachedPath.length) if (Array.isArray(this._key)) this._cachedPath.push(...this._path, ...this._key); else this._cachedPath.push(...this._path, this._key); return this._cachedPath; } }; const handleResult = (ctx, result) => { if (isValid(result)) return { success: true, data: result.value }; else { if (!ctx.common.issues.length) throw new Error("Validation failed but no issues detected."); return { success: false, get error() { if (this._error) return this._error; const error = new ZodError(ctx.common.issues); this._error = error; return this._error; } }; } }; function processCreateParams(params) { if (!params) return {}; const { errorMap: errorMap$1, invalid_type_error, required_error, description } = params; if (errorMap$1 && (invalid_type_error || required_error)) throw new Error(`Can't use "invalid_type_error" or "required_error" in conjunction with custom error map.`); if (errorMap$1) return { errorMap: errorMap$1, description }; const customMap = (iss, ctx) => { const { message } = params; if (iss.code === "invalid_enum_value") return { message: message ?? ctx.defaultError }; if (typeof ctx.data === "undefined") return { message: message ?? required_error ?? ctx.defaultError }; if (iss.code !== "invalid_type") return { message: ctx.defaultError }; return { message: message ?? invalid_type_error ?? ctx.defaultError }; }; return { errorMap: customMap, description }; } var ZodType = class { get description() { return this._def.description; } _getType(input) { return getParsedType(input.data); } _getOrReturnCtx(input, ctx) { return ctx || { common: input.parent.common, data: input.data, parsedType: getParsedType(input.data), schemaErrorMap: this._def.errorMap, path: input.path, parent: input.parent }; } _processInputParams(input) { return { status: new ParseStatus(), ctx: { common: input.parent.common, data: input.data, parsedType: getParsedType(input.data), schemaErrorMap: this._def.errorMap, path: input.path, parent: input.parent } }; } _parseSync(input) { const result = this._parse(input); if (isAsync(result)) throw new Error("Synchronous parse encountered promise."); return result; } _parseAsync(input) { const result = this._parse(input); return Promise.resolve(result); } parse(data, params) { const result = this.safeParse(data, params); if (result.success) return result.data; throw result.error; } safeParse(data, params) { const ctx = { common: { issues: [], async: params?.async ?? false, contextualErrorMap: params?.errorMap }, path: params?.path || [], schemaErrorMap: this._def.errorMap, parent: null, data, parsedType: getParsedType(data) }; const result = this._parseSync({ data, path: ctx.path, parent: ctx }); return handleResult(ctx, result); } "~validate"(data) { const ctx = { common: { issues: [], async: !!this["~standard"].async }, path: [], schemaErrorMap: this._def.errorMap, parent: null, data, parsedType: getParsedType(data) }; if (!this["~standard"].async) try { const result = this._parseSync({ data, path: [], parent: ctx }); return isValid(result) ? { value: result.value } : { issues: ctx.common.issues }; } catch (err) { if (err?.message?.toLowerCase()?.includes("encountered")) this["~standard"].async = true; ctx.common = { issues: [], async: true }; } return this._parseAsync({ data, path: [], parent: ctx }).then((result) => isValid(result) ? { value: result.value } : { issues: ctx.common.issues }); } async parseAsync(data, params) { const result = await this.safeParseAsync(data, params); if (result.success) return result.data; throw result.error; } async safeParseAsync(data, params) { const ctx = { common: { issues: [], contextualErrorMap: params?.errorMap, async: true }, path: params?.path || [], schemaErrorMap: this._def.errorMap, parent: null, data, parsedType: getParsedType(data) }; const maybeAsyncResult = this._parse({ data, path: ctx.path, parent: ctx }); const result = await (isAsync(maybeAsyncResult) ? maybeAsyncResult : Promise.resolve(maybeAsyncResult)); return handleResult(ctx, result); } refine(check, message) { const getIssueProperties = (val) => { if (typeof message === "string" || typeof message === "undefined") return { message }; else if (typeof message === "function") return message(val); else return message; }; return this._refinement((val, ctx) => { const result = check(val); const setError = () => ctx.addIssue({ code: ZodIssueCode.custom, ...getIssueProperties(val) }); if (typeof Promise !== "undefined" && result instanceof Promise) return result.then((data) => { if (!data) { setError(); return false; } else return true; }); if (!result) { setError(); return false; } else return true; }); } refinement(check, refinementData) { return this._refinement((val, ctx) => { if (!check(val)) { ctx.addIssue(typeof refinementData === "function" ? refinementData(val, ctx) : refinementData); return false; } else return true; }); } _refinement(refinement) { return new ZodEffects({ schema: this, typeName: ZodFirstPartyTypeKind.ZodEffects, effect: { type: "refinement", refinement } }); } superRefine(refinement) { return this._refinement(refinement); } constructor(def) { /** Alias of safeParseAsync */ this.spa = this.safeParseAsync; this._def = def; this.parse = this.parse.bind(this); this.safeParse = this.safeParse.bind(this); this.parseAsync = this.parseAsync.bind(this); this.safeParseAsync = this.safeParseAsync.bind(this); this.spa = this.spa.bind(this); this.refine = this.refine.bind(this); this.refinement = this.refinement.bind(this); this.superRefine = this.superRefine.bind(this); this.optional = this.optional.bind(this); this.nullable = this.nullable.bind(this); this.nullish = this.nullish.bind(this); this.array = this.array.bind(this); this.promise = this.promise.bind(this); this.or = this.or.bind(this); this.and = this.and.bind(this); this.transform = this.transform.bind(this); this.brand = this.brand.bind(this); this.default = this.default.bind(this); this.catch = this.catch.bind(this); this.describe = this.describe.bind(this); this.pipe = this.pipe.bind(this); this.readonly = this.readonly.bind(this); this.isNullable = this.isNullable.bind(this); this.isOptional = this.isOptional.bind(this); this["~standard"] = { version: 1, vendor: "zod", validate: (data) => this["~validate"](data) }; } optional() { return ZodOptional.create(this, this._def); } nullable() { return ZodNullable.create(this, this._def); } nullish() { return this.nullable().optional(); } array() { return ZodArray.create(this); } promise() { return ZodPromise.create(this, this._def); } or(option) { return ZodUnion.create([this, option], this._def); } and(incoming) { return ZodIntersection.create(this, incoming, this._def); } transform(transform) { return new ZodEffects({ ...processCreateParams(this._def), schema: this, typeName: ZodFirstPartyTypeKind.ZodEffects, effect: { type: "transform", transform } }); } default(def) { const defaultValueFunc = typeof def === "function" ? def : () => def; return new ZodDefault({ ...processCreateParams(this._def), innerType: this, defaultValue: defaultValueFunc, typeName: ZodFirstPartyTypeKind.ZodDefault }); } brand() { return new ZodBranded({ typeName: ZodFirstPartyTypeKind.ZodBranded, type: this, ...processCreateParams(this._def) }); } catch(def) { const catchValueFunc = typeof def === "function" ? def : () => def; return new ZodCatch({ ...processCreateParams(this._def), innerType: this, catchValue: catchValueFunc, typeName: ZodFirstPartyTypeKind.ZodCatch }); } describe(description) { const This = this.constructor; return new This({ ...this._def, description }); } pipe(target) { return ZodPipeline.create(this, target); } readonly() { return ZodReadonly.create(this); } isOptional() { return this.safeParse(void 0).success; } isNullable() { return this.safeParse(null).success; } }; const cuidRegex = /^c[^\s-]{8,}$/i; const cuid2Regex = /^[0-9a-z]+$/; const ulidRegex = /^[0-9A-HJKMNP-TV-Z]{26}$/i; const uuidRegex = /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/i; const nanoidRegex = /^[a-z0-9_-]{21}$/i; const jwtRegex = /^[A-Za-z0-9-_]+\.[A-Za-z0-9-_]+\.[A-Za-z0-9-_]*$/; const durationRegex = /^[-+]?P(?!$)(?:(?:[-+]?\d+Y)|(?:[-+]?\d+[.,]\d+Y$))?(?:(?:[-+]?\d+M)|(?:[-+]?\d+[.,]\d+M$))?(?:(?:[-+]?\d+W)|(?:[-+]?\d+[.,]\d+W$))?(?:(?:[-+]?\d+D)|(?:[-+]?\d+[.,]\d+D$))?(?:T(?=[\d+-])(?:(?:[-+]?\d+H)|(?:[-+]?\d+[.,]\d+H$))?(?:(?:[-+]?\d+M)|(?:[-+]?\d+[.,]\d+M$))?(?:[-+]?\d+(?:[.,]\d+)?S)?)??$/; const emailRegex = /^(?!\.)(?!.*\.\.)([A-Z0-9_'+\-\.]*)[A-Z0-9_+-]@([A-Z0-9][A-Z0-9\-]*\.)+[A-Z]{2,}$/i; const _emojiRegex = `^(\\p{Extended_Pictographic}|\\p{Emoji_Component})+$`; let emojiRegex$1; const ipv4Regex = /^(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])$/; const ipv4CidrRegex = /^(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\/(3[0-2]|[12]?[0-9])$/; const ipv6Regex = /^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$/; const ipv6CidrRegex = /^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))\/(12[0-8]|1[01][0-9]|[1-9]?[0-9])$/; const base64Regex = /^([0-9a-zA-Z+/]{4})*(([0-9a-zA-Z+/]{2}==)|([0-9a-zA-Z+/]{3}=))?$/; const base64urlRegex = /^([0-9a-zA-Z-_]{4})*(([0-9a-zA-Z-_]{2}(==)?)|([0-9a-zA-Z-_]{3}(=)?))?$/; const dateRegexSource = `((\\d\\d[2468][048]|\\d\\d[13579][26]|\\d\\d0[48]|[02468][048]00|[13579][26]00)-02-29|\\d{4}-((0[13578]|1[02])-(0[1-9]|[12]\\d|3[01])|(0[469]|11)-(0[1-9]|[12]\\d|30)|(02)-(0[1-9]|1\\d|2[0-8])))`; const dateRegex = new RegExp(`^${dateRegexSource}$`); function timeRegexSource(args) { let secondsRegexSource = `[0-5]\\d`; if (args.precision) secondsRegexSource = `${secondsRegexSource}\\.\\d{${args.precision}}`; else if (args.precision == null) secondsRegexSource = `${secondsRegexSource}(\\.\\d+)?`; const secondsQuantifier = args.precision ? "+" : "?"; return `([01]\\d|2[0-3]):[0-5]\\d(:${secondsRegexSource})${secondsQuantifier}`; } function timeRegex(args) { return new RegExp(`^${timeRegexSource(args)}$`); } function datetimeRegex(args) { let regex = `${dateRegexSource}T${timeRegexSource(args)}`; const opts = []; opts.push(args.local ? `Z?` : `Z`); if (args.offset) opts.push(`([+-]\\d{2}:?\\d{2})`); regex = `${regex}(${opts.join("|")})`; return new RegExp(`^${regex}$`); } function isValidIP(ip, version) { if ((version === "v4" || !version) && ipv4Regex.test(ip)) return true; if ((version === "v6" || !version) && ipv6Regex.test(ip)) return true; return false; } function isValidJWT(jwt, alg) { if (!jwtRegex.test(jwt)) return false; try { const [header] = jwt.split("."); const base64 = header.replace(/-/g, "+").replace(/_/g, "/").padEnd(header.length + (4 - header.length % 4) % 4, "="); const decoded = JSON.parse(atob(base64)); if (typeof decoded !== "object" || decoded === null) return false; if ("typ" in decoded && decoded?.typ !== "JWT") return false; if (!decoded.alg) return false; if (alg && decoded.alg !== alg) return false; return true; } catch { return false; } } function isValidCidr(ip, version) { if ((version === "v4" || !version) && ipv4CidrRegex.test(ip)) return true; if ((version === "v6" || !version) && ipv6CidrRegex.test(ip)) return true; return false; } var ZodString = class ZodString extends ZodType { _parse(input) { if (this._def.coerce) input.data = String(input.data); const parsedType = this._getType(input); if (parsedType !== ZodParsedType.string) { const ctx$1 = this._getOrReturnCtx(input); addIssueToContext(ctx$1, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.string, received: ctx$1.parsedType }); return INVALID; } const status = new ParseStatus(); let ctx = void 0; for (const check of this._def.checks) if (check.kind === "min") { if (input.data.length < check.value) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { code: ZodIssueCode.too_small, minimum: check.value, type: "string", inclusive: true, exact: false, message: check.message }); status.dirty(); } } else if (check.kind === "max") { if (input.data.length > check.value) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { code: ZodIssueCode.too_big, maximum: check.value, type: "string", inclusive: true, exact: false, message: check.message }); status.dirty(); } } else if (check.kind === "length") { const tooBig = input.data.length > check.value; const tooSmall = input.data.length < check.value; if (tooBig || tooSmall) { ctx = this._getOrReturnCtx(input, ctx); if (tooBig) addIssueToContext(ctx, { code: ZodIssueCode.too_big, maximum: check.value, type: "string", inclusive: true, exact: true, message: check.message }); else if (tooSmall) addIssueToContext(ctx, { code: ZodIssueCode.too_small, minimum: check.value, type: "string", inclusive: true, exact: true, message: check.message }); status.dirty(); } } else if (check.kind === "email") { if (!emailRegex.test(input.data)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { validation: "email", code: ZodIssueCode.invalid_string, message: check.message }); status.dirty(); } } else if (check.kind === "emoji") { if (!emojiRegex$1) emojiRegex$1 = new RegExp(_emojiRegex, "u"); if (!emojiRegex$1.test(input.data)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { validation: "emoji", code: ZodIssueCode.invalid_string, message: check.message }); status.dirty(); } } else if (check.kind === "uuid") { if (!uuidRegex.test(input.data)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { validation: "uuid", code: ZodIssueCode.invalid_string, message: check.message }); status.dirty(); } } else if (check.kind === "nanoid") { if (!nanoidRegex.test(input.data)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { validation: "nanoid", code: ZodIssueCode.invalid_string, message: check.message }); status.dirty(); } } else if (check.kind === "cuid") { if (!cuidRegex.test(input.data)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { validation: "cuid", code: ZodIssueCode.invalid_string, message: check.message }); status.dirty(); } } else if (check.kind === "cuid2") { if (!cuid2Regex.test(input.data)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { validation: "cuid2", code: ZodIssueCode.invalid_string, message: check.message }); status.dirty(); } } else if (check.kind === "ulid") { if (!ulidRegex.test(input.data)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { validation: "ulid", code: ZodIssueCode.invalid_string, message: check.message }); status.dirty(); } } else if (check.kind === "url") try { new URL(input.data); } catch { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { validation: "url", code: ZodIssueCode.invalid_string, message: check.message }); status.dirty(); } else if (check.kind === "regex") { check.regex.lastIndex = 0; const testResult = check.regex.test(input.data); if (!testResult) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { validation: "regex", code: ZodIssueCode.invalid_string, message: check.message }); status.dirty(); } } else if (check.kind === "trim") input.data = input.data.trim(); else if (check.kind === "includes") { if (!input.data.includes(check.value, check.position)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { code: ZodIssueCode.invalid_string, validation: { includes: check.value, position: check.position }, message: check.message }); status.dirty(); } } else if (check.kind === "toLowerCase") input.data = input.data.toLowerCase(); else if (check.kind === "toUpperCase") input.data = input.data.toUpperCase(); else if (check.kind === "startsWith") { if (!input.data.startsWith(check.value)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { code: ZodIssueCode.invalid_string, validation: { startsWith: check.value }, message: check.message }); status.dirty(); } } else if (check.kind === "endsWith") { if (!input.data.endsWith(check.value)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { code: ZodIssueCode.invalid_string, validation: { endsWith: check.value }, message: check.message }); status.dirty(); } } else if (check.kind === "datetime") { const regex = datetimeRegex(check); if (!regex.test(input.data)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { code: ZodIssueCode.invalid_string, validation: "datetime", message: check.message }); status.dirty(); } } else if (check.kind === "date") { const regex = dateRegex; if (!regex.test(input.data)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { code: ZodIssueCode.invalid_string, validation: "date", message: check.message }); status.dirty(); } } else if (check.kind === "time") { const regex = timeRegex(check); if (!regex.test(input.data)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { code: ZodIssueCode.invalid_string, validation: "time", message: check.message }); status.dirty(); } } else if (check.kind === "duration") { if (!durationRegex.test(input.data)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { validation: "duration", code: ZodIssueCode.invalid_string, message: check.message }); status.dirty(); } } else if (check.kind === "ip") { if (!isValidIP(input.data, check.version)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { validation: "ip", code: ZodIssueCode.invalid_string, message: check.message }); status.dirty(); } } else if (check.kind === "jwt") { if (!isValidJWT(input.data, check.alg)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { validation: "jwt", code: ZodIssueCode.invalid_string, message: check.message }); status.dirty(); } } else if (check.kind === "cidr") { if (!isValidCidr(input.data, check.version)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { validation: "cidr", code: ZodIssueCode.invalid_string, message: check.message }); status.dirty(); } } else if (check.kind === "base64") { if (!base64Regex.test(input.data)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { validation: "base64", code: ZodIssueCode.invalid_string, message: check.message }); status.dirty(); } } else if (check.kind === "base64url") { if (!base64urlRegex.test(input.data)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { validation: "base64url", code: ZodIssueCode.invalid_string, message: check.message }); status.dirty(); } } else util.assertNever(check); return { status: status.value, value: input.data }; } _regex(regex, validation, message) { return this.refinement((data) => regex.test(data), { validation, code: ZodIssueCode.invalid_string, ...errorUtil.errToObj(message) }); } _addCheck(check) { return new ZodString({ ...this._def, checks: [...this._def.checks, check] }); } email(message) { return this._addCheck({ kind: "email", ...errorUtil.errToObj(message) }); } url(message) { return this._addCheck({ kind: "url", ...errorUtil.errToObj(message) }); } emoji(message) { return this._addCheck({ kind: "emoji", ...errorUtil.errToObj(message) }); } uuid(message) { return this._addCheck({ kind: "uuid", ...errorUtil.errToObj(message) }); } nanoid(message) { return this._addCheck({ kind: "nanoid", ...errorUtil.errToObj(message) }); } cuid(message) { return this._addCheck({ kind: "cuid", ...errorUtil.errToObj(message) }); } cuid2(message) { return this._addCheck({ kind: "cuid2", ...errorUtil.errToObj(message) }); } ulid(message) { return this._addCheck({ kind: "ulid", ...errorUtil.errToObj(message) }); } base64(message) { return this._addCheck({ kind: "base64", ...errorUtil.errToObj(message) }); } base64url(message) { return this._addCheck({ kind: "base64url", ...errorUtil.errToObj(message) }); } jwt(options) { return this._addCheck({ kind: "jwt", ...errorUtil.errToObj(options) }); } ip(options) { return this._addCheck({ kind: "ip", ...errorUtil.errToObj(options) }); } cidr(options) { return this._addCheck({ kind: "cidr", ...errorUtil.errToObj(options) }); } datetime(options) { if (typeof options === "string") return this._addCheck({ kind: "datetime", precision: null, offset: false, local: false, message: options }); return this._addCheck({ kind: "datetime", precision: typeof options?.precision === "undefined" ? null : options?.precision, offset: options?.offset ?? false, local: options?.local ?? false, ...errorUtil.errToObj(options?.message) }); } date(message) { return this._addCheck({ kind: "date", message }); } time(options) { if (typeof options === "string") return this._addCheck({ kind: "time", precision: null, message: options }); return this._addCheck({ kind: "time", precision: typeof options?.precision === "undefined" ? null : options?.precision, ...errorUtil.errToObj(options?.message) }); } duration(message) { return this._addCheck({ kind: "duration", ...errorUtil.errToObj(message) }); } regex(regex, message) { return this._addCheck({ kind: "regex", regex, ...errorUtil.errToObj(message) }); } includes(value, options) { return this._addCheck({ kind: "includes", value, position: options?.position, ...errorUtil.errToObj(options?.message) }); } startsWith(value, message) { return this._addCheck({ kind: "startsWith", value, ...errorUtil.errToObj(message) }); } endsWith(value, message) { return this._addCheck({ kind: "endsWith", value, ...errorUtil.errToObj(message) }); } min(minLength, message) { return this._addCheck({ kind: "min", value: minLength, ...errorUtil.errToObj(message) }); } max(maxLength, message) { return this._addCheck({ kind: "max", value: maxLength, ...errorUtil.errToObj(message) }); } length(len, message) { return this._addCheck({ kind: "length", value: len, ...errorUtil.errToObj(message) }); } /** * Equivalent to `.min(1)` */ nonempty(message) { return this.min(1, errorUtil.errToObj(message)); } trim() { return new ZodString({ ...this._def, checks: [...this._def.checks, { kind: "trim" }] }); } toLowerCase() { return new ZodString({ ...this._def, checks: [...this._def.checks, { kind: "toLowerCase" }] }); } toUpperCase() { return new ZodString({ ...this._def, checks: [...this._def.checks, { kind: "toUpperCase" }] }); } get isDatetime() { return !!this._def.checks.find((ch) => ch.kind === "datetime"); } get isDate() { return !!this._def.checks.find((ch) => ch.kind === "date"); } get isTime() { return !!this._def.checks.find((ch) => ch.kind === "time"); } get isDuration() { return !!this._def.checks.find((ch) => ch.kind === "duration"); } get isEmail() { return !!this._def.checks.find((ch) => ch.kind === "email"); } get isURL() { return !!this._def.checks.find((ch) => ch.kind === "url"); } get isEmoji() { return !!this._def.checks.find((ch) => ch.kind === "emoji"); } get isUUID() { return !!this._def.checks.find((ch) => ch.kind === "uuid"); } get isNANOID() { return !!this._def.checks.find((ch) => ch.kind === "nanoid"); } get isCUID() { return !!this._def.checks.find((ch) => ch.kind === "cuid"); } get isCUID2() { return !!this._def.checks.find((ch) => ch.kind === "cuid2"); } get isULID() { return !!this._def.checks.find((ch) => ch.kind === "ulid"); } get isIP() { return !!this._def.checks.find((ch) => ch.kind === "ip"); } get isCIDR() { return !!this._def.checks.find((ch) => ch.kind === "cidr"); } get isBase64() { return !!this._def.checks.find((ch) => ch.kind === "base64"); } get isBase64url() { return !!this._def.checks.find((ch) => ch.kind === "base64url"); } get minLength() { let min = null; for (const ch of this._def.checks) if (ch.kind === "min") { if (min === null || ch.value > min) min = ch.value; } return min; } get maxLength() { let max = null; for (const ch of this._def.checks) if (ch.kind === "max") { if (max === null || ch.value < max) max = ch.value; } return max; } }; ZodString.create = (params) => { return new ZodString({ checks: [], typeName: ZodFirstPartyTypeKind.ZodString, coerce: params?.coerce ?? false, ...processCreateParams(params) }); }; function floatSafeRemainder(val, step) { const valDecCount = (val.toString().split(".")[1] || "").length; const stepDecCount = (step.toString().split(".")[1] || "").length; const decCount = valDecCount > stepDecCount ? valDecCount : stepDecCount; const valInt = Number.parseInt(val.toFixed(decCount).replace(".", "")); const stepInt = Number.parseInt(step.toFixed(decCount).replace(".", "")); return valInt % stepInt / 10 ** decCount; } var ZodNumber = class ZodNumber extends ZodType { constructor() { super(...arguments); this.min = this.gte; this.max = this.lte; this.step = this.multipleOf; } _parse(input) { if (this._def.coerce) input.data = Number(input.data); const parsedType = this._getType(input); if (parsedType !== ZodParsedType.number) { const ctx$1 = this._getOrReturnCtx(input); addIssueToContext(ctx$1, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.number, received: ctx$1.parsedType }); return INVALID; } let ctx = void 0; const status = new ParseStatus(); for (const check of this._def.checks) if (check.kind === "int") { if (!util.isInteger(input.data)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { code: ZodIssueCode.invalid_type, expected: "integer", received: "float", message: check.message }); status.dirty(); } } else if (check.kind === "min") { const tooSmall = check.inclusive ? input.data < check.value : input.data <= check.value; if (tooSmall) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { code: ZodIssueCode.too_small, minimum: check.value, type: "number", inclusive: check.inclusive, exact: false, message: check.message }); status.dirty(); } } else if (check.kind === "max") { const tooBig = check.inclusive ? input.data > check.value : input.data >= check.value; if (tooBig) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { code: ZodIssueCode.too_big, maximum: check.value, type: "number", inclusive: check.inclusive, exact: false, message: check.message }); status.dirty(); } } else if (check.kind === "multipleOf") { if (floatSafeRemainder(input.data, check.value) !== 0) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { code: ZodIssueCode.not_multiple_of, multipleOf: check.value, message: check.message }); status.dirty(); } } else if (check.kind === "finite") { if (!Number.isFinite(input.data)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { code: ZodIssueCode.not_finite, message: check.message }); status.dirty(); } } else util.assertNever(check); return { status: status.value, value: input.data }; } gte(value, message) { return this.setLimit("min", value, true, errorUtil.toString(message)); } gt(value, message) { return this.setLimit("min", value, false, errorUtil.toString(message)); } lte(value, message) { return this.setLimit("max", value, true, errorUtil.toString(message)); } lt(value, message) { return this.setLimit("max", value, false, errorUtil.toString(message)); } setLimit(kind, value, inclusive, message) { return new ZodNumber({ ...this._def, checks: [...this._def.checks, { kind, value, inclusive, message: errorUtil.toString(message) }] }); } _addCheck(check) { return new ZodNumber({ ...this._def, checks: [...this._def.checks, check] }); } int(message) { return this._addCheck({ kind: "int", message: errorUtil.toString(message) }); } positive(message) { return this._addCheck({ kind: "min", value: 0, inclusive: false, message: errorUtil.toString(message) }); } negative(message) { return this._addCheck({ kind: "max", value: 0, inclusive: false, message: errorUtil.toString(message) }); } nonpositive(message) { return this._addCheck({ kind: "max", value: 0, inclusive: true, message: errorUtil.toString(message) }); } nonnegative(message) { return this._addCheck({ kind: "min", value: 0, inclusive: true, message: errorUtil.toString(message) }); } multipleOf(value, message) { return this._addCheck({ kind: "multipleOf", value, message: errorUtil.toString(message) }); } finite(message) { return this._addCheck({ kind: "finite", message: errorUtil.toString(message) }); } safe(message) { return this._addCheck({ kind: "min", inclusive: true, value: Number.MIN_SAFE_INTEGER, message: errorUtil.toString(message) })._addCheck({ kind: "max", inclusive: true, value: Number.MAX_SAFE_INTEGER, message: errorUtil.toString(message) }); } get minValue() { let min = null; for (const ch of this._def.checks) if (ch.kind === "min") { if (min === null || ch.value > min) min = ch.value; } return min; } get maxValue() { let max = null; for (const ch of this._def.checks) if (ch.kind === "max") { if (max === null || ch.value < max) max = ch.value; } return max; } get isInt() { return !!this._def.checks.find((ch) => ch.kind === "int" || ch.kind === "multipleOf" && util.isInteger(ch.value)); } get isFinite() { let max = null; let min = null; for (const ch of this._def.checks) if (ch.kind === "finite" || ch.kind === "int" || ch.kind === "multipleOf") return true; else if (ch.kind === "min") { if (min === null || ch.value > min) min = ch.value; } else if (ch.kind === "max") { if (max === null || ch.value < max) max = ch.value; } return Number.isFinite(min) && Number.isFinite(max); } }; ZodNumber.create = (params) => { return new ZodNumber({ checks: [], typeName: ZodFirstPartyTypeKind.ZodNumber, coerce: params?.coerce || false, ...processCreateParams(params) }); }; var ZodBigInt = class ZodBigInt extends ZodType { constructor() { super(...arguments); this.min = this.gte; this.max = this.lte; } _parse(input) { if (this._def.coerce) try { input.data = BigInt(input.data); } catch { return this._getInvalidInput(input); } const parsedType = this._getType(input); if (parsedType !== ZodParsedType.bigint) return this._getInvalidInput(input); let ctx = void 0; const status = new ParseStatus(); for (const check of this._def.checks) if (check.kind === "min") { const tooSmall = check.inclusive ? input.data < check.value : input.data <= check.value; if (tooSmall) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { code: ZodIssueCode.too_small, type: "bigint", minimum: check.value, inclusive: check.inclusive, message: check.message }); status.dirty(); } } else if (check.kind === "max") { const tooBig = check.inclusive ? input.data > check.value : input.data >= check.value; if (tooBig) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { code: ZodIssueCode.too_big, type: "bigint", maximum: check.value, inclusive: check.inclusive, message: check.message }); status.dirty(); } } else if (check.kind === "multipleOf") { if (input.data % check.value !== BigInt(0)) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { code: ZodIssueCode.not_multiple_of, multipleOf: check.value, message: check.message }); status.dirty(); } } else util.assertNever(check); return { status: status.value, value: input.data }; } _getInvalidInput(input) { const ctx = this._getOrReturnCtx(input); addIssueToContext(ctx, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.bigint, received: ctx.parsedType }); return INVALID; } gte(value, message) { return this.setLimit("min", value, true, errorUtil.toString(message)); } gt(value, message) { return this.setLimit("min", value, false, errorUtil.toString(message)); } lte(value, message) { return this.setLimit("max", value, true, errorUtil.toString(message)); } lt(value, message) { return this.setLimit("max", value, false, errorUtil.toString(message)); } setLimit(kind, value, inclusive, message) { return new ZodBigInt({ ...this._def, checks: [...this._def.checks, { kind, value, inclusive, message: errorUtil.toString(message) }] }); } _addCheck(check) { return new ZodBigInt({ ...this._def, checks: [...this._def.checks, check] }); } positive(message) { return this._addCheck({ kind: "min", value: BigInt(0), inclusive: false, message: errorUtil.toString(message) }); } negative(message) { return this._addCheck({ kind: "max", value: BigInt(0), inclusive: false, message: errorUtil.toString(message) }); } nonpositive(message) { return this._addCheck({ kind: "max", value: BigInt(0), inclusive: true, message: errorUtil.toString(message) }); } nonnegative(message) { return this._addCheck({ kind: "min", value: BigInt(0), inclusive: true, message: errorUtil.toString(message) }); } multipleOf(value, message) { return this._addCheck({ kind: "multipleOf", value, message: errorUtil.toString(message) }); } get minValue() { let min = null; for (const ch of this._def.checks) if (ch.kind === "min") { if (min === null || ch.value > min) min = ch.value; } return min; } get maxValue() { let max = null; for (const ch of this._def.checks) if (ch.kind === "max") { if (max === null || ch.value < max) max = ch.value; } return max; } }; ZodBigInt.create = (params) => { return new ZodBigInt({ checks: [], typeName: ZodFirstPartyTypeKind.ZodBigInt, coerce: params?.coerce ?? false, ...processCreateParams(params) }); }; var ZodBoolean = class extends ZodType { _parse(input) { if (this._def.coerce) input.data = Boolean(input.data); const parsedType = this._getType(input); if (parsedType !== ZodParsedType.boolean) { const ctx = this._getOrReturnCtx(input); addIssueToContext(ctx, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.boolean, received: ctx.parsedType }); return INVALID; } return OK(input.data); } }; ZodBoolean.create = (params) => { return new ZodBoolean({ typeName: ZodFirstPartyTypeKind.ZodBoolean, coerce: params?.coerce || false, ...processCreateParams(params) }); }; var ZodDate = class ZodDate extends ZodType { _parse(input) { if (this._def.coerce) input.data = new Date(input.data); const parsedType = this._getType(input); if (parsedType !== ZodParsedType.date) { const ctx$1 = this._getOrReturnCtx(input); addIssueToContext(ctx$1, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.date, received: ctx$1.parsedType }); return INVALID; } if (Number.isNaN(input.data.getTime())) { const ctx$1 = this._getOrReturnCtx(input); addIssueToContext(ctx$1, { code: ZodIssueCode.invalid_date }); return INVALID; } const status = new ParseStatus(); let ctx = void 0; for (const check of this._def.checks) if (check.kind === "min") { if (input.data.getTime() < check.value) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { code: ZodIssueCode.too_small, message: check.message, inclusive: true, exact: false, minimum: check.value, type: "date" }); status.dirty(); } } else if (check.kind === "max") { if (input.data.getTime() > check.value) { ctx = this._getOrReturnCtx(input, ctx); addIssueToContext(ctx, { code: ZodIssueCode.too_big, message: check.message, inclusive: true, exact: false, maximum: check.value, type: "date" }); status.dirty(); } } else util.assertNever(check); return { status: status.value, value: new Date(input.data.getTime()) }; } _addCheck(check) { return new ZodDate({ ...this._def, checks: [...this._def.checks, check] }); } min(minDate, message) { return this._addCheck({ kind: "min", value: minDate.getTime(), message: errorUtil.toString(message) }); } max(maxDate, message) { return this._addCheck({ kind: "max", value: maxDate.getTime(), message: errorUtil.toString(message) }); } get minDate() { let min = null; for (const ch of this._def.checks) if (ch.kind === "min") { if (min === null || ch.value > min) min = ch.value; } return min != null ? new Date(min) : null; } get maxDate() { let max = null; for (const ch of this._def.checks) if (ch.kind === "max") { if (max === null || ch.value < max) max = ch.value; } return max != null ? new Date(max) : null; } }; ZodDate.create = (params) => { return new ZodDate({ checks: [], coerce: params?.coerce || false, typeName: ZodFirstPartyTypeKind.ZodDate, ...processCreateParams(params) }); }; var ZodSymbol = class extends ZodType { _parse(input) { const parsedType = this._getType(input); if (parsedType !== ZodParsedType.symbol) { const ctx = this._getOrReturnCtx(input); addIssueToContext(ctx, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.symbol, received: ctx.parsedType }); return INVALID; } return OK(input.data); } }; ZodSymbol.create = (params) => { return new ZodSymbol({ typeName: ZodFirstPartyTypeKind.ZodSymbol, ...processCreateParams(params) }); }; var ZodUndefined = class extends ZodType { _parse(input) { const parsedType = this._getType(input); if (parsedType !== ZodParsedType.undefined) { const ctx = this._getOrReturnCtx(input); addIssueToContext(ctx, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.undefined, received: ctx.parsedType }); return INVALID; } return OK(input.data); } }; ZodUndefined.create = (params) => { return new ZodUndefined({ typeName: ZodFirstPartyTypeKind.ZodUndefined, ...processCreateParams(params) }); }; var ZodNull = class extends ZodType { _parse(input) { const parsedType = this._getType(input); if (parsedType !== ZodParsedType.null) { const ctx = this._getOrReturnCtx(input); addIssueToContext(ctx, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.null, received: ctx.parsedType }); return INVALID; } return OK(input.data); } }; ZodNull.create = (params) => { return new ZodNull({ typeName: ZodFirstPartyTypeKind.ZodNull, ...processCreateParams(params) }); }; var ZodAny = class extends ZodType { constructor() { super(...arguments); this._any = true; } _parse(input) { return OK(input.data); } }; ZodAny.create = (params) => { return new ZodAny({ typeName: ZodFirstPartyTypeKind.ZodAny, ...processCreateParams(params) }); }; var ZodUnknown = class extends ZodType { constructor() { super(...arguments); this._unknown = true; } _parse(input) { return OK(input.data); } }; ZodUnknown.create = (params) => { return new ZodUnknown({ typeName: ZodFirstPartyTypeKind.ZodUnknown, ...processCreateParams(params) }); }; var ZodNever = class extends ZodType { _parse(input) { const ctx = this._getOrReturnCtx(input); addIssueToContext(ctx, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.never, received: ctx.parsedType }); return INVALID; } }; ZodNever.create = (params) => { return new ZodNever({ typeName: ZodFirstPartyTypeKind.ZodNever, ...processCreateParams(params) }); }; var ZodVoid = class extends ZodType { _parse(input) { const parsedType = this._getType(input); if (parsedType !== ZodParsedType.undefined) { const ctx = this._getOrReturnCtx(input); addIssueToContext(ctx, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.void, received: ctx.parsedType }); return INVALID; } return OK(input.data); } }; ZodVoid.create = (params) => { return new ZodVoid({ typeName: ZodFirstPartyTypeKind.ZodVoid, ...processCreateParams(params) }); }; var ZodArray = class ZodArray extends ZodType { _parse(input) { const { ctx, status } = this._processInputParams(input); const def = this._def; if (ctx.parsedType !== ZodParsedType.array) { addIssueToContext(ctx, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.array, received: ctx.parsedType }); return INVALID; } if (def.exactLength !== null) { const tooBig = ctx.data.length > def.exactLength.value; const tooSmall = ctx.data.length < def.exactLength.value; if (tooBig || tooSmall) { addIssueToContext(ctx, { code: tooBig ? ZodIssueCode.too_big : ZodIssueCode.too_small, minimum: tooSmall ? def.exactLength.value : void 0, maximum: tooBig ? def.exactLength.value : void 0, type: "array", inclusive: true, exact: true, message: def.exactLength.message }); status.dirty(); } } if (def.minLength !== null) { if (ctx.data.length < def.minLength.value) { addIssueToContext(ctx, { code: ZodIssueCode.too_small, minimum: def.minLength.value, type: "array", inclusive: true, exact: false, message: def.minLength.message }); status.dirty(); } } if (def.maxLength !== null) { if (ctx.data.length > def.maxLength.value) { addIssueToContext(ctx, { code: ZodIssueCode.too_big, maximum: def.maxLength.value, type: "array", inclusive: true, exact: false, message: def.maxLength.message }); status.dirty(); } } if (ctx.common.async) return Promise.all([...ctx.data].map((item, i) => { return def.type._parseAsync(new ParseInputLazyPath(ctx, item, ctx.path, i)); })).then((result$1) => { return ParseStatus.mergeArray(status, result$1); }); const result = [...ctx.data].map((item, i) => { return def.type._parseSync(new ParseInputLazyPath(ctx, item, ctx.path, i)); }); return ParseStatus.mergeArray(status, result); } get element() { return this._def.type; } min(minLength, message) { return new ZodArray({ ...this._def, minLength: { value: minLength, message: errorUtil.toString(message) } }); } max(maxLength, message) { return new ZodArray({ ...this._def, maxLength: { value: maxLength, message: errorUtil.toString(message) } }); } length(len, message) { return new ZodArray({ ...this._def, exactLength: { value: len, message: errorUtil.toString(message) } }); } nonempty(message) { return this.min(1, message); } }; ZodArray.create = (schema, params) => { return new ZodArray({ type: schema, minLength: null, maxLength: null, exactLength: null, typeName: ZodFirstPartyTypeKind.ZodArray, ...processCreateParams(params) }); }; function deepPartialify(schema) { if (schema instanceof ZodObject) { const newShape = {}; for (const key in schema.shape) { const fieldSchema = schema.shape[key]; newShape[key] = ZodOptional.create(deepPartialify(fieldSchema)); } return new ZodObject({ ...schema._def, shape: () => newShape }); } else if (schema instanceof ZodArray) return new ZodArray({ ...schema._def, type: deepPartialify(schema.element) }); else if (schema instanceof ZodOptional) return ZodOptional.create(deepPartialify(schema.unwrap())); else if (schema instanceof ZodNullable) return ZodNullable.create(deepPartialify(schema.unwrap())); else if (schema instanceof ZodTuple) return ZodTuple.create(schema.items.map((item) => deepPartialify(item))); else return schema; } var ZodObject = class ZodObject extends ZodType { constructor() { super(...arguments); this._cached = null; /** * @deprecated In most cases, this is no longer needed - unknown properties are now silently stripped. * If you want to pass through unknown properties, use `.passthrough()` instead. */ this.nonstrict = this.passthrough; /** * @deprecated Use `.extend` instead * */ this.augment = this.extend; } _getCached() { if (this._cached !== null) return this._cached; const shape = this._def.shape(); const keys = util.objectKeys(shape); this._cached = { shape, keys }; return this._cached; } _parse(input) { const parsedType = this._getType(input); if (parsedType !== ZodParsedType.object) { const ctx$1 = this._getOrReturnCtx(input); addIssueToContext(ctx$1, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.object, received: ctx$1.parsedType }); return INVALID; } const { status, ctx } = this._processInputParams(input); const { shape, keys: shapeKeys } = this._getCached(); const extraKeys = []; if (!(this._def.catchall instanceof ZodNever && this._def.unknownKeys === "strip")) { for (const key in ctx.data) if (!shapeKeys.includes(key)) extraKeys.push(key); } const pairs = []; for (const key of shapeKeys) { const keyValidator = shape[key]; const value = ctx.data[key]; pairs.push({ key: { status: "valid", value: key }, value: keyValidator._parse(new ParseInputLazyPath(ctx, value, ctx.path, key)), alwaysSet: key in ctx.data }); } if (this._def.catchall instanceof ZodNever) { const unknownKeys = this._def.unknownKeys; if (unknownKeys === "passthrough") for (const key of extraKeys) pairs.push({ key: { status: "valid", value: key }, value: { status: "valid", value: ctx.data[key] } }); else if (unknownKeys === "strict") { if (extraKeys.length > 0) { addIssueToContext(ctx, { code: ZodIssueCode.unrecognized_keys, keys: extraKeys }); status.dirty(); } } else if (unknownKeys === "strip") {} else throw new Error(`Internal ZodObject error: invalid unknownKeys value.`); } else { const catchall = this._def.catchall; for (const key of extraKeys) { const value = ctx.data[key]; pairs.push({ key: { status: "valid", value: key }, value: catchall._parse(new ParseInputLazyPath(ctx, value, ctx.path, key)), alwaysSet: key in ctx.data }); } } if (ctx.common.async) return Promise.resolve().then(async () => { const syncPairs = []; for (const pair of pairs) { const key = await pair.key; const value = await pair.value; syncPairs.push({ key, value, alwaysSet: pair.alwaysSet }); } return syncPairs; }).then((syncPairs) => { return ParseStatus.mergeObjectSync(status, syncPairs); }); else return ParseStatus.mergeObjectSync(status, pairs); } get shape() { return this._def.shape(); } strict(message) { errorUtil.errToObj; return new ZodObject({ ...this._def, unknownKeys: "strict", ...message !== void 0 ? { errorMap: (issue, ctx) => { const defaultError = this._def.errorMap?.(issue, ctx).message ?? ctx.defaultError; if (issue.code === "unrecognized_keys") return { message: errorUtil.errToObj(message).message ?? defaultError }; return { message: defaultError }; } } : {} }); } strip() { return new ZodObject({ ...this._def, unknownKeys: "strip" }); } passthrough() { return new ZodObject({ ...this._def, unknownKeys: "passthrough" }); } extend(augmentation) { return new ZodObject({ ...this._def, shape: () => ({ ...this._def.shape(), ...augmentation }) }); } /** * Prior to zod@1.0.12 there was a bug in the * inferred type of merged objects. Please * upgrade if you are experiencing issues. */ merge(merging) { const merged = new ZodObject({ unknownKeys: merging._def.unknownKeys, catchall: merging._def.catchall, shape: () => ({ ...this._def.shape(), ...merging._def.shape() }), typeName: ZodFirstPartyTypeKind.ZodObject }); return merged; } setKey(key, schema) { return this.augment({ [key]: schema }); } catchall(index) { return new ZodObject({ ...this._def, catchall: index }); } pick(mask) { const shape = {}; for (const key of util.objectKeys(mask)) if (mask[key] && this.shape[key]) shape[key] = this.shape[key]; return new ZodObject({ ...this._def, shape: () => shape }); } omit(mask) { const shape = {}; for (const key of util.objectKeys(this.shape)) if (!mask[key]) shape[key] = this.shape[key]; return new ZodObject({ ...this._def, shape: () => shape }); } /** * @deprecated */ deepPartial() { return deepPartialify(this); } partial(mask) { const newShape = {}; for (const key of util.objectKeys(this.shape)) { const fieldSchema = this.shape[key]; if (mask && !mask[key]) newShape[key] = fieldSchema; else newShape[key] = fieldSchema.optional(); } return new ZodObject({ ...this._def, shape: () => newShape }); } required(mask) { const newShape = {}; for (const key of util.objectKeys(this.shape)) if (mask && !mask[key]) newShape[key] = this.shape[key]; else { const fieldSchema = this.shape[key]; let newField = fieldSchema; while (newField instanceof ZodOptional) newField = newField._def.innerType; newShape[key] = newField; } return new ZodObject({ ...this._def, shape: () => newShape }); } keyof() { return createZodEnum(util.objectKeys(this.shape)); } }; ZodObject.create = (shape, params) => { return new ZodObject({ shape: () => shape, unknownKeys: "strip", catchall: ZodNever.create(), typeName: ZodFirstPartyTypeKind.ZodObject, ...processCreateParams(params) }); }; ZodObject.strictCreate = (shape, params) => { return new ZodObject({ shape: () => shape, unknownKeys: "strict", catchall: ZodNever.create(), typeName: ZodFirstPartyTypeKind.ZodObject, ...processCreateParams(params) }); }; ZodObject.lazycreate = (shape, params) => { return new ZodObject({ shape, unknownKeys: "strip", catchall: ZodNever.create(), typeName: ZodFirstPartyTypeKind.ZodObject, ...processCreateParams(params) }); }; var ZodUnion = class extends ZodType { _parse(input) { const { ctx } = this._processInputParams(input); const options = this._def.options; function handleResults(results) { for (const result of results) if (result.result.status === "valid") return result.result; for (const result of results) if (result.result.status === "dirty") { ctx.common.issues.push(...result.ctx.common.issues); return result.result; } const unionErrors = results.map((result) => new ZodError(result.ctx.common.issues)); addIssueToContext(ctx, { code: ZodIssueCode.invalid_union, unionErrors }); return INVALID; } if (ctx.common.async) return Promise.all(options.map(async (option) => { const childCtx = { ...ctx, common: { ...ctx.common, issues: [] }, parent: null }; return { result: await option._parseAsync({ data: ctx.data, path: ctx.path, parent: childCtx }), ctx: childCtx }; })).then(handleResults); else { let dirty = void 0; const issues = []; for (const option of options) { const childCtx = { ...ctx, common: { ...ctx.common, issues: [] }, parent: null }; const result = option._parseSync({ data: ctx.data, path: ctx.path, parent: childCtx }); if (result.status === "valid") return result; else if (result.status === "dirty" && !dirty) dirty = { result, ctx: childCtx }; if (childCtx.common.issues.length) issues.push(childCtx.common.issues); } if (dirty) { ctx.common.issues.push(...dirty.ctx.common.issues); return dirty.result; } const unionErrors = issues.map((issues$1) => new ZodError(issues$1)); addIssueToContext(ctx, { code: ZodIssueCode.invalid_union, unionErrors }); return INVALID; } } get options() { return this._def.options; } }; ZodUnion.create = (types, params) => { return new ZodUnion({ options: types, typeName: ZodFirstPartyTypeKind.ZodUnion, ...processCreateParams(params) }); }; const getDiscriminator = (type) => { if (type instanceof ZodLazy) return getDiscriminator(type.schema); else if (type instanceof ZodEffects) return getDiscriminator(type.innerType()); else if (type instanceof ZodLiteral) return [type.value]; else if (type instanceof ZodEnum) return type.options; else if (type instanceof ZodNativeEnum) return util.objectValues(type.enum); else if (type instanceof ZodDefault) return getDiscriminator(type._def.innerType); else if (type instanceof ZodUndefined) return [void 0]; else if (type instanceof ZodNull) return [null]; else if (type instanceof ZodOptional) return [void 0, ...getDiscriminator(type.unwrap())]; else if (type instanceof ZodNullable) return [null, ...getDiscriminator(type.unwrap())]; else if (type instanceof ZodBranded) return getDiscriminator(type.unwrap()); else if (type instanceof ZodReadonly) return getDiscriminator(type.unwrap()); else if (type instanceof ZodCatch) return getDiscriminator(type._def.innerType); else return []; }; var ZodDiscriminatedUnion = class ZodDiscriminatedUnion extends ZodType { _parse(input) { const { ctx } = this._processInputParams(input); if (ctx.parsedType !== ZodParsedType.object) { addIssueToContext(ctx, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.object, received: ctx.parsedType }); return INVALID; } const discriminator = this.discriminator; const discriminatorValue = ctx.data[discriminator]; const option = this.optionsMap.get(discriminatorValue); if (!option) { addIssueToContext(ctx, { code: ZodIssueCode.invalid_union_discriminator, options: Array.from(this.optionsMap.keys()), path: [discriminator] }); return INVALID; } if (ctx.common.async) return option._parseAsync({ data: ctx.data, path: ctx.path, parent: ctx }); else return option._parseSync({ data: ctx.data, path: ctx.path, parent: ctx }); } get discriminator() { return this._def.discriminator; } get options() { return this._def.options; } get optionsMap() { return this._def.optionsMap; } /** * The constructor of the discriminated union schema. Its behaviour is very similar to that of the normal z.union() constructor. * However, it only allows a union of objects, all of which need to share a discriminator property. This property must * have a different value for each object in the union. * @param discriminator the name of the discriminator property * @param types an array of object schemas * @param params */ static create(discriminator, options, params) { const optionsMap = /* @__PURE__ */ new Map(); for (const type of options) { const discriminatorValues = getDiscriminator(type.shape[discriminator]); if (!discriminatorValues.length) throw new Error(`A discriminator value for key \`${discriminator}\` could not be extracted from all schema options`); for (const value of discriminatorValues) { if (optionsMap.has(value)) throw new Error(`Discriminator property ${String(discriminator)} has duplicate value ${String(value)}`); optionsMap.set(value, type); } } return new ZodDiscriminatedUnion({ typeName: ZodFirstPartyTypeKind.ZodDiscriminatedUnion, discriminator, options, optionsMap, ...processCreateParams(params) }); } }; function mergeValues(a, b) { const aType = getParsedType(a); const bType = getParsedType(b); if (a === b) return { valid: true, data: a }; else if (aType === ZodParsedType.object && bType === ZodParsedType.object) { const bKeys = util.objectKeys(b); const sharedKeys = util.objectKeys(a).filter((key) => bKeys.indexOf(key) !== -1); const newObj = { ...a, ...b }; for (const key of sharedKeys) { const sharedValue = mergeValues(a[key], b[key]); if (!sharedValue.valid) return { valid: false }; newObj[key] = sharedValue.data; } return { valid: true, data: newObj }; } else if (aType === ZodParsedType.array && bType === ZodParsedType.array) { if (a.length !== b.length) return { valid: false }; const newArray = []; for (let index = 0; index < a.length; index++) { const itemA = a[index]; const itemB = b[index]; const sharedValue = mergeValues(itemA, itemB); if (!sharedValue.valid) return { valid: false }; newArray.push(sharedValue.data); } return { valid: true, data: newArray }; } else if (aType === ZodParsedType.date && bType === ZodParsedType.date && +a === +b) return { valid: true, data: a }; else return { valid: false }; } var ZodIntersection = class extends ZodType { _parse(input) { const { status, ctx } = this._processInputParams(input); const handleParsed = (parsedLeft, parsedRight) => { if (isAborted(parsedLeft) || isAborted(parsedRight)) return INVALID; const merged = mergeValues(parsedLeft.value, parsedRight.value); if (!merged.valid) { addIssueToContext(ctx, { code: ZodIssueCode.invalid_intersection_types }); return INVALID; } if (isDirty(parsedLeft) || isDirty(parsedRight)) status.dirty(); return { status: status.value, value: merged.data }; }; if (ctx.common.async) return Promise.all([this._def.left._parseAsync({ data: ctx.data, path: ctx.path, parent: ctx }), this._def.right._parseAsync({ data: ctx.data, path: ctx.path, parent: ctx })]).then(([left, right]) => handleParsed(left, right)); else return handleParsed(this._def.left._parseSync({ data: ctx.data, path: ctx.path, parent: ctx }), this._def.right._parseSync({ data: ctx.data, path: ctx.path, parent: ctx })); } }; ZodIntersection.create = (left, right, params) => { return new ZodIntersection({ left, right, typeName: ZodFirstPartyTypeKind.ZodIntersection, ...processCreateParams(params) }); }; var ZodTuple = class ZodTuple extends ZodType { _parse(input) { const { status, ctx } = this._processInputParams(input); if (ctx.parsedType !== ZodParsedType.array) { addIssueToContext(ctx, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.array, received: ctx.parsedType }); return INVALID; } if (ctx.data.length < this._def.items.length) { addIssueToContext(ctx, { code: ZodIssueCode.too_small, minimum: this._def.items.length, inclusive: true, exact: false, type: "array" }); return INVALID; } const rest = this._def.rest; if (!rest && ctx.data.length > this._def.items.length) { addIssueToContext(ctx, { code: ZodIssueCode.too_big, maximum: this._def.items.length, inclusive: true, exact: false, type: "array" }); status.dirty(); } const items = [...ctx.data].map((item, itemIndex) => { const schema = this._def.items[itemIndex] || this._def.rest; if (!schema) return null; return schema._parse(new ParseInputLazyPath(ctx, item, ctx.path, itemIndex)); }).filter((x) => !!x); if (ctx.common.async) return Promise.all(items).then((results) => { return ParseStatus.mergeArray(status, results); }); else return ParseStatus.mergeArray(status, items); } get items() { return this._def.items; } rest(rest) { return new ZodTuple({ ...this._def, rest }); } }; ZodTuple.create = (schemas, params) => { if (!Array.isArray(schemas)) throw new Error("You must pass an array of schemas to z.tuple([ ... ])"); return new ZodTuple({ items: schemas, typeName: ZodFirstPartyTypeKind.ZodTuple, rest: null, ...processCreateParams(params) }); }; var ZodRecord = class ZodRecord extends ZodType { get keySchema() { return this._def.keyType; } get valueSchema() { return this._def.valueType; } _parse(input) { const { status, ctx } = this._processInputParams(input); if (ctx.parsedType !== ZodParsedType.object) { addIssueToContext(ctx, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.object, received: ctx.parsedType }); return INVALID; } const pairs = []; const keyType = this._def.keyType; const valueType = this._def.valueType; for (const key in ctx.data) pairs.push({ key: keyType._parse(new ParseInputLazyPath(ctx, key, ctx.path, key)), value: valueType._parse(new ParseInputLazyPath(ctx, ctx.data[key], ctx.path, key)), alwaysSet: key in ctx.data }); if (ctx.common.async) return ParseStatus.mergeObjectAsync(status, pairs); else return ParseStatus.mergeObjectSync(status, pairs); } get element() { return this._def.valueType; } static create(first, second, third) { if (second instanceof ZodType) return new ZodRecord({ keyType: first, valueType: second, typeName: ZodFirstPartyTypeKind.ZodRecord, ...processCreateParams(third) }); return new ZodRecord({ keyType: ZodString.create(), valueType: first, typeName: ZodFirstPartyTypeKind.ZodRecord, ...processCreateParams(second) }); } }; var ZodMap = class extends ZodType { get keySchema() { return this._def.keyType; } get valueSchema() { return this._def.valueType; } _parse(input) { const { status, ctx } = this._processInputParams(input); if (ctx.parsedType !== ZodParsedType.map) { addIssueToContext(ctx, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.map, received: ctx.parsedType }); return INVALID; } const keyType = this._def.keyType; const valueType = this._def.valueType; const pairs = [...ctx.data.entries()].map(([key, value], index) => { return { key: keyType._parse(new ParseInputLazyPath(ctx, key, ctx.path, [index, "key"])), value: valueType._parse(new ParseInputLazyPath(ctx, value, ctx.path, [index, "value"])) }; }); if (ctx.common.async) { const finalMap = /* @__PURE__ */ new Map(); return Promise.resolve().then(async () => { for (const pair of pairs) { const key = await pair.key; const value = await pair.value; if (key.status === "aborted" || value.status === "aborted") return INVALID; if (key.status === "dirty" || value.status === "dirty") status.dirty(); finalMap.set(key.value, value.value); } return { status: status.value, value: finalMap }; }); } else { const finalMap = /* @__PURE__ */ new Map(); for (const pair of pairs) { const key = pair.key; const value = pair.value; if (key.status === "aborted" || value.status === "aborted") return INVALID; if (key.status === "dirty" || value.status === "dirty") status.dirty(); finalMap.set(key.value, value.value); } return { status: status.value, value: finalMap }; } } }; ZodMap.create = (keyType, valueType, params) => { return new ZodMap({ valueType, keyType, typeName: ZodFirstPartyTypeKind.ZodMap, ...processCreateParams(params) }); }; var ZodSet = class ZodSet extends ZodType { _parse(input) { const { status, ctx } = this._processInputParams(input); if (ctx.parsedType !== ZodParsedType.set) { addIssueToContext(ctx, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.set, received: ctx.parsedType }); return INVALID; } const def = this._def; if (def.minSize !== null) { if (ctx.data.size < def.minSize.value) { addIssueToContext(ctx, { code: ZodIssueCode.too_small, minimum: def.minSize.value, type: "set", inclusive: true, exact: false, message: def.minSize.message }); status.dirty(); } } if (def.maxSize !== null) { if (ctx.data.size > def.maxSize.value) { addIssueToContext(ctx, { code: ZodIssueCode.too_big, maximum: def.maxSize.value, type: "set", inclusive: true, exact: false, message: def.maxSize.message }); status.dirty(); } } const valueType = this._def.valueType; function finalizeSet(elements$1) { const parsedSet = /* @__PURE__ */ new Set(); for (const element of elements$1) { if (element.status === "aborted") return INVALID; if (element.status === "dirty") status.dirty(); parsedSet.add(element.value); } return { status: status.value, value: parsedSet }; } const elements = [...ctx.data.values()].map((item, i) => valueType._parse(new ParseInputLazyPath(ctx, item, ctx.path, i))); if (ctx.common.async) return Promise.all(elements).then((elements$1) => finalizeSet(elements$1)); else return finalizeSet(elements); } min(minSize, message) { return new ZodSet({ ...this._def, minSize: { value: minSize, message: errorUtil.toString(message) } }); } max(maxSize, message) { return new ZodSet({ ...this._def, maxSize: { value: maxSize, message: errorUtil.toString(message) } }); } size(size, message) { return this.min(size, message).max(size, message); } nonempty(message) { return this.min(1, message); } }; ZodSet.create = (valueType, params) => { return new ZodSet({ valueType, minSize: null, maxSize: null, typeName: ZodFirstPartyTypeKind.ZodSet, ...processCreateParams(params) }); }; var ZodFunction = class ZodFunction extends ZodType { constructor() { super(...arguments); this.validate = this.implement; } _parse(input) { const { ctx } = this._processInputParams(input); if (ctx.parsedType !== ZodParsedType.function) { addIssueToContext(ctx, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.function, received: ctx.parsedType }); return INVALID; } function makeArgsIssue(args, error) { return makeIssue({ data: args, path: ctx.path, errorMaps: [ ctx.common.contextualErrorMap, ctx.schemaErrorMap, getErrorMap(), en_default ].filter((x) => !!x), issueData: { code: ZodIssueCode.invalid_arguments, argumentsError: error } }); } function makeReturnsIssue(returns, error) { return makeIssue({ data: returns, path: ctx.path, errorMaps: [ ctx.common.contextualErrorMap, ctx.schemaErrorMap, getErrorMap(), en_default ].filter((x) => !!x), issueData: { code: ZodIssueCode.invalid_return_type, returnTypeError: error } }); } const params = { errorMap: ctx.common.contextualErrorMap }; const fn = ctx.data; if (this._def.returns instanceof ZodPromise) { const me = this; return OK(async function(...args) { const error = new ZodError([]); const parsedArgs = await me._def.args.parseAsync(args, params).catch((e) => { error.addIssue(makeArgsIssue(args, e)); throw error; }); const result = await Reflect.apply(fn, this, parsedArgs); const parsedReturns = await me._def.returns._def.type.parseAsync(result, params).catch((e) => { error.addIssue(makeReturnsIssue(result, e)); throw error; }); return parsedReturns; }); } else { const me = this; return OK(function(...args) { const parsedArgs = me._def.args.safeParse(args, params); if (!parsedArgs.success) throw new ZodError([makeArgsIssue(args, parsedArgs.error)]); const result = Reflect.apply(fn, this, parsedArgs.data); const parsedReturns = me._def.returns.safeParse(result, params); if (!parsedReturns.success) throw new ZodError([makeReturnsIssue(result, parsedReturns.error)]); return parsedReturns.data; }); } } parameters() { return this._def.args; } returnType() { return this._def.returns; } args(...items) { return new ZodFunction({ ...this._def, args: ZodTuple.create(items).rest(ZodUnknown.create()) }); } returns(returnType) { return new ZodFunction({ ...this._def, returns: returnType }); } implement(func) { const validatedFunc = this.parse(func); return validatedFunc; } strictImplement(func) { const validatedFunc = this.parse(func); return validatedFunc; } static create(args, returns, params) { return new ZodFunction({ args: args ? args : ZodTuple.create([]).rest(ZodUnknown.create()), returns: returns || ZodUnknown.create(), typeName: ZodFirstPartyTypeKind.ZodFunction, ...processCreateParams(params) }); } }; var ZodLazy = class extends ZodType { get schema() { return this._def.getter(); } _parse(input) { const { ctx } = this._processInputParams(input); const lazySchema = this._def.getter(); return lazySchema._parse({ data: ctx.data, path: ctx.path, parent: ctx }); } }; ZodLazy.create = (getter, params) => { return new ZodLazy({ getter, typeName: ZodFirstPartyTypeKind.ZodLazy, ...processCreateParams(params) }); }; var ZodLiteral = class extends ZodType { _parse(input) { if (input.data !== this._def.value) { const ctx = this._getOrReturnCtx(input); addIssueToContext(ctx, { received: ctx.data, code: ZodIssueCode.invalid_literal, expected: this._def.value }); return INVALID; } return { status: "valid", value: input.data }; } get value() { return this._def.value; } }; ZodLiteral.create = (value, params) => { return new ZodLiteral({ value, typeName: ZodFirstPartyTypeKind.ZodLiteral, ...processCreateParams(params) }); }; function createZodEnum(values, params) { return new ZodEnum({ values, typeName: ZodFirstPartyTypeKind.ZodEnum, ...processCreateParams(params) }); } var ZodEnum = class ZodEnum extends ZodType { _parse(input) { if (typeof input.data !== "string") { const ctx = this._getOrReturnCtx(input); const expectedValues = this._def.values; addIssueToContext(ctx, { expected: util.joinValues(expectedValues), received: ctx.parsedType, code: ZodIssueCode.invalid_type }); return INVALID; } if (!this._cache) this._cache = new Set(this._def.values); if (!this._cache.has(input.data)) { const ctx = this._getOrReturnCtx(input); const expectedValues = this._def.values; addIssueToContext(ctx, { received: ctx.data, code: ZodIssueCode.invalid_enum_value, options: expectedValues }); return INVALID; } return OK(input.data); } get options() { return this._def.values; } get enum() { const enumValues = {}; for (const val of this._def.values) enumValues[val] = val; return enumValues; } get Values() { const enumValues = {}; for (const val of this._def.values) enumValues[val] = val; return enumValues; } get Enum() { const enumValues = {}; for (const val of this._def.values) enumValues[val] = val; return enumValues; } extract(values, newDef = this._def) { return ZodEnum.create(values, { ...this._def, ...newDef }); } exclude(values, newDef = this._def) { return ZodEnum.create(this.options.filter((opt) => !values.includes(opt)), { ...this._def, ...newDef }); } }; ZodEnum.create = createZodEnum; var ZodNativeEnum = class extends ZodType { _parse(input) { const nativeEnumValues = util.getValidEnumValues(this._def.values); const ctx = this._getOrReturnCtx(input); if (ctx.parsedType !== ZodParsedType.string && ctx.parsedType !== ZodParsedType.number) { const expectedValues = util.objectValues(nativeEnumValues); addIssueToContext(ctx, { expected: util.joinValues(expectedValues), received: ctx.parsedType, code: ZodIssueCode.invalid_type }); return INVALID; } if (!this._cache) this._cache = new Set(util.getValidEnumValues(this._def.values)); if (!this._cache.has(input.data)) { const expectedValues = util.objectValues(nativeEnumValues); addIssueToContext(ctx, { received: ctx.data, code: ZodIssueCode.invalid_enum_value, options: expectedValues }); return INVALID; } return OK(input.data); } get enum() { return this._def.values; } }; ZodNativeEnum.create = (values, params) => { return new ZodNativeEnum({ values, typeName: ZodFirstPartyTypeKind.ZodNativeEnum, ...processCreateParams(params) }); }; var ZodPromise = class extends ZodType { unwrap() { return this._def.type; } _parse(input) { const { ctx } = this._processInputParams(input); if (ctx.parsedType !== ZodParsedType.promise && ctx.common.async === false) { addIssueToContext(ctx, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.promise, received: ctx.parsedType }); return INVALID; } const promisified = ctx.parsedType === ZodParsedType.promise ? ctx.data : Promise.resolve(ctx.data); return OK(promisified.then((data) => { return this._def.type.parseAsync(data, { path: ctx.path, errorMap: ctx.common.contextualErrorMap }); })); } }; ZodPromise.create = (schema, params) => { return new ZodPromise({ type: schema, typeName: ZodFirstPartyTypeKind.ZodPromise, ...processCreateParams(params) }); }; var ZodEffects = class extends ZodType { innerType() { return this._def.schema; } sourceType() { return this._def.schema._def.typeName === ZodFirstPartyTypeKind.ZodEffects ? this._def.schema.sourceType() : this._def.schema; } _parse(input) { const { status, ctx } = this._processInputParams(input); const effect = this._def.effect || null; const checkCtx = { addIssue: (arg) => { addIssueToContext(ctx, arg); if (arg.fatal) status.abort(); else status.dirty(); }, get path() { return ctx.path; } }; checkCtx.addIssue = checkCtx.addIssue.bind(checkCtx); if (effect.type === "preprocess") { const processed = effect.transform(ctx.data, checkCtx); if (ctx.common.async) return Promise.resolve(processed).then(async (processed$1) => { if (status.value === "aborted") return INVALID; const result = await this._def.schema._parseAsync({ data: processed$1, path: ctx.path, parent: ctx }); if (result.status === "aborted") return INVALID; if (result.status === "dirty") return DIRTY(result.value); if (status.value === "dirty") return DIRTY(result.value); return result; }); else { if (status.value === "aborted") return INVALID; const result = this._def.schema._parseSync({ data: processed, path: ctx.path, parent: ctx }); if (result.status === "aborted") return INVALID; if (result.status === "dirty") return DIRTY(result.value); if (status.value === "dirty") return DIRTY(result.value); return result; } } if (effect.type === "refinement") { const executeRefinement = (acc) => { const result = effect.refinement(acc, checkCtx); if (ctx.common.async) return Promise.resolve(result); if (result instanceof Promise) throw new Error("Async refinement encountered during synchronous parse operation. Use .parseAsync instead."); return acc; }; if (ctx.common.async === false) { const inner = this._def.schema._parseSync({ data: ctx.data, path: ctx.path, parent: ctx }); if (inner.status === "aborted") return INVALID; if (inner.status === "dirty") status.dirty(); executeRefinement(inner.value); return { status: status.value, value: inner.value }; } else return this._def.schema._parseAsync({ data: ctx.data, path: ctx.path, parent: ctx }).then((inner) => { if (inner.status === "aborted") return INVALID; if (inner.status === "dirty") status.dirty(); return executeRefinement(inner.value).then(() => { return { status: status.value, value: inner.value }; }); }); } if (effect.type === "transform") if (ctx.common.async === false) { const base = this._def.schema._parseSync({ data: ctx.data, path: ctx.path, parent: ctx }); if (!isValid(base)) return INVALID; const result = effect.transform(base.value, checkCtx); if (result instanceof Promise) throw new Error(`Asynchronous transform encountered during synchronous parse operation. Use .parseAsync instead.`); return { status: status.value, value: result }; } else return this._def.schema._parseAsync({ data: ctx.data, path: ctx.path, parent: ctx }).then((base) => { if (!isValid(base)) return INVALID; return Promise.resolve(effect.transform(base.value, checkCtx)).then((result) => ({ status: status.value, value: result })); }); util.assertNever(effect); } }; ZodEffects.create = (schema, effect, params) => { return new ZodEffects({ schema, typeName: ZodFirstPartyTypeKind.ZodEffects, effect, ...processCreateParams(params) }); }; ZodEffects.createWithPreprocess = (preprocess, schema, params) => { return new ZodEffects({ schema, effect: { type: "preprocess", transform: preprocess }, typeName: ZodFirstPartyTypeKind.ZodEffects, ...processCreateParams(params) }); }; var ZodOptional = class extends ZodType { _parse(input) { const parsedType = this._getType(input); if (parsedType === ZodParsedType.undefined) return OK(void 0); return this._def.innerType._parse(input); } unwrap() { return this._def.innerType; } }; ZodOptional.create = (type, params) => { return new ZodOptional({ innerType: type, typeName: ZodFirstPartyTypeKind.ZodOptional, ...processCreateParams(params) }); }; var ZodNullable = class extends ZodType { _parse(input) { const parsedType = this._getType(input); if (parsedType === ZodParsedType.null) return OK(null); return this._def.innerType._parse(input); } unwrap() { return this._def.innerType; } }; ZodNullable.create = (type, params) => { return new ZodNullable({ innerType: type, typeName: ZodFirstPartyTypeKind.ZodNullable, ...processCreateParams(params) }); }; var ZodDefault = class extends ZodType { _parse(input) { const { ctx } = this._processInputParams(input); let data = ctx.data; if (ctx.parsedType === ZodParsedType.undefined) data = this._def.defaultValue(); return this._def.innerType._parse({ data, path: ctx.path, parent: ctx }); } removeDefault() { return this._def.innerType; } }; ZodDefault.create = (type, params) => { return new ZodDefault({ innerType: type, typeName: ZodFirstPartyTypeKind.ZodDefault, defaultValue: typeof params.default === "function" ? params.default : () => params.default, ...processCreateParams(params) }); }; var ZodCatch = class extends ZodType { _parse(input) { const { ctx } = this._processInputParams(input); const newCtx = { ...ctx, common: { ...ctx.common, issues: [] } }; const result = this._def.innerType._parse({ data: newCtx.data, path: newCtx.path, parent: { ...newCtx } }); if (isAsync(result)) return result.then((result$1) => { return { status: "valid", value: result$1.status === "valid" ? result$1.value : this._def.catchValue({ get error() { return new ZodError(newCtx.common.issues); }, input: newCtx.data }) }; }); else return { status: "valid", value: result.status === "valid" ? result.value : this._def.catchValue({ get error() { return new ZodError(newCtx.common.issues); }, input: newCtx.data }) }; } removeCatch() { return this._def.innerType; } }; ZodCatch.create = (type, params) => { return new ZodCatch({ innerType: type, typeName: ZodFirstPartyTypeKind.ZodCatch, catchValue: typeof params.catch === "function" ? params.catch : () => params.catch, ...processCreateParams(params) }); }; var ZodNaN = class extends ZodType { _parse(input) { const parsedType = this._getType(input); if (parsedType !== ZodParsedType.nan) { const ctx = this._getOrReturnCtx(input); addIssueToContext(ctx, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.nan, received: ctx.parsedType }); return INVALID; } return { status: "valid", value: input.data }; } }; ZodNaN.create = (params) => { return new ZodNaN({ typeName: ZodFirstPartyTypeKind.ZodNaN, ...processCreateParams(params) }); }; const BRAND = Symbol("zod_brand"); var ZodBranded = class extends ZodType { _parse(input) { const { ctx } = this._processInputParams(input); const data = ctx.data; return this._def.type._parse({ data, path: ctx.path, parent: ctx }); } unwrap() { return this._def.type; } }; var ZodPipeline = class ZodPipeline extends ZodType { _parse(input) { const { status, ctx } = this._processInputParams(input); if (ctx.common.async) { const handleAsync = async () => { const inResult = await this._def.in._parseAsync({ data: ctx.data, path: ctx.path, parent: ctx }); if (inResult.status === "aborted") return INVALID; if (inResult.status === "dirty") { status.dirty(); return DIRTY(inResult.value); } else return this._def.out._parseAsync({ data: inResult.value, path: ctx.path, parent: ctx }); }; return handleAsync(); } else { const inResult = this._def.in._parseSync({ data: ctx.data, path: ctx.path, parent: ctx }); if (inResult.status === "aborted") return INVALID; if (inResult.status === "dirty") { status.dirty(); return { status: "dirty", value: inResult.value }; } else return this._def.out._parseSync({ data: inResult.value, path: ctx.path, parent: ctx }); } } static create(a, b) { return new ZodPipeline({ in: a, out: b, typeName: ZodFirstPartyTypeKind.ZodPipeline }); } }; var ZodReadonly = class extends ZodType { _parse(input) { const result = this._def.innerType._parse(input); const freeze = (data) => { if (isValid(data)) data.value = Object.freeze(data.value); return data; }; return isAsync(result) ? result.then((data) => freeze(data)) : freeze(result); } unwrap() { return this._def.innerType; } }; ZodReadonly.create = (type, params) => { return new ZodReadonly({ innerType: type, typeName: ZodFirstPartyTypeKind.ZodReadonly, ...processCreateParams(params) }); }; function cleanParams(params, data) { const p = typeof params === "function" ? params(data) : typeof params === "string" ? { message: params } : params; const p2 = typeof p === "string" ? { message: p } : p; return p2; } function custom(check, _params = {}, fatal) { if (check) return ZodAny.create().superRefine((data, ctx) => { const r = check(data); if (r instanceof Promise) return r.then((r$1) => { if (!r$1) { const params = cleanParams(_params, data); const _fatal = params.fatal ?? fatal ?? true; ctx.addIssue({ code: "custom", ...params, fatal: _fatal }); } }); if (!r) { const params = cleanParams(_params, data); const _fatal = params.fatal ?? fatal ?? true; ctx.addIssue({ code: "custom", ...params, fatal: _fatal }); } return; }); return ZodAny.create(); } const late = { object: ZodObject.lazycreate }; var ZodFirstPartyTypeKind; (function(ZodFirstPartyTypeKind$1) { ZodFirstPartyTypeKind$1["ZodString"] = "ZodString"; ZodFirstPartyTypeKind$1["ZodNumber"] = "ZodNumber"; ZodFirstPartyTypeKind$1["ZodNaN"] = "ZodNaN"; ZodFirstPartyTypeKind$1["ZodBigInt"] = "ZodBigInt"; ZodFirstPartyTypeKind$1["ZodBoolean"] = "ZodBoolean"; ZodFirstPartyTypeKind$1["ZodDate"] = "ZodDate"; ZodFirstPartyTypeKind$1["ZodSymbol"] = "ZodSymbol"; ZodFirstPartyTypeKind$1["ZodUndefined"] = "ZodUndefined"; ZodFirstPartyTypeKind$1["ZodNull"] = "ZodNull"; ZodFirstPartyTypeKind$1["ZodAny"] = "ZodAny"; ZodFirstPartyTypeKind$1["ZodUnknown"] = "ZodUnknown"; ZodFirstPartyTypeKind$1["ZodNever"] = "ZodNever"; ZodFirstPartyTypeKind$1["ZodVoid"] = "ZodVoid"; ZodFirstPartyTypeKind$1["ZodArray"] = "ZodArray"; ZodFirstPartyTypeKind$1["ZodObject"] = "ZodObject"; ZodFirstPartyTypeKind$1["ZodUnion"] = "ZodUnion"; ZodFirstPartyTypeKind$1["ZodDiscriminatedUnion"] = "ZodDiscriminatedUnion"; ZodFirstPartyTypeKind$1["ZodIntersection"] = "ZodIntersection"; ZodFirstPartyTypeKind$1["ZodTuple"] = "ZodTuple"; ZodFirstPartyTypeKind$1["ZodRecord"] = "ZodRecord"; ZodFirstPartyTypeKind$1["ZodMap"] = "ZodMap"; ZodFirstPartyTypeKind$1["ZodSet"] = "ZodSet"; ZodFirstPartyTypeKind$1["ZodFunction"] = "ZodFunction"; ZodFirstPartyTypeKind$1["ZodLazy"] = "ZodLazy"; ZodFirstPartyTypeKind$1["ZodLiteral"] = "ZodLiteral"; ZodFirstPartyTypeKind$1["ZodEnum"] = "ZodEnum"; ZodFirstPartyTypeKind$1["ZodEffects"] = "ZodEffects"; ZodFirstPartyTypeKind$1["ZodNativeEnum"] = "ZodNativeEnum"; ZodFirstPartyTypeKind$1["ZodOptional"] = "ZodOptional"; ZodFirstPartyTypeKind$1["ZodNullable"] = "ZodNullable"; ZodFirstPartyTypeKind$1["ZodDefault"] = "ZodDefault"; ZodFirstPartyTypeKind$1["ZodCatch"] = "ZodCatch"; ZodFirstPartyTypeKind$1["ZodPromise"] = "ZodPromise"; ZodFirstPartyTypeKind$1["ZodBranded"] = "ZodBranded"; ZodFirstPartyTypeKind$1["ZodPipeline"] = "ZodPipeline"; ZodFirstPartyTypeKind$1["ZodReadonly"] = "ZodReadonly"; })(ZodFirstPartyTypeKind || (ZodFirstPartyTypeKind = {})); const stringType = ZodString.create; const numberType = ZodNumber.create; const nanType = ZodNaN.create; const bigIntType = ZodBigInt.create; const booleanType = ZodBoolean.create; const dateType = ZodDate.create; const symbolType = ZodSymbol.create; const undefinedType = ZodUndefined.create; const nullType = ZodNull.create; const anyType = ZodAny.create; const unknownType = ZodUnknown.create; const neverType = ZodNever.create; const voidType = ZodVoid.create; const arrayType = ZodArray.create; const objectType = ZodObject.create; const strictObjectType = ZodObject.strictCreate; const unionType = ZodUnion.create; const discriminatedUnionType = ZodDiscriminatedUnion.create; const intersectionType = ZodIntersection.create; const tupleType = ZodTuple.create; const recordType = ZodRecord.create; const mapType = ZodMap.create; const setType = ZodSet.create; const functionType = ZodFunction.create; const lazyType = ZodLazy.create; const literalType = ZodLiteral.create; const enumType = ZodEnum.create; const nativeEnumType = ZodNativeEnum.create; const promiseType = ZodPromise.create; const effectsType = ZodEffects.create; const optionalType = ZodOptional.create; const nullableType = ZodNullable.create; const preprocessType = ZodEffects.createWithPreprocess; const pipelineType = ZodPipeline.create; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/utils/fast-json-patch/src/helpers.js /*! * https://github.com/Starcounter-Jack/JSON-Patch * (c) 2017-2022 Joachim Wester * MIT licensed */ const _hasOwnProperty = Object.prototype.hasOwnProperty; function hasOwnProperty(obj, key) { return _hasOwnProperty.call(obj, key); } function _objectKeys(obj) { if (Array.isArray(obj)) { const keys$1 = new Array(obj.length); for (let k = 0; k < keys$1.length; k++) keys$1[k] = "" + k; return keys$1; } if (Object.keys) return Object.keys(obj); let keys = []; for (let i in obj) if (hasOwnProperty(obj, i)) keys.push(i); return keys; } /** * Deeply clone the object. * https://jsperf.com/deep-copy-vs-json-stringify-json-parse/25 (recursiveDeepCopy) * @param {any} obj value to clone * @return {any} cloned obj */ function _deepClone(obj) { switch (typeof obj) { case "object": return JSON.parse(JSON.stringify(obj)); case "undefined": return null; default: return obj; } } function isInteger(str) { let i = 0; const len = str.length; let charCode; while (i < len) { charCode = str.charCodeAt(i); if (charCode >= 48 && charCode <= 57) { i++; continue; } return false; } return true; } /** * Escapes a json pointer path * @param path The raw pointer * @return the Escaped path */ function escapePathComponent(path) { if (path.indexOf("/") === -1 && path.indexOf("~") === -1) return path; return path.replace(/~/g, "~0").replace(/\//g, "~1"); } /** * Unescapes a json pointer path * @param path The escaped pointer * @return The unescaped path */ function unescapePathComponent(path) { return path.replace(/~1/g, "/").replace(/~0/g, "~"); } /** * Recursively checks whether an object has any undefined values inside. */ function hasUndefined(obj) { if (obj === void 0) return true; if (obj) { if (Array.isArray(obj)) { for (let i$1 = 0, len = obj.length; i$1 < len; i$1++) if (hasUndefined(obj[i$1])) return true; } else if (typeof obj === "object") { const objKeys = _objectKeys(obj); const objKeysLength = objKeys.length; for (var i = 0; i < objKeysLength; i++) if (hasUndefined(obj[objKeys[i]])) return true; } } return false; } function patchErrorMessageFormatter(message, args) { const messageParts = [message]; for (const key in args) { const value = typeof args[key] === "object" ? JSON.stringify(args[key], null, 2) : args[key]; if (typeof value !== "undefined") messageParts.push(`${key}: ${value}`); } return messageParts.join("\n"); } var PatchError = class extends Error { constructor(message, name, index, operation, tree) { super(patchErrorMessageFormatter(message, { name, index, operation, tree })); Object.defineProperty(this, "name", { enumerable: true, configurable: true, writable: true, value: name }); Object.defineProperty(this, "index", { enumerable: true, configurable: true, writable: true, value: index }); Object.defineProperty(this, "operation", { enumerable: true, configurable: true, writable: true, value: operation }); Object.defineProperty(this, "tree", { enumerable: true, configurable: true, writable: true, value: tree }); Object.setPrototypeOf(this, new.target.prototype); this.message = patchErrorMessageFormatter(message, { name, index, operation, tree }); } }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/utils/fast-json-patch/src/core.js var core_exports = {}; __export(core_exports, { JsonPatchError: () => JsonPatchError, _areEquals: () => _areEquals, applyOperation: () => applyOperation, applyPatch: () => applyPatch, applyReducer: () => applyReducer, deepClone: () => deepClone, getValueByPointer: () => getValueByPointer, validate: () => validate, validator: () => validator }); const JsonPatchError = PatchError; const deepClone = _deepClone; const objOps = { add: function(obj, key, document) { obj[key] = this.value; return { newDocument: document }; }, remove: function(obj, key, document) { var removed = obj[key]; delete obj[key]; return { newDocument: document, removed }; }, replace: function(obj, key, document) { var removed = obj[key]; obj[key] = this.value; return { newDocument: document, removed }; }, move: function(obj, key, document) { let removed = getValueByPointer(document, this.path); if (removed) removed = _deepClone(removed); const originalValue = applyOperation(document, { op: "remove", path: this.from }).removed; applyOperation(document, { op: "add", path: this.path, value: originalValue }); return { newDocument: document, removed }; }, copy: function(obj, key, document) { const valueToCopy = getValueByPointer(document, this.from); applyOperation(document, { op: "add", path: this.path, value: _deepClone(valueToCopy) }); return { newDocument: document }; }, test: function(obj, key, document) { return { newDocument: document, test: _areEquals(obj[key], this.value) }; }, _get: function(obj, key, document) { this.value = obj[key]; return { newDocument: document }; } }; var arrOps = { add: function(arr$1, i, document) { if (isInteger(i)) arr$1.splice(i, 0, this.value); else arr$1[i] = this.value; return { newDocument: document, index: i }; }, remove: function(arr$1, i, document) { var removedList = arr$1.splice(i, 1); return { newDocument: document, removed: removedList[0] }; }, replace: function(arr$1, i, document) { var removed = arr$1[i]; arr$1[i] = this.value; return { newDocument: document, removed }; }, move: objOps.move, copy: objOps.copy, test: objOps.test, _get: objOps._get }; /** * Retrieves a value from a JSON document by a JSON pointer. * Returns the value. * * @param document The document to get the value from * @param pointer an escaped JSON pointer * @return The retrieved value */ function getValueByPointer(document, pointer) { if (pointer == "") return document; var getOriginalDestination = { op: "_get", path: pointer }; applyOperation(document, getOriginalDestination); return getOriginalDestination.value; } /** * Apply a single JSON Patch Operation on a JSON document. * Returns the {newDocument, result} of the operation. * It modifies the `document` and `operation` objects - it gets the values by reference. * If you would like to avoid touching your values, clone them: * `jsonpatch.applyOperation(document, jsonpatch._deepClone(operation))`. * * @param document The document to patch * @param operation The operation to apply * @param validateOperation `false` is without validation, `true` to use default jsonpatch's validation, or you can pass a `validateOperation` callback to be used for validation. * @param mutateDocument Whether to mutate the original document or clone it before applying * @param banPrototypeModifications Whether to ban modifications to `__proto__`, defaults to `true`. * @return `{newDocument, result}` after the operation */ function applyOperation(document, operation, validateOperation = false, mutateDocument = true, banPrototypeModifications = true, index = 0) { if (validateOperation) if (typeof validateOperation == "function") validateOperation(operation, 0, document, operation.path); else validator(operation, 0); if (operation.path === "") { let returnValue = { newDocument: document }; if (operation.op === "add") { returnValue.newDocument = operation.value; return returnValue; } else if (operation.op === "replace") { returnValue.newDocument = operation.value; returnValue.removed = document; return returnValue; } else if (operation.op === "move" || operation.op === "copy") { returnValue.newDocument = getValueByPointer(document, operation.from); if (operation.op === "move") returnValue.removed = document; return returnValue; } else if (operation.op === "test") { returnValue.test = _areEquals(document, operation.value); if (returnValue.test === false) throw new JsonPatchError("Test operation failed", "TEST_OPERATION_FAILED", index, operation, document); returnValue.newDocument = document; return returnValue; } else if (operation.op === "remove") { returnValue.removed = document; returnValue.newDocument = null; return returnValue; } else if (operation.op === "_get") { operation.value = document; return returnValue; } else if (validateOperation) throw new JsonPatchError("Operation `op` property is not one of operations defined in RFC-6902", "OPERATION_OP_INVALID", index, operation, document); else return returnValue; } else { if (!mutateDocument) document = _deepClone(document); const path = operation.path || ""; const keys = path.split("/"); let obj = document; let t$5 = 1; let len = keys.length; let existingPathFragment = void 0; let key; let validateFunction; if (typeof validateOperation == "function") validateFunction = validateOperation; else validateFunction = validator; while (true) { key = keys[t$5]; if (key && key.indexOf("~") != -1) key = unescapePathComponent(key); if (banPrototypeModifications && (key == "__proto__" || key == "prototype" && t$5 > 0 && keys[t$5 - 1] == "constructor")) throw new TypeError("JSON-Patch: modifying `__proto__` or `constructor/prototype` prop is banned for security reasons, if this was on purpose, please set `banPrototypeModifications` flag false and pass it to this function. More info in fast-json-patch README"); if (validateOperation) { if (existingPathFragment === void 0) { if (obj[key] === void 0) existingPathFragment = keys.slice(0, t$5).join("/"); else if (t$5 == len - 1) existingPathFragment = operation.path; if (existingPathFragment !== void 0) validateFunction(operation, 0, document, existingPathFragment); } } t$5++; if (Array.isArray(obj)) { if (key === "-") key = obj.length; else if (validateOperation && !isInteger(key)) throw new JsonPatchError("Expected an unsigned base-10 integer value, making the new referenced value the array element with the zero-based index", "OPERATION_PATH_ILLEGAL_ARRAY_INDEX", index, operation, document); else if (isInteger(key)) key = ~~key; if (t$5 >= len) { if (validateOperation && operation.op === "add" && key > obj.length) throw new JsonPatchError("The specified index MUST NOT be greater than the number of elements in the array", "OPERATION_VALUE_OUT_OF_BOUNDS", index, operation, document); const returnValue = arrOps[operation.op].call(operation, obj, key, document); if (returnValue.test === false) throw new JsonPatchError("Test operation failed", "TEST_OPERATION_FAILED", index, operation, document); return returnValue; } } else if (t$5 >= len) { const returnValue = objOps[operation.op].call(operation, obj, key, document); if (returnValue.test === false) throw new JsonPatchError("Test operation failed", "TEST_OPERATION_FAILED", index, operation, document); return returnValue; } obj = obj[key]; if (validateOperation && t$5 < len && (!obj || typeof obj !== "object")) throw new JsonPatchError("Cannot perform operation at the desired path", "OPERATION_PATH_UNRESOLVABLE", index, operation, document); } } } /** * Apply a full JSON Patch array on a JSON document. * Returns the {newDocument, result} of the patch. * It modifies the `document` object and `patch` - it gets the values by reference. * If you would like to avoid touching your values, clone them: * `jsonpatch.applyPatch(document, jsonpatch._deepClone(patch))`. * * @param document The document to patch * @param patch The patch to apply * @param validateOperation `false` is without validation, `true` to use default jsonpatch's validation, or you can pass a `validateOperation` callback to be used for validation. * @param mutateDocument Whether to mutate the original document or clone it before applying * @param banPrototypeModifications Whether to ban modifications to `__proto__`, defaults to `true`. * @return An array of `{newDocument, result}` after the patch */ function applyPatch(document, patch$2, validateOperation, mutateDocument = true, banPrototypeModifications = true) { if (validateOperation) { if (!Array.isArray(patch$2)) throw new JsonPatchError("Patch sequence must be an array", "SEQUENCE_NOT_AN_ARRAY"); } if (!mutateDocument) document = _deepClone(document); const results = new Array(patch$2.length); for (let i = 0, length = patch$2.length; i < length; i++) { results[i] = applyOperation(document, patch$2[i], validateOperation, true, banPrototypeModifications, i); document = results[i].newDocument; } results.newDocument = document; return results; } /** * Apply a single JSON Patch Operation on a JSON document. * Returns the updated document. * Suitable as a reducer. * * @param document The document to patch * @param operation The operation to apply * @return The updated document */ function applyReducer(document, operation, index) { const operationResult = applyOperation(document, operation); if (operationResult.test === false) throw new JsonPatchError("Test operation failed", "TEST_OPERATION_FAILED", index, operation, document); return operationResult.newDocument; } /** * Validates a single operation. Called from `jsonpatch.validate`. Throws `JsonPatchError` in case of an error. * @param {object} operation - operation object (patch) * @param {number} index - index of operation in the sequence * @param {object} [document] - object where the operation is supposed to be applied * @param {string} [existingPathFragment] - comes along with `document` */ function validator(operation, index, document, existingPathFragment) { if (typeof operation !== "object" || operation === null || Array.isArray(operation)) throw new JsonPatchError("Operation is not an object", "OPERATION_NOT_AN_OBJECT", index, operation, document); else if (!objOps[operation.op]) throw new JsonPatchError("Operation `op` property is not one of operations defined in RFC-6902", "OPERATION_OP_INVALID", index, operation, document); else if (typeof operation.path !== "string") throw new JsonPatchError("Operation `path` property is not a string", "OPERATION_PATH_INVALID", index, operation, document); else if (operation.path.indexOf("/") !== 0 && operation.path.length > 0) throw new JsonPatchError("Operation `path` property must start with \"/\"", "OPERATION_PATH_INVALID", index, operation, document); else if ((operation.op === "move" || operation.op === "copy") && typeof operation.from !== "string") throw new JsonPatchError("Operation `from` property is not present (applicable in `move` and `copy` operations)", "OPERATION_FROM_REQUIRED", index, operation, document); else if ((operation.op === "add" || operation.op === "replace" || operation.op === "test") && operation.value === void 0) throw new JsonPatchError("Operation `value` property is not present (applicable in `add`, `replace` and `test` operations)", "OPERATION_VALUE_REQUIRED", index, operation, document); else if ((operation.op === "add" || operation.op === "replace" || operation.op === "test") && hasUndefined(operation.value)) throw new JsonPatchError("Operation `value` property is not present (applicable in `add`, `replace` and `test` operations)", "OPERATION_VALUE_CANNOT_CONTAIN_UNDEFINED", index, operation, document); else if (document) { if (operation.op == "add") { var pathLen = operation.path.split("/").length; var existingPathLen = existingPathFragment.split("/").length; if (pathLen !== existingPathLen + 1 && pathLen !== existingPathLen) throw new JsonPatchError("Cannot perform an `add` operation at the desired path", "OPERATION_PATH_CANNOT_ADD", index, operation, document); } else if (operation.op === "replace" || operation.op === "remove" || operation.op === "_get") { if (operation.path !== existingPathFragment) throw new JsonPatchError("Cannot perform the operation at a path that does not exist", "OPERATION_PATH_UNRESOLVABLE", index, operation, document); } else if (operation.op === "move" || operation.op === "copy") { var existingValue = { op: "_get", path: operation.from, value: void 0 }; var error = validate([existingValue], document); if (error && error.name === "OPERATION_PATH_UNRESOLVABLE") throw new JsonPatchError("Cannot perform the operation from a path that does not exist", "OPERATION_FROM_UNRESOLVABLE", index, operation, document); } } } /** * Validates a sequence of operations. If `document` parameter is provided, the sequence is additionally validated against the object document. * If error is encountered, returns a JsonPatchError object * @param sequence * @param document * @returns {JsonPatchError|undefined} */ function validate(sequence, document, externalValidator) { try { if (!Array.isArray(sequence)) throw new JsonPatchError("Patch sequence must be an array", "SEQUENCE_NOT_AN_ARRAY"); if (document) applyPatch(_deepClone(document), _deepClone(sequence), externalValidator || true); else { externalValidator = externalValidator || validator; for (var i = 0; i < sequence.length; i++) externalValidator(sequence[i], i, document, void 0); } } catch (e) { if (e instanceof JsonPatchError) return e; else throw e; } } function _areEquals(a, b) { if (a === b) return true; if (a && b && typeof a == "object" && typeof b == "object") { var arrA = Array.isArray(a), arrB = Array.isArray(b), i, length, key; if (arrA && arrB) { length = a.length; if (length != b.length) return false; for (i = length; i-- !== 0;) if (!_areEquals(a[i], b[i])) return false; return true; } if (arrA != arrB) return false; var keys = Object.keys(a); length = keys.length; if (length !== Object.keys(b).length) return false; for (i = length; i-- !== 0;) if (!b.hasOwnProperty(keys[i])) return false; for (i = length; i-- !== 0;) { key = keys[i]; if (!_areEquals(a[key], b[key])) return false; } return true; } return a !== a && b !== b; } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/utils/fast-json-patch/src/duplex.js function _generate(mirror, obj, patches, path, invertible) { if (obj === mirror) return; if (typeof obj.toJSON === "function") obj = obj.toJSON(); var newKeys = _objectKeys(obj); var oldKeys = _objectKeys(mirror); var changed = false; var deleted = false; for (var t$5 = oldKeys.length - 1; t$5 >= 0; t$5--) { var key = oldKeys[t$5]; var oldVal = mirror[key]; if (hasOwnProperty(obj, key) && !(obj[key] === void 0 && oldVal !== void 0 && Array.isArray(obj) === false)) { var newVal = obj[key]; if (typeof oldVal == "object" && oldVal != null && typeof newVal == "object" && newVal != null && Array.isArray(oldVal) === Array.isArray(newVal)) _generate(oldVal, newVal, patches, path + "/" + escapePathComponent(key), invertible); else if (oldVal !== newVal) { changed = true; if (invertible) patches.push({ op: "test", path: path + "/" + escapePathComponent(key), value: _deepClone(oldVal) }); patches.push({ op: "replace", path: path + "/" + escapePathComponent(key), value: _deepClone(newVal) }); } } else if (Array.isArray(mirror) === Array.isArray(obj)) { if (invertible) patches.push({ op: "test", path: path + "/" + escapePathComponent(key), value: _deepClone(oldVal) }); patches.push({ op: "remove", path: path + "/" + escapePathComponent(key) }); deleted = true; } else { if (invertible) patches.push({ op: "test", path, value: mirror }); patches.push({ op: "replace", path, value: obj }); changed = true; } } if (!deleted && newKeys.length == oldKeys.length) return; for (var t$5 = 0; t$5 < newKeys.length; t$5++) { var key = newKeys[t$5]; if (!hasOwnProperty(mirror, key) && obj[key] !== void 0) patches.push({ op: "add", path: path + "/" + escapePathComponent(key), value: _deepClone(obj[key]) }); } } /** * Create an array of patches from the differences in two objects */ function compare(tree1, tree2, invertible = false) { var patches = []; _generate(tree1, tree2, patches, "", invertible); return patches; } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/utils/fast-json-patch/index.js var fast_json_patch_default = { ...core_exports, JsonPatchError: PatchError, deepClone: _deepClone, escapePathComponent, unescapePathComponent }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/runnables/config.js const DEFAULT_RECURSION_LIMIT = 25; async function getCallbackManagerForConfig(config) { return CallbackManager._configureSync(config?.callbacks, void 0, config?.tags, void 0, config?.metadata); } function mergeConfigs(...configs) { const copy = {}; for (const options of configs.filter((c) => !!c)) for (const key of Object.keys(options)) if (key === "metadata") copy[key] = { ...copy[key], ...options[key] }; else if (key === "tags") { const baseKeys = copy[key] ?? []; copy[key] = [...new Set(baseKeys.concat(options[key] ?? []))]; } else if (key === "configurable") copy[key] = { ...copy[key], ...options[key] }; else if (key === "timeout") { if (copy.timeout === void 0) copy.timeout = options.timeout; else if (options.timeout !== void 0) copy.timeout = Math.min(copy.timeout, options.timeout); } else if (key === "signal") { if (copy.signal === void 0) copy.signal = options.signal; else if (options.signal !== void 0) if ("any" in AbortSignal) copy.signal = AbortSignal.any([copy.signal, options.signal]); else copy.signal = options.signal; } else if (key === "callbacks") { const baseCallbacks = copy.callbacks; const providedCallbacks = options.callbacks; if (Array.isArray(providedCallbacks)) if (!baseCallbacks) copy.callbacks = providedCallbacks; else if (Array.isArray(baseCallbacks)) copy.callbacks = baseCallbacks.concat(providedCallbacks); else { const manager = baseCallbacks.copy(); for (const callback of providedCallbacks) manager.addHandler(ensureHandler(callback), true); copy.callbacks = manager; } else if (providedCallbacks) if (!baseCallbacks) copy.callbacks = providedCallbacks; else if (Array.isArray(baseCallbacks)) { const manager = providedCallbacks.copy(); for (const callback of baseCallbacks) manager.addHandler(ensureHandler(callback), true); copy.callbacks = manager; } else copy.callbacks = new CallbackManager(providedCallbacks._parentRunId, { handlers: baseCallbacks.handlers.concat(providedCallbacks.handlers), inheritableHandlers: baseCallbacks.inheritableHandlers.concat(providedCallbacks.inheritableHandlers), tags: Array.from(new Set(baseCallbacks.tags.concat(providedCallbacks.tags))), inheritableTags: Array.from(new Set(baseCallbacks.inheritableTags.concat(providedCallbacks.inheritableTags))), metadata: { ...baseCallbacks.metadata, ...providedCallbacks.metadata } }); } else { const typedKey = key; copy[typedKey] = options[typedKey] ?? copy[typedKey]; } return copy; } const PRIMITIVES = new Set([ "string", "number", "boolean" ]); /** * Ensure that a passed config is an object with all required keys present. */ function ensureConfig(config) { const implicitConfig = AsyncLocalStorageProviderSingleton.getRunnableConfig(); let empty$1 = { tags: [], metadata: {}, recursionLimit: 25, runId: void 0 }; if (implicitConfig) { const { runId, runName,...rest } = implicitConfig; empty$1 = Object.entries(rest).reduce((currentConfig, [key, value]) => { if (value !== void 0) currentConfig[key] = value; return currentConfig; }, empty$1); } if (config) empty$1 = Object.entries(config).reduce((currentConfig, [key, value]) => { if (value !== void 0) currentConfig[key] = value; return currentConfig; }, empty$1); if (empty$1?.configurable) { for (const key of Object.keys(empty$1.configurable)) if (PRIMITIVES.has(typeof empty$1.configurable[key]) && !empty$1.metadata?.[key]) { if (!empty$1.metadata) empty$1.metadata = {}; empty$1.metadata[key] = empty$1.configurable[key]; } } if (empty$1.timeout !== void 0) { if (empty$1.timeout <= 0) throw new Error("Timeout must be a positive number"); const timeoutSignal = AbortSignal.timeout(empty$1.timeout); if (empty$1.signal !== void 0) { if ("any" in AbortSignal) empty$1.signal = AbortSignal.any([empty$1.signal, timeoutSignal]); } else empty$1.signal = timeoutSignal; delete empty$1.timeout; } return empty$1; } /** * Helper function that patches runnable configs with updated properties. */ function patchConfig(config = {}, { callbacks, maxConcurrency, recursionLimit, runName, configurable, runId } = {}) { const newConfig = ensureConfig(config); if (callbacks !== void 0) { /** * If we're replacing callbacks we need to unset runName * since that should apply only to the same run as the original callbacks */ delete newConfig.runName; newConfig.callbacks = callbacks; } if (recursionLimit !== void 0) newConfig.recursionLimit = recursionLimit; if (maxConcurrency !== void 0) newConfig.maxConcurrency = maxConcurrency; if (runName !== void 0) newConfig.runName = runName; if (configurable !== void 0) newConfig.configurable = { ...newConfig.configurable, ...configurable }; if (runId !== void 0) delete newConfig.runId; return newConfig; } function pickRunnableConfigKeys(config) { return config ? { configurable: config.configurable, recursionLimit: config.recursionLimit, callbacks: config.callbacks, tags: config.tags, metadata: config.metadata, maxConcurrency: config.maxConcurrency, timeout: config.timeout, signal: config.signal } : void 0; } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/utils/signal.js async function raceWithSignal(promise, signal) { if (signal === void 0) return promise; let listener; return Promise.race([promise.catch((err) => { if (!signal?.aborted) throw err; else return void 0; }), new Promise((_, reject) => { listener = () => { reject(new Error("Aborted")); }; signal.addEventListener("abort", listener); if (signal.aborted) reject(new Error("Aborted")); })]).finally(() => signal.removeEventListener("abort", listener)); } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/utils/stream.js var stream_exports = {}; __export(stream_exports, { AsyncGeneratorWithSetup: () => AsyncGeneratorWithSetup, IterableReadableStream: () => IterableReadableStream, atee: () => atee, concat: () => concat, pipeGeneratorWithSetup: () => pipeGeneratorWithSetup }); var IterableReadableStream = class IterableReadableStream extends ReadableStream { constructor() { super(...arguments); Object.defineProperty(this, "reader", { enumerable: true, configurable: true, writable: true, value: void 0 }); } ensureReader() { if (!this.reader) this.reader = this.getReader(); } async next() { this.ensureReader(); try { const result = await this.reader.read(); if (result.done) { this.reader.releaseLock(); return { done: true, value: void 0 }; } else return { done: false, value: result.value }; } catch (e) { this.reader.releaseLock(); throw e; } } async return() { this.ensureReader(); if (this.locked) { const cancelPromise = this.reader.cancel(); this.reader.releaseLock(); await cancelPromise; } return { done: true, value: void 0 }; } async throw(e) { this.ensureReader(); if (this.locked) { const cancelPromise = this.reader.cancel(); this.reader.releaseLock(); await cancelPromise; } throw e; } [Symbol.asyncIterator]() { return this; } async [Symbol.asyncDispose]() { await this.return(); } static fromReadableStream(stream) { const reader = stream.getReader(); return new IterableReadableStream({ start(controller) { return pump(); function pump() { return reader.read().then(({ done, value }) => { if (done) { controller.close(); return; } controller.enqueue(value); return pump(); }); } }, cancel() { reader.releaseLock(); } }); } static fromAsyncGenerator(generator) { return new IterableReadableStream({ async pull(controller) { const { value, done } = await generator.next(); if (done) controller.close(); controller.enqueue(value); }, async cancel(reason) { await generator.return(reason); } }); } }; function atee(iter, length = 2) { const buffers = Array.from({ length }, () => []); return buffers.map(async function* makeIter(buffer) { while (true) if (buffer.length === 0) { const result = await iter.next(); for (const buffer$1 of buffers) buffer$1.push(result); } else if (buffer[0].done) return; else yield buffer.shift().value; }); } function concat(first, second) { if (Array.isArray(first) && Array.isArray(second)) return first.concat(second); else if (typeof first === "string" && typeof second === "string") return first + second; else if (typeof first === "number" && typeof second === "number") return first + second; else if ("concat" in first && typeof first.concat === "function") return first.concat(second); else if (typeof first === "object" && typeof second === "object") { const chunk = { ...first }; for (const [key, value] of Object.entries(second)) if (key in chunk && !Array.isArray(chunk[key])) chunk[key] = concat(chunk[key], value); else chunk[key] = value; return chunk; } else throw new Error(`Cannot concat ${typeof first} and ${typeof second}`); } var AsyncGeneratorWithSetup = class { constructor(params) { Object.defineProperty(this, "generator", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "setup", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "config", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "signal", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "firstResult", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "firstResultUsed", { enumerable: true, configurable: true, writable: true, value: false }); this.generator = params.generator; this.config = params.config; this.signal = params.signal ?? this.config?.signal; this.setup = new Promise((resolve, reject) => { AsyncLocalStorageProviderSingleton.runWithConfig(pickRunnableConfigKeys(params.config), async () => { this.firstResult = params.generator.next(); if (params.startSetup) this.firstResult.then(params.startSetup).then(resolve, reject); else this.firstResult.then((_result) => resolve(void 0), reject); }, true); }); } async next(...args) { this.signal?.throwIfAborted(); if (!this.firstResultUsed) { this.firstResultUsed = true; return this.firstResult; } return AsyncLocalStorageProviderSingleton.runWithConfig(pickRunnableConfigKeys(this.config), this.signal ? async () => { return raceWithSignal(this.generator.next(...args), this.signal); } : async () => { return this.generator.next(...args); }, true); } async return(value) { return this.generator.return(value); } async throw(e) { return this.generator.throw(e); } [Symbol.asyncIterator]() { return this; } async [Symbol.asyncDispose]() { await this.return(); } }; async function pipeGeneratorWithSetup(to, generator, startSetup, signal, ...args) { const gen = new AsyncGeneratorWithSetup({ generator, startSetup, signal }); const setup = await gen.setup; return { output: to(gen, setup, ...args), setup }; } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/tracers/log_stream.js var log_stream_exports = {}; __export(log_stream_exports, { LogStreamCallbackHandler: () => LogStreamCallbackHandler, RunLog: () => RunLog, RunLogPatch: () => RunLogPatch, isLogStreamHandler: () => isLogStreamHandler }); /** * List of jsonpatch JSONPatchOperations, which describe how to create the run state * from an empty dict. This is the minimal representation of the log, designed to * be serialized as JSON and sent over the wire to reconstruct the log on the other * side. Reconstruction of the state can be done with any jsonpatch-compliant library, * see https://jsonpatch.com for more information. */ var RunLogPatch = class { constructor(fields) { Object.defineProperty(this, "ops", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.ops = fields.ops ?? []; } concat(other) { const ops = this.ops.concat(other.ops); const states = applyPatch({}, ops); return new RunLog({ ops, state: states[states.length - 1].newDocument }); } }; var RunLog = class RunLog extends RunLogPatch { constructor(fields) { super(fields); Object.defineProperty(this, "state", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.state = fields.state; } concat(other) { const ops = this.ops.concat(other.ops); const states = applyPatch(this.state, other.ops); return new RunLog({ ops, state: states[states.length - 1].newDocument }); } static fromRunLogPatch(patch$2) { const states = applyPatch({}, patch$2.ops); return new RunLog({ ops: patch$2.ops, state: states[states.length - 1].newDocument }); } }; const isLogStreamHandler = (handler) => handler.name === "log_stream_tracer"; /** * Extract standardized inputs from a run. * * Standardizes the inputs based on the type of the runnable used. * * @param run - Run object * @param schemaFormat - The schema format to use. * * @returns Valid inputs are only dict. By conventions, inputs always represented * invocation using named arguments. * A null means that the input is not yet known! */ async function _getStandardizedInputs(run, schemaFormat) { if (schemaFormat === "original") throw new Error("Do not assign inputs with original schema drop the key for now. When inputs are added to streamLog they should be added with standardized schema for streaming events."); const { inputs } = run; if ([ "retriever", "llm", "prompt" ].includes(run.run_type)) return inputs; if (Object.keys(inputs).length === 1 && inputs?.input === "") return void 0; return inputs.input; } async function _getStandardizedOutputs(run, schemaFormat) { const { outputs } = run; if (schemaFormat === "original") return outputs; if ([ "retriever", "llm", "prompt" ].includes(run.run_type)) return outputs; if (outputs !== void 0 && Object.keys(outputs).length === 1 && outputs?.output !== void 0) return outputs.output; return outputs; } function isChatGenerationChunk(x) { return x !== void 0 && x.message !== void 0; } /** * Class that extends the `BaseTracer` class from the * `langchain.callbacks.tracers.base` module. It represents a callback * handler that logs the execution of runs and emits `RunLog` instances to a * `RunLogStream`. */ var LogStreamCallbackHandler = class extends BaseTracer { constructor(fields) { super({ _awaitHandler: true, ...fields }); Object.defineProperty(this, "autoClose", { enumerable: true, configurable: true, writable: true, value: true }); Object.defineProperty(this, "includeNames", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "includeTypes", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "includeTags", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "excludeNames", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "excludeTypes", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "excludeTags", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "_schemaFormat", { enumerable: true, configurable: true, writable: true, value: "original" }); Object.defineProperty(this, "rootId", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "keyMapByRunId", { enumerable: true, configurable: true, writable: true, value: {} }); Object.defineProperty(this, "counterMapByRunName", { enumerable: true, configurable: true, writable: true, value: {} }); Object.defineProperty(this, "transformStream", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "writer", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "receiveStream", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "name", { enumerable: true, configurable: true, writable: true, value: "log_stream_tracer" }); Object.defineProperty(this, "lc_prefer_streaming", { enumerable: true, configurable: true, writable: true, value: true }); this.autoClose = fields?.autoClose ?? true; this.includeNames = fields?.includeNames; this.includeTypes = fields?.includeTypes; this.includeTags = fields?.includeTags; this.excludeNames = fields?.excludeNames; this.excludeTypes = fields?.excludeTypes; this.excludeTags = fields?.excludeTags; this._schemaFormat = fields?._schemaFormat ?? this._schemaFormat; this.transformStream = new TransformStream(); this.writer = this.transformStream.writable.getWriter(); this.receiveStream = IterableReadableStream.fromReadableStream(this.transformStream.readable); } [Symbol.asyncIterator]() { return this.receiveStream; } async persistRun(_run) {} _includeRun(run) { if (run.id === this.rootId) return false; const runTags = run.tags ?? []; let include = this.includeNames === void 0 && this.includeTags === void 0 && this.includeTypes === void 0; if (this.includeNames !== void 0) include = include || this.includeNames.includes(run.name); if (this.includeTypes !== void 0) include = include || this.includeTypes.includes(run.run_type); if (this.includeTags !== void 0) include = include || runTags.find((tag) => this.includeTags?.includes(tag)) !== void 0; if (this.excludeNames !== void 0) include = include && !this.excludeNames.includes(run.name); if (this.excludeTypes !== void 0) include = include && !this.excludeTypes.includes(run.run_type); if (this.excludeTags !== void 0) include = include && runTags.every((tag) => !this.excludeTags?.includes(tag)); return include; } async *tapOutputIterable(runId, output) { for await (const chunk of output) { if (runId !== this.rootId) { const key = this.keyMapByRunId[runId]; if (key) await this.writer.write(new RunLogPatch({ ops: [{ op: "add", path: `/logs/${key}/streamed_output/-`, value: chunk }] })); } yield chunk; } } async onRunCreate(run) { if (this.rootId === void 0) { this.rootId = run.id; await this.writer.write(new RunLogPatch({ ops: [{ op: "replace", path: "", value: { id: run.id, name: run.name, type: run.run_type, streamed_output: [], final_output: void 0, logs: {} } }] })); } if (!this._includeRun(run)) return; if (this.counterMapByRunName[run.name] === void 0) this.counterMapByRunName[run.name] = 0; this.counterMapByRunName[run.name] += 1; const count = this.counterMapByRunName[run.name]; this.keyMapByRunId[run.id] = count === 1 ? run.name : `${run.name}:${count}`; const logEntry = { id: run.id, name: run.name, type: run.run_type, tags: run.tags ?? [], metadata: run.extra?.metadata ?? {}, start_time: new Date(run.start_time).toISOString(), streamed_output: [], streamed_output_str: [], final_output: void 0, end_time: void 0 }; if (this._schemaFormat === "streaming_events") logEntry.inputs = await _getStandardizedInputs(run, this._schemaFormat); await this.writer.write(new RunLogPatch({ ops: [{ op: "add", path: `/logs/${this.keyMapByRunId[run.id]}`, value: logEntry }] })); } async onRunUpdate(run) { try { const runName = this.keyMapByRunId[run.id]; if (runName === void 0) return; const ops = []; if (this._schemaFormat === "streaming_events") ops.push({ op: "replace", path: `/logs/${runName}/inputs`, value: await _getStandardizedInputs(run, this._schemaFormat) }); ops.push({ op: "add", path: `/logs/${runName}/final_output`, value: await _getStandardizedOutputs(run, this._schemaFormat) }); if (run.end_time !== void 0) ops.push({ op: "add", path: `/logs/${runName}/end_time`, value: new Date(run.end_time).toISOString() }); const patch$2 = new RunLogPatch({ ops }); await this.writer.write(patch$2); } finally { if (run.id === this.rootId) { const patch$2 = new RunLogPatch({ ops: [{ op: "replace", path: "/final_output", value: await _getStandardizedOutputs(run, this._schemaFormat) }] }); await this.writer.write(patch$2); if (this.autoClose) await this.writer.close(); } } } async onLLMNewToken(run, token, kwargs) { const runName = this.keyMapByRunId[run.id]; if (runName === void 0) return; const isChatModel = run.inputs.messages !== void 0; let streamedOutputValue; if (isChatModel) if (isChatGenerationChunk(kwargs?.chunk)) streamedOutputValue = kwargs?.chunk; else streamedOutputValue = new AIMessageChunk({ id: `run-${run.id}`, content: token }); else streamedOutputValue = token; const patch$2 = new RunLogPatch({ ops: [{ op: "add", path: `/logs/${runName}/streamed_output_str/-`, value: token }, { op: "add", path: `/logs/${runName}/streamed_output/-`, value: streamedOutputValue }] }); await this.writer.write(patch$2); } }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/outputs.js var outputs_exports = {}; __export(outputs_exports, { ChatGenerationChunk: () => ChatGenerationChunk, GenerationChunk: () => GenerationChunk, RUN_KEY: () => RUN_KEY }); const RUN_KEY = "__run"; /** * Chunk of a single generation. Used for streaming. */ var GenerationChunk = class GenerationChunk { constructor(fields) { Object.defineProperty(this, "text", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "generationInfo", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.text = fields.text; this.generationInfo = fields.generationInfo; } concat(chunk) { return new GenerationChunk({ text: this.text + chunk.text, generationInfo: { ...this.generationInfo, ...chunk.generationInfo } }); } }; var ChatGenerationChunk = class ChatGenerationChunk extends GenerationChunk { constructor(fields) { super(fields); Object.defineProperty(this, "message", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.message = fields.message; } concat(chunk) { return new ChatGenerationChunk({ text: this.text + chunk.text, generationInfo: { ...this.generationInfo, ...chunk.generationInfo }, message: this.message.concat(chunk.message) }); } }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/tracers/event_stream.js function assignName({ name, serialized }) { if (name !== void 0) return name; if (serialized?.name !== void 0) return serialized.name; else if (serialized?.id !== void 0 && Array.isArray(serialized?.id)) return serialized.id[serialized.id.length - 1]; return "Unnamed"; } const isStreamEventsHandler = (handler) => handler.name === "event_stream_tracer"; /** * Class that extends the `BaseTracer` class from the * `langchain.callbacks.tracers.base` module. It represents a callback * handler that logs the execution of runs and emits `RunLog` instances to a * `RunLogStream`. */ var EventStreamCallbackHandler = class extends BaseTracer { constructor(fields) { super({ _awaitHandler: true, ...fields }); Object.defineProperty(this, "autoClose", { enumerable: true, configurable: true, writable: true, value: true }); Object.defineProperty(this, "includeNames", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "includeTypes", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "includeTags", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "excludeNames", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "excludeTypes", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "excludeTags", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "runInfoMap", { enumerable: true, configurable: true, writable: true, value: /* @__PURE__ */ new Map() }); Object.defineProperty(this, "tappedPromises", { enumerable: true, configurable: true, writable: true, value: /* @__PURE__ */ new Map() }); Object.defineProperty(this, "transformStream", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "writer", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "receiveStream", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "name", { enumerable: true, configurable: true, writable: true, value: "event_stream_tracer" }); Object.defineProperty(this, "lc_prefer_streaming", { enumerable: true, configurable: true, writable: true, value: true }); this.autoClose = fields?.autoClose ?? true; this.includeNames = fields?.includeNames; this.includeTypes = fields?.includeTypes; this.includeTags = fields?.includeTags; this.excludeNames = fields?.excludeNames; this.excludeTypes = fields?.excludeTypes; this.excludeTags = fields?.excludeTags; this.transformStream = new TransformStream(); this.writer = this.transformStream.writable.getWriter(); this.receiveStream = IterableReadableStream.fromReadableStream(this.transformStream.readable); } [Symbol.asyncIterator]() { return this.receiveStream; } async persistRun(_run) {} _includeRun(run) { const runTags = run.tags ?? []; let include = this.includeNames === void 0 && this.includeTags === void 0 && this.includeTypes === void 0; if (this.includeNames !== void 0) include = include || this.includeNames.includes(run.name); if (this.includeTypes !== void 0) include = include || this.includeTypes.includes(run.runType); if (this.includeTags !== void 0) include = include || runTags.find((tag) => this.includeTags?.includes(tag)) !== void 0; if (this.excludeNames !== void 0) include = include && !this.excludeNames.includes(run.name); if (this.excludeTypes !== void 0) include = include && !this.excludeTypes.includes(run.runType); if (this.excludeTags !== void 0) include = include && runTags.every((tag) => !this.excludeTags?.includes(tag)); return include; } async *tapOutputIterable(runId, outputStream) { const firstChunk = await outputStream.next(); if (firstChunk.done) return; const runInfo = this.runInfoMap.get(runId); if (runInfo === void 0) { yield firstChunk.value; return; } function _formatOutputChunk(eventType, data) { if (eventType === "llm" && typeof data === "string") return new GenerationChunk({ text: data }); return data; } let tappedPromise = this.tappedPromises.get(runId); if (tappedPromise === void 0) { let tappedPromiseResolver; tappedPromise = new Promise((resolve) => { tappedPromiseResolver = resolve; }); this.tappedPromises.set(runId, tappedPromise); try { const event = { event: `on_${runInfo.runType}_stream`, run_id: runId, name: runInfo.name, tags: runInfo.tags, metadata: runInfo.metadata, data: {} }; await this.send({ ...event, data: { chunk: _formatOutputChunk(runInfo.runType, firstChunk.value) } }, runInfo); yield firstChunk.value; for await (const chunk of outputStream) { if (runInfo.runType !== "tool" && runInfo.runType !== "retriever") await this.send({ ...event, data: { chunk: _formatOutputChunk(runInfo.runType, chunk) } }, runInfo); yield chunk; } } finally { tappedPromiseResolver(); } } else { yield firstChunk.value; for await (const chunk of outputStream) yield chunk; } } async send(payload, run) { if (this._includeRun(run)) await this.writer.write(payload); } async sendEndEvent(payload, run) { const tappedPromise = this.tappedPromises.get(payload.run_id); if (tappedPromise !== void 0) tappedPromise.then(() => { this.send(payload, run); }); else await this.send(payload, run); } async onLLMStart(run) { const runName = assignName(run); const runType = run.inputs.messages !== void 0 ? "chat_model" : "llm"; const runInfo = { tags: run.tags ?? [], metadata: run.extra?.metadata ?? {}, name: runName, runType, inputs: run.inputs }; this.runInfoMap.set(run.id, runInfo); const eventName = `on_${runType}_start`; await this.send({ event: eventName, data: { input: run.inputs }, name: runName, tags: run.tags ?? [], run_id: run.id, metadata: run.extra?.metadata ?? {} }, runInfo); } async onLLMNewToken(run, token, kwargs) { const runInfo = this.runInfoMap.get(run.id); let chunk; let eventName; if (runInfo === void 0) throw new Error(`onLLMNewToken: Run ID ${run.id} not found in run map.`); if (this.runInfoMap.size === 1) return; if (runInfo.runType === "chat_model") { eventName = "on_chat_model_stream"; if (kwargs?.chunk === void 0) chunk = new AIMessageChunk({ content: token, id: `run-${run.id}` }); else chunk = kwargs.chunk.message; } else if (runInfo.runType === "llm") { eventName = "on_llm_stream"; if (kwargs?.chunk === void 0) chunk = new GenerationChunk({ text: token }); else chunk = kwargs.chunk; } else throw new Error(`Unexpected run type ${runInfo.runType}`); await this.send({ event: eventName, data: { chunk }, run_id: run.id, name: runInfo.name, tags: runInfo.tags, metadata: runInfo.metadata }, runInfo); } async onLLMEnd(run) { const runInfo = this.runInfoMap.get(run.id); this.runInfoMap.delete(run.id); let eventName; if (runInfo === void 0) throw new Error(`onLLMEnd: Run ID ${run.id} not found in run map.`); const generations = run.outputs?.generations; let output; if (runInfo.runType === "chat_model") { for (const generation of generations ?? []) { if (output !== void 0) break; output = generation[0]?.message; } eventName = "on_chat_model_end"; } else if (runInfo.runType === "llm") { output = { generations: generations?.map((generation) => { return generation.map((chunk) => { return { text: chunk.text, generationInfo: chunk.generationInfo }; }); }), llmOutput: run.outputs?.llmOutput ?? {} }; eventName = "on_llm_end"; } else throw new Error(`onLLMEnd: Unexpected run type: ${runInfo.runType}`); await this.sendEndEvent({ event: eventName, data: { output, input: runInfo.inputs }, run_id: run.id, name: runInfo.name, tags: runInfo.tags, metadata: runInfo.metadata }, runInfo); } async onChainStart(run) { const runName = assignName(run); const runType = run.run_type ?? "chain"; const runInfo = { tags: run.tags ?? [], metadata: run.extra?.metadata ?? {}, name: runName, runType: run.run_type }; let eventData = {}; if (run.inputs.input === "" && Object.keys(run.inputs).length === 1) { eventData = {}; runInfo.inputs = {}; } else if (run.inputs.input !== void 0) { eventData.input = run.inputs.input; runInfo.inputs = run.inputs.input; } else { eventData.input = run.inputs; runInfo.inputs = run.inputs; } this.runInfoMap.set(run.id, runInfo); await this.send({ event: `on_${runType}_start`, data: eventData, name: runName, tags: run.tags ?? [], run_id: run.id, metadata: run.extra?.metadata ?? {} }, runInfo); } async onChainEnd(run) { const runInfo = this.runInfoMap.get(run.id); this.runInfoMap.delete(run.id); if (runInfo === void 0) throw new Error(`onChainEnd: Run ID ${run.id} not found in run map.`); const eventName = `on_${run.run_type}_end`; const inputs = run.inputs ?? runInfo.inputs ?? {}; const outputs = run.outputs?.output ?? run.outputs; const data = { output: outputs, input: inputs }; if (inputs.input && Object.keys(inputs).length === 1) { data.input = inputs.input; runInfo.inputs = inputs.input; } await this.sendEndEvent({ event: eventName, data, run_id: run.id, name: runInfo.name, tags: runInfo.tags, metadata: runInfo.metadata ?? {} }, runInfo); } async onToolStart(run) { const runName = assignName(run); const runInfo = { tags: run.tags ?? [], metadata: run.extra?.metadata ?? {}, name: runName, runType: "tool", inputs: run.inputs ?? {} }; this.runInfoMap.set(run.id, runInfo); await this.send({ event: "on_tool_start", data: { input: run.inputs ?? {} }, name: runName, run_id: run.id, tags: run.tags ?? [], metadata: run.extra?.metadata ?? {} }, runInfo); } async onToolEnd(run) { const runInfo = this.runInfoMap.get(run.id); this.runInfoMap.delete(run.id); if (runInfo === void 0) throw new Error(`onToolEnd: Run ID ${run.id} not found in run map.`); if (runInfo.inputs === void 0) throw new Error(`onToolEnd: Run ID ${run.id} is a tool call, and is expected to have traced inputs.`); const output = run.outputs?.output === void 0 ? run.outputs : run.outputs.output; await this.sendEndEvent({ event: "on_tool_end", data: { output, input: runInfo.inputs }, run_id: run.id, name: runInfo.name, tags: runInfo.tags, metadata: runInfo.metadata }, runInfo); } async onRetrieverStart(run) { const runName = assignName(run); const runType = "retriever"; const runInfo = { tags: run.tags ?? [], metadata: run.extra?.metadata ?? {}, name: runName, runType, inputs: { query: run.inputs.query } }; this.runInfoMap.set(run.id, runInfo); await this.send({ event: "on_retriever_start", data: { input: { query: run.inputs.query } }, name: runName, tags: run.tags ?? [], run_id: run.id, metadata: run.extra?.metadata ?? {} }, runInfo); } async onRetrieverEnd(run) { const runInfo = this.runInfoMap.get(run.id); this.runInfoMap.delete(run.id); if (runInfo === void 0) throw new Error(`onRetrieverEnd: Run ID ${run.id} not found in run map.`); await this.sendEndEvent({ event: "on_retriever_end", data: { output: run.outputs?.documents ?? run.outputs, input: runInfo.inputs }, run_id: run.id, name: runInfo.name, tags: runInfo.tags, metadata: runInfo.metadata }, runInfo); } async handleCustomEvent(eventName, data, runId) { const runInfo = this.runInfoMap.get(runId); if (runInfo === void 0) throw new Error(`handleCustomEvent: Run ID ${runId} not found in run map.`); await this.send({ event: "on_custom_event", run_id: runId, name: eventName, tags: runInfo.tags, metadata: runInfo.metadata, data }, runInfo); } async finish() { const pendingPromises = [...this.tappedPromises.values()]; Promise.all(pendingPromises).finally(() => { this.writer.close(); }); } }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/utils/async_caller.js var async_caller_exports = {}; __export(async_caller_exports, { AsyncCaller: () => AsyncCaller }); var import_p_retry$1 = __toESM(require_p_retry(), 1); var import_dist = __toESM(require_dist(), 1); const STATUS_NO_RETRY = [ 400, 401, 402, 403, 404, 405, 406, 407, 409 ]; const defaultFailedAttemptHandler = (error) => { if (error.message.startsWith("Cancel") || error.message.startsWith("AbortError") || error.name === "AbortError") throw error; if (error?.code === "ECONNABORTED") throw error; const status = error?.response?.status ?? error?.status; if (status && STATUS_NO_RETRY.includes(+status)) throw error; if (error?.error?.code === "insufficient_quota") { const err = new Error(error?.message); err.name = "InsufficientQuotaError"; throw err; } }; /** * A class that can be used to make async calls with concurrency and retry logic. * * This is useful for making calls to any kind of "expensive" external resource, * be it because it's rate-limited, subject to network issues, etc. * * Concurrent calls are limited by the `maxConcurrency` parameter, which defaults * to `Infinity`. This means that by default, all calls will be made in parallel. * * Retries are limited by the `maxRetries` parameter, which defaults to 6. This * means that by default, each call will be retried up to 6 times, with an * exponential backoff between each attempt. */ var AsyncCaller = class { constructor(params) { Object.defineProperty(this, "maxConcurrency", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "maxRetries", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "onFailedAttempt", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "queue", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.maxConcurrency = params.maxConcurrency ?? Infinity; this.maxRetries = params.maxRetries ?? 6; this.onFailedAttempt = params.onFailedAttempt ?? defaultFailedAttemptHandler; const PQueue$1 = "default" in import_dist.default ? import_dist.default.default : import_dist.default; this.queue = new PQueue$1({ concurrency: this.maxConcurrency }); } call(callable, ...args) { return this.queue.add(() => (0, import_p_retry$1.default)(() => callable(...args).catch((error) => { if (error instanceof Error) throw error; else throw new Error(error); }), { onFailedAttempt: this.onFailedAttempt, retries: this.maxRetries, randomize: true }), { throwOnTimeout: true }); } callWithOptions(options, callable, ...args) { if (options.signal) return Promise.race([this.call(callable, ...args), new Promise((_, reject) => { options.signal?.addEventListener("abort", () => { reject(new Error("AbortError")); }); })]); return this.call(callable, ...args); } fetch(...args) { return this.call(() => fetch(...args).then((res) => res.ok ? res : Promise.reject(res))); } }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/tracers/root_listener.js var RootListenersTracer = class extends BaseTracer { constructor({ config, onStart, onEnd, onError }) { super({ _awaitHandler: true }); Object.defineProperty(this, "name", { enumerable: true, configurable: true, writable: true, value: "RootListenersTracer" }); /** The Run's ID. Type UUID */ Object.defineProperty(this, "rootId", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "config", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "argOnStart", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "argOnEnd", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "argOnError", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.config = config; this.argOnStart = onStart; this.argOnEnd = onEnd; this.argOnError = onError; } /** * This is a legacy method only called once for an entire run tree * therefore not useful here * @param {Run} _ Not used */ persistRun(_) { return Promise.resolve(); } async onRunCreate(run) { if (this.rootId) return; this.rootId = run.id; if (this.argOnStart) await this.argOnStart(run, this.config); } async onRunUpdate(run) { if (run.id !== this.rootId) return; if (!run.error) { if (this.argOnEnd) await this.argOnEnd(run, this.config); } else if (this.argOnError) await this.argOnError(run, this.config); } }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/runnables/utils.js function isRunnableInterface(thing) { return thing ? thing.lc_runnable : false; } /** * Utility to filter the root event in the streamEvents implementation. * This is simply binding the arguments to the namespace to make save on * a bit of typing in the streamEvents implementation. * * TODO: Refactor and remove. */ var _RootEventFilter = class { constructor(fields) { Object.defineProperty(this, "includeNames", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "includeTypes", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "includeTags", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "excludeNames", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "excludeTypes", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "excludeTags", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.includeNames = fields.includeNames; this.includeTypes = fields.includeTypes; this.includeTags = fields.includeTags; this.excludeNames = fields.excludeNames; this.excludeTypes = fields.excludeTypes; this.excludeTags = fields.excludeTags; } includeEvent(event, rootType) { let include = this.includeNames === void 0 && this.includeTypes === void 0 && this.includeTags === void 0; const eventTags = event.tags ?? []; if (this.includeNames !== void 0) include = include || this.includeNames.includes(event.name); if (this.includeTypes !== void 0) include = include || this.includeTypes.includes(rootType); if (this.includeTags !== void 0) include = include || eventTags.some((tag) => this.includeTags?.includes(tag)); if (this.excludeNames !== void 0) include = include && !this.excludeNames.includes(event.name); if (this.excludeTypes !== void 0) include = include && !this.excludeTypes.includes(rootType); if (this.excludeTags !== void 0) include = include && eventTags.every((tag) => !this.excludeTags?.includes(tag)); return include; } }; //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/Options.js const ignoreOverride = Symbol("Let zodToJsonSchema decide on which parser to use"); const defaultOptions = { name: void 0, $refStrategy: "root", basePath: ["#"], effectStrategy: "input", pipeStrategy: "all", dateStrategy: "format:date-time", mapStrategy: "entries", removeAdditionalStrategy: "passthrough", allowedAdditionalProperties: true, rejectedAdditionalProperties: false, definitionPath: "definitions", target: "jsonSchema7", strictUnions: false, definitions: {}, errorMessages: false, markdownDescription: false, patternStrategy: "escape", applyRegexFlags: false, emailStrategy: "format:email", base64Strategy: "contentEncoding:base64", nameStrategy: "ref" }; const getDefaultOptions = (options) => typeof options === "string" ? { ...defaultOptions, name: options } : { ...defaultOptions, ...options }; //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/Refs.js const getRefs = (options) => { const _options = getDefaultOptions(options); const currentPath = _options.name !== void 0 ? [ ..._options.basePath, _options.definitionPath, _options.name ] : _options.basePath; return { ..._options, currentPath, propertyPath: void 0, seen: new Map(Object.entries(_options.definitions).map(([name, def]) => [def._def, { def: def._def, path: [ ..._options.basePath, _options.definitionPath, name ], jsonSchema: void 0 }])) }; }; //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/errorMessages.js function addErrorMessage(res, key, errorMessage, refs) { if (!refs?.errorMessages) return; if (errorMessage) res.errorMessage = { ...res.errorMessage, [key]: errorMessage }; } function setResponseValueAndErrors(res, key, value, errorMessage, refs) { res[key] = value; addErrorMessage(res, key, errorMessage, refs); } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/any.js function parseAnyDef() { return {}; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/array.js function parseArrayDef(def, refs) { const res = { type: "array" }; if (def.type?._def && def.type?._def?.typeName !== ZodFirstPartyTypeKind.ZodAny) res.items = parseDef(def.type._def, { ...refs, currentPath: [...refs.currentPath, "items"] }); if (def.minLength) setResponseValueAndErrors(res, "minItems", def.minLength.value, def.minLength.message, refs); if (def.maxLength) setResponseValueAndErrors(res, "maxItems", def.maxLength.value, def.maxLength.message, refs); if (def.exactLength) { setResponseValueAndErrors(res, "minItems", def.exactLength.value, def.exactLength.message, refs); setResponseValueAndErrors(res, "maxItems", def.exactLength.value, def.exactLength.message, refs); } return res; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/bigint.js function parseBigintDef(def, refs) { const res = { type: "integer", format: "int64" }; if (!def.checks) return res; for (const check of def.checks) switch (check.kind) { case "min": if (refs.target === "jsonSchema7") if (check.inclusive) setResponseValueAndErrors(res, "minimum", check.value, check.message, refs); else setResponseValueAndErrors(res, "exclusiveMinimum", check.value, check.message, refs); else { if (!check.inclusive) res.exclusiveMinimum = true; setResponseValueAndErrors(res, "minimum", check.value, check.message, refs); } break; case "max": if (refs.target === "jsonSchema7") if (check.inclusive) setResponseValueAndErrors(res, "maximum", check.value, check.message, refs); else setResponseValueAndErrors(res, "exclusiveMaximum", check.value, check.message, refs); else { if (!check.inclusive) res.exclusiveMaximum = true; setResponseValueAndErrors(res, "maximum", check.value, check.message, refs); } break; case "multipleOf": setResponseValueAndErrors(res, "multipleOf", check.value, check.message, refs); break; } return res; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/boolean.js function parseBooleanDef() { return { type: "boolean" }; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/branded.js function parseBrandedDef(_def, refs) { return parseDef(_def.type._def, refs); } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/catch.js const parseCatchDef = (def, refs) => { return parseDef(def.innerType._def, refs); }; //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/date.js function parseDateDef(def, refs, overrideDateStrategy) { const strategy = overrideDateStrategy ?? refs.dateStrategy; if (Array.isArray(strategy)) return { anyOf: strategy.map((item, i) => parseDateDef(def, refs, item)) }; switch (strategy) { case "string": case "format:date-time": return { type: "string", format: "date-time" }; case "format:date": return { type: "string", format: "date" }; case "integer": return integerDateParser(def, refs); } } const integerDateParser = (def, refs) => { const res = { type: "integer", format: "unix-time" }; if (refs.target === "openApi3") return res; for (const check of def.checks) switch (check.kind) { case "min": setResponseValueAndErrors(res, "minimum", check.value, check.message, refs); break; case "max": setResponseValueAndErrors(res, "maximum", check.value, check.message, refs); break; } return res; }; //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/default.js function parseDefaultDef(_def, refs) { return { ...parseDef(_def.innerType._def, refs), default: _def.defaultValue() }; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/effects.js function parseEffectsDef(_def, refs) { return refs.effectStrategy === "input" ? parseDef(_def.schema._def, refs) : {}; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/enum.js function parseEnumDef(def) { return { type: "string", enum: Array.from(def.values) }; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/intersection.js const isJsonSchema7AllOfType = (type) => { if ("type" in type && type.type === "string") return false; return "allOf" in type; }; function parseIntersectionDef(def, refs) { const allOf = [parseDef(def.left._def, { ...refs, currentPath: [ ...refs.currentPath, "allOf", "0" ] }), parseDef(def.right._def, { ...refs, currentPath: [ ...refs.currentPath, "allOf", "1" ] })].filter((x) => !!x); let unevaluatedProperties = refs.target === "jsonSchema2019-09" ? { unevaluatedProperties: false } : void 0; const mergedAllOf = []; allOf.forEach((schema) => { if (isJsonSchema7AllOfType(schema)) { mergedAllOf.push(...schema.allOf); if (schema.unevaluatedProperties === void 0) unevaluatedProperties = void 0; } else { let nestedSchema = schema; if ("additionalProperties" in schema && schema.additionalProperties === false) { const { additionalProperties,...rest } = schema; nestedSchema = rest; } else unevaluatedProperties = void 0; mergedAllOf.push(nestedSchema); } }); return mergedAllOf.length ? { allOf: mergedAllOf, ...unevaluatedProperties } : void 0; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/literal.js function parseLiteralDef(def, refs) { const parsedType = typeof def.value; if (parsedType !== "bigint" && parsedType !== "number" && parsedType !== "boolean" && parsedType !== "string") return { type: Array.isArray(def.value) ? "array" : "object" }; if (refs.target === "openApi3") return { type: parsedType === "bigint" ? "integer" : parsedType, enum: [def.value] }; return { type: parsedType === "bigint" ? "integer" : parsedType, const: def.value }; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/string.js let emojiRegex = void 0; /** * Generated from the regular expressions found here as of 2024-05-22: * https://github.com/colinhacks/zod/blob/master/src/types.ts. * * Expressions with /i flag have been changed accordingly. */ const zodPatterns = { cuid: /^[cC][^\s-]{8,}$/, cuid2: /^[0-9a-z]+$/, ulid: /^[0-9A-HJKMNP-TV-Z]{26}$/, email: /^(?!\.)(?!.*\.\.)([a-zA-Z0-9_'+\-\.]*)[a-zA-Z0-9_+-]@([a-zA-Z0-9][a-zA-Z0-9\-]*\.)+[a-zA-Z]{2,}$/, emoji: () => { if (emojiRegex === void 0) emojiRegex = RegExp("^(\\p{Extended_Pictographic}|\\p{Emoji_Component})+$", "u"); return emojiRegex; }, uuid: /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/, ipv4: /^(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])$/, ipv4Cidr: /^(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\/(3[0-2]|[12]?[0-9])$/, ipv6: /^(([a-f0-9]{1,4}:){7}|::([a-f0-9]{1,4}:){0,6}|([a-f0-9]{1,4}:){1}:([a-f0-9]{1,4}:){0,5}|([a-f0-9]{1,4}:){2}:([a-f0-9]{1,4}:){0,4}|([a-f0-9]{1,4}:){3}:([a-f0-9]{1,4}:){0,3}|([a-f0-9]{1,4}:){4}:([a-f0-9]{1,4}:){0,2}|([a-f0-9]{1,4}:){5}:([a-f0-9]{1,4}:){0,1})([a-f0-9]{1,4}|(((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2}))\.){3}((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2})))$/, ipv6Cidr: /^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))\/(12[0-8]|1[01][0-9]|[1-9]?[0-9])$/, base64: /^([0-9a-zA-Z+/]{4})*(([0-9a-zA-Z+/]{2}==)|([0-9a-zA-Z+/]{3}=))?$/, base64url: /^([0-9a-zA-Z-_]{4})*(([0-9a-zA-Z-_]{2}(==)?)|([0-9a-zA-Z-_]{3}(=)?))?$/, nanoid: /^[a-zA-Z0-9_-]{21}$/, jwt: /^[A-Za-z0-9-_]+\.[A-Za-z0-9-_]+\.[A-Za-z0-9-_]*$/ }; function parseStringDef(def, refs) { const res = { type: "string" }; if (def.checks) for (const check of def.checks) switch (check.kind) { case "min": setResponseValueAndErrors(res, "minLength", typeof res.minLength === "number" ? Math.max(res.minLength, check.value) : check.value, check.message, refs); break; case "max": setResponseValueAndErrors(res, "maxLength", typeof res.maxLength === "number" ? Math.min(res.maxLength, check.value) : check.value, check.message, refs); break; case "email": switch (refs.emailStrategy) { case "format:email": addFormat(res, "email", check.message, refs); break; case "format:idn-email": addFormat(res, "idn-email", check.message, refs); break; case "pattern:zod": addPattern(res, zodPatterns.email, check.message, refs); break; } break; case "url": addFormat(res, "uri", check.message, refs); break; case "uuid": addFormat(res, "uuid", check.message, refs); break; case "regex": addPattern(res, check.regex, check.message, refs); break; case "cuid": addPattern(res, zodPatterns.cuid, check.message, refs); break; case "cuid2": addPattern(res, zodPatterns.cuid2, check.message, refs); break; case "startsWith": addPattern(res, RegExp(`^${escapeLiteralCheckValue(check.value, refs)}`), check.message, refs); break; case "endsWith": addPattern(res, RegExp(`${escapeLiteralCheckValue(check.value, refs)}$`), check.message, refs); break; case "datetime": addFormat(res, "date-time", check.message, refs); break; case "date": addFormat(res, "date", check.message, refs); break; case "time": addFormat(res, "time", check.message, refs); break; case "duration": addFormat(res, "duration", check.message, refs); break; case "length": setResponseValueAndErrors(res, "minLength", typeof res.minLength === "number" ? Math.max(res.minLength, check.value) : check.value, check.message, refs); setResponseValueAndErrors(res, "maxLength", typeof res.maxLength === "number" ? Math.min(res.maxLength, check.value) : check.value, check.message, refs); break; case "includes": { addPattern(res, RegExp(escapeLiteralCheckValue(check.value, refs)), check.message, refs); break; } case "ip": { if (check.version !== "v6") addFormat(res, "ipv4", check.message, refs); if (check.version !== "v4") addFormat(res, "ipv6", check.message, refs); break; } case "base64url": addPattern(res, zodPatterns.base64url, check.message, refs); break; case "jwt": addPattern(res, zodPatterns.jwt, check.message, refs); break; case "cidr": { if (check.version !== "v6") addPattern(res, zodPatterns.ipv4Cidr, check.message, refs); if (check.version !== "v4") addPattern(res, zodPatterns.ipv6Cidr, check.message, refs); break; } case "emoji": addPattern(res, zodPatterns.emoji(), check.message, refs); break; case "ulid": { addPattern(res, zodPatterns.ulid, check.message, refs); break; } case "base64": { switch (refs.base64Strategy) { case "format:binary": { addFormat(res, "binary", check.message, refs); break; } case "contentEncoding:base64": { setResponseValueAndErrors(res, "contentEncoding", "base64", check.message, refs); break; } case "pattern:zod": { addPattern(res, zodPatterns.base64, check.message, refs); break; } } break; } case "nanoid": addPattern(res, zodPatterns.nanoid, check.message, refs); case "toLowerCase": case "toUpperCase": case "trim": break; default: /* c8 ignore next */ ((_) => {})(check); } return res; } function escapeLiteralCheckValue(literal, refs) { return refs.patternStrategy === "escape" ? escapeNonAlphaNumeric(literal) : literal; } const ALPHA_NUMERIC = new Set("ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvxyz0123456789"); function escapeNonAlphaNumeric(source) { let result = ""; for (let i = 0; i < source.length; i++) { if (!ALPHA_NUMERIC.has(source[i])) result += "\\"; result += source[i]; } return result; } function addFormat(schema, value, message, refs) { if (schema.format || schema.anyOf?.some((x) => x.format)) { if (!schema.anyOf) schema.anyOf = []; if (schema.format) { schema.anyOf.push({ format: schema.format, ...schema.errorMessage && refs.errorMessages && { errorMessage: { format: schema.errorMessage.format } } }); delete schema.format; if (schema.errorMessage) { delete schema.errorMessage.format; if (Object.keys(schema.errorMessage).length === 0) delete schema.errorMessage; } } schema.anyOf.push({ format: value, ...message && refs.errorMessages && { errorMessage: { format: message } } }); } else setResponseValueAndErrors(schema, "format", value, message, refs); } function addPattern(schema, regex, message, refs) { if (schema.pattern || schema.allOf?.some((x) => x.pattern)) { if (!schema.allOf) schema.allOf = []; if (schema.pattern) { schema.allOf.push({ pattern: schema.pattern, ...schema.errorMessage && refs.errorMessages && { errorMessage: { pattern: schema.errorMessage.pattern } } }); delete schema.pattern; if (schema.errorMessage) { delete schema.errorMessage.pattern; if (Object.keys(schema.errorMessage).length === 0) delete schema.errorMessage; } } schema.allOf.push({ pattern: stringifyRegExpWithFlags(regex, refs), ...message && refs.errorMessages && { errorMessage: { pattern: message } } }); } else setResponseValueAndErrors(schema, "pattern", stringifyRegExpWithFlags(regex, refs), message, refs); } function stringifyRegExpWithFlags(regex, refs) { if (!refs.applyRegexFlags || !regex.flags) return regex.source; const flags = { i: regex.flags.includes("i"), m: regex.flags.includes("m"), s: regex.flags.includes("s") }; const source = flags.i ? regex.source.toLowerCase() : regex.source; let pattern = ""; let isEscaped = false; let inCharGroup = false; let inCharRange = false; for (let i = 0; i < source.length; i++) { if (isEscaped) { pattern += source[i]; isEscaped = false; continue; } if (flags.i) { if (inCharGroup) { if (source[i].match(/[a-z]/)) { if (inCharRange) { pattern += source[i]; pattern += `${source[i - 2]}-${source[i]}`.toUpperCase(); inCharRange = false; } else if (source[i + 1] === "-" && source[i + 2]?.match(/[a-z]/)) { pattern += source[i]; inCharRange = true; } else pattern += `${source[i]}${source[i].toUpperCase()}`; continue; } } else if (source[i].match(/[a-z]/)) { pattern += `[${source[i]}${source[i].toUpperCase()}]`; continue; } } if (flags.m) { if (source[i] === "^") { pattern += `(^|(?<=[\r\n]))`; continue; } else if (source[i] === "$") { pattern += `($|(?=[\r\n]))`; continue; } } if (flags.s && source[i] === ".") { pattern += inCharGroup ? `${source[i]}\r\n` : `[${source[i]}\r\n]`; continue; } pattern += source[i]; if (source[i] === "\\") isEscaped = true; else if (inCharGroup && source[i] === "]") inCharGroup = false; else if (!inCharGroup && source[i] === "[") inCharGroup = true; } try { new RegExp(pattern); } catch { console.warn(`Could not convert regex pattern at ${refs.currentPath.join("/")} to a flag-independent form! Falling back to the flag-ignorant source`); return regex.source; } return pattern; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/record.js function parseRecordDef(def, refs) { if (refs.target === "openAi") console.warn("Warning: OpenAI may not support records in schemas! Try an array of key-value pairs instead."); if (refs.target === "openApi3" && def.keyType?._def.typeName === ZodFirstPartyTypeKind.ZodEnum) return { type: "object", required: def.keyType._def.values, properties: def.keyType._def.values.reduce((acc, key) => ({ ...acc, [key]: parseDef(def.valueType._def, { ...refs, currentPath: [ ...refs.currentPath, "properties", key ] }) ?? {} }), {}), additionalProperties: refs.rejectedAdditionalProperties }; const schema = { type: "object", additionalProperties: parseDef(def.valueType._def, { ...refs, currentPath: [...refs.currentPath, "additionalProperties"] }) ?? refs.allowedAdditionalProperties }; if (refs.target === "openApi3") return schema; if (def.keyType?._def.typeName === ZodFirstPartyTypeKind.ZodString && def.keyType._def.checks?.length) { const { type,...keyType } = parseStringDef(def.keyType._def, refs); return { ...schema, propertyNames: keyType }; } else if (def.keyType?._def.typeName === ZodFirstPartyTypeKind.ZodEnum) return { ...schema, propertyNames: { enum: def.keyType._def.values } }; else if (def.keyType?._def.typeName === ZodFirstPartyTypeKind.ZodBranded && def.keyType._def.type._def.typeName === ZodFirstPartyTypeKind.ZodString && def.keyType._def.type._def.checks?.length) { const { type,...keyType } = parseBrandedDef(def.keyType._def, refs); return { ...schema, propertyNames: keyType }; } return schema; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/map.js function parseMapDef(def, refs) { if (refs.mapStrategy === "record") return parseRecordDef(def, refs); const keys = parseDef(def.keyType._def, { ...refs, currentPath: [ ...refs.currentPath, "items", "items", "0" ] }) || {}; const values = parseDef(def.valueType._def, { ...refs, currentPath: [ ...refs.currentPath, "items", "items", "1" ] }) || {}; return { type: "array", maxItems: 125, items: { type: "array", items: [keys, values], minItems: 2, maxItems: 2 } }; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/nativeEnum.js function parseNativeEnumDef(def) { const object = def.values; const actualKeys = Object.keys(def.values).filter((key) => { return typeof object[object[key]] !== "number"; }); const actualValues = actualKeys.map((key) => object[key]); const parsedTypes = Array.from(new Set(actualValues.map((values) => typeof values))); return { type: parsedTypes.length === 1 ? parsedTypes[0] === "string" ? "string" : "number" : ["string", "number"], enum: actualValues }; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/never.js function parseNeverDef() { return { not: {} }; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/null.js function parseNullDef(refs) { return refs.target === "openApi3" ? { enum: ["null"], nullable: true } : { type: "null" }; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/union.js const primitiveMappings = { ZodString: "string", ZodNumber: "number", ZodBigInt: "integer", ZodBoolean: "boolean", ZodNull: "null" }; function parseUnionDef(def, refs) { if (refs.target === "openApi3") return asAnyOf(def, refs); const options = def.options instanceof Map ? Array.from(def.options.values()) : def.options; if (options.every((x) => x._def.typeName in primitiveMappings && (!x._def.checks || !x._def.checks.length))) { const types = options.reduce((types$1, x) => { const type = primitiveMappings[x._def.typeName]; return type && !types$1.includes(type) ? [...types$1, type] : types$1; }, []); return { type: types.length > 1 ? types : types[0] }; } else if (options.every((x) => x._def.typeName === "ZodLiteral" && !x.description)) { const types = options.reduce((acc, x) => { const type = typeof x._def.value; switch (type) { case "string": case "number": case "boolean": return [...acc, type]; case "bigint": return [...acc, "integer"]; case "object": if (x._def.value === null) return [...acc, "null"]; case "symbol": case "undefined": case "function": default: return acc; } }, []); if (types.length === options.length) { const uniqueTypes = types.filter((x, i, a) => a.indexOf(x) === i); return { type: uniqueTypes.length > 1 ? uniqueTypes : uniqueTypes[0], enum: options.reduce((acc, x) => { return acc.includes(x._def.value) ? acc : [...acc, x._def.value]; }, []) }; } } else if (options.every((x) => x._def.typeName === "ZodEnum")) return { type: "string", enum: options.reduce((acc, x) => [...acc, ...x._def.values.filter((x$1) => !acc.includes(x$1))], []) }; return asAnyOf(def, refs); } const asAnyOf = (def, refs) => { const anyOf = (def.options instanceof Map ? Array.from(def.options.values()) : def.options).map((x, i) => parseDef(x._def, { ...refs, currentPath: [ ...refs.currentPath, "anyOf", `${i}` ] })).filter((x) => !!x && (!refs.strictUnions || typeof x === "object" && Object.keys(x).length > 0)); return anyOf.length ? { anyOf } : void 0; }; //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/nullable.js function parseNullableDef(def, refs) { if ([ "ZodString", "ZodNumber", "ZodBigInt", "ZodBoolean", "ZodNull" ].includes(def.innerType._def.typeName) && (!def.innerType._def.checks || !def.innerType._def.checks.length)) { if (refs.target === "openApi3") return { type: primitiveMappings[def.innerType._def.typeName], nullable: true }; return { type: [primitiveMappings[def.innerType._def.typeName], "null"] }; } if (refs.target === "openApi3") { const base$1 = parseDef(def.innerType._def, { ...refs, currentPath: [...refs.currentPath] }); if (base$1 && "$ref" in base$1) return { allOf: [base$1], nullable: true }; return base$1 && { ...base$1, nullable: true }; } const base = parseDef(def.innerType._def, { ...refs, currentPath: [ ...refs.currentPath, "anyOf", "0" ] }); return base && { anyOf: [base, { type: "null" }] }; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/number.js function parseNumberDef(def, refs) { const res = { type: "number" }; if (!def.checks) return res; for (const check of def.checks) switch (check.kind) { case "int": res.type = "integer"; addErrorMessage(res, "type", check.message, refs); break; case "min": if (refs.target === "jsonSchema7") if (check.inclusive) setResponseValueAndErrors(res, "minimum", check.value, check.message, refs); else setResponseValueAndErrors(res, "exclusiveMinimum", check.value, check.message, refs); else { if (!check.inclusive) res.exclusiveMinimum = true; setResponseValueAndErrors(res, "minimum", check.value, check.message, refs); } break; case "max": if (refs.target === "jsonSchema7") if (check.inclusive) setResponseValueAndErrors(res, "maximum", check.value, check.message, refs); else setResponseValueAndErrors(res, "exclusiveMaximum", check.value, check.message, refs); else { if (!check.inclusive) res.exclusiveMaximum = true; setResponseValueAndErrors(res, "maximum", check.value, check.message, refs); } break; case "multipleOf": setResponseValueAndErrors(res, "multipleOf", check.value, check.message, refs); break; } return res; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/object.js function parseObjectDef(def, refs) { const forceOptionalIntoNullable = refs.target === "openAi"; const result = { type: "object", properties: {} }; const required = []; const shape = def.shape(); for (const propName in shape) { let propDef = shape[propName]; if (propDef === void 0 || propDef._def === void 0) continue; let propOptional = safeIsOptional(propDef); if (propOptional && forceOptionalIntoNullable) { if (propDef instanceof ZodOptional) propDef = propDef._def.innerType; if (!propDef.isNullable()) propDef = propDef.nullable(); propOptional = false; } const parsedDef = parseDef(propDef._def, { ...refs, currentPath: [ ...refs.currentPath, "properties", propName ], propertyPath: [ ...refs.currentPath, "properties", propName ] }); if (parsedDef === void 0) continue; result.properties[propName] = parsedDef; if (!propOptional) required.push(propName); } if (required.length) result.required = required; const additionalProperties = decideAdditionalProperties(def, refs); if (additionalProperties !== void 0) result.additionalProperties = additionalProperties; return result; } function decideAdditionalProperties(def, refs) { if (def.catchall._def.typeName !== "ZodNever") return parseDef(def.catchall._def, { ...refs, currentPath: [...refs.currentPath, "additionalProperties"] }); switch (def.unknownKeys) { case "passthrough": return refs.allowedAdditionalProperties; case "strict": return refs.rejectedAdditionalProperties; case "strip": return refs.removeAdditionalStrategy === "strict" ? refs.allowedAdditionalProperties : refs.rejectedAdditionalProperties; } } function safeIsOptional(schema) { try { return schema.isOptional(); } catch { return true; } } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/optional.js const parseOptionalDef = (def, refs) => { if (refs.currentPath.toString() === refs.propertyPath?.toString()) return parseDef(def.innerType._def, refs); const innerSchema = parseDef(def.innerType._def, { ...refs, currentPath: [ ...refs.currentPath, "anyOf", "1" ] }); return innerSchema ? { anyOf: [{ not: {} }, innerSchema] } : {}; }; //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/pipeline.js const parsePipelineDef = (def, refs) => { if (refs.pipeStrategy === "input") return parseDef(def.in._def, refs); else if (refs.pipeStrategy === "output") return parseDef(def.out._def, refs); const a = parseDef(def.in._def, { ...refs, currentPath: [ ...refs.currentPath, "allOf", "0" ] }); const b = parseDef(def.out._def, { ...refs, currentPath: [ ...refs.currentPath, "allOf", a ? "1" : "0" ] }); return { allOf: [a, b].filter((x) => x !== void 0) }; }; //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/promise.js function parsePromiseDef(def, refs) { return parseDef(def.type._def, refs); } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/set.js function parseSetDef(def, refs) { const items = parseDef(def.valueType._def, { ...refs, currentPath: [...refs.currentPath, "items"] }); const schema = { type: "array", uniqueItems: true, items }; if (def.minSize) setResponseValueAndErrors(schema, "minItems", def.minSize.value, def.minSize.message, refs); if (def.maxSize) setResponseValueAndErrors(schema, "maxItems", def.maxSize.value, def.maxSize.message, refs); return schema; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/tuple.js function parseTupleDef(def, refs) { if (def.rest) return { type: "array", minItems: def.items.length, items: def.items.map((x, i) => parseDef(x._def, { ...refs, currentPath: [ ...refs.currentPath, "items", `${i}` ] })).reduce((acc, x) => x === void 0 ? acc : [...acc, x], []), additionalItems: parseDef(def.rest._def, { ...refs, currentPath: [...refs.currentPath, "additionalItems"] }) }; else return { type: "array", minItems: def.items.length, maxItems: def.items.length, items: def.items.map((x, i) => parseDef(x._def, { ...refs, currentPath: [ ...refs.currentPath, "items", `${i}` ] })).reduce((acc, x) => x === void 0 ? acc : [...acc, x], []) }; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/undefined.js function parseUndefinedDef() { return { not: {} }; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/unknown.js function parseUnknownDef() { return {}; } //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parsers/readonly.js const parseReadonlyDef = (def, refs) => { return parseDef(def.innerType._def, refs); }; //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/selectParser.js const selectParser = (def, typeName, refs) => { switch (typeName) { case ZodFirstPartyTypeKind.ZodString: return parseStringDef(def, refs); case ZodFirstPartyTypeKind.ZodNumber: return parseNumberDef(def, refs); case ZodFirstPartyTypeKind.ZodObject: return parseObjectDef(def, refs); case ZodFirstPartyTypeKind.ZodBigInt: return parseBigintDef(def, refs); case ZodFirstPartyTypeKind.ZodBoolean: return parseBooleanDef(); case ZodFirstPartyTypeKind.ZodDate: return parseDateDef(def, refs); case ZodFirstPartyTypeKind.ZodUndefined: return parseUndefinedDef(); case ZodFirstPartyTypeKind.ZodNull: return parseNullDef(refs); case ZodFirstPartyTypeKind.ZodArray: return parseArrayDef(def, refs); case ZodFirstPartyTypeKind.ZodUnion: case ZodFirstPartyTypeKind.ZodDiscriminatedUnion: return parseUnionDef(def, refs); case ZodFirstPartyTypeKind.ZodIntersection: return parseIntersectionDef(def, refs); case ZodFirstPartyTypeKind.ZodTuple: return parseTupleDef(def, refs); case ZodFirstPartyTypeKind.ZodRecord: return parseRecordDef(def, refs); case ZodFirstPartyTypeKind.ZodLiteral: return parseLiteralDef(def, refs); case ZodFirstPartyTypeKind.ZodEnum: return parseEnumDef(def); case ZodFirstPartyTypeKind.ZodNativeEnum: return parseNativeEnumDef(def); case ZodFirstPartyTypeKind.ZodNullable: return parseNullableDef(def, refs); case ZodFirstPartyTypeKind.ZodOptional: return parseOptionalDef(def, refs); case ZodFirstPartyTypeKind.ZodMap: return parseMapDef(def, refs); case ZodFirstPartyTypeKind.ZodSet: return parseSetDef(def, refs); case ZodFirstPartyTypeKind.ZodLazy: return () => def.getter()._def; case ZodFirstPartyTypeKind.ZodPromise: return parsePromiseDef(def, refs); case ZodFirstPartyTypeKind.ZodNaN: case ZodFirstPartyTypeKind.ZodNever: return parseNeverDef(); case ZodFirstPartyTypeKind.ZodEffects: return parseEffectsDef(def, refs); case ZodFirstPartyTypeKind.ZodAny: return parseAnyDef(); case ZodFirstPartyTypeKind.ZodUnknown: return parseUnknownDef(); case ZodFirstPartyTypeKind.ZodDefault: return parseDefaultDef(def, refs); case ZodFirstPartyTypeKind.ZodBranded: return parseBrandedDef(def, refs); case ZodFirstPartyTypeKind.ZodReadonly: return parseReadonlyDef(def, refs); case ZodFirstPartyTypeKind.ZodCatch: return parseCatchDef(def, refs); case ZodFirstPartyTypeKind.ZodPipeline: return parsePipelineDef(def, refs); case ZodFirstPartyTypeKind.ZodFunction: case ZodFirstPartyTypeKind.ZodVoid: case ZodFirstPartyTypeKind.ZodSymbol: return void 0; default: /* c8 ignore next */ return ((_) => void 0)(typeName); } }; //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/parseDef.js function parseDef(def, refs, forceResolution = false) { const seenItem = refs.seen.get(def); if (refs.override) { const overrideResult = refs.override?.(def, refs, seenItem, forceResolution); if (overrideResult !== ignoreOverride) return overrideResult; } if (seenItem && !forceResolution) { const seenSchema = get$ref(seenItem, refs); if (seenSchema !== void 0) return seenSchema; } const newItem = { def, path: refs.currentPath, jsonSchema: void 0 }; refs.seen.set(def, newItem); const jsonSchemaOrGetter = selectParser(def, def.typeName, refs); const jsonSchema = typeof jsonSchemaOrGetter === "function" ? parseDef(jsonSchemaOrGetter(), refs) : jsonSchemaOrGetter; if (jsonSchema) addMeta(def, refs, jsonSchema); if (refs.postProcess) { const postProcessResult = refs.postProcess(jsonSchema, def, refs); newItem.jsonSchema = jsonSchema; return postProcessResult; } newItem.jsonSchema = jsonSchema; return jsonSchema; } const get$ref = (item, refs) => { switch (refs.$refStrategy) { case "root": return { $ref: item.path.join("/") }; case "relative": return { $ref: getRelativePath(refs.currentPath, item.path) }; case "none": case "seen": { if (item.path.length < refs.currentPath.length && item.path.every((value, index) => refs.currentPath[index] === value)) { console.warn(`Recursive reference detected at ${refs.currentPath.join("/")}! Defaulting to any`); return {}; } return refs.$refStrategy === "seen" ? {} : void 0; } } }; const getRelativePath = (pathA, pathB) => { let i = 0; for (; i < pathA.length && i < pathB.length; i++) if (pathA[i] !== pathB[i]) break; return [(pathA.length - i).toString(), ...pathB.slice(i)].join("/"); }; const addMeta = (def, refs, jsonSchema) => { if (def.description) { jsonSchema.description = def.description; if (refs.markdownDescription) jsonSchema.markdownDescription = def.description; } return jsonSchema; }; //#endregion //#region node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.57/node_modules/zod-to-json-schema/dist/esm/zodToJsonSchema.js const zodToJsonSchema = (schema, options) => { const refs = getRefs(options); const definitions = typeof options === "object" && options.definitions ? Object.entries(options.definitions).reduce((acc, [name$1, schema$1]) => ({ ...acc, [name$1]: parseDef(schema$1._def, { ...refs, currentPath: [ ...refs.basePath, refs.definitionPath, name$1 ] }, true) ?? {} }), {}) : void 0; const name = typeof options === "string" ? options : options?.nameStrategy === "title" ? void 0 : options?.name; const main = parseDef(schema._def, name === void 0 ? refs : { ...refs, currentPath: [ ...refs.basePath, refs.definitionPath, name ] }, false) ?? {}; const title = typeof options === "object" && options.name !== void 0 && options.nameStrategy === "title" ? options.name : void 0; if (title !== void 0) main.title = title; const combined = name === void 0 ? definitions ? { ...main, [refs.definitionPath]: definitions } : main : { $ref: [ ...refs.$refStrategy === "relative" ? [] : refs.basePath, refs.definitionPath, name ].join("/"), [refs.definitionPath]: { ...definitions, [name]: main } }; if (refs.target === "jsonSchema7") combined.$schema = "http://json-schema.org/draft-07/schema#"; else if (refs.target === "jsonSchema2019-09" || refs.target === "openAi") combined.$schema = "https://json-schema.org/draft/2019-09/schema#"; if (refs.target === "openAi" && ("anyOf" in combined || "oneOf" in combined || "allOf" in combined || "type" in combined && Array.isArray(combined.type))) console.warn("Warning: OpenAI may not support schemas with unions as roots! Try wrapping it in an object property."); return combined; }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/runnables/graph_mermaid.js function _escapeNodeLabel(nodeLabel) { return nodeLabel.replace(/[^a-zA-Z-_0-9]/g, "_"); } const MARKDOWN_SPECIAL_CHARS = [ "*", "_", "`" ]; function _generateMermaidGraphStyles(nodeColors) { let styles$1 = ""; for (const [className, color$1] of Object.entries(nodeColors)) styles$1 += `\tclassDef ${className} ${color$1};\n`; return styles$1; } /** * Draws a Mermaid graph using the provided graph data */ function drawMermaid(nodes, edges, config) { const { firstNode, lastNode, nodeColors, withStyles = true, curveStyle = "linear", wrapLabelNWords = 9 } = config ?? {}; let mermaidGraph = withStyles ? `%%{init: {'flowchart': {'curve': '${curveStyle}'}}}%%\ngraph TD;\n` : "graph TD;\n"; if (withStyles) { const defaultClassLabel = "default"; const formatDict = { [defaultClassLabel]: "{0}({1})" }; if (firstNode !== void 0) formatDict[firstNode] = "{0}([{1}]):::first"; if (lastNode !== void 0) formatDict[lastNode] = "{0}([{1}]):::last"; for (const [key, node] of Object.entries(nodes)) { const nodeName = node.name.split(":").pop() ?? ""; const label = MARKDOWN_SPECIAL_CHARS.some((char) => nodeName.startsWith(char) && nodeName.endsWith(char)) ? `

${nodeName}

` : nodeName; let finalLabel = label; if (Object.keys(node.metadata ?? {}).length) finalLabel += `
${Object.entries(node.metadata ?? {}).map(([k, v]) => `${k} = ${v}`).join("\n")}`; const nodeLabel = (formatDict[key] ?? formatDict[defaultClassLabel]).replace("{0}", _escapeNodeLabel(key)).replace("{1}", finalLabel); mermaidGraph += `\t${nodeLabel}\n`; } } const edgeGroups = {}; for (const edge of edges) { const srcParts = edge.source.split(":"); const tgtParts = edge.target.split(":"); const commonPrefix = srcParts.filter((src$1, i) => src$1 === tgtParts[i]).join(":"); if (!edgeGroups[commonPrefix]) edgeGroups[commonPrefix] = []; edgeGroups[commonPrefix].push(edge); } const seenSubgraphs = /* @__PURE__ */ new Set(); function addSubgraph(edges$1, prefix$1) { const selfLoop = edges$1.length === 1 && edges$1[0].source === edges$1[0].target; if (prefix$1 && !selfLoop) { const subgraph = prefix$1.split(":").pop(); if (seenSubgraphs.has(subgraph)) throw new Error(`Found duplicate subgraph '${subgraph}' -- this likely means that you're reusing a subgraph node with the same name. Please adjust your graph to have subgraph nodes with unique names.`); seenSubgraphs.add(subgraph); mermaidGraph += `\tsubgraph ${subgraph}\n`; } for (const edge of edges$1) { const { source, target, data, conditional } = edge; let edgeLabel = ""; if (data !== void 0) { let edgeData = data; const words = edgeData.split(" "); if (words.length > wrapLabelNWords) edgeData = Array.from({ length: Math.ceil(words.length / wrapLabelNWords) }, (_, i) => words.slice(i * wrapLabelNWords, (i + 1) * wrapLabelNWords).join(" ")).join(" 
 "); edgeLabel = conditional ? ` -.  ${edgeData}  .-> ` : ` --  ${edgeData}  --> `; } else edgeLabel = conditional ? " -.-> " : " --> "; mermaidGraph += `\t${_escapeNodeLabel(source)}${edgeLabel}${_escapeNodeLabel(target)};\n`; } for (const nestedPrefix in edgeGroups) if (nestedPrefix.startsWith(`${prefix$1}:`) && nestedPrefix !== prefix$1) addSubgraph(edgeGroups[nestedPrefix], nestedPrefix); if (prefix$1 && !selfLoop) mermaidGraph += " end\n"; } addSubgraph(edgeGroups[""] ?? [], ""); for (const prefix$1 in edgeGroups) if (!prefix$1.includes(":") && prefix$1 !== "") addSubgraph(edgeGroups[prefix$1], prefix$1); if (withStyles) mermaidGraph += _generateMermaidGraphStyles(nodeColors ?? {}); return mermaidGraph; } /** * Renders Mermaid graph using the Mermaid.INK API. */ async function drawMermaidPng(mermaidSyntax, config) { let { backgroundColor = "white" } = config ?? {}; const mermaidSyntaxEncoded = btoa(mermaidSyntax); if (backgroundColor !== void 0) { const hexColorPattern = /^#(?:[0-9a-fA-F]{3}){1,2}$/; if (!hexColorPattern.test(backgroundColor)) backgroundColor = `!${backgroundColor}`; } const imageUrl = `https://mermaid.ink/img/${mermaidSyntaxEncoded}?bgColor=${backgroundColor}`; const res = await fetch(imageUrl); if (!res.ok) throw new Error([ `Failed to render the graph using the Mermaid.INK API.`, `Status code: ${res.status}`, `Status text: ${res.statusText}` ].join("\n")); const content = await res.blob(); return content; } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/runnables/graph.js function nodeDataStr(id, data) { if (id !== void 0 && !validate_default(id)) return id; else if (isRunnableInterface(data)) try { let dataStr = data.getName(); dataStr = dataStr.startsWith("Runnable") ? dataStr.slice(8) : dataStr; return dataStr; } catch (error) { return data.getName(); } else return data.name ?? "UnknownSchema"; } function nodeDataJson(node) { if (isRunnableInterface(node.data)) return { type: "runnable", data: { id: node.data.lc_id, name: node.data.getName() } }; else return { type: "schema", data: { ...zodToJsonSchema(node.data.schema), title: node.data.name } }; } var Graph = class Graph { constructor(params) { Object.defineProperty(this, "nodes", { enumerable: true, configurable: true, writable: true, value: {} }); Object.defineProperty(this, "edges", { enumerable: true, configurable: true, writable: true, value: [] }); this.nodes = params?.nodes ?? this.nodes; this.edges = params?.edges ?? this.edges; } toJSON() { const stableNodeIds = {}; Object.values(this.nodes).forEach((node, i) => { stableNodeIds[node.id] = validate_default(node.id) ? i : node.id; }); return { nodes: Object.values(this.nodes).map((node) => ({ id: stableNodeIds[node.id], ...nodeDataJson(node) })), edges: this.edges.map((edge) => { const item = { source: stableNodeIds[edge.source], target: stableNodeIds[edge.target] }; if (typeof edge.data !== "undefined") item.data = edge.data; if (typeof edge.conditional !== "undefined") item.conditional = edge.conditional; return item; }) }; } addNode(data, id, metadata) { if (id !== void 0 && this.nodes[id] !== void 0) throw new Error(`Node with id ${id} already exists`); const nodeId = id ?? v4_default(); const node = { id: nodeId, data, name: nodeDataStr(id, data), metadata }; this.nodes[nodeId] = node; return node; } removeNode(node) { delete this.nodes[node.id]; this.edges = this.edges.filter((edge) => edge.source !== node.id && edge.target !== node.id); } addEdge(source, target, data, conditional) { if (this.nodes[source.id] === void 0) throw new Error(`Source node ${source.id} not in graph`); if (this.nodes[target.id] === void 0) throw new Error(`Target node ${target.id} not in graph`); const edge = { source: source.id, target: target.id, data, conditional }; this.edges.push(edge); return edge; } firstNode() { return _firstNode(this); } lastNode() { return _lastNode(this); } /** * Add all nodes and edges from another graph. * Note this doesn't check for duplicates, nor does it connect the graphs. */ extend(graph, prefix$1 = "") { let finalPrefix = prefix$1; const nodeIds = Object.values(graph.nodes).map((node) => node.id); if (nodeIds.every(validate_default)) finalPrefix = ""; const prefixed = (id) => { return finalPrefix ? `${finalPrefix}:${id}` : id; }; Object.entries(graph.nodes).forEach(([key, value]) => { this.nodes[prefixed(key)] = { ...value, id: prefixed(key) }; }); const newEdges = graph.edges.map((edge) => { return { ...edge, source: prefixed(edge.source), target: prefixed(edge.target) }; }); this.edges = [...this.edges, ...newEdges]; const first = graph.firstNode(); const last = graph.lastNode(); return [first ? { id: prefixed(first.id), data: first.data } : void 0, last ? { id: prefixed(last.id), data: last.data } : void 0]; } trimFirstNode() { const firstNode = this.firstNode(); if (firstNode && _firstNode(this, [firstNode.id])) this.removeNode(firstNode); } trimLastNode() { const lastNode = this.lastNode(); if (lastNode && _lastNode(this, [lastNode.id])) this.removeNode(lastNode); } /** * Return a new graph with all nodes re-identified, * using their unique, readable names where possible. */ reid() { const nodeLabels = Object.fromEntries(Object.values(this.nodes).map((node) => [node.id, node.name])); const nodeLabelCounts = /* @__PURE__ */ new Map(); Object.values(nodeLabels).forEach((label) => { nodeLabelCounts.set(label, (nodeLabelCounts.get(label) || 0) + 1); }); const getNodeId = (nodeId) => { const label = nodeLabels[nodeId]; if (validate_default(nodeId) && nodeLabelCounts.get(label) === 1) return label; else return nodeId; }; return new Graph({ nodes: Object.fromEntries(Object.entries(this.nodes).map(([id, node]) => [getNodeId(id), { ...node, id: getNodeId(id) }])), edges: this.edges.map((edge) => ({ ...edge, source: getNodeId(edge.source), target: getNodeId(edge.target) })) }); } drawMermaid(params) { const { withStyles, curveStyle, nodeColors = { default: "fill:#f2f0ff,line-height:1.2", first: "fill-opacity:0", last: "fill:#bfb6fc" }, wrapLabelNWords } = params ?? {}; const graph = this.reid(); const firstNode = graph.firstNode(); const lastNode = graph.lastNode(); return drawMermaid(graph.nodes, graph.edges, { firstNode: firstNode?.id, lastNode: lastNode?.id, withStyles, curveStyle, nodeColors, wrapLabelNWords }); } async drawMermaidPng(params) { const mermaidSyntax = this.drawMermaid(params); return drawMermaidPng(mermaidSyntax, { backgroundColor: params?.backgroundColor }); } }; /** * Find the single node that is not a target of any edge. * Exclude nodes/sources with ids in the exclude list. * If there is no such node, or there are multiple, return undefined. * When drawing the graph, this node would be the origin. */ function _firstNode(graph, exclude = []) { const targets = new Set(graph.edges.filter((edge) => !exclude.includes(edge.source)).map((edge) => edge.target)); const found = []; for (const node of Object.values(graph.nodes)) if (!exclude.includes(node.id) && !targets.has(node.id)) found.push(node); return found.length === 1 ? found[0] : void 0; } /** * Find the single node that is not a source of any edge. * Exclude nodes/targets with ids in the exclude list. * If there is no such node, or there are multiple, return undefined. * When drawing the graph, this node would be the destination. */ function _lastNode(graph, exclude = []) { const sources = new Set(graph.edges.filter((edge) => !exclude.includes(edge.target)).map((edge) => edge.source)); const found = []; for (const node of Object.values(graph.nodes)) if (!exclude.includes(node.id) && !sources.has(node.id)) found.push(node); return found.length === 1 ? found[0] : void 0; } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/runnables/wrappers.js function convertToHttpEventStream(stream) { const encoder$1 = new TextEncoder(); const finalStream = new ReadableStream({ async start(controller) { for await (const chunk of stream) controller.enqueue(encoder$1.encode(`event: data\ndata: ${JSON.stringify(chunk)}\n\n`)); controller.enqueue(encoder$1.encode("event: end\n\n")); controller.close(); } }); return IterableReadableStream.fromReadableStream(finalStream); } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/runnables/iter.js function isIterableIterator(thing) { return typeof thing === "object" && thing !== null && typeof thing[Symbol.iterator] === "function" && typeof thing.next === "function"; } const isIterator = (x) => x != null && typeof x === "object" && "next" in x && typeof x.next === "function"; function isAsyncIterable(thing) { return typeof thing === "object" && thing !== null && typeof thing[Symbol.asyncIterator] === "function"; } function* consumeIteratorInContext(context, iter) { while (true) { const { value, done } = AsyncLocalStorageProviderSingleton.runWithConfig(pickRunnableConfigKeys(context), iter.next.bind(iter), true); if (done) break; else yield value; } } async function* consumeAsyncIterableInContext(context, iter) { const iterator = iter[Symbol.asyncIterator](); while (true) { const { value, done } = await AsyncLocalStorageProviderSingleton.runWithConfig(pickRunnableConfigKeys(context), iterator.next.bind(iter), true); if (done) break; else yield value; } } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/runnables/base.js var import_p_retry = __toESM(require_p_retry(), 1); function _coerceToDict(value, defaultKey) { return value && !Array.isArray(value) && !(value instanceof Date) && typeof value === "object" ? value : { [defaultKey]: value }; } /** * A Runnable is a generic unit of work that can be invoked, batched, streamed, and/or * transformed. */ var Runnable = class extends Serializable { constructor() { super(...arguments); Object.defineProperty(this, "lc_runnable", { enumerable: true, configurable: true, writable: true, value: true }); Object.defineProperty(this, "name", { enumerable: true, configurable: true, writable: true, value: void 0 }); } getName(suffix) { const name = this.name ?? this.constructor.lc_name() ?? this.constructor.name; return suffix ? `${name}${suffix}` : name; } /** * Bind arguments to a Runnable, returning a new Runnable. * @param kwargs * @returns A new RunnableBinding that, when invoked, will apply the bound args. * * @deprecated Use {@link withConfig} instead. This will be removed in the next breaking release. */ bind(kwargs) { return new RunnableBinding({ bound: this, kwargs, config: {} }); } /** * Return a new Runnable that maps a list of inputs to a list of outputs, * by calling invoke() with each input. * * @deprecated This will be removed in the next breaking release. */ map() { return new RunnableEach({ bound: this }); } /** * Add retry logic to an existing runnable. * @param fields.stopAfterAttempt The number of attempts to retry. * @param fields.onFailedAttempt A function that is called when a retry fails. * @returns A new RunnableRetry that, when invoked, will retry according to the parameters. */ withRetry(fields) { return new RunnableRetry({ bound: this, kwargs: {}, config: {}, maxAttemptNumber: fields?.stopAfterAttempt, ...fields }); } /** * Bind config to a Runnable, returning a new Runnable. * @param config New configuration parameters to attach to the new runnable. * @returns A new RunnableBinding with a config matching what's passed. */ withConfig(config) { return new RunnableBinding({ bound: this, config, kwargs: {} }); } /** * Create a new runnable from the current one that will try invoking * other passed fallback runnables if the initial invocation fails. * @param fields.fallbacks Other runnables to call if the runnable errors. * @returns A new RunnableWithFallbacks. */ withFallbacks(fields) { const fallbacks = Array.isArray(fields) ? fields : fields.fallbacks; return new RunnableWithFallbacks({ runnable: this, fallbacks }); } _getOptionsList(options, length = 0) { if (Array.isArray(options) && options.length !== length) throw new Error(`Passed "options" must be an array with the same length as the inputs, but got ${options.length} options for ${length} inputs`); if (Array.isArray(options)) return options.map(ensureConfig); if (length > 1 && !Array.isArray(options) && options.runId) { console.warn("Provided runId will be used only for the first element of the batch."); const subsequent = Object.fromEntries(Object.entries(options).filter(([key]) => key !== "runId")); return Array.from({ length }, (_, i) => ensureConfig(i === 0 ? options : subsequent)); } return Array.from({ length }, () => ensureConfig(options)); } async batch(inputs, options, batchOptions) { const configList = this._getOptionsList(options ?? {}, inputs.length); const maxConcurrency = configList[0]?.maxConcurrency ?? batchOptions?.maxConcurrency; const caller = new AsyncCaller({ maxConcurrency, onFailedAttempt: (e) => { throw e; } }); const batchCalls = inputs.map((input, i) => caller.call(async () => { try { const result = await this.invoke(input, configList[i]); return result; } catch (e) { if (batchOptions?.returnExceptions) return e; throw e; } })); return Promise.all(batchCalls); } /** * Default streaming implementation. * Subclasses should override this method if they support streaming output. * @param input * @param options */ async *_streamIterator(input, options) { yield this.invoke(input, options); } /** * Stream output in chunks. * @param input * @param options * @returns A readable stream that is also an iterable. */ async stream(input, options) { const config = ensureConfig(options); const wrappedGenerator = new AsyncGeneratorWithSetup({ generator: this._streamIterator(input, config), config }); await wrappedGenerator.setup; return IterableReadableStream.fromAsyncGenerator(wrappedGenerator); } _separateRunnableConfigFromCallOptions(options) { let runnableConfig; if (options === void 0) runnableConfig = ensureConfig(options); else runnableConfig = ensureConfig({ callbacks: options.callbacks, tags: options.tags, metadata: options.metadata, runName: options.runName, configurable: options.configurable, recursionLimit: options.recursionLimit, maxConcurrency: options.maxConcurrency, runId: options.runId, timeout: options.timeout, signal: options.signal }); const callOptions = { ...options }; delete callOptions.callbacks; delete callOptions.tags; delete callOptions.metadata; delete callOptions.runName; delete callOptions.configurable; delete callOptions.recursionLimit; delete callOptions.maxConcurrency; delete callOptions.runId; delete callOptions.timeout; delete callOptions.signal; return [runnableConfig, callOptions]; } async _callWithConfig(func, input, options) { const config = ensureConfig(options); const callbackManager_ = await getCallbackManagerForConfig(config); const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), config.runId, config?.runType, void 0, void 0, config?.runName ?? this.getName()); delete config.runId; let output; try { const promise = func.call(this, input, config, runManager); output = await raceWithSignal(promise, options?.signal); } catch (e) { await runManager?.handleChainError(e); throw e; } await runManager?.handleChainEnd(_coerceToDict(output, "output")); return output; } /** * Internal method that handles batching and configuration for a runnable * It takes a function, input values, and optional configuration, and * returns a promise that resolves to the output values. * @param func The function to be executed for each input value. * @param input The input values to be processed. * @param config Optional configuration for the function execution. * @returns A promise that resolves to the output values. */ async _batchWithConfig(func, inputs, options, batchOptions) { const optionsList = this._getOptionsList(options ?? {}, inputs.length); const callbackManagers = await Promise.all(optionsList.map(getCallbackManagerForConfig)); const runManagers = await Promise.all(callbackManagers.map(async (callbackManager, i) => { const handleStartRes = await callbackManager?.handleChainStart(this.toJSON(), _coerceToDict(inputs[i], "input"), optionsList[i].runId, optionsList[i].runType, void 0, void 0, optionsList[i].runName ?? this.getName()); delete optionsList[i].runId; return handleStartRes; })); let outputs; try { const promise = func.call(this, inputs, optionsList, runManagers, batchOptions); outputs = await raceWithSignal(promise, optionsList?.[0]?.signal); } catch (e) { await Promise.all(runManagers.map((runManager) => runManager?.handleChainError(e))); throw e; } await Promise.all(runManagers.map((runManager) => runManager?.handleChainEnd(_coerceToDict(outputs, "output")))); return outputs; } /** * Helper method to transform an Iterator of Input values into an Iterator of * Output values, with callbacks. * Use this to implement `stream()` or `transform()` in Runnable subclasses. */ async *_transformStreamWithConfig(inputGenerator, transformer, options) { let finalInput; let finalInputSupported = true; let finalOutput; let finalOutputSupported = true; const config = ensureConfig(options); const callbackManager_ = await getCallbackManagerForConfig(config); async function* wrapInputForTracing() { for await (const chunk of inputGenerator) { if (finalInputSupported) if (finalInput === void 0) finalInput = chunk; else try { finalInput = concat(finalInput, chunk); } catch { finalInput = void 0; finalInputSupported = false; } yield chunk; } } let runManager; try { const pipe = await pipeGeneratorWithSetup(transformer.bind(this), wrapInputForTracing(), async () => callbackManager_?.handleChainStart(this.toJSON(), { input: "" }, config.runId, config.runType, void 0, void 0, config.runName ?? this.getName()), options?.signal, config); delete config.runId; runManager = pipe.setup; const streamEventsHandler = runManager?.handlers.find(isStreamEventsHandler); let iterator = pipe.output; if (streamEventsHandler !== void 0 && runManager !== void 0) iterator = streamEventsHandler.tapOutputIterable(runManager.runId, iterator); const streamLogHandler = runManager?.handlers.find(isLogStreamHandler); if (streamLogHandler !== void 0 && runManager !== void 0) iterator = streamLogHandler.tapOutputIterable(runManager.runId, iterator); for await (const chunk of iterator) { yield chunk; if (finalOutputSupported) if (finalOutput === void 0) finalOutput = chunk; else try { finalOutput = concat(finalOutput, chunk); } catch { finalOutput = void 0; finalOutputSupported = false; } } } catch (e) { await runManager?.handleChainError(e, void 0, void 0, void 0, { inputs: _coerceToDict(finalInput, "input") }); throw e; } await runManager?.handleChainEnd(finalOutput ?? {}, void 0, void 0, void 0, { inputs: _coerceToDict(finalInput, "input") }); } getGraph(_) { const graph = new Graph(); const inputNode = graph.addNode({ name: `${this.getName()}Input`, schema: anyType() }); const runnableNode = graph.addNode(this); const outputNode = graph.addNode({ name: `${this.getName()}Output`, schema: anyType() }); graph.addEdge(inputNode, runnableNode); graph.addEdge(runnableNode, outputNode); return graph; } /** * Create a new runnable sequence that runs each individual runnable in series, * piping the output of one runnable into another runnable or runnable-like. * @param coerceable A runnable, function, or object whose values are functions or runnables. * @returns A new runnable sequence. */ pipe(coerceable) { return new RunnableSequence({ first: this, last: _coerceToRunnable(coerceable) }); } /** * Pick keys from the dict output of this runnable. Returns a new runnable. */ pick(keys) { return this.pipe(new RunnablePick(keys)); } /** * Assigns new fields to the dict output of this runnable. Returns a new runnable. */ assign(mapping) { return this.pipe(new RunnableAssign(new RunnableMap({ steps: mapping }))); } /** * Default implementation of transform, which buffers input and then calls stream. * Subclasses should override this method if they can start producing output while * input is still being generated. * @param generator * @param options */ async *transform(generator, options) { let finalChunk; for await (const chunk of generator) if (finalChunk === void 0) finalChunk = chunk; else finalChunk = concat(finalChunk, chunk); yield* this._streamIterator(finalChunk, ensureConfig(options)); } /** * Stream all output from a runnable, as reported to the callback system. * This includes all inner runs of LLMs, Retrievers, Tools, etc. * Output is streamed as Log objects, which include a list of * jsonpatch ops that describe how the state of the run has changed in each * step, and the final state of the run. * The jsonpatch ops can be applied in order to construct state. * @param input * @param options * @param streamOptions */ async *streamLog(input, options, streamOptions) { const logStreamCallbackHandler = new LogStreamCallbackHandler({ ...streamOptions, autoClose: false, _schemaFormat: "original" }); const config = ensureConfig(options); yield* this._streamLog(input, logStreamCallbackHandler, config); } async *_streamLog(input, logStreamCallbackHandler, config) { const { callbacks } = config; if (callbacks === void 0) config.callbacks = [logStreamCallbackHandler]; else if (Array.isArray(callbacks)) config.callbacks = callbacks.concat([logStreamCallbackHandler]); else { const copiedCallbacks = callbacks.copy(); copiedCallbacks.addHandler(logStreamCallbackHandler, true); config.callbacks = copiedCallbacks; } const runnableStreamPromise = this.stream(input, config); async function consumeRunnableStream() { try { const runnableStream = await runnableStreamPromise; for await (const chunk of runnableStream) { const patch$2 = new RunLogPatch({ ops: [{ op: "add", path: "/streamed_output/-", value: chunk }] }); await logStreamCallbackHandler.writer.write(patch$2); } } finally { await logStreamCallbackHandler.writer.close(); } } const runnableStreamConsumePromise = consumeRunnableStream(); try { for await (const log of logStreamCallbackHandler) yield log; } finally { await runnableStreamConsumePromise; } } streamEvents(input, options, streamOptions) { let stream; if (options.version === "v1") stream = this._streamEventsV1(input, options, streamOptions); else if (options.version === "v2") stream = this._streamEventsV2(input, options, streamOptions); else throw new Error(`Only versions "v1" and "v2" of the schema are currently supported.`); if (options.encoding === "text/event-stream") return convertToHttpEventStream(stream); else return IterableReadableStream.fromAsyncGenerator(stream); } async *_streamEventsV2(input, options, streamOptions) { const eventStreamer = new EventStreamCallbackHandler({ ...streamOptions, autoClose: false }); const config = ensureConfig(options); const runId = config.runId ?? v4_default(); config.runId = runId; const callbacks = config.callbacks; if (callbacks === void 0) config.callbacks = [eventStreamer]; else if (Array.isArray(callbacks)) config.callbacks = callbacks.concat(eventStreamer); else { const copiedCallbacks = callbacks.copy(); copiedCallbacks.addHandler(eventStreamer, true); config.callbacks = copiedCallbacks; } const abortController = new AbortController(); const outerThis = this; async function consumeRunnableStream() { try { let signal; if (options?.signal) if ("any" in AbortSignal) signal = AbortSignal.any([abortController.signal, options.signal]); else { signal = options.signal; options.signal.addEventListener("abort", () => { abortController.abort(); }, { once: true }); } else signal = abortController.signal; const runnableStream = await outerThis.stream(input, { ...config, signal }); const tappedStream = eventStreamer.tapOutputIterable(runId, runnableStream); for await (const _ of tappedStream) if (abortController.signal.aborted) break; } finally { await eventStreamer.finish(); } } const runnableStreamConsumePromise = consumeRunnableStream(); let firstEventSent = false; let firstEventRunId; try { for await (const event of eventStreamer) { if (!firstEventSent) { event.data.input = input; firstEventSent = true; firstEventRunId = event.run_id; yield event; continue; } if (event.run_id === firstEventRunId && event.event.endsWith("_end")) { if (event.data?.input) delete event.data.input; } yield event; } } finally { abortController.abort(); await runnableStreamConsumePromise; } } async *_streamEventsV1(input, options, streamOptions) { let runLog; let hasEncounteredStartEvent = false; const config = ensureConfig(options); const rootTags = config.tags ?? []; const rootMetadata = config.metadata ?? {}; const rootName = config.runName ?? this.getName(); const logStreamCallbackHandler = new LogStreamCallbackHandler({ ...streamOptions, autoClose: false, _schemaFormat: "streaming_events" }); const rootEventFilter = new _RootEventFilter({ ...streamOptions }); const logStream = this._streamLog(input, logStreamCallbackHandler, config); for await (const log of logStream) { if (!runLog) runLog = RunLog.fromRunLogPatch(log); else runLog = runLog.concat(log); if (runLog.state === void 0) throw new Error(`Internal error: "streamEvents" state is missing. Please open a bug report.`); if (!hasEncounteredStartEvent) { hasEncounteredStartEvent = true; const state$2 = { ...runLog.state }; const event = { run_id: state$2.id, event: `on_${state$2.type}_start`, name: rootName, tags: rootTags, metadata: rootMetadata, data: { input } }; if (rootEventFilter.includeEvent(event, state$2.type)) yield event; } const paths = log.ops.filter((op) => op.path.startsWith("/logs/")).map((op) => op.path.split("/")[2]); const dedupedPaths = [...new Set(paths)]; for (const path of dedupedPaths) { let eventType; let data = {}; const logEntry = runLog.state.logs[path]; if (logEntry.end_time === void 0) if (logEntry.streamed_output.length > 0) eventType = "stream"; else eventType = "start"; else eventType = "end"; if (eventType === "start") { if (logEntry.inputs !== void 0) data.input = logEntry.inputs; } else if (eventType === "end") { if (logEntry.inputs !== void 0) data.input = logEntry.inputs; data.output = logEntry.final_output; } else if (eventType === "stream") { const chunkCount = logEntry.streamed_output.length; if (chunkCount !== 1) throw new Error(`Expected exactly one chunk of streamed output, got ${chunkCount} instead. Encountered in: "${logEntry.name}"`); data = { chunk: logEntry.streamed_output[0] }; logEntry.streamed_output = []; } yield { event: `on_${logEntry.type}_${eventType}`, name: logEntry.name, run_id: logEntry.id, tags: logEntry.tags, metadata: logEntry.metadata, data }; } const { state: state$1 } = runLog; if (state$1.streamed_output.length > 0) { const chunkCount = state$1.streamed_output.length; if (chunkCount !== 1) throw new Error(`Expected exactly one chunk of streamed output, got ${chunkCount} instead. Encountered in: "${state$1.name}"`); const data = { chunk: state$1.streamed_output[0] }; state$1.streamed_output = []; const event = { event: `on_${state$1.type}_stream`, run_id: state$1.id, tags: rootTags, metadata: rootMetadata, name: rootName, data }; if (rootEventFilter.includeEvent(event, state$1.type)) yield event; } } const state = runLog?.state; if (state !== void 0) { const event = { event: `on_${state.type}_end`, name: rootName, run_id: state.id, tags: rootTags, metadata: rootMetadata, data: { output: state.final_output } }; if (rootEventFilter.includeEvent(event, state.type)) yield event; } } static isRunnable(thing) { return isRunnableInterface(thing); } /** * Bind lifecycle listeners to a Runnable, returning a new Runnable. * The Run object contains information about the run, including its id, * type, input, output, error, startTime, endTime, and any tags or metadata * added to the run. * * @param {Object} params - The object containing the callback functions. * @param {(run: Run) => void} params.onStart - Called before the runnable starts running, with the Run object. * @param {(run: Run) => void} params.onEnd - Called after the runnable finishes running, with the Run object. * @param {(run: Run) => void} params.onError - Called if the runnable throws an error, with the Run object. */ withListeners({ onStart, onEnd, onError }) { return new RunnableBinding({ bound: this, config: {}, configFactories: [(config) => ({ callbacks: [new RootListenersTracer({ config, onStart, onEnd, onError })] })] }); } /** * Convert a runnable to a tool. Return a new instance of `RunnableToolLike` * which contains the runnable, name, description and schema. * * @template {T extends RunInput = RunInput} RunInput - The input type of the runnable. Should be the same as the `RunInput` type of the runnable. * * @param fields * @param {string | undefined} [fields.name] The name of the tool. If not provided, it will default to the name of the runnable. * @param {string | undefined} [fields.description] The description of the tool. Falls back to the description on the Zod schema if not provided, or undefined if neither are provided. * @param {z.ZodType} [fields.schema] The Zod schema for the input of the tool. Infers the Zod type from the input type of the runnable. * @returns {RunnableToolLike, RunOutput>} An instance of `RunnableToolLike` which is a runnable that can be used as a tool. */ asTool(fields) { return convertRunnableToTool(this, fields); } }; /** * Wraps a runnable and applies partial config upon invocation. * * @example * ```typescript * import { * type RunnableConfig, * RunnableLambda, * } from "@langchain/core/runnables"; * * const enhanceProfile = ( * profile: Record, * config?: RunnableConfig * ) => { * if (config?.configurable?.role) { * return { ...profile, role: config.configurable.role }; * } * return profile; * }; * * const runnable = RunnableLambda.from(enhanceProfile); * * // Bind configuration to the runnable to set the user's role dynamically * const adminRunnable = runnable.bind({ configurable: { role: "Admin" } }); * const userRunnable = runnable.bind({ configurable: { role: "User" } }); * * const result1 = await adminRunnable.invoke({ * name: "Alice", * email: "alice@example.com" * }); * * // { name: "Alice", email: "alice@example.com", role: "Admin" } * * const result2 = await userRunnable.invoke({ * name: "Bob", * email: "bob@example.com" * }); * * // { name: "Bob", email: "bob@example.com", role: "User" } * ``` */ var RunnableBinding = class RunnableBinding extends Runnable { static lc_name() { return "RunnableBinding"; } constructor(fields) { super(fields); Object.defineProperty(this, "lc_namespace", { enumerable: true, configurable: true, writable: true, value: ["langchain_core", "runnables"] }); Object.defineProperty(this, "lc_serializable", { enumerable: true, configurable: true, writable: true, value: true }); Object.defineProperty(this, "bound", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "config", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "kwargs", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "configFactories", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.bound = fields.bound; this.kwargs = fields.kwargs; this.config = fields.config; this.configFactories = fields.configFactories; } getName(suffix) { return this.bound.getName(suffix); } async _mergeConfig(...options) { const config = mergeConfigs(this.config, ...options); return mergeConfigs(config, ...this.configFactories ? await Promise.all(this.configFactories.map(async (configFactory) => await configFactory(config))) : []); } /** * Binds the runnable with the specified arguments. * @param kwargs The arguments to bind the runnable with. * @returns A new instance of the `RunnableBinding` class that is bound with the specified arguments. * * @deprecated Use {@link withConfig} instead. This will be removed in the next breaking release. */ bind(kwargs) { return new this.constructor({ bound: this.bound, kwargs: { ...this.kwargs, ...kwargs }, config: this.config }); } withConfig(config) { return new this.constructor({ bound: this.bound, kwargs: this.kwargs, config: { ...this.config, ...config } }); } withRetry(fields) { return new RunnableRetry({ bound: this.bound, kwargs: this.kwargs, config: this.config, maxAttemptNumber: fields?.stopAfterAttempt, ...fields }); } async invoke(input, options) { return this.bound.invoke(input, await this._mergeConfig(ensureConfig(options), this.kwargs)); } async batch(inputs, options, batchOptions) { const mergedOptions = Array.isArray(options) ? await Promise.all(options.map(async (individualOption) => this._mergeConfig(ensureConfig(individualOption), this.kwargs))) : await this._mergeConfig(ensureConfig(options), this.kwargs); return this.bound.batch(inputs, mergedOptions, batchOptions); } async *_streamIterator(input, options) { yield* this.bound._streamIterator(input, await this._mergeConfig(ensureConfig(options), this.kwargs)); } async stream(input, options) { return this.bound.stream(input, await this._mergeConfig(ensureConfig(options), this.kwargs)); } async *transform(generator, options) { yield* this.bound.transform(generator, await this._mergeConfig(ensureConfig(options), this.kwargs)); } streamEvents(input, options, streamOptions) { const outerThis = this; const generator = async function* () { yield* outerThis.bound.streamEvents(input, { ...await outerThis._mergeConfig(ensureConfig(options), outerThis.kwargs), version: options.version }, streamOptions); }; return IterableReadableStream.fromAsyncGenerator(generator()); } static isRunnableBinding(thing) { return thing.bound && Runnable.isRunnable(thing.bound); } /** * Bind lifecycle listeners to a Runnable, returning a new Runnable. * The Run object contains information about the run, including its id, * type, input, output, error, startTime, endTime, and any tags or metadata * added to the run. * * @param {Object} params - The object containing the callback functions. * @param {(run: Run) => void} params.onStart - Called before the runnable starts running, with the Run object. * @param {(run: Run) => void} params.onEnd - Called after the runnable finishes running, with the Run object. * @param {(run: Run) => void} params.onError - Called if the runnable throws an error, with the Run object. */ withListeners({ onStart, onEnd, onError }) { return new RunnableBinding({ bound: this.bound, kwargs: this.kwargs, config: this.config, configFactories: [(config) => ({ callbacks: [new RootListenersTracer({ config, onStart, onEnd, onError })] })] }); } }; /** * A runnable that delegates calls to another runnable * with each element of the input sequence. * @example * ```typescript * import { RunnableEach, RunnableLambda } from "@langchain/core/runnables"; * * const toUpperCase = (input: string): string => input.toUpperCase(); * const addGreeting = (input: string): string => `Hello, ${input}!`; * * const upperCaseLambda = RunnableLambda.from(toUpperCase); * const greetingLambda = RunnableLambda.from(addGreeting); * * const chain = new RunnableEach({ * bound: upperCaseLambda.pipe(greetingLambda), * }); * * const result = await chain.invoke(["alice", "bob", "carol"]) * * // ["Hello, ALICE!", "Hello, BOB!", "Hello, CAROL!"] * ``` * * @deprecated This will be removed in the next breaking release. */ var RunnableEach = class RunnableEach extends Runnable { static lc_name() { return "RunnableEach"; } constructor(fields) { super(fields); Object.defineProperty(this, "lc_serializable", { enumerable: true, configurable: true, writable: true, value: true }); Object.defineProperty(this, "lc_namespace", { enumerable: true, configurable: true, writable: true, value: ["langchain_core", "runnables"] }); Object.defineProperty(this, "bound", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.bound = fields.bound; } /** * Binds the runnable with the specified arguments. * @param kwargs The arguments to bind the runnable with. * @returns A new instance of the `RunnableEach` class that is bound with the specified arguments. * * @deprecated Use {@link withConfig} instead. This will be removed in the next breaking release. */ bind(kwargs) { return new RunnableEach({ bound: this.bound.bind(kwargs) }); } /** * Invokes the runnable with the specified input and configuration. * @param input The input to invoke the runnable with. * @param config The configuration to invoke the runnable with. * @returns A promise that resolves to the output of the runnable. */ async invoke(inputs, config) { return this._callWithConfig(this._invoke.bind(this), inputs, config); } /** * A helper method that is used to invoke the runnable with the specified input and configuration. * @param input The input to invoke the runnable with. * @param config The configuration to invoke the runnable with. * @returns A promise that resolves to the output of the runnable. */ async _invoke(inputs, config, runManager) { return this.bound.batch(inputs, patchConfig(config, { callbacks: runManager?.getChild() })); } /** * Bind lifecycle listeners to a Runnable, returning a new Runnable. * The Run object contains information about the run, including its id, * type, input, output, error, startTime, endTime, and any tags or metadata * added to the run. * * @param {Object} params - The object containing the callback functions. * @param {(run: Run) => void} params.onStart - Called before the runnable starts running, with the Run object. * @param {(run: Run) => void} params.onEnd - Called after the runnable finishes running, with the Run object. * @param {(run: Run) => void} params.onError - Called if the runnable throws an error, with the Run object. */ withListeners({ onStart, onEnd, onError }) { return new RunnableEach({ bound: this.bound.withListeners({ onStart, onEnd, onError }) }); } }; /** * Base class for runnables that can be retried a * specified number of times. * @example * ```typescript * import { * RunnableLambda, * RunnableRetry, * } from "@langchain/core/runnables"; * * // Simulate an API call that fails * const simulateApiCall = (input: string): string => { * console.log(`Attempting API call with input: ${input}`); * throw new Error("API call failed due to network issue"); * }; * * const apiCallLambda = RunnableLambda.from(simulateApiCall); * * // Apply retry logic using the .withRetry() method * const apiCallWithRetry = apiCallLambda.withRetry({ stopAfterAttempt: 3 }); * * // Alternatively, create a RunnableRetry instance manually * const manualRetry = new RunnableRetry({ * bound: apiCallLambda, * maxAttemptNumber: 3, * config: {}, * }); * * // Example invocation using the .withRetry() method * const res = await apiCallWithRetry * .invoke("Request 1") * .catch((error) => { * console.error("Failed after multiple retries:", error.message); * }); * * // Example invocation using the manual retry instance * const res2 = await manualRetry * .invoke("Request 2") * .catch((error) => { * console.error("Failed after multiple retries:", error.message); * }); * ``` */ var RunnableRetry = class extends RunnableBinding { static lc_name() { return "RunnableRetry"; } constructor(fields) { super(fields); Object.defineProperty(this, "lc_namespace", { enumerable: true, configurable: true, writable: true, value: ["langchain_core", "runnables"] }); Object.defineProperty(this, "maxAttemptNumber", { enumerable: true, configurable: true, writable: true, value: 3 }); Object.defineProperty(this, "onFailedAttempt", { enumerable: true, configurable: true, writable: true, value: () => {} }); this.maxAttemptNumber = fields.maxAttemptNumber ?? this.maxAttemptNumber; this.onFailedAttempt = fields.onFailedAttempt ?? this.onFailedAttempt; } _patchConfigForRetry(attempt, config, runManager) { const tag = attempt > 1 ? `retry:attempt:${attempt}` : void 0; return patchConfig(config, { callbacks: runManager?.getChild(tag) }); } async _invoke(input, config, runManager) { return (0, import_p_retry.default)((attemptNumber) => super.invoke(input, this._patchConfigForRetry(attemptNumber, config, runManager)), { onFailedAttempt: (error) => this.onFailedAttempt(error, input), retries: Math.max(this.maxAttemptNumber - 1, 0), randomize: true }); } /** * Method that invokes the runnable with the specified input, run manager, * and config. It handles the retry logic by catching any errors and * recursively invoking itself with the updated config for the next retry * attempt. * @param input The input for the runnable. * @param runManager The run manager for the runnable. * @param config The config for the runnable. * @returns A promise that resolves to the output of the runnable. */ async invoke(input, config) { return this._callWithConfig(this._invoke.bind(this), input, config); } async _batch(inputs, configs, runManagers, batchOptions) { const resultsMap = {}; try { await (0, import_p_retry.default)(async (attemptNumber) => { const remainingIndexes = inputs.map((_, i) => i).filter((i) => resultsMap[i.toString()] === void 0 || resultsMap[i.toString()] instanceof Error); const remainingInputs = remainingIndexes.map((i) => inputs[i]); const patchedConfigs = remainingIndexes.map((i) => this._patchConfigForRetry(attemptNumber, configs?.[i], runManagers?.[i])); const results = await super.batch(remainingInputs, patchedConfigs, { ...batchOptions, returnExceptions: true }); let firstException; for (let i = 0; i < results.length; i += 1) { const result = results[i]; const resultMapIndex = remainingIndexes[i]; if (result instanceof Error) { if (firstException === void 0) { firstException = result; firstException.input = remainingInputs[i]; } } resultsMap[resultMapIndex.toString()] = result; } if (firstException) throw firstException; return results; }, { onFailedAttempt: (error) => this.onFailedAttempt(error, error.input), retries: Math.max(this.maxAttemptNumber - 1, 0), randomize: true }); } catch (e) { if (batchOptions?.returnExceptions !== true) throw e; } return Object.keys(resultsMap).sort((a, b) => parseInt(a, 10) - parseInt(b, 10)).map((key) => resultsMap[parseInt(key, 10)]); } async batch(inputs, options, batchOptions) { return this._batchWithConfig(this._batch.bind(this), inputs, options, batchOptions); } }; /** * A sequence of runnables, where the output of each is the input of the next. * @example * ```typescript * const promptTemplate = PromptTemplate.fromTemplate( * "Tell me a joke about {topic}", * ); * const chain = RunnableSequence.from([promptTemplate, new ChatOpenAI({})]); * const result = await chain.invoke({ topic: "bears" }); * ``` */ var RunnableSequence = class RunnableSequence extends Runnable { static lc_name() { return "RunnableSequence"; } constructor(fields) { super(fields); Object.defineProperty(this, "first", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "middle", { enumerable: true, configurable: true, writable: true, value: [] }); Object.defineProperty(this, "last", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "omitSequenceTags", { enumerable: true, configurable: true, writable: true, value: false }); Object.defineProperty(this, "lc_serializable", { enumerable: true, configurable: true, writable: true, value: true }); Object.defineProperty(this, "lc_namespace", { enumerable: true, configurable: true, writable: true, value: ["langchain_core", "runnables"] }); this.first = fields.first; this.middle = fields.middle ?? this.middle; this.last = fields.last; this.name = fields.name; this.omitSequenceTags = fields.omitSequenceTags ?? this.omitSequenceTags; } get steps() { return [ this.first, ...this.middle, this.last ]; } async invoke(input, options) { const config = ensureConfig(options); const callbackManager_ = await getCallbackManagerForConfig(config); const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), config.runId, void 0, void 0, void 0, config?.runName); delete config.runId; let nextStepInput = input; let finalOutput; try { const initialSteps = [this.first, ...this.middle]; for (let i = 0; i < initialSteps.length; i += 1) { const step = initialSteps[i]; const promise = step.invoke(nextStepInput, patchConfig(config, { callbacks: runManager?.getChild(this.omitSequenceTags ? void 0 : `seq:step:${i + 1}`) })); nextStepInput = await raceWithSignal(promise, options?.signal); } if (options?.signal?.aborted) throw new Error("Aborted"); finalOutput = await this.last.invoke(nextStepInput, patchConfig(config, { callbacks: runManager?.getChild(this.omitSequenceTags ? void 0 : `seq:step:${this.steps.length}`) })); } catch (e) { await runManager?.handleChainError(e); throw e; } await runManager?.handleChainEnd(_coerceToDict(finalOutput, "output")); return finalOutput; } async batch(inputs, options, batchOptions) { const configList = this._getOptionsList(options ?? {}, inputs.length); const callbackManagers = await Promise.all(configList.map(getCallbackManagerForConfig)); const runManagers = await Promise.all(callbackManagers.map(async (callbackManager, i) => { const handleStartRes = await callbackManager?.handleChainStart(this.toJSON(), _coerceToDict(inputs[i], "input"), configList[i].runId, void 0, void 0, void 0, configList[i].runName); delete configList[i].runId; return handleStartRes; })); let nextStepInputs = inputs; try { for (let i = 0; i < this.steps.length; i += 1) { const step = this.steps[i]; const promise = step.batch(nextStepInputs, runManagers.map((runManager, j) => { const childRunManager = runManager?.getChild(this.omitSequenceTags ? void 0 : `seq:step:${i + 1}`); return patchConfig(configList[j], { callbacks: childRunManager }); }), batchOptions); nextStepInputs = await raceWithSignal(promise, configList[0]?.signal); } } catch (e) { await Promise.all(runManagers.map((runManager) => runManager?.handleChainError(e))); throw e; } await Promise.all(runManagers.map((runManager) => runManager?.handleChainEnd(_coerceToDict(nextStepInputs, "output")))); return nextStepInputs; } async *_streamIterator(input, options) { const callbackManager_ = await getCallbackManagerForConfig(options); const { runId,...otherOptions } = options ?? {}; const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), runId, void 0, void 0, void 0, otherOptions?.runName); const steps = [ this.first, ...this.middle, this.last ]; let concatSupported = true; let finalOutput; async function* inputGenerator() { yield input; } try { let finalGenerator = steps[0].transform(inputGenerator(), patchConfig(otherOptions, { callbacks: runManager?.getChild(this.omitSequenceTags ? void 0 : `seq:step:1`) })); for (let i = 1; i < steps.length; i += 1) { const step = steps[i]; finalGenerator = await step.transform(finalGenerator, patchConfig(otherOptions, { callbacks: runManager?.getChild(this.omitSequenceTags ? void 0 : `seq:step:${i + 1}`) })); } for await (const chunk of finalGenerator) { options?.signal?.throwIfAborted(); yield chunk; if (concatSupported) if (finalOutput === void 0) finalOutput = chunk; else try { finalOutput = concat(finalOutput, chunk); } catch (e) { finalOutput = void 0; concatSupported = false; } } } catch (e) { await runManager?.handleChainError(e); throw e; } await runManager?.handleChainEnd(_coerceToDict(finalOutput, "output")); } getGraph(config) { const graph = new Graph(); let currentLastNode = null; this.steps.forEach((step, index) => { const stepGraph = step.getGraph(config); if (index !== 0) stepGraph.trimFirstNode(); if (index !== this.steps.length - 1) stepGraph.trimLastNode(); graph.extend(stepGraph); const stepFirstNode = stepGraph.firstNode(); if (!stepFirstNode) throw new Error(`Runnable ${step} has no first node`); if (currentLastNode) graph.addEdge(currentLastNode, stepFirstNode); currentLastNode = stepGraph.lastNode(); }); return graph; } pipe(coerceable) { if (RunnableSequence.isRunnableSequence(coerceable)) return new RunnableSequence({ first: this.first, middle: this.middle.concat([ this.last, coerceable.first, ...coerceable.middle ]), last: coerceable.last, name: this.name ?? coerceable.name }); else return new RunnableSequence({ first: this.first, middle: [...this.middle, this.last], last: _coerceToRunnable(coerceable), name: this.name }); } static isRunnableSequence(thing) { return Array.isArray(thing.middle) && Runnable.isRunnable(thing); } static from([first, ...runnables], nameOrFields) { let extra = {}; if (typeof nameOrFields === "string") extra.name = nameOrFields; else if (nameOrFields !== void 0) extra = nameOrFields; return new RunnableSequence({ ...extra, first: _coerceToRunnable(first), middle: runnables.slice(0, -1).map(_coerceToRunnable), last: _coerceToRunnable(runnables[runnables.length - 1]) }); } }; /** * A runnable that runs a mapping of runnables in parallel, * and returns a mapping of their outputs. * @example * ```typescript * const mapChain = RunnableMap.from({ * joke: PromptTemplate.fromTemplate("Tell me a joke about {topic}").pipe( * new ChatAnthropic({}), * ), * poem: PromptTemplate.fromTemplate("write a 2-line poem about {topic}").pipe( * new ChatAnthropic({}), * ), * }); * const result = await mapChain.invoke({ topic: "bear" }); * ``` */ var RunnableMap = class RunnableMap extends Runnable { static lc_name() { return "RunnableMap"; } getStepsKeys() { return Object.keys(this.steps); } constructor(fields) { super(fields); Object.defineProperty(this, "lc_namespace", { enumerable: true, configurable: true, writable: true, value: ["langchain_core", "runnables"] }); Object.defineProperty(this, "lc_serializable", { enumerable: true, configurable: true, writable: true, value: true }); Object.defineProperty(this, "steps", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.steps = {}; for (const [key, value] of Object.entries(fields.steps)) this.steps[key] = _coerceToRunnable(value); } static from(steps) { return new RunnableMap({ steps }); } async invoke(input, options) { const config = ensureConfig(options); const callbackManager_ = await getCallbackManagerForConfig(config); const runManager = await callbackManager_?.handleChainStart(this.toJSON(), { input }, config.runId, void 0, void 0, void 0, config?.runName); delete config.runId; const output = {}; try { const promises = Object.entries(this.steps).map(async ([key, runnable]) => { output[key] = await runnable.invoke(input, patchConfig(config, { callbacks: runManager?.getChild(`map:key:${key}`) })); }); await raceWithSignal(Promise.all(promises), options?.signal); } catch (e) { await runManager?.handleChainError(e); throw e; } await runManager?.handleChainEnd(output); return output; } async *_transform(generator, runManager, options) { const steps = { ...this.steps }; const inputCopies = atee(generator, Object.keys(steps).length); const tasks = new Map(Object.entries(steps).map(([key, runnable], i) => { const gen = runnable.transform(inputCopies[i], patchConfig(options, { callbacks: runManager?.getChild(`map:key:${key}`) })); return [key, gen.next().then((result) => ({ key, gen, result }))]; })); while (tasks.size) { const promise = Promise.race(tasks.values()); const { key, result, gen } = await raceWithSignal(promise, options?.signal); tasks.delete(key); if (!result.done) { yield { [key]: result.value }; tasks.set(key, gen.next().then((result$1) => ({ key, gen, result: result$1 }))); } } } transform(generator, options) { return this._transformStreamWithConfig(generator, this._transform.bind(this), options); } async stream(input, options) { async function* generator() { yield input; } const config = ensureConfig(options); const wrappedGenerator = new AsyncGeneratorWithSetup({ generator: this.transform(generator(), config), config }); await wrappedGenerator.setup; return IterableReadableStream.fromAsyncGenerator(wrappedGenerator); } }; /** * A runnable that wraps a traced LangSmith function. */ var RunnableTraceable = class RunnableTraceable extends Runnable { constructor(fields) { super(fields); Object.defineProperty(this, "lc_serializable", { enumerable: true, configurable: true, writable: true, value: false }); Object.defineProperty(this, "lc_namespace", { enumerable: true, configurable: true, writable: true, value: ["langchain_core", "runnables"] }); Object.defineProperty(this, "func", { enumerable: true, configurable: true, writable: true, value: void 0 }); if (!isTraceableFunction(fields.func)) throw new Error("RunnableTraceable requires a function that is wrapped in traceable higher-order function"); this.func = fields.func; } async invoke(input, options) { const [config] = this._getOptionsList(options ?? {}, 1); const callbacks = await getCallbackManagerForConfig(config); const promise = this.func(patchConfig(config, { callbacks }), input); return raceWithSignal(promise, config?.signal); } async *_streamIterator(input, options) { const [config] = this._getOptionsList(options ?? {}, 1); const result = await this.invoke(input, options); if (isAsyncIterable(result)) { for await (const item of result) { config?.signal?.throwIfAborted(); yield item; } return; } if (isIterator(result)) { while (true) { config?.signal?.throwIfAborted(); const state = result.next(); if (state.done) break; yield state.value; } return; } yield result; } static from(func) { return new RunnableTraceable({ func }); } }; function assertNonTraceableFunction(func) { if (isTraceableFunction(func)) throw new Error("RunnableLambda requires a function that is not wrapped in traceable higher-order function. This shouldn't happen."); } /** * A runnable that wraps an arbitrary function that takes a single argument. * @example * ```typescript * import { RunnableLambda } from "@langchain/core/runnables"; * * const add = (input: { x: number; y: number }) => input.x + input.y; * * const multiply = (input: { value: number; multiplier: number }) => * input.value * input.multiplier; * * // Create runnables for the functions * const addLambda = RunnableLambda.from(add); * const multiplyLambda = RunnableLambda.from(multiply); * * // Chain the lambdas for a mathematical operation * const chainedLambda = addLambda.pipe((result) => * multiplyLambda.invoke({ value: result, multiplier: 2 }) * ); * * // Example invocation of the chainedLambda * const result = await chainedLambda.invoke({ x: 2, y: 3 }); * * // Will log "10" (since (2 + 3) * 2 = 10) * ``` */ var RunnableLambda = class RunnableLambda extends Runnable { static lc_name() { return "RunnableLambda"; } constructor(fields) { if (isTraceableFunction(fields.func)) return RunnableTraceable.from(fields.func); super(fields); Object.defineProperty(this, "lc_namespace", { enumerable: true, configurable: true, writable: true, value: ["langchain_core", "runnables"] }); Object.defineProperty(this, "func", { enumerable: true, configurable: true, writable: true, value: void 0 }); assertNonTraceableFunction(fields.func); this.func = fields.func; } static from(func) { return new RunnableLambda({ func }); } async _invoke(input, config, runManager) { return new Promise((resolve, reject) => { const childConfig = patchConfig(config, { callbacks: runManager?.getChild(), recursionLimit: (config?.recursionLimit ?? DEFAULT_RECURSION_LIMIT) - 1 }); AsyncLocalStorageProviderSingleton.runWithConfig(pickRunnableConfigKeys(childConfig), async () => { try { let output = await this.func(input, { ...childConfig }); if (output && Runnable.isRunnable(output)) { if (config?.recursionLimit === 0) throw new Error("Recursion limit reached."); output = await output.invoke(input, { ...childConfig, recursionLimit: (childConfig.recursionLimit ?? DEFAULT_RECURSION_LIMIT) - 1 }); } else if (isAsyncIterable(output)) { let finalOutput; for await (const chunk of consumeAsyncIterableInContext(childConfig, output)) { config?.signal?.throwIfAborted(); if (finalOutput === void 0) finalOutput = chunk; else try { finalOutput = concat(finalOutput, chunk); } catch (e) { finalOutput = chunk; } } output = finalOutput; } else if (isIterableIterator(output)) { let finalOutput; for (const chunk of consumeIteratorInContext(childConfig, output)) { config?.signal?.throwIfAborted(); if (finalOutput === void 0) finalOutput = chunk; else try { finalOutput = concat(finalOutput, chunk); } catch (e) { finalOutput = chunk; } } output = finalOutput; } resolve(output); } catch (e) { reject(e); } }); }); } async invoke(input, options) { return this._callWithConfig(this._invoke.bind(this), input, options); } async *_transform(generator, runManager, config) { let finalChunk; for await (const chunk of generator) if (finalChunk === void 0) finalChunk = chunk; else try { finalChunk = concat(finalChunk, chunk); } catch (e) { finalChunk = chunk; } const childConfig = patchConfig(config, { callbacks: runManager?.getChild(), recursionLimit: (config?.recursionLimit ?? DEFAULT_RECURSION_LIMIT) - 1 }); const output = await new Promise((resolve, reject) => { AsyncLocalStorageProviderSingleton.runWithConfig(pickRunnableConfigKeys(childConfig), async () => { try { const res = await this.func(finalChunk, { ...childConfig, config: childConfig }); resolve(res); } catch (e) { reject(e); } }); }); if (output && Runnable.isRunnable(output)) { if (config?.recursionLimit === 0) throw new Error("Recursion limit reached."); const stream = await output.stream(finalChunk, childConfig); for await (const chunk of stream) yield chunk; } else if (isAsyncIterable(output)) for await (const chunk of consumeAsyncIterableInContext(childConfig, output)) { config?.signal?.throwIfAborted(); yield chunk; } else if (isIterableIterator(output)) for (const chunk of consumeIteratorInContext(childConfig, output)) { config?.signal?.throwIfAborted(); yield chunk; } else yield output; } transform(generator, options) { return this._transformStreamWithConfig(generator, this._transform.bind(this), options); } async stream(input, options) { async function* generator() { yield input; } const config = ensureConfig(options); const wrappedGenerator = new AsyncGeneratorWithSetup({ generator: this.transform(generator(), config), config }); await wrappedGenerator.setup; return IterableReadableStream.fromAsyncGenerator(wrappedGenerator); } }; /** * A runnable that runs a mapping of runnables in parallel, * and returns a mapping of their outputs. * @example * ```typescript * import { * RunnableLambda, * RunnableParallel, * } from "@langchain/core/runnables"; * * const addYears = (age: number): number => age + 5; * const yearsToFifty = (age: number): number => 50 - age; * const yearsToHundred = (age: number): number => 100 - age; * * const addYearsLambda = RunnableLambda.from(addYears); * const milestoneFiftyLambda = RunnableLambda.from(yearsToFifty); * const milestoneHundredLambda = RunnableLambda.from(yearsToHundred); * * // Pipe will coerce objects into RunnableParallel by default, but we * // explicitly instantiate one here to demonstrate * const sequence = addYearsLambda.pipe( * RunnableParallel.from({ * years_to_fifty: milestoneFiftyLambda, * years_to_hundred: milestoneHundredLambda, * }) * ); * * // Invoke the sequence with a single age input * const res = sequence.invoke(25); * * // { years_to_fifty: 25, years_to_hundred: 75 } * ``` */ var RunnableParallel = class extends RunnableMap {}; /** * A Runnable that can fallback to other Runnables if it fails. * External APIs (e.g., APIs for a language model) may at times experience * degraded performance or even downtime. * * In these cases, it can be useful to have a fallback Runnable that can be * used in place of the original Runnable (e.g., fallback to another LLM provider). * * Fallbacks can be defined at the level of a single Runnable, or at the level * of a chain of Runnables. Fallbacks are tried in order until one succeeds or * all fail. * * While you can instantiate a `RunnableWithFallbacks` directly, it is usually * more convenient to use the `withFallbacks` method on an existing Runnable. * * When streaming, fallbacks will only be called on failures during the initial * stream creation. Errors that occur after a stream starts will not fallback * to the next Runnable. * * @example * ```typescript * import { * RunnableLambda, * RunnableWithFallbacks, * } from "@langchain/core/runnables"; * * const primaryOperation = (input: string): string => { * if (input !== "safe") { * throw new Error("Primary operation failed due to unsafe input"); * } * return `Processed: ${input}`; * }; * * // Define a fallback operation that processes the input differently * const fallbackOperation = (input: string): string => * `Fallback processed: ${input}`; * * const primaryRunnable = RunnableLambda.from(primaryOperation); * const fallbackRunnable = RunnableLambda.from(fallbackOperation); * * // Apply the fallback logic using the .withFallbacks() method * const runnableWithFallback = primaryRunnable.withFallbacks([fallbackRunnable]); * * // Alternatively, create a RunnableWithFallbacks instance manually * const manualFallbackChain = new RunnableWithFallbacks({ * runnable: primaryRunnable, * fallbacks: [fallbackRunnable], * }); * * // Example invocation using .withFallbacks() * const res = await runnableWithFallback * .invoke("unsafe input") * .catch((error) => { * console.error("Failed after all attempts:", error.message); * }); * * // "Fallback processed: unsafe input" * * // Example invocation using manual instantiation * const res = await manualFallbackChain * .invoke("safe") * .catch((error) => { * console.error("Failed after all attempts:", error.message); * }); * * // "Processed: safe" * ``` */ var RunnableWithFallbacks = class extends Runnable { static lc_name() { return "RunnableWithFallbacks"; } constructor(fields) { super(fields); Object.defineProperty(this, "lc_namespace", { enumerable: true, configurable: true, writable: true, value: ["langchain_core", "runnables"] }); Object.defineProperty(this, "lc_serializable", { enumerable: true, configurable: true, writable: true, value: true }); Object.defineProperty(this, "runnable", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "fallbacks", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.runnable = fields.runnable; this.fallbacks = fields.fallbacks; } *runnables() { yield this.runnable; for (const fallback of this.fallbacks) yield fallback; } async invoke(input, options) { const config = ensureConfig(options); const callbackManager_ = await getCallbackManagerForConfig(config); const { runId,...otherConfigFields } = config; const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), runId, void 0, void 0, void 0, otherConfigFields?.runName); const childConfig = patchConfig(otherConfigFields, { callbacks: runManager?.getChild() }); const res = await AsyncLocalStorageProviderSingleton.runWithConfig(childConfig, async () => { let firstError; for (const runnable of this.runnables()) { config?.signal?.throwIfAborted(); try { const output = await runnable.invoke(input, childConfig); await runManager?.handleChainEnd(_coerceToDict(output, "output")); return output; } catch (e) { if (firstError === void 0) firstError = e; } } if (firstError === void 0) throw new Error("No error stored at end of fallback."); await runManager?.handleChainError(firstError); throw firstError; }); return res; } async *_streamIterator(input, options) { const config = ensureConfig(options); const callbackManager_ = await getCallbackManagerForConfig(config); const { runId,...otherConfigFields } = config; const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), runId, void 0, void 0, void 0, otherConfigFields?.runName); let firstError; let stream; for (const runnable of this.runnables()) { config?.signal?.throwIfAborted(); const childConfig = patchConfig(otherConfigFields, { callbacks: runManager?.getChild() }); try { const originalStream = await runnable.stream(input, childConfig); stream = consumeAsyncIterableInContext(childConfig, originalStream); break; } catch (e) { if (firstError === void 0) firstError = e; } } if (stream === void 0) { const error = firstError ?? new Error("No error stored at end of fallback."); await runManager?.handleChainError(error); throw error; } let output; try { for await (const chunk of stream) { yield chunk; try { output = output === void 0 ? output : concat(output, chunk); } catch (e) { output = void 0; } } } catch (e) { await runManager?.handleChainError(e); throw e; } await runManager?.handleChainEnd(_coerceToDict(output, "output")); } async batch(inputs, options, batchOptions) { if (batchOptions?.returnExceptions) throw new Error("Not implemented."); const configList = this._getOptionsList(options ?? {}, inputs.length); const callbackManagers = await Promise.all(configList.map((config) => getCallbackManagerForConfig(config))); const runManagers = await Promise.all(callbackManagers.map(async (callbackManager, i) => { const handleStartRes = await callbackManager?.handleChainStart(this.toJSON(), _coerceToDict(inputs[i], "input"), configList[i].runId, void 0, void 0, void 0, configList[i].runName); delete configList[i].runId; return handleStartRes; })); let firstError; for (const runnable of this.runnables()) { configList[0].signal?.throwIfAborted(); try { const outputs = await runnable.batch(inputs, runManagers.map((runManager, j) => patchConfig(configList[j], { callbacks: runManager?.getChild() })), batchOptions); await Promise.all(runManagers.map((runManager, i) => runManager?.handleChainEnd(_coerceToDict(outputs[i], "output")))); return outputs; } catch (e) { if (firstError === void 0) firstError = e; } } if (!firstError) throw new Error("No error stored at end of fallbacks."); await Promise.all(runManagers.map((runManager) => runManager?.handleChainError(firstError))); throw firstError; } }; function _coerceToRunnable(coerceable) { if (typeof coerceable === "function") return new RunnableLambda({ func: coerceable }); else if (Runnable.isRunnable(coerceable)) return coerceable; else if (!Array.isArray(coerceable) && typeof coerceable === "object") { const runnables = {}; for (const [key, value] of Object.entries(coerceable)) runnables[key] = _coerceToRunnable(value); return new RunnableMap({ steps: runnables }); } else throw new Error(`Expected a Runnable, function or object.\nInstead got an unsupported type.`); } /** * A runnable that assigns key-value pairs to inputs of type `Record`. * @example * ```typescript * import { * RunnableAssign, * RunnableLambda, * RunnableParallel, * } from "@langchain/core/runnables"; * * const calculateAge = (x: { birthYear: number }): { age: number } => { * const currentYear = new Date().getFullYear(); * return { age: currentYear - x.birthYear }; * }; * * const createGreeting = (x: { name: string }): { greeting: string } => { * return { greeting: `Hello, ${x.name}!` }; * }; * * const mapper = RunnableParallel.from({ * age_step: RunnableLambda.from(calculateAge), * greeting_step: RunnableLambda.from(createGreeting), * }); * * const runnableAssign = new RunnableAssign({ mapper }); * * const res = await runnableAssign.invoke({ name: "Alice", birthYear: 1990 }); * * // { name: "Alice", birthYear: 1990, age_step: { age: 34 }, greeting_step: { greeting: "Hello, Alice!" } } * ``` */ var RunnableAssign = class extends Runnable { static lc_name() { return "RunnableAssign"; } constructor(fields) { if (fields instanceof RunnableMap) fields = { mapper: fields }; super(fields); Object.defineProperty(this, "lc_namespace", { enumerable: true, configurable: true, writable: true, value: ["langchain_core", "runnables"] }); Object.defineProperty(this, "lc_serializable", { enumerable: true, configurable: true, writable: true, value: true }); Object.defineProperty(this, "mapper", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.mapper = fields.mapper; } async invoke(input, options) { const mapperResult = await this.mapper.invoke(input, options); return { ...input, ...mapperResult }; } async *_transform(generator, runManager, options) { const mapperKeys = this.mapper.getStepsKeys(); const [forPassthrough, forMapper] = atee(generator); const mapperOutput = this.mapper.transform(forMapper, patchConfig(options, { callbacks: runManager?.getChild() })); const firstMapperChunkPromise = mapperOutput.next(); for await (const chunk of forPassthrough) { if (typeof chunk !== "object" || Array.isArray(chunk)) throw new Error(`RunnableAssign can only be used with objects as input, got ${typeof chunk}`); const filtered = Object.fromEntries(Object.entries(chunk).filter(([key]) => !mapperKeys.includes(key))); if (Object.keys(filtered).length > 0) yield filtered; } yield (await firstMapperChunkPromise).value; for await (const chunk of mapperOutput) yield chunk; } transform(generator, options) { return this._transformStreamWithConfig(generator, this._transform.bind(this), options); } async stream(input, options) { async function* generator() { yield input; } const config = ensureConfig(options); const wrappedGenerator = new AsyncGeneratorWithSetup({ generator: this.transform(generator(), config), config }); await wrappedGenerator.setup; return IterableReadableStream.fromAsyncGenerator(wrappedGenerator); } }; /** * A runnable that assigns key-value pairs to inputs of type `Record`. * Useful for streaming, can be automatically created and chained by calling `runnable.pick();`. * @example * ```typescript * import { RunnablePick } from "@langchain/core/runnables"; * * const inputData = { * name: "John", * age: 30, * city: "New York", * country: "USA", * email: "john.doe@example.com", * phone: "+1234567890", * }; * * const basicInfoRunnable = new RunnablePick(["name", "city"]); * * // Example invocation * const res = await basicInfoRunnable.invoke(inputData); * * // { name: 'John', city: 'New York' } * ``` */ var RunnablePick = class extends Runnable { static lc_name() { return "RunnablePick"; } constructor(fields) { if (typeof fields === "string" || Array.isArray(fields)) fields = { keys: fields }; super(fields); Object.defineProperty(this, "lc_namespace", { enumerable: true, configurable: true, writable: true, value: ["langchain_core", "runnables"] }); Object.defineProperty(this, "lc_serializable", { enumerable: true, configurable: true, writable: true, value: true }); Object.defineProperty(this, "keys", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.keys = fields.keys; } async _pick(input) { if (typeof this.keys === "string") return input[this.keys]; else { const picked = this.keys.map((key) => [key, input[key]]).filter((v) => v[1] !== void 0); return picked.length === 0 ? void 0 : Object.fromEntries(picked); } } async invoke(input, options) { return this._callWithConfig(this._pick.bind(this), input, options); } async *_transform(generator) { for await (const chunk of generator) { const picked = await this._pick(chunk); if (picked !== void 0) yield picked; } } transform(generator, options) { return this._transformStreamWithConfig(generator, this._transform.bind(this), options); } async stream(input, options) { async function* generator() { yield input; } const config = ensureConfig(options); const wrappedGenerator = new AsyncGeneratorWithSetup({ generator: this.transform(generator(), config), config }); await wrappedGenerator.setup; return IterableReadableStream.fromAsyncGenerator(wrappedGenerator); } }; var RunnableToolLike = class extends RunnableBinding { constructor(fields) { const sequence = RunnableSequence.from([RunnableLambda.from(async (input) => { let toolInput; if (_isToolCall(input)) try { toolInput = await this.schema.parseAsync(input.args); } catch (e) { throw new ToolInputParsingException(`Received tool input did not match expected schema`, JSON.stringify(input.args)); } else toolInput = input; return toolInput; }).withConfig({ runName: `${fields.name}:parse_input` }), fields.bound]).withConfig({ runName: fields.name }); super({ bound: sequence, config: fields.config ?? {} }); Object.defineProperty(this, "name", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "description", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "schema", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.name = fields.name; this.description = fields.description; this.schema = fields.schema; } static lc_name() { return "RunnableToolLike"; } }; /** * Given a runnable and a Zod schema, convert the runnable to a tool. * * @template RunInput The input type for the runnable. * @template RunOutput The output type for the runnable. * * @param {Runnable} runnable The runnable to convert to a tool. * @param fields * @param {string | undefined} [fields.name] The name of the tool. If not provided, it will default to the name of the runnable. * @param {string | undefined} [fields.description] The description of the tool. Falls back to the description on the Zod schema if not provided, or undefined if neither are provided. * @param {z.ZodType} [fields.schema] The Zod schema for the input of the tool. Infers the Zod type from the input type of the runnable. * @returns {RunnableToolLike, RunOutput>} An instance of `RunnableToolLike` which is a runnable that can be used as a tool. */ function convertRunnableToTool(runnable, fields) { const name = fields.name ?? runnable.getName(); const description = fields.description ?? fields.schema?.description; if (fields.schema.constructor === ZodString) return new RunnableToolLike({ name, description, schema: objectType({ input: stringType() }).transform((input) => input.input), bound: runnable }); return new RunnableToolLike({ name, description, schema: fields.schema, bound: runnable }); } //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/prompt_values.js var prompt_values_exports = {}; __export(prompt_values_exports, { BasePromptValue: () => BasePromptValue, ChatPromptValue: () => ChatPromptValue, ImagePromptValue: () => ImagePromptValue, StringPromptValue: () => StringPromptValue }); /** * Base PromptValue class. All prompt values should extend this class. */ var BasePromptValue = class extends Serializable {}; /** * Represents a prompt value as a string. It extends the BasePromptValue * class and overrides the toString and toChatMessages methods. */ var StringPromptValue = class extends BasePromptValue { static lc_name() { return "StringPromptValue"; } constructor(value) { super({ value }); Object.defineProperty(this, "lc_namespace", { enumerable: true, configurable: true, writable: true, value: ["langchain_core", "prompt_values"] }); Object.defineProperty(this, "lc_serializable", { enumerable: true, configurable: true, writable: true, value: true }); Object.defineProperty(this, "value", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.value = value; } toString() { return this.value; } toChatMessages() { return [new HumanMessage(this.value)]; } }; /** * Class that represents a chat prompt value. It extends the * BasePromptValue and includes an array of BaseMessage instances. */ var ChatPromptValue = class extends BasePromptValue { static lc_name() { return "ChatPromptValue"; } constructor(fields) { if (Array.isArray(fields)) fields = { messages: fields }; super(fields); Object.defineProperty(this, "lc_namespace", { enumerable: true, configurable: true, writable: true, value: ["langchain_core", "prompt_values"] }); Object.defineProperty(this, "lc_serializable", { enumerable: true, configurable: true, writable: true, value: true }); Object.defineProperty(this, "messages", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.messages = fields.messages; } toString() { return getBufferString(this.messages); } toChatMessages() { return this.messages; } }; /** * Class that represents an image prompt value. It extends the * BasePromptValue and includes an ImageURL instance. */ var ImagePromptValue = class extends BasePromptValue { static lc_name() { return "ImagePromptValue"; } constructor(fields) { if (!("imageUrl" in fields)) fields = { imageUrl: fields }; super(fields); Object.defineProperty(this, "lc_namespace", { enumerable: true, configurable: true, writable: true, value: ["langchain_core", "prompt_values"] }); Object.defineProperty(this, "lc_serializable", { enumerable: true, configurable: true, writable: true, value: true }); Object.defineProperty(this, "imageUrl", { enumerable: true, configurable: true, writable: true, value: void 0 }); /** @ignore */ Object.defineProperty(this, "value", { enumerable: true, configurable: true, writable: true, value: void 0 }); this.imageUrl = fields.imageUrl; } toString() { return this.imageUrl.url; } toChatMessages() { return [new HumanMessage({ content: [{ type: "image_url", image_url: { detail: this.imageUrl.detail, url: this.imageUrl.url } }] })]; } }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/prompts/base.js /** * Base class for prompt templates. Exposes a format method that returns a * string prompt given a set of input values. */ var BasePromptTemplate = class extends Runnable { get lc_attributes() { return { partialVariables: void 0 }; } constructor(input) { super(input); Object.defineProperty(this, "lc_serializable", { enumerable: true, configurable: true, writable: true, value: true }); Object.defineProperty(this, "lc_namespace", { enumerable: true, configurable: true, writable: true, value: [ "langchain_core", "prompts", this._getPromptType() ] }); Object.defineProperty(this, "inputVariables", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "outputParser", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "partialVariables", { enumerable: true, configurable: true, writable: true, value: void 0 }); /** * Metadata to be used for tracing. */ Object.defineProperty(this, "metadata", { enumerable: true, configurable: true, writable: true, value: void 0 }); /** Tags to be used for tracing. */ Object.defineProperty(this, "tags", { enumerable: true, configurable: true, writable: true, value: void 0 }); const { inputVariables } = input; if (inputVariables.includes("stop")) throw new Error("Cannot have an input variable named 'stop', as it is used internally, please rename."); Object.assign(this, input); } /** * Merges partial variables and user variables. * @param userVariables The user variables to merge with the partial variables. * @returns A Promise that resolves to an object containing the merged variables. */ async mergePartialAndUserVariables(userVariables) { const partialVariables = this.partialVariables ?? {}; const partialValues = {}; for (const [key, value] of Object.entries(partialVariables)) if (typeof value === "string") partialValues[key] = value; else partialValues[key] = await value(); const allKwargs = { ...partialValues, ...userVariables }; return allKwargs; } /** * Invokes the prompt template with the given input and options. * @param input The input to invoke the prompt template with. * @param options Optional configuration for the callback. * @returns A Promise that resolves to the output of the prompt template. */ async invoke(input, options) { const metadata = { ...this.metadata, ...options?.metadata }; const tags = [...this.tags ?? [], ...options?.tags ?? []]; return this._callWithConfig((input$1) => this.formatPromptValue(input$1), input, { ...options, tags, metadata, runType: "prompt" }); } /** * Return a json-like object representing this prompt template. * @deprecated */ serialize() { throw new Error("Use .toJSON() instead"); } /** * @deprecated * Load a prompt template from a json-like object describing it. * * @remarks * Deserializing needs to be async because templates (e.g. {@link FewShotPromptTemplate}) can * reference remote resources that we read asynchronously with a web * request. */ static async deserialize(data) { switch (data._type) { case "prompt": { const { PromptTemplate: PromptTemplate$1 } = await import("./prompt-DoC83bWw.mjs"); return PromptTemplate$1.deserialize(data); } case void 0: { const { PromptTemplate: PromptTemplate$1 } = await import("./prompt-DoC83bWw.mjs"); return PromptTemplate$1.deserialize({ ...data, _type: "prompt" }); } case "few_shot": { const { FewShotPromptTemplate } = await import("./few_shot-C9MZ9K-V.mjs"); return FewShotPromptTemplate.deserialize(data); } default: throw new Error(`Invalid prompt type in config: ${data._type}`); } } }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/prompts/string.js /** * Base class for string prompt templates. It extends the * BasePromptTemplate class and overrides the formatPromptValue method to * return a StringPromptValue. */ var BaseStringPromptTemplate = class extends BasePromptTemplate { /** * Formats the prompt given the input values and returns a formatted * prompt value. * @param values The input values to format the prompt. * @returns A Promise that resolves to a formatted prompt value. */ async formatPromptValue(values) { const formattedPrompt = await this.format(values); return new StringPromptValue(formattedPrompt); } }; //#endregion //#region node_modules/.pnpm/mustache@4.2.0/node_modules/mustache/mustache.mjs /*! * mustache.js - Logic-less {{mustache}} templates with JavaScript * http://github.com/janl/mustache.js */ var objectToString = Object.prototype.toString; var isArray = Array.isArray || function isArrayPolyfill(object) { return objectToString.call(object) === "[object Array]"; }; function isFunction(object) { return typeof object === "function"; } /** * More correct typeof string handling array * which normally returns typeof 'object' */ function typeStr(obj) { return isArray(obj) ? "array" : typeof obj; } function escapeRegExp(string) { return string.replace(/[\-\[\]{}()*+?.,\\\^$|#\s]/g, "\\$&"); } /** * Null safe way of checking whether or not an object, * including its prototype, has a given property */ function hasProperty(obj, propName) { return obj != null && typeof obj === "object" && propName in obj; } /** * Safe way of detecting whether or not the given thing is a primitive and * whether it has the given property */ function primitiveHasOwnProperty(primitive, propName) { return primitive != null && typeof primitive !== "object" && primitive.hasOwnProperty && primitive.hasOwnProperty(propName); } var regExpTest = RegExp.prototype.test; function testRegExp(re$5, string) { return regExpTest.call(re$5, string); } var nonSpaceRe = /\S/; function isWhitespace(string) { return !testRegExp(nonSpaceRe, string); } var entityMap = { "&": "&", "<": "<", ">": ">", "\"": """, "'": "'", "/": "/", "`": "`", "=": "=" }; function escapeHtml(string) { return String(string).replace(/[&<>"'`=\/]/g, function fromEntityMap(s) { return entityMap[s]; }); } var whiteRe = /\s*/; var spaceRe = /\s+/; var equalsRe = /\s*=/; var curlyRe = /\s*\}/; var tagRe = /#|\^|\/|>|\{|&|=|!/; /** * Breaks up the given `template` string into a tree of tokens. If the `tags` * argument is given here it must be an array with two string values: the * opening and closing tags used in the template (e.g. [ "<%", "%>" ]). Of * course, the default is to use mustaches (i.e. mustache.tags). * * A token is an array with at least 4 elements. The first element is the * mustache symbol that was used inside the tag, e.g. "#" or "&". If the tag * did not contain a symbol (i.e. {{myValue}}) this element is "name". For * all text that appears outside a symbol this element is "text". * * The second element of a token is its "value". For mustache tags this is * whatever else was inside the tag besides the opening symbol. For text tokens * this is the text itself. * * The third and fourth elements of the token are the start and end indices, * respectively, of the token in the original template. * * Tokens that are the root node of a subtree contain two more elements: 1) an * array of tokens in the subtree and 2) the index in the original template at * which the closing tag for that section begins. * * Tokens for partials also contain two more elements: 1) a string value of * indendation prior to that tag and 2) the index of that tag on that line - * eg a value of 2 indicates the partial is the third tag on this line. */ function parseTemplate$1(template, tags) { if (!template) return []; var lineHasNonSpace = false; var sections = []; var tokens = []; var spaces = []; var hasTag = false; var nonSpace = false; var indentation = ""; var tagIndex = 0; function stripSpace() { if (hasTag && !nonSpace) while (spaces.length) delete tokens[spaces.pop()]; else spaces = []; hasTag = false; nonSpace = false; } var openingTagRe, closingTagRe, closingCurlyRe; function compileTags(tagsToCompile) { if (typeof tagsToCompile === "string") tagsToCompile = tagsToCompile.split(spaceRe, 2); if (!isArray(tagsToCompile) || tagsToCompile.length !== 2) throw new Error("Invalid tags: " + tagsToCompile); openingTagRe = new RegExp(escapeRegExp(tagsToCompile[0]) + "\\s*"); closingTagRe = new RegExp("\\s*" + escapeRegExp(tagsToCompile[1])); closingCurlyRe = new RegExp("\\s*" + escapeRegExp("}" + tagsToCompile[1])); } compileTags(tags || mustache.tags); var scanner = new Scanner(template); var start, type, value, chr, token, openSection; while (!scanner.eos()) { start = scanner.pos; value = scanner.scanUntil(openingTagRe); if (value) for (var i = 0, valueLength = value.length; i < valueLength; ++i) { chr = value.charAt(i); if (isWhitespace(chr)) { spaces.push(tokens.length); indentation += chr; } else { nonSpace = true; lineHasNonSpace = true; indentation += " "; } tokens.push([ "text", chr, start, start + 1 ]); start += 1; if (chr === "\n") { stripSpace(); indentation = ""; tagIndex = 0; lineHasNonSpace = false; } } if (!scanner.scan(openingTagRe)) break; hasTag = true; type = scanner.scan(tagRe) || "name"; scanner.scan(whiteRe); if (type === "=") { value = scanner.scanUntil(equalsRe); scanner.scan(equalsRe); scanner.scanUntil(closingTagRe); } else if (type === "{") { value = scanner.scanUntil(closingCurlyRe); scanner.scan(curlyRe); scanner.scanUntil(closingTagRe); type = "&"; } else value = scanner.scanUntil(closingTagRe); if (!scanner.scan(closingTagRe)) throw new Error("Unclosed tag at " + scanner.pos); if (type == ">") token = [ type, value, start, scanner.pos, indentation, tagIndex, lineHasNonSpace ]; else token = [ type, value, start, scanner.pos ]; tagIndex++; tokens.push(token); if (type === "#" || type === "^") sections.push(token); else if (type === "/") { openSection = sections.pop(); if (!openSection) throw new Error("Unopened section \"" + value + "\" at " + start); if (openSection[1] !== value) throw new Error("Unclosed section \"" + openSection[1] + "\" at " + start); } else if (type === "name" || type === "{" || type === "&") nonSpace = true; else if (type === "=") compileTags(value); } stripSpace(); openSection = sections.pop(); if (openSection) throw new Error("Unclosed section \"" + openSection[1] + "\" at " + scanner.pos); return nestTokens(squashTokens(tokens)); } /** * Combines the values of consecutive text tokens in the given `tokens` array * to a single token. */ function squashTokens(tokens) { var squashedTokens = []; var token, lastToken; for (var i = 0, numTokens = tokens.length; i < numTokens; ++i) { token = tokens[i]; if (token) if (token[0] === "text" && lastToken && lastToken[0] === "text") { lastToken[1] += token[1]; lastToken[3] = token[3]; } else { squashedTokens.push(token); lastToken = token; } } return squashedTokens; } /** * Forms the given array of `tokens` into a nested tree structure where * tokens that represent a section have two additional items: 1) an array of * all tokens that appear in that section and 2) the index in the original * template that represents the end of that section. */ function nestTokens(tokens) { var nestedTokens = []; var collector = nestedTokens; var sections = []; var token, section; for (var i = 0, numTokens = tokens.length; i < numTokens; ++i) { token = tokens[i]; switch (token[0]) { case "#": case "^": collector.push(token); sections.push(token); collector = token[4] = []; break; case "/": section = sections.pop(); section[5] = token[2]; collector = sections.length > 0 ? sections[sections.length - 1][4] : nestedTokens; break; default: collector.push(token); } } return nestedTokens; } /** * A simple string scanner that is used by the template parser to find * tokens in template strings. */ function Scanner(string) { this.string = string; this.tail = string; this.pos = 0; } /** * Returns `true` if the tail is empty (end of string). */ Scanner.prototype.eos = function eos() { return this.tail === ""; }; /** * Tries to match the given regular expression at the current position. * Returns the matched text if it can match, the empty string otherwise. */ Scanner.prototype.scan = function scan(re$5) { var match = this.tail.match(re$5); if (!match || match.index !== 0) return ""; var string = match[0]; this.tail = this.tail.substring(string.length); this.pos += string.length; return string; }; /** * Skips all text until the given regular expression can be matched. Returns * the skipped string, which is the entire tail if no match can be made. */ Scanner.prototype.scanUntil = function scanUntil(re$5) { var index = this.tail.search(re$5), match; switch (index) { case -1: match = this.tail; this.tail = ""; break; case 0: match = ""; break; default: match = this.tail.substring(0, index); this.tail = this.tail.substring(index); } this.pos += match.length; return match; }; /** * Represents a rendering context by wrapping a view object and * maintaining a reference to the parent context. */ function Context(view, parentContext) { this.view = view; this.cache = { ".": this.view }; this.parent = parentContext; } /** * Creates a new context using the given view with this context * as the parent. */ Context.prototype.push = function push(view) { return new Context(view, this); }; /** * Returns the value of the given name in this context, traversing * up the context hierarchy if the value is absent in this context's view. */ Context.prototype.lookup = function lookup(name) { var cache$1 = this.cache; var value; if (cache$1.hasOwnProperty(name)) value = cache$1[name]; else { var context = this, intermediateValue, names, index, lookupHit = false; while (context) { if (name.indexOf(".") > 0) { intermediateValue = context.view; names = name.split("."); index = 0; /** * Using the dot notion path in `name`, we descend through the * nested objects. * * To be certain that the lookup has been successful, we have to * check if the last object in the path actually has the property * we are looking for. We store the result in `lookupHit`. * * This is specially necessary for when the value has been set to * `undefined` and we want to avoid looking up parent contexts. * * In the case where dot notation is used, we consider the lookup * to be successful even if the last "object" in the path is * not actually an object but a primitive (e.g., a string, or an * integer), because it is sometimes useful to access a property * of an autoboxed primitive, such as the length of a string. **/ while (intermediateValue != null && index < names.length) { if (index === names.length - 1) lookupHit = hasProperty(intermediateValue, names[index]) || primitiveHasOwnProperty(intermediateValue, names[index]); intermediateValue = intermediateValue[names[index++]]; } } else { intermediateValue = context.view[name]; /** * Only checking against `hasProperty`, which always returns `false` if * `context.view` is not an object. Deliberately omitting the check * against `primitiveHasOwnProperty` if dot notation is not used. * * Consider this example: * ``` * Mustache.render("The length of a football field is {{#length}}{{length}}{{/length}}.", {length: "100 yards"}) * ``` * * If we were to check also against `primitiveHasOwnProperty`, as we do * in the dot notation case, then render call would return: * * "The length of a football field is 9." * * rather than the expected: * * "The length of a football field is 100 yards." **/ lookupHit = hasProperty(context.view, name); } if (lookupHit) { value = intermediateValue; break; } context = context.parent; } cache$1[name] = value; } if (isFunction(value)) value = value.call(this.view); return value; }; /** * A Writer knows how to take a stream of tokens and render them to a * string, given a context. It also maintains a cache of templates to * avoid the need to parse the same template twice. */ function Writer() { this.templateCache = { _cache: {}, set: function set(key, value) { this._cache[key] = value; }, get: function get(key) { return this._cache[key]; }, clear: function clear() { this._cache = {}; } }; } /** * Clears all cached templates in this writer. */ Writer.prototype.clearCache = function clearCache() { if (typeof this.templateCache !== "undefined") this.templateCache.clear(); }; /** * Parses and caches the given `template` according to the given `tags` or * `mustache.tags` if `tags` is omitted, and returns the array of tokens * that is generated from the parse. */ Writer.prototype.parse = function parse$7(template, tags) { var cache$1 = this.templateCache; var cacheKey = template + ":" + (tags || mustache.tags).join(":"); var isCacheEnabled = typeof cache$1 !== "undefined"; var tokens = isCacheEnabled ? cache$1.get(cacheKey) : void 0; if (tokens == void 0) { tokens = parseTemplate$1(template, tags); isCacheEnabled && cache$1.set(cacheKey, tokens); } return tokens; }; /** * High-level method that is used to render the given `template` with * the given `view`. * * The optional `partials` argument may be an object that contains the * names and templates of partials that are used in the template. It may * also be a function that is used to load partial templates on the fly * that takes a single argument: the name of the partial. * * If the optional `config` argument is given here, then it should be an * object with a `tags` attribute or an `escape` attribute or both. * If an array is passed, then it will be interpreted the same way as * a `tags` attribute on a `config` object. * * The `tags` attribute of a `config` object must be an array with two * string values: the opening and closing tags used in the template (e.g. * [ "<%", "%>" ]). The default is to mustache.tags. * * The `escape` attribute of a `config` object must be a function which * accepts a string as input and outputs a safely escaped string. * If an `escape` function is not provided, then an HTML-safe string * escaping function is used as the default. */ Writer.prototype.render = function render(template, view, partials, config) { var tags = this.getConfigTags(config); var tokens = this.parse(template, tags); var context = view instanceof Context ? view : new Context(view, void 0); return this.renderTokens(tokens, context, partials, template, config); }; /** * Low-level method that renders the given array of `tokens` using * the given `context` and `partials`. * * Note: The `originalTemplate` is only ever used to extract the portion * of the original template that was contained in a higher-order section. * If the template doesn't use higher-order sections, this argument may * be omitted. */ Writer.prototype.renderTokens = function renderTokens(tokens, context, partials, originalTemplate, config) { var buffer = ""; var token, symbol, value; for (var i = 0, numTokens = tokens.length; i < numTokens; ++i) { value = void 0; token = tokens[i]; symbol = token[0]; if (symbol === "#") value = this.renderSection(token, context, partials, originalTemplate, config); else if (symbol === "^") value = this.renderInverted(token, context, partials, originalTemplate, config); else if (symbol === ">") value = this.renderPartial(token, context, partials, config); else if (symbol === "&") value = this.unescapedValue(token, context); else if (symbol === "name") value = this.escapedValue(token, context, config); else if (symbol === "text") value = this.rawValue(token); if (value !== void 0) buffer += value; } return buffer; }; Writer.prototype.renderSection = function renderSection(token, context, partials, originalTemplate, config) { var self = this; var buffer = ""; var value = context.lookup(token[1]); function subRender(template) { return self.render(template, context, partials, config); } if (!value) return; if (isArray(value)) for (var j = 0, valueLength = value.length; j < valueLength; ++j) buffer += this.renderTokens(token[4], context.push(value[j]), partials, originalTemplate, config); else if (typeof value === "object" || typeof value === "string" || typeof value === "number") buffer += this.renderTokens(token[4], context.push(value), partials, originalTemplate, config); else if (isFunction(value)) { if (typeof originalTemplate !== "string") throw new Error("Cannot use higher-order sections without the original template"); value = value.call(context.view, originalTemplate.slice(token[3], token[5]), subRender); if (value != null) buffer += value; } else buffer += this.renderTokens(token[4], context, partials, originalTemplate, config); return buffer; }; Writer.prototype.renderInverted = function renderInverted(token, context, partials, originalTemplate, config) { var value = context.lookup(token[1]); if (!value || isArray(value) && value.length === 0) return this.renderTokens(token[4], context, partials, originalTemplate, config); }; Writer.prototype.indentPartial = function indentPartial(partial, indentation, lineHasNonSpace) { var filteredIndentation = indentation.replace(/[^ \t]/g, ""); var partialByNl = partial.split("\n"); for (var i = 0; i < partialByNl.length; i++) if (partialByNl[i].length && (i > 0 || !lineHasNonSpace)) partialByNl[i] = filteredIndentation + partialByNl[i]; return partialByNl.join("\n"); }; Writer.prototype.renderPartial = function renderPartial(token, context, partials, config) { if (!partials) return; var tags = this.getConfigTags(config); var value = isFunction(partials) ? partials(token[1]) : partials[token[1]]; if (value != null) { var lineHasNonSpace = token[6]; var tagIndex = token[5]; var indentation = token[4]; var indentedValue = value; if (tagIndex == 0 && indentation) indentedValue = this.indentPartial(value, indentation, lineHasNonSpace); var tokens = this.parse(indentedValue, tags); return this.renderTokens(tokens, context, partials, indentedValue, config); } }; Writer.prototype.unescapedValue = function unescapedValue(token, context) { var value = context.lookup(token[1]); if (value != null) return value; }; Writer.prototype.escapedValue = function escapedValue(token, context, config) { var escape = this.getConfigEscape(config) || mustache.escape; var value = context.lookup(token[1]); if (value != null) return typeof value === "number" && escape === mustache.escape ? String(value) : escape(value); }; Writer.prototype.rawValue = function rawValue(token) { return token[1]; }; Writer.prototype.getConfigTags = function getConfigTags(config) { if (isArray(config)) return config; else if (config && typeof config === "object") return config.tags; else return void 0; }; Writer.prototype.getConfigEscape = function getConfigEscape(config) { if (config && typeof config === "object" && !isArray(config)) return config.escape; else return void 0; }; var mustache = { name: "mustache.js", version: "4.2.0", tags: ["{{", "}}"], clearCache: void 0, escape: void 0, parse: void 0, render: void 0, Scanner: void 0, Context: void 0, Writer: void 0, set templateCache(cache$1) { defaultWriter.templateCache = cache$1; }, get templateCache() { return defaultWriter.templateCache; } }; var defaultWriter = new Writer(); /** * Clears all cached templates in the default writer. */ mustache.clearCache = function clearCache() { return defaultWriter.clearCache(); }; /** * Parses and caches the given template in the default writer and returns the * array of tokens it contains. Doing this ahead of time avoids the need to * parse templates on the fly as they are rendered. */ mustache.parse = function parse$7(template, tags) { return defaultWriter.parse(template, tags); }; /** * Renders the `template` with the given `view`, `partials`, and `config` * using the default writer. */ mustache.render = function render(template, view, partials, config) { if (typeof template !== "string") throw new TypeError("Invalid template! Template should be a \"string\" but \"" + typeStr(template) + "\" was given as the first argument for mustache#render(template, view, partials)"); return defaultWriter.render(template, view, partials, config); }; mustache.escape = escapeHtml; mustache.Scanner = Scanner; mustache.Context = Context; mustache.Writer = Writer; var mustache_default = mustache; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/prompts/template.js function configureMustache() { mustache_default.escape = (text) => text; } const parseFString = (template) => { const chars = template.split(""); const nodes = []; const nextBracket = (bracket, start) => { for (let i$1 = start; i$1 < chars.length; i$1 += 1) if (bracket.includes(chars[i$1])) return i$1; return -1; }; let i = 0; while (i < chars.length) if (chars[i] === "{" && i + 1 < chars.length && chars[i + 1] === "{") { nodes.push({ type: "literal", text: "{" }); i += 2; } else if (chars[i] === "}" && i + 1 < chars.length && chars[i + 1] === "}") { nodes.push({ type: "literal", text: "}" }); i += 2; } else if (chars[i] === "{") { const j = nextBracket("}", i); if (j < 0) throw new Error("Unclosed '{' in template."); nodes.push({ type: "variable", name: chars.slice(i + 1, j).join("") }); i = j + 1; } else if (chars[i] === "}") throw new Error("Single '}' in template."); else { const next = nextBracket("{}", i); const text = (next < 0 ? chars.slice(i) : chars.slice(i, next)).join(""); nodes.push({ type: "literal", text }); i = next < 0 ? chars.length : next; } return nodes; }; /** * Convert the result of mustache.parse into an array of ParsedTemplateNode, * to make it compatible with other LangChain string parsing template formats. * * @param {mustache.TemplateSpans} template The result of parsing a mustache template with the mustache.js library. * @returns {ParsedTemplateNode[]} */ const mustacheTemplateToNodes = (template) => template.map((temp) => { if (temp[0] === "name") { const name = temp[1].includes(".") ? temp[1].split(".")[0] : temp[1]; return { type: "variable", name }; } else if ([ "#", "&", "^", ">" ].includes(temp[0])) return { type: "variable", name: temp[1] }; else return { type: "literal", text: temp[1] }; }); const parseMustache = (template) => { configureMustache(); const parsed = mustache_default.parse(template); return mustacheTemplateToNodes(parsed); }; const interpolateFString = (template, values) => { return parseFString(template).reduce((res, node) => { if (node.type === "variable") { if (node.name in values) { const stringValue = typeof values[node.name] === "string" ? values[node.name] : JSON.stringify(values[node.name]); return res + stringValue; } throw new Error(`(f-string) Missing value for input ${node.name}`); } return res + node.text; }, ""); }; const interpolateMustache = (template, values) => { configureMustache(); return mustache_default.render(template, values); }; const DEFAULT_FORMATTER_MAPPING = { "f-string": interpolateFString, mustache: interpolateMustache }; const DEFAULT_PARSER_MAPPING = { "f-string": parseFString, mustache: parseMustache }; const renderTemplate = (template, templateFormat, inputValues) => { try { return DEFAULT_FORMATTER_MAPPING[templateFormat](template, inputValues); } catch (e) { const error = addLangChainErrorFields(e, "INVALID_PROMPT_INPUT"); throw error; } }; const parseTemplate = (template, templateFormat) => DEFAULT_PARSER_MAPPING[templateFormat](template); const checkValidTemplate = (template, templateFormat, inputVariables) => { if (!(templateFormat in DEFAULT_FORMATTER_MAPPING)) { const validFormats = Object.keys(DEFAULT_FORMATTER_MAPPING); throw new Error(`Invalid template format. Got \`${templateFormat}\`; should be one of ${validFormats}`); } try { const dummyInputs = inputVariables.reduce((acc, v) => { acc[v] = "foo"; return acc; }, {}); if (Array.isArray(template)) template.forEach((message) => { if (message.type === "text") renderTemplate(message.text, templateFormat, dummyInputs); else if (message.type === "image_url") if (typeof message.image_url === "string") renderTemplate(message.image_url, templateFormat, dummyInputs); else { const imageUrl = message.image_url.url; renderTemplate(imageUrl, templateFormat, dummyInputs); } else throw new Error(`Invalid message template received. ${JSON.stringify(message, null, 2)}`); }); else renderTemplate(template, templateFormat, dummyInputs); } catch (e) { throw new Error(`Invalid prompt schema: ${e.message}`); } }; //#endregion //#region node_modules/.pnpm/@langchain+core@0.3.57_openai@5.3.0_ws@8.18.2_zod@3.25.57_/node_modules/@langchain/core/dist/prompts/prompt.js /** * Schema to represent a basic prompt for an LLM. * @augments BasePromptTemplate * @augments PromptTemplateInput * * @example * ```ts * import { PromptTemplate } from "langchain/prompts"; * * const prompt = new PromptTemplate({ * inputVariables: ["foo"], * template: "Say {foo}", * }); * ``` */ var PromptTemplate = class PromptTemplate extends BaseStringPromptTemplate { static lc_name() { return "PromptTemplate"; } constructor(input) { super(input); Object.defineProperty(this, "template", { enumerable: true, configurable: true, writable: true, value: void 0 }); Object.defineProperty(this, "templateFormat", { enumerable: true, configurable: true, writable: true, value: "f-string" }); Object.defineProperty(this, "validateTemplate", { enumerable: true, configurable: true, writable: true, value: true }); /** * Additional fields which should be included inside * the message content array if using a complex message * content. */ Object.defineProperty(this, "additionalContentFields", { enumerable: true, configurable: true, writable: true, value: void 0 }); if (input.templateFormat === "mustache" && input.validateTemplate === void 0) this.validateTemplate = false; Object.assign(this, input); if (this.validateTemplate) { if (this.templateFormat === "mustache") throw new Error("Mustache templates cannot be validated."); let totalInputVariables = this.inputVariables; if (this.partialVariables) totalInputVariables = totalInputVariables.concat(Object.keys(this.partialVariables)); checkValidTemplate(this.template, this.templateFormat, totalInputVariables); } } _getPromptType() { return "prompt"; } /** * Formats the prompt template with the provided values. * @param values The values to be used to format the prompt template. * @returns A promise that resolves to a string which is the formatted prompt. */ async format(values) { const allValues = await this.mergePartialAndUserVariables(values); return renderTemplate(this.template, this.templateFormat, allValues); } /** * Take examples in list format with prefix and suffix to create a prompt. * * Intended to be used a a way to dynamically create a prompt from examples. * * @param examples - List of examples to use in the prompt. * @param suffix - String to go after the list of examples. Should generally set up the user's input. * @param inputVariables - A list of variable names the final prompt template will expect * @param exampleSeparator - The separator to use in between examples * @param prefix - String that should go before any examples. Generally includes examples. * * @returns The final prompt template generated. */ static fromExamples(examples, suffix, inputVariables, exampleSeparator = "\n\n", prefix$1 = "") { const template = [ prefix$1, ...examples, suffix ].join(exampleSeparator); return new PromptTemplate({ inputVariables, template }); } static fromTemplate(template, options) { const { templateFormat = "f-string",...rest } = options ?? {}; const names = /* @__PURE__ */ new Set(); parseTemplate(template, templateFormat).forEach((node) => { if (node.type === "variable") names.add(node.name); }); return new PromptTemplate({ inputVariables: [...names], templateFormat, template, ...rest }); } /** * Partially applies values to the prompt template. * @param values The values to be partially applied to the prompt template. * @returns A new instance of PromptTemplate with the partially applied values. */ async partial(values) { const newInputVariables = this.inputVariables.filter((iv) => !(iv in values)); const newPartialVariables = { ...this.partialVariables ?? {}, ...values }; const promptDict = { ...this, inputVariables: newInputVariables, partialVariables: newPartialVariables }; return new PromptTemplate(promptDict); } serialize() { if (this.outputParser !== void 0) throw new Error("Cannot serialize a prompt template with an output parser"); return { _type: this._getPromptType(), input_variables: this.inputVariables, template: this.template, template_format: this.templateFormat }; } static async deserialize(data) { if (!data.template) throw new Error("Prompt template must have a template"); const res = new PromptTemplate({ inputVariables: data.input_variables, template: data.template, templateFormat: data.template_format }); return res; } }; //#endregion export { AIMessage, AIMessageChunk, AsyncCaller, AsyncLocalStorageProviderSingleton, BaseCallbackHandler, BaseMessage, BaseMessageChunk, BasePromptTemplate, BaseStringPromptTemplate, BaseTracer, CallbackManager, ChatGenerationChunk, ChatMessage, ChatMessageChunk, ChatPromptValue, DEFAULT_FORMATTER_MAPPING, DEFAULT_PARSER_MAPPING, FunctionMessage, FunctionMessageChunk, GenerationChunk, Graph, HumanMessage, HumanMessageChunk, ImagePromptValue, IterableReadableStream, LangChainTracer, PromptTemplate, RUN_KEY, Runnable, RunnableAssign, RunnableBinding, RunnableEach, RunnableLambda, RunnableMap, RunnableParallel, RunnablePick, RunnableRetry, RunnableSequence, RunnableToolLike, RunnableWithFallbacks, Serializable, StringPromptValue, SystemMessage, SystemMessageChunk, ToolInputParsingException, ToolMessage, ToolMessageChunk, ZodFirstPartyTypeKind, _coerceToDict, _coerceToRunnable, _configHasToolCallId, _isMessageFieldWithRole, _isToolCall, _mergeDicts, _mergeLists, _mergeObj, _mergeStatus, addLangChainErrorFields, anyType, applyPatch, arrayType, async_caller_exports, base_exports, base_exports$1, booleanType, callbackHandlerPrefersStreaming, checkValidTemplate, coerceMessageLikeToMessage, compare, concat, console_exports, convertToChunk, convertToOpenAIImageBlock, convertToProviderContentBlock, custom, ensureConfig, enumType, env_exports, getBufferString, getCallbackManagerForConfig, getEnvironmentVariable, get_lc_unique_name, interpolateFString, interpolateMustache, isAIMessage, isAIMessageChunk, isBase64ContentBlock, isBaseMessage, isBaseMessageChunk, isChatMessage, isChatMessageChunk, isDataContentBlock, isDirectToolOutput, isFunctionMessage, isFunctionMessageChunk, isHumanMessage, isHumanMessageChunk, isIDContentBlock, isOpenAIToolCallArray, isPlainTextContentBlock, isSystemMessage, isSystemMessageChunk, isToolMessage, isToolMessageChunk, isURLContentBlock, keyFromJson, log_stream_exports, manager_exports, mapChatMessagesToStoredMessages, mapKeys, mapStoredMessageToChatMessage, mapStoredMessagesToChatMessages, mergeConfigs, mergeContent, numberType, objectType, outputs_exports, parseBase64DataUrl, parseCallbackConfigArg, parseFString, parseJsonMarkdown, parseMimeType, parseMustache, parsePartialJson, parseTemplate, patchConfig, pickRunnableConfigKeys, promises_exports, prompt_values_exports, renderTemplate, rng, serializable_exports, stream_exports, stringType, tracer_langchain_exports, unsafeStringify, v4_default, validate_default, zodToJsonSchema };