Merge remote-tracking branch 'origin/next' into telemetry

This commit is contained in:
Ralph Khreish
2025-05-16 18:16:58 +02:00
40 changed files with 387 additions and 229 deletions

View File

@@ -1,16 +1,26 @@
import { createOpenAI, openai } from '@ai-sdk/openai'; // Using openai provider from Vercel AI SDK
import { generateText, streamText, generateObject } from 'ai'; // Import necessary functions from 'ai'
import { createOpenAI } from '@ai-sdk/openai'; // Using openai provider from Vercel AI SDK
import { generateObject } from 'ai'; // Import necessary functions from 'ai'
import { log } from '../../scripts/modules/utils.js';
function getClient(apiKey, baseUrl) {
if (!apiKey) {
throw new Error('OpenAI API key is required.');
}
return createOpenAI({
apiKey: apiKey,
...(baseUrl && { baseURL: baseUrl })
});
}
/**
* Generates text using OpenAI models via Vercel AI SDK.
*
* @param {object} params - Parameters including apiKey, modelId, messages, maxTokens, temperature.
* @param {object} params - Parameters including apiKey, modelId, messages, maxTokens, temperature, baseUrl.
* @returns {Promise<object>} The generated text content and usage.
* @throws {Error} If API call fails.
*/
export async function generateOpenAIText(params) {
const { apiKey, modelId, messages, maxTokens, temperature } = params;
const { apiKey, modelId, messages, maxTokens, temperature, baseUrl } = params;
log('debug', `generateOpenAIText called with model: ${modelId}`);
if (!apiKey) {
@@ -23,7 +33,7 @@ export async function generateOpenAIText(params) {
throw new Error('Invalid or empty messages array provided for OpenAI.');
}
const openaiClient = createOpenAI({ apiKey });
const openaiClient = getClient(apiKey, baseUrl);
try {
const result = await generateText({
@@ -67,12 +77,12 @@ export async function generateOpenAIText(params) {
/**
* Streams text using OpenAI models via Vercel AI SDK.
*
* @param {object} params - Parameters including apiKey, modelId, messages, maxTokens, temperature.
* @param {object} params - Parameters including apiKey, modelId, messages, maxTokens, temperature, baseUrl.
* @returns {Promise<ReadableStream>} A readable stream of text deltas.
* @throws {Error} If API call fails.
*/
export async function streamOpenAIText(params) {
const { apiKey, modelId, messages, maxTokens, temperature } = params;
const { apiKey, modelId, messages, maxTokens, temperature, baseUrl } = params;
log('debug', `streamOpenAIText called with model: ${modelId}`);
if (!apiKey) {
@@ -87,7 +97,7 @@ export async function streamOpenAIText(params) {
);
}
const openaiClient = createOpenAI({ apiKey });
const openaiClient = getClient(apiKey, baseUrl);
try {
const stream = await openaiClient.chat.stream(messages, {
@@ -116,7 +126,7 @@ export async function streamOpenAIText(params) {
/**
* Generates structured objects using OpenAI models via Vercel AI SDK.
*
* @param {object} params - Parameters including apiKey, modelId, messages, schema, objectName, maxTokens, temperature.
* @param {object} params - Parameters including apiKey, modelId, messages, schema, objectName, maxTokens, temperature, baseUrl.
* @returns {Promise<object>} The generated object matching the schema and usage.
* @throws {Error} If API call fails or object generation fails.
*/
@@ -128,7 +138,8 @@ export async function generateOpenAIObject(params) {
schema,
objectName,
maxTokens,
temperature
temperature,
baseUrl
} = params;
log(
'debug',
@@ -144,7 +155,7 @@ export async function generateOpenAIObject(params) {
if (!objectName)
throw new Error('Object name is required for OpenAI object generation.');
const openaiClient = createOpenAI({ apiKey });
const openaiClient = getClient(apiKey, baseUrl);
try {
const result = await generateObject({