chore: run npm run format

This commit is contained in:
Ralph Khreish
2025-04-09 00:25:27 +02:00
parent 36b4dc6470
commit edb889ae84
109 changed files with 28144 additions and 24157 deletions

View File

@@ -11,9 +11,9 @@ dotenv.config();
// Default model configuration from CLI environment
const DEFAULT_MODEL_CONFIG = {
model: 'claude-3-7-sonnet-20250219',
maxTokens: 64000,
temperature: 0.2
model: 'claude-3-7-sonnet-20250219',
maxTokens: 64000,
temperature: 0.2
};
/**
@@ -24,25 +24,28 @@ const DEFAULT_MODEL_CONFIG = {
* @throws {Error} If API key is missing
*/
export function getAnthropicClientForMCP(session, log = console) {
try {
// Extract API key from session.env or fall back to environment variables
const apiKey = session?.env?.ANTHROPIC_API_KEY || process.env.ANTHROPIC_API_KEY;
if (!apiKey) {
throw new Error('ANTHROPIC_API_KEY not found in session environment or process.env');
}
// Initialize and return a new Anthropic client
return new Anthropic({
apiKey,
defaultHeaders: {
'anthropic-beta': 'output-128k-2025-02-19' // Include header for increased token limit
}
});
} catch (error) {
log.error(`Failed to initialize Anthropic client: ${error.message}`);
throw error;
}
try {
// Extract API key from session.env or fall back to environment variables
const apiKey =
session?.env?.ANTHROPIC_API_KEY || process.env.ANTHROPIC_API_KEY;
if (!apiKey) {
throw new Error(
'ANTHROPIC_API_KEY not found in session environment or process.env'
);
}
// Initialize and return a new Anthropic client
return new Anthropic({
apiKey,
defaultHeaders: {
'anthropic-beta': 'output-128k-2025-02-19' // Include header for increased token limit
}
});
} catch (error) {
log.error(`Failed to initialize Anthropic client: ${error.message}`);
throw error;
}
}
/**
@@ -53,26 +56,29 @@ export function getAnthropicClientForMCP(session, log = console) {
* @throws {Error} If API key is missing or OpenAI package can't be imported
*/
export async function getPerplexityClientForMCP(session, log = console) {
try {
// Extract API key from session.env or fall back to environment variables
const apiKey = session?.env?.PERPLEXITY_API_KEY || process.env.PERPLEXITY_API_KEY;
if (!apiKey) {
throw new Error('PERPLEXITY_API_KEY not found in session environment or process.env');
}
// Dynamically import OpenAI (it may not be used in all contexts)
const { default: OpenAI } = await import('openai');
// Initialize and return a new OpenAI client configured for Perplexity
return new OpenAI({
apiKey,
baseURL: 'https://api.perplexity.ai'
});
} catch (error) {
log.error(`Failed to initialize Perplexity client: ${error.message}`);
throw error;
}
try {
// Extract API key from session.env or fall back to environment variables
const apiKey =
session?.env?.PERPLEXITY_API_KEY || process.env.PERPLEXITY_API_KEY;
if (!apiKey) {
throw new Error(
'PERPLEXITY_API_KEY not found in session environment or process.env'
);
}
// Dynamically import OpenAI (it may not be used in all contexts)
const { default: OpenAI } = await import('openai');
// Initialize and return a new OpenAI client configured for Perplexity
return new OpenAI({
apiKey,
baseURL: 'https://api.perplexity.ai'
});
} catch (error) {
log.error(`Failed to initialize Perplexity client: ${error.message}`);
throw error;
}
}
/**
@@ -82,12 +88,12 @@ export async function getPerplexityClientForMCP(session, log = console) {
* @returns {Object} Model configuration with model, maxTokens, and temperature
*/
export function getModelConfig(session, defaults = DEFAULT_MODEL_CONFIG) {
// Get values from session or fall back to defaults
return {
model: session?.env?.MODEL || defaults.model,
maxTokens: parseInt(session?.env?.MAX_TOKENS || defaults.maxTokens),
temperature: parseFloat(session?.env?.TEMPERATURE || defaults.temperature)
};
// Get values from session or fall back to defaults
return {
model: session?.env?.MODEL || defaults.model,
maxTokens: parseInt(session?.env?.MAX_TOKENS || defaults.maxTokens),
temperature: parseFloat(session?.env?.TEMPERATURE || defaults.temperature)
};
}
/**
@@ -100,59 +106,78 @@ export function getModelConfig(session, defaults = DEFAULT_MODEL_CONFIG) {
* @returns {Promise<Object>} Selected model info with type and client
* @throws {Error} If no AI models are available
*/
export async function getBestAvailableAIModel(session, options = {}, log = console) {
const { requiresResearch = false, claudeOverloaded = false } = options;
// Test case: When research is needed but no Perplexity, use Claude
if (requiresResearch &&
!(session?.env?.PERPLEXITY_API_KEY || process.env.PERPLEXITY_API_KEY) &&
(session?.env?.ANTHROPIC_API_KEY || process.env.ANTHROPIC_API_KEY)) {
try {
log.warn('Perplexity not available for research, using Claude');
const client = getAnthropicClientForMCP(session, log);
return { type: 'claude', client };
} catch (error) {
log.error(`Claude not available: ${error.message}`);
throw new Error('No AI models available for research');
}
}
// Regular path: Perplexity for research when available
if (requiresResearch && (session?.env?.PERPLEXITY_API_KEY || process.env.PERPLEXITY_API_KEY)) {
try {
const client = await getPerplexityClientForMCP(session, log);
return { type: 'perplexity', client };
} catch (error) {
log.warn(`Perplexity not available: ${error.message}`);
// Fall through to Claude as backup
}
}
// Test case: Claude for overloaded scenario
if (claudeOverloaded && (session?.env?.ANTHROPIC_API_KEY || process.env.ANTHROPIC_API_KEY)) {
try {
log.warn('Claude is overloaded but no alternatives are available. Proceeding with Claude anyway.');
const client = getAnthropicClientForMCP(session, log);
return { type: 'claude', client };
} catch (error) {
log.error(`Claude not available despite being overloaded: ${error.message}`);
throw new Error('No AI models available');
}
}
// Default case: Use Claude when available and not overloaded
if (!claudeOverloaded && (session?.env?.ANTHROPIC_API_KEY || process.env.ANTHROPIC_API_KEY)) {
try {
const client = getAnthropicClientForMCP(session, log);
return { type: 'claude', client };
} catch (error) {
log.warn(`Claude not available: ${error.message}`);
// Fall through to error if no other options
}
}
// If we got here, no models were successfully initialized
throw new Error('No AI models available. Please check your API keys.');
export async function getBestAvailableAIModel(
session,
options = {},
log = console
) {
const { requiresResearch = false, claudeOverloaded = false } = options;
// Test case: When research is needed but no Perplexity, use Claude
if (
requiresResearch &&
!(session?.env?.PERPLEXITY_API_KEY || process.env.PERPLEXITY_API_KEY) &&
(session?.env?.ANTHROPIC_API_KEY || process.env.ANTHROPIC_API_KEY)
) {
try {
log.warn('Perplexity not available for research, using Claude');
const client = getAnthropicClientForMCP(session, log);
return { type: 'claude', client };
} catch (error) {
log.error(`Claude not available: ${error.message}`);
throw new Error('No AI models available for research');
}
}
// Regular path: Perplexity for research when available
if (
requiresResearch &&
(session?.env?.PERPLEXITY_API_KEY || process.env.PERPLEXITY_API_KEY)
) {
try {
const client = await getPerplexityClientForMCP(session, log);
return { type: 'perplexity', client };
} catch (error) {
log.warn(`Perplexity not available: ${error.message}`);
// Fall through to Claude as backup
}
}
// Test case: Claude for overloaded scenario
if (
claudeOverloaded &&
(session?.env?.ANTHROPIC_API_KEY || process.env.ANTHROPIC_API_KEY)
) {
try {
log.warn(
'Claude is overloaded but no alternatives are available. Proceeding with Claude anyway.'
);
const client = getAnthropicClientForMCP(session, log);
return { type: 'claude', client };
} catch (error) {
log.error(
`Claude not available despite being overloaded: ${error.message}`
);
throw new Error('No AI models available');
}
}
// Default case: Use Claude when available and not overloaded
if (
!claudeOverloaded &&
(session?.env?.ANTHROPIC_API_KEY || process.env.ANTHROPIC_API_KEY)
) {
try {
const client = getAnthropicClientForMCP(session, log);
return { type: 'claude', client };
} catch (error) {
log.warn(`Claude not available: ${error.message}`);
// Fall through to error if no other options
}
}
// If we got here, no models were successfully initialized
throw new Error('No AI models available. Please check your API keys.');
}
/**
@@ -161,28 +186,28 @@ export async function getBestAvailableAIModel(session, options = {}, log = conso
* @returns {string} User-friendly error message
*/
export function handleClaudeError(error) {
// Check if it's a structured error response
if (error.type === 'error' && error.error) {
switch (error.error.type) {
case 'overloaded_error':
return 'Claude is currently experiencing high demand and is overloaded. Please wait a few minutes and try again.';
case 'rate_limit_error':
return 'You have exceeded the rate limit. Please wait a few minutes before making more requests.';
case 'invalid_request_error':
return 'There was an issue with the request format. If this persists, please report it as a bug.';
default:
return `Claude API error: ${error.error.message}`;
}
}
// Check for network/timeout errors
if (error.message?.toLowerCase().includes('timeout')) {
return 'The request to Claude timed out. Please try again.';
}
if (error.message?.toLowerCase().includes('network')) {
return 'There was a network error connecting to Claude. Please check your internet connection and try again.';
}
// Default error message
return `Error communicating with Claude: ${error.message}`;
}
// Check if it's a structured error response
if (error.type === 'error' && error.error) {
switch (error.error.type) {
case 'overloaded_error':
return 'Claude is currently experiencing high demand and is overloaded. Please wait a few minutes and try again.';
case 'rate_limit_error':
return 'You have exceeded the rate limit. Please wait a few minutes before making more requests.';
case 'invalid_request_error':
return 'There was an issue with the request format. If this persists, please report it as a bug.';
default:
return `Claude API error: ${error.error.message}`;
}
}
// Check for network/timeout errors
if (error.message?.toLowerCase().includes('timeout')) {
return 'The request to Claude timed out. Please try again.';
}
if (error.message?.toLowerCase().includes('network')) {
return 'There was a network error connecting to Claude. Please check your internet connection and try again.';
}
// Default error message
return `Error communicating with Claude: ${error.message}`;
}