fix: add validation for resourceLocator properties in AI model nodes

This fixes a critical validation gap where AI agents could create invalid
configurations for nodes using resourceLocator properties (primarily AI model
nodes like OpenAI Chat Model v1.2+, Anthropic, Cohere, etc.).

Before this fix, AI agents could incorrectly pass a string value like:
  model: "gpt-4o-mini"

Instead of the required object format:
  model: { mode: "list", value: "gpt-4o-mini" }

These invalid configs would pass validation but fail at runtime in n8n.

Changes:
- Added resourceLocator type validation in config-validator.ts (lines 237-274)
- Validates value is an object with required 'mode' and 'value' properties
- Provides helpful error messages with exact fix suggestions
- Added 10 comprehensive test cases (100% passing)
- Updated version to 2.17.3
- Added CHANGELOG entry

Affected nodes: OpenAI Chat Model (v1.2+), Anthropic, Cohere, DeepSeek,
Groq, Mistral, OpenRouter, xAI Grok Chat Models, and embeddings nodes.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
czlonkowski
2025-10-07 16:54:29 +02:00
parent 3332eb09fc
commit e95ac7c335
6 changed files with 314 additions and 3 deletions

View File

@@ -439,4 +439,243 @@ describe('ConfigValidator - Basic Validation', () => {
expect(result.suggestions.length).toBeGreaterThanOrEqual(0);
});
});
describe('resourceLocator validation', () => {
it('should reject string value when resourceLocator object is required', () => {
const nodeType = '@n8n/n8n-nodes-langchain.lmChatOpenAi';
const config = {
model: 'gpt-4o-mini' // Wrong - should be object with mode and value
};
const properties = [
{
name: 'model',
displayName: 'Model',
type: 'resourceLocator',
required: true,
default: { mode: 'list', value: 'gpt-4o-mini' }
}
];
const result = ConfigValidator.validate(nodeType, config, properties);
expect(result.valid).toBe(false);
expect(result.errors).toHaveLength(1);
expect(result.errors[0]).toMatchObject({
type: 'invalid_type',
property: 'model',
message: expect.stringContaining('must be an object with \'mode\' and \'value\' properties')
});
expect(result.errors[0].fix).toContain('mode');
expect(result.errors[0].fix).toContain('value');
});
it('should accept valid resourceLocator with mode and value', () => {
const nodeType = '@n8n/n8n-nodes-langchain.lmChatOpenAi';
const config = {
model: {
mode: 'list',
value: 'gpt-4o-mini'
}
};
const properties = [
{
name: 'model',
displayName: 'Model',
type: 'resourceLocator',
required: true,
default: { mode: 'list', value: 'gpt-4o-mini' }
}
];
const result = ConfigValidator.validate(nodeType, config, properties);
expect(result.valid).toBe(true);
expect(result.errors).toHaveLength(0);
});
it('should reject null value for resourceLocator', () => {
const nodeType = '@n8n/n8n-nodes-langchain.lmChatOpenAi';
const config = {
model: null
};
const properties = [
{
name: 'model',
type: 'resourceLocator',
required: true
}
];
const result = ConfigValidator.validate(nodeType, config, properties);
expect(result.valid).toBe(false);
expect(result.errors.some(e =>
e.property === 'model' &&
e.type === 'invalid_type'
)).toBe(true);
});
it('should reject array value for resourceLocator', () => {
const nodeType = '@n8n/n8n-nodes-langchain.lmChatOpenAi';
const config = {
model: ['gpt-4o-mini']
};
const properties = [
{
name: 'model',
type: 'resourceLocator',
required: true
}
];
const result = ConfigValidator.validate(nodeType, config, properties);
expect(result.valid).toBe(false);
expect(result.errors.some(e =>
e.property === 'model' &&
e.type === 'invalid_type' &&
e.message.includes('must be an object')
)).toBe(true);
});
it('should detect missing mode property in resourceLocator', () => {
const nodeType = '@n8n/n8n-nodes-langchain.lmChatOpenAi';
const config = {
model: {
value: 'gpt-4o-mini'
// Missing mode property
}
};
const properties = [
{
name: 'model',
type: 'resourceLocator',
required: true
}
];
const result = ConfigValidator.validate(nodeType, config, properties);
expect(result.valid).toBe(false);
expect(result.errors.some(e =>
e.property === 'model.mode' &&
e.type === 'missing_required' &&
e.message.includes('missing required property \'mode\'')
)).toBe(true);
});
it('should detect missing value property in resourceLocator', () => {
const nodeType = '@n8n/n8n-nodes-langchain.lmChatOpenAi';
const config = {
model: {
mode: 'list'
// Missing value property
}
};
const properties = [
{
name: 'model',
displayName: 'Model',
type: 'resourceLocator',
required: true
}
];
const result = ConfigValidator.validate(nodeType, config, properties);
expect(result.valid).toBe(false);
expect(result.errors.some(e =>
e.property === 'model.value' &&
e.type === 'missing_required' &&
e.message.includes('missing required property \'value\'')
)).toBe(true);
});
it('should detect invalid mode type in resourceLocator', () => {
const nodeType = '@n8n/n8n-nodes-langchain.lmChatOpenAi';
const config = {
model: {
mode: 123, // Should be string
value: 'gpt-4o-mini'
}
};
const properties = [
{
name: 'model',
type: 'resourceLocator',
required: true
}
];
const result = ConfigValidator.validate(nodeType, config, properties);
expect(result.valid).toBe(false);
expect(result.errors.some(e =>
e.property === 'model.mode' &&
e.type === 'invalid_type' &&
e.message.includes('must be a string')
)).toBe(true);
});
it('should accept resourceLocator with mode "id"', () => {
const nodeType = '@n8n/n8n-nodes-langchain.lmChatOpenAi';
const config = {
model: {
mode: 'id',
value: 'gpt-4o-2024-11-20'
}
};
const properties = [
{
name: 'model',
type: 'resourceLocator',
required: true
}
];
const result = ConfigValidator.validate(nodeType, config, properties);
expect(result.valid).toBe(true);
expect(result.errors).toHaveLength(0);
});
it('should reject number value when resourceLocator is required', () => {
const nodeType = '@n8n/n8n-nodes-langchain.lmChatOpenAi';
const config = {
model: 12345 // Wrong type
};
const properties = [
{
name: 'model',
type: 'resourceLocator',
required: true
}
];
const result = ConfigValidator.validate(nodeType, config, properties);
expect(result.valid).toBe(false);
expect(result.errors[0].type).toBe('invalid_type');
expect(result.errors[0].message).toContain('must be an object');
});
it('should provide helpful fix suggestion for string to resourceLocator conversion', () => {
const nodeType = '@n8n/n8n-nodes-langchain.lmChatOpenAi';
const config = {
model: 'gpt-4o-mini'
};
const properties = [
{
name: 'model',
type: 'resourceLocator',
required: true
}
];
const result = ConfigValidator.validate(nodeType, config, properties);
expect(result.errors[0].fix).toContain('{ mode: "list", value: "gpt-4o-mini" }');
expect(result.errors[0].fix).toContain('{ mode: "id", value: "gpt-4o-mini" }');
});
});
});