diff --git a/Dockerfile b/Dockerfile index e949638..a7b59f3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -74,6 +74,10 @@ USER nodejs # Set Docker environment flag ENV IS_DOCKER=true +# Telemetry: Anonymous usage statistics are ENABLED by default +# To opt-out, uncomment the following line: +# ENV N8N_MCP_TELEMETRY_DISABLED=true + # Expose HTTP port EXPOSE 3000 diff --git a/PRIVACY.md b/PRIVACY.md new file mode 100644 index 0000000..c1febc3 --- /dev/null +++ b/PRIVACY.md @@ -0,0 +1,69 @@ +# Privacy Policy for n8n-mcp Telemetry + +## Overview +n8n-mcp collects anonymous usage statistics to help improve the tool. This data collection is designed to respect user privacy while providing valuable insights into how the tool is used. + +## What We Collect +- **Anonymous User ID**: A hashed identifier derived from your machine characteristics (no personal information) +- **Tool Usage**: Which MCP tools are used and their performance metrics +- **Workflow Patterns**: Sanitized workflow structures (all sensitive data removed) +- **Error Types**: Categories of errors encountered (no error messages with user data) +- **System Information**: Platform, architecture, Node.js version, and n8n-mcp version + +## What We DON'T Collect +- Personal information or usernames +- API keys, tokens, or credentials +- URLs, endpoints, or hostnames +- Email addresses or contact information +- File paths or directory structures +- Actual workflow data or parameters +- Database connection strings +- Any authentication information + +## Data Sanitization +All collected data undergoes automatic sanitization: +- URLs are replaced with `[URL]` or `[REDACTED]` +- Long alphanumeric strings (potential keys) are replaced with `[KEY]` +- Email addresses are replaced with `[EMAIL]` +- Authentication-related fields are completely removed + +## Data Storage +- Data is stored securely using Supabase +- Anonymous users have write-only access (cannot read data back) +- Row Level Security (RLS) policies prevent data access by anonymous users + +## Opt-Out +You can disable telemetry at any time: +```bash +npx n8n-mcp telemetry disable +``` + +To re-enable: +```bash +npx n8n-mcp telemetry enable +``` + +To check status: +```bash +npx n8n-mcp telemetry status +``` + +## Data Usage +Collected data is used solely to: +- Understand which features are most used +- Identify common error patterns +- Improve tool performance and reliability +- Guide development priorities + +## Data Retention +- Data is retained for analysis purposes +- No personal identification is possible from the collected data + +## Changes to This Policy +We may update this privacy policy from time to time. Updates will be reflected in this document. + +## Contact +For questions about telemetry or privacy, please open an issue on GitHub: +https://github.com/czlonkowski/n8n-mcp/issues + +Last updated: 2025-09-25 \ No newline at end of file diff --git a/README.md b/README.md index a84f67b..27424ce 100644 --- a/README.md +++ b/README.md @@ -211,6 +211,51 @@ Add to Claude Desktop config: **Restart Claude Desktop after updating configuration** - That's it! ๐ŸŽ‰ +## ๐Ÿ” Privacy & Telemetry + +n8n-mcp collects anonymous usage statistics to improve the tool. [View our privacy policy](./PRIVACY.md). + +### Opting Out + +**For npx users:** +```bash +npx n8n-mcp telemetry disable +``` + +**For Docker users:** +Add the following environment variable to your Docker configuration: +```json +"-e", "N8N_MCP_TELEMETRY_DISABLED=true" +``` + +Example in Claude Desktop config: +```json +{ + "mcpServers": { + "n8n-mcp": { + "command": "docker", + "args": [ + "run", + "-i", + "--rm", + "--init", + "-e", "MCP_MODE=stdio", + "-e", "LOG_LEVEL=error", + "-e", "N8N_MCP_TELEMETRY_DISABLED=true", + "ghcr.io/czlonkowski/n8n-mcp:latest" + ] + } + } +} +``` + +**For docker-compose users:** +Set in your environment file or docker-compose.yml: +```yaml +environment: + N8N_MCP_TELEMETRY_DISABLED: "true" +``` + ## ๐Ÿ’– Support This Project
diff --git a/docker-compose.yml b/docker-compose.yml index 7aad1ff..b4fe7a5 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -23,7 +23,11 @@ services: # Database NODE_DB_PATH: ${NODE_DB_PATH:-/app/data/nodes.db} REBUILD_ON_START: ${REBUILD_ON_START:-false} - + + # Telemetry: Anonymous usage statistics are ENABLED by default + # To opt-out, uncomment and set to 'true': + # N8N_MCP_TELEMETRY_DISABLED: ${N8N_MCP_TELEMETRY_DISABLED:-true} + # Optional: n8n API configuration (enables 16 additional management tools) # Uncomment and configure to enable n8n workflow management # N8N_API_URL: ${N8N_API_URL} diff --git a/scripts/test-telemetry-direct.ts b/scripts/test-telemetry-direct.ts new file mode 100644 index 0000000..d1373f4 --- /dev/null +++ b/scripts/test-telemetry-direct.ts @@ -0,0 +1,46 @@ +#!/usr/bin/env npx tsx +/** + * Direct telemetry test with hardcoded credentials + */ + +import { createClient } from '@supabase/supabase-js'; + +const TELEMETRY_BACKEND = { + URL: 'https://ydyufsohxdfpopqbubwk.supabase.co', + ANON_KEY: 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InlkeXVmc29oeGRmcG9wcWJ1YndrIiwicm9sZSI6ImFub24iLCJpYXQiOjE3Mzc2MzAxMDgsImV4cCI6MjA1MzIwNjEwOH0.LsUTx9OsNtnqg-jxXaJPc84aBHVDehHiMaFoF2Ir8s0' +}; + +async function testDirect() { + console.log('๐Ÿงช Direct Telemetry Test\n'); + + const supabase = createClient(TELEMETRY_BACKEND.URL, TELEMETRY_BACKEND.ANON_KEY, { + auth: { + persistSession: false, + autoRefreshToken: false, + } + }); + + const testEvent = { + user_id: 'direct-test-' + Date.now(), + event: 'direct_test', + properties: { + source: 'test-telemetry-direct.ts', + timestamp: new Date().toISOString() + } + }; + + console.log('Sending event:', testEvent); + + const { data, error } = await supabase + .from('telemetry_events') + .insert([testEvent]); + + if (error) { + console.error('โŒ Failed:', error); + } else { + console.log('โœ… Success! Event sent directly to Supabase'); + console.log('Response:', data); + } +} + +testDirect().catch(console.error); diff --git a/scripts/test-telemetry-env.ts b/scripts/test-telemetry-env.ts new file mode 100644 index 0000000..59c845f --- /dev/null +++ b/scripts/test-telemetry-env.ts @@ -0,0 +1,62 @@ +#!/usr/bin/env npx tsx +/** + * Test telemetry environment variable override + */ + +import { TelemetryConfigManager } from '../src/telemetry/config-manager'; +import { telemetry } from '../src/telemetry/telemetry-manager'; + +async function testEnvOverride() { + console.log('๐Ÿงช Testing Telemetry Environment Variable Override\n'); + + const configManager = TelemetryConfigManager.getInstance(); + + // Test 1: Check current status without env var + console.log('Test 1: Without environment variable'); + console.log('Is Enabled:', configManager.isEnabled()); + console.log('Status:', configManager.getStatus()); + + // Test 2: Set environment variable and check again + console.log('\nโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”\n'); + console.log('Test 2: With N8N_MCP_TELEMETRY_DISABLED=true'); + process.env.N8N_MCP_TELEMETRY_DISABLED = 'true'; + + // Force reload by creating new instance (for testing) + const newConfigManager = TelemetryConfigManager.getInstance(); + console.log('Is Enabled:', newConfigManager.isEnabled()); + console.log('Status:', newConfigManager.getStatus()); + + // Test 3: Try tracking with env disabled + console.log('\nโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”\n'); + console.log('Test 3: Attempting to track with telemetry disabled'); + telemetry.trackToolUsage('test_tool', true, 100); + console.log('Tool usage tracking attempted (should be ignored)'); + + // Test 4: Alternative env vars + console.log('\nโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”\n'); + console.log('Test 4: Alternative environment variables'); + + delete process.env.N8N_MCP_TELEMETRY_DISABLED; + process.env.TELEMETRY_DISABLED = 'true'; + console.log('With TELEMETRY_DISABLED=true:', newConfigManager.isEnabled()); + + delete process.env.TELEMETRY_DISABLED; + process.env.DISABLE_TELEMETRY = 'true'; + console.log('With DISABLE_TELEMETRY=true:', newConfigManager.isEnabled()); + + // Test 5: Env var takes precedence over config + console.log('\nโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”\n'); + console.log('Test 5: Environment variable precedence'); + + // Enable via config + newConfigManager.enable(); + console.log('After enabling via config:', newConfigManager.isEnabled()); + + // But env var should still override + process.env.N8N_MCP_TELEMETRY_DISABLED = 'true'; + console.log('With env var set (should override config):', newConfigManager.isEnabled()); + + console.log('\nโœ… All tests completed!'); +} + +testEnvOverride().catch(console.error); \ No newline at end of file diff --git a/scripts/test-telemetry-integration.ts b/scripts/test-telemetry-integration.ts index b4a7e38..d08c327 100644 --- a/scripts/test-telemetry-integration.ts +++ b/scripts/test-telemetry-integration.ts @@ -1,106 +1,73 @@ #!/usr/bin/env npx tsx /** - * Test script for telemetry integration - * Verifies that telemetry data can be sent to Supabase + * Integration test for the telemetry manager */ -import { telemetry } from '../src/telemetry'; -import { WorkflowSanitizer } from '../src/telemetry/workflow-sanitizer'; -import dotenv from 'dotenv'; +import { telemetry } from '../src/telemetry/telemetry-manager'; -// Load environment variables -dotenv.config(); +async function testIntegration() { + console.log('๐Ÿงช Testing Telemetry Manager Integration\n'); -async function testTelemetryIntegration() { - console.log('๐Ÿงช Testing Telemetry Integration with Supabase\n'); + // Check status + console.log('Status:', telemetry.getStatus()); - // Check environment variables - const supabaseUrl = process.env.SUPABASE_URL; - const supabaseKey = process.env.SUPABASE_ANON_KEY; + // Track session start + console.log('\nTracking session start...'); + telemetry.trackSessionStart(); - if (!supabaseUrl || !supabaseKey) { - console.error('โŒ Missing SUPABASE_URL or SUPABASE_ANON_KEY in .env file'); - process.exit(1); - } + // Track tool usage + console.log('Tracking tool usage...'); + telemetry.trackToolUsage('search_nodes', true, 150); + telemetry.trackToolUsage('get_node_info', true, 75); + telemetry.trackToolUsage('validate_workflow', false, 200); - console.log('โœ… Environment variables configured'); - console.log(` Supabase URL: ${supabaseUrl}`); - console.log(` Anon Key: ${supabaseKey.substring(0, 20)}...`); - - // Test 1: Track tool usage - console.log('\n๐Ÿ“Š Test 1: Tracking tool usage...'); - telemetry.trackToolUsage('search_nodes', true, 1250); - telemetry.trackToolUsage('get_node_info', true, 850); - telemetry.trackToolUsage('validate_workflow', false, 2000); - console.log(' โœ“ Tool usage events queued'); - - // Test 2: Track errors - console.log('\n๐Ÿ› Test 2: Tracking errors...'); + // Track errors + console.log('Tracking errors...'); telemetry.trackError('ValidationError', 'workflow_validation', 'validate_workflow'); - telemetry.trackError('NetworkError', 'api_call', 'n8n_create_workflow'); - console.log(' โœ“ Error events queued'); - // Test 3: Track workflow creation - console.log('\n๐Ÿ”ง Test 3: Tracking workflow creation...'); + // Track a test workflow + console.log('Tracking workflow creation...'); const testWorkflow = { - name: 'Test Workflow', nodes: [ { id: '1', - name: 'Webhook', type: 'n8n-nodes-base.webhook', - position: [100, 100], + name: 'Webhook', + position: [0, 0], parameters: { - path: 'test-webhook', - webhookUrl: 'https://n8n.example.com/webhook/abc-123-def', - method: 'POST', - authentication: 'none' - }, - credentials: { - webhookAuth: { - id: 'cred-123', - name: 'My Webhook Auth' - } + path: '/test-webhook', + httpMethod: 'POST' } }, { id: '2', - name: 'HTTP Request', type: 'n8n-nodes-base.httpRequest', - position: [300, 100], + name: 'HTTP Request', + position: [250, 0], parameters: { url: 'https://api.example.com/endpoint', method: 'POST', authentication: 'genericCredentialType', genericAuthType: 'httpHeaderAuth', - httpHeaders: { + sendHeaders: true, + headerParameters: { parameters: [ { name: 'Authorization', - value: 'Bearer sk-1234567890abcdef1234567890abcdef' + value: 'Bearer sk-1234567890abcdef' } ] - }, - options: { - timeout: 10000 } } }, { id: '3', - name: 'Slack', type: 'n8n-nodes-base.slack', - position: [500, 100], + name: 'Slack', + position: [500, 0], parameters: { - channel: 'general', - text: 'Message sent!', - authentication: 'accessToken' - }, - credentials: { - slackApi: { - id: 'cred-456', - name: 'My Slack' - } + channel: '#notifications', + text: 'Workflow completed!' } } ], @@ -111,72 +78,17 @@ async function testTelemetryIntegration() { '2': { main: [[{ node: '3', type: 'main', index: 0 }]] } - }, - settings: { - errorWorkflow: 'error-workflow-id', - saveDataErrorExecution: 'all', - saveDataSuccessExecution: 'none', - saveExecutionProgress: true, - saveManualExecutions: true, - timezone: 'America/New_York' - }, - staticData: { some: 'data' }, - pinData: { node1: 'pinned' }, - ownedBy: 'user-123', - createdBy: 'user-123', - updatedBy: 'user-456' + } }; - // Track successful workflow - await telemetry.trackWorkflowCreation(testWorkflow, true); - console.log(' โœ“ Workflow creation tracked'); + telemetry.trackWorkflowCreation(testWorkflow, true); - // Test workflow sanitization - console.log('\n๐Ÿ”’ Test 4: Verifying workflow sanitization...'); - const sanitized = WorkflowSanitizer.sanitizeWorkflow(testWorkflow); - - // Verify sensitive data was removed - const sanitizedStr = JSON.stringify(sanitized); - const hasSensitiveData = - sanitizedStr.includes('sk-1234567890abcdef') || - sanitizedStr.includes('cred-123') || - sanitizedStr.includes('cred-456') || - sanitizedStr.includes('user-123'); - - if (hasSensitiveData) { - console.error(' โŒ Sensitive data found in sanitized workflow!'); - } else { - console.log(' โœ“ All sensitive data removed'); - } - - console.log(' โœ“ Workflow hash:', sanitized.workflowHash); - console.log(' โœ“ Node count:', sanitized.nodeCount); - console.log(' โœ“ Node types:', sanitized.nodeTypes); - console.log(' โœ“ Complexity:', sanitized.complexity); - - // Test 5: Track session start - console.log('\n๐Ÿš€ Test 5: Tracking session start...'); - telemetry.trackSessionStart(); - console.log(' โœ“ Session start tracked'); - - // Flush all events - console.log('\n๐Ÿ’พ Flushing telemetry data to Supabase...'); + // Force flush + console.log('\nFlushing telemetry data...'); await telemetry.flush(); - console.log(' โœ“ Data flushed to Supabase'); - // Test 6: Verify data in Supabase - console.log('\n๐Ÿ” Test 6: Verifying data in Supabase...'); - console.log(' Please check your Supabase dashboard to verify:'); - console.log(' - telemetry_events table has new records'); - console.log(' - telemetry_workflows table has the test workflow'); - console.log(' - Views show aggregated data'); - console.log('\n Dashboard URL: https://supabase.com/dashboard/project/ydyufsohxdfpopqbubwk/editor'); - - console.log('\nโœจ Telemetry integration test completed!'); + console.log('\nโœ… Telemetry integration test completed!'); + console.log('Check your Supabase dashboard for the telemetry data.'); } -// Run the test -testTelemetryIntegration().catch(error => { - console.error('โŒ Test failed:', error); - process.exit(1); -}); \ No newline at end of file +testIntegration().catch(console.error); diff --git a/scripts/test-telemetry-security.ts b/scripts/test-telemetry-security.ts new file mode 100644 index 0000000..dac9ca0 --- /dev/null +++ b/scripts/test-telemetry-security.ts @@ -0,0 +1,87 @@ +#!/usr/bin/env npx tsx +/** + * Test that RLS properly protects data + */ + +import { createClient } from '@supabase/supabase-js'; +import dotenv from 'dotenv'; + +dotenv.config(); + +async function testSecurity() { + const supabaseUrl = process.env.SUPABASE_URL!; + const supabaseAnonKey = process.env.SUPABASE_ANON_KEY!; + + console.log('๐Ÿ”’ Testing Telemetry Security (RLS)\n'); + + const supabase = createClient(supabaseUrl, supabaseAnonKey, { + auth: { + persistSession: false, + autoRefreshToken: false, + } + }); + + // Test 1: Verify anon can INSERT + console.log('Test 1: Anonymous INSERT (should succeed)...'); + const testData = { + user_id: 'security-test-' + Date.now(), + event: 'security_test', + properties: { test: true } + }; + + const { error: insertError } = await supabase + .from('telemetry_events') + .insert([testData]); + + if (insertError) { + console.error('โŒ Insert failed:', insertError.message); + } else { + console.log('โœ… Insert succeeded (as expected)'); + } + + // Test 2: Verify anon CANNOT SELECT + console.log('\nTest 2: Anonymous SELECT (should fail)...'); + const { data, error: selectError } = await supabase + .from('telemetry_events') + .select('*') + .limit(1); + + if (selectError) { + console.log('โœ… Select blocked by RLS (as expected):', selectError.message); + } else if (data && data.length > 0) { + console.error('โŒ SECURITY ISSUE: Anon can read data!', data); + } else if (data && data.length === 0) { + console.log('โš ๏ธ Select returned empty array (might be RLS working)'); + } + + // Test 3: Verify anon CANNOT UPDATE + console.log('\nTest 3: Anonymous UPDATE (should fail)...'); + const { error: updateError } = await supabase + .from('telemetry_events') + .update({ event: 'hacked' }) + .eq('user_id', 'test'); + + if (updateError) { + console.log('โœ… Update blocked (as expected):', updateError.message); + } else { + console.error('โŒ SECURITY ISSUE: Anon can update data!'); + } + + // Test 4: Verify anon CANNOT DELETE + console.log('\nTest 4: Anonymous DELETE (should fail)...'); + const { error: deleteError } = await supabase + .from('telemetry_events') + .delete() + .eq('user_id', 'test'); + + if (deleteError) { + console.log('โœ… Delete blocked (as expected):', deleteError.message); + } else { + console.error('โŒ SECURITY ISSUE: Anon can delete data!'); + } + + console.log('\nโœจ Security test completed!'); + console.log('Summary: Anonymous users can INSERT (for telemetry) but cannot READ/UPDATE/DELETE'); +} + +testSecurity().catch(console.error); \ No newline at end of file diff --git a/scripts/test-workflow-insert.ts b/scripts/test-workflow-insert.ts new file mode 100644 index 0000000..3cd0cb2 --- /dev/null +++ b/scripts/test-workflow-insert.ts @@ -0,0 +1,55 @@ +#!/usr/bin/env npx tsx +/** + * Test direct workflow insert to Supabase + */ + +import { createClient } from '@supabase/supabase-js'; + +const TELEMETRY_BACKEND = { + URL: 'https://ydyufsohxdfpopqbubwk.supabase.co', + ANON_KEY: 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InlkeXVmc29oeGRmcG9wcWJ1YndrIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NTg3OTYyMDAsImV4cCI6MjA3NDM3MjIwMH0.xESphg6h5ozaDsm4Vla3QnDJGc6Nc_cpfoqTHRynkCk' +}; + +async function testWorkflowInsert() { + const supabase = createClient(TELEMETRY_BACKEND.URL, TELEMETRY_BACKEND.ANON_KEY, { + auth: { + persistSession: false, + autoRefreshToken: false, + } + }); + + const testWorkflow = { + user_id: 'direct-test-' + Date.now(), + workflow_hash: 'hash-direct-' + Date.now(), + node_count: 2, + node_types: ['webhook', 'http'], + has_trigger: true, + has_webhook: true, + complexity: 'simple' as const, + sanitized_workflow: { + nodes: [ + { id: '1', type: 'webhook', parameters: {} }, + { id: '2', type: 'http', parameters: {} } + ], + connections: {} + } + }; + + console.log('Attempting direct insert to telemetry_workflows...'); + console.log('Data:', JSON.stringify(testWorkflow, null, 2)); + + const { data, error } = await supabase + .from('telemetry_workflows') + .insert([testWorkflow]); + + if (error) { + console.error('\nโŒ Error:', error); + } else { + console.log('\nโœ… Success! Workflow inserted'); + if (data) { + console.log('Response:', data); + } + } +} + +testWorkflowInsert().catch(console.error); \ No newline at end of file diff --git a/scripts/test-workflow-sanitizer.ts b/scripts/test-workflow-sanitizer.ts new file mode 100644 index 0000000..3d3615b --- /dev/null +++ b/scripts/test-workflow-sanitizer.ts @@ -0,0 +1,67 @@ +#!/usr/bin/env npx tsx +/** + * Test workflow sanitizer + */ + +import { WorkflowSanitizer } from '../src/telemetry/workflow-sanitizer'; + +const testWorkflow = { + nodes: [ + { + id: 'webhook1', + type: 'n8n-nodes-base.webhook', + name: 'Webhook', + position: [0, 0], + parameters: { + path: '/test-webhook', + httpMethod: 'POST' + } + }, + { + id: 'http1', + type: 'n8n-nodes-base.httpRequest', + name: 'HTTP Request', + position: [250, 0], + parameters: { + url: 'https://api.example.com/endpoint', + method: 'GET', + authentication: 'genericCredentialType', + sendHeaders: true, + headerParameters: { + parameters: [ + { + name: 'Authorization', + value: 'Bearer sk-1234567890abcdef' + } + ] + } + } + } + ], + connections: { + 'webhook1': { + main: [[{ node: 'http1', type: 'main', index: 0 }]] + } + } +}; + +console.log('๐Ÿงช Testing Workflow Sanitizer\n'); +console.log('Original workflow has', testWorkflow.nodes.length, 'nodes'); + +try { + const sanitized = WorkflowSanitizer.sanitizeWorkflow(testWorkflow); + + console.log('\nโœ… Sanitization successful!'); + console.log('\nSanitized output:'); + console.log(JSON.stringify(sanitized, null, 2)); + + console.log('\n๐Ÿ“Š Metrics:'); + console.log('- Workflow Hash:', sanitized.workflowHash); + console.log('- Node Count:', sanitized.nodeCount); + console.log('- Node Types:', sanitized.nodeTypes); + console.log('- Has Trigger:', sanitized.hasTrigger); + console.log('- Has Webhook:', sanitized.hasWebhook); + console.log('- Complexity:', sanitized.complexity); +} catch (error) { + console.error('โŒ Sanitization failed:', error); +} diff --git a/scripts/test-workflow-tracking-debug.ts b/scripts/test-workflow-tracking-debug.ts new file mode 100644 index 0000000..6de3e94 --- /dev/null +++ b/scripts/test-workflow-tracking-debug.ts @@ -0,0 +1,71 @@ +#!/usr/bin/env npx tsx +/** + * Debug workflow tracking in telemetry manager + */ + +import { TelemetryManager } from '../src/telemetry/telemetry-manager'; + +// Get the singleton instance +const telemetry = TelemetryManager.getInstance(); + +const testWorkflow = { + nodes: [ + { + id: 'webhook1', + type: 'n8n-nodes-base.webhook', + name: 'Webhook', + position: [0, 0], + parameters: { + path: '/test-' + Date.now(), + httpMethod: 'POST' + } + }, + { + id: 'http1', + type: 'n8n-nodes-base.httpRequest', + name: 'HTTP Request', + position: [250, 0], + parameters: { + url: 'https://api.example.com/data', + method: 'GET' + } + }, + { + id: 'slack1', + type: 'n8n-nodes-base.slack', + name: 'Slack', + position: [500, 0], + parameters: { + channel: '#general', + text: 'Workflow complete!' + } + } + ], + connections: { + 'webhook1': { + main: [[{ node: 'http1', type: 'main', index: 0 }]] + }, + 'http1': { + main: [[{ node: 'slack1', type: 'main', index: 0 }]] + } + } +}; + +console.log('๐Ÿงช Testing Workflow Tracking\n'); +console.log('Workflow has', testWorkflow.nodes.length, 'nodes'); + +// Track the workflow +console.log('Calling trackWorkflowCreation...'); +telemetry.trackWorkflowCreation(testWorkflow, true); + +console.log('Waiting for async processing...'); + +// Wait for setImmediate to process +setTimeout(async () => { + console.log('\nForcing flush...'); + await telemetry.flush(); + console.log('โœ… Flush complete!'); + + console.log('\nWorkflow should now be in the telemetry_workflows table.'); + console.log('Check with: SELECT * FROM telemetry_workflows ORDER BY created_at DESC LIMIT 1;'); +}, 2000); diff --git a/src/mcp/index.ts b/src/mcp/index.ts index 22596c9..b792a44 100644 --- a/src/mcp/index.ts +++ b/src/mcp/index.ts @@ -50,7 +50,7 @@ Commands: disable Disable anonymous telemetry status Show current telemetry status -Learn more: https://github.com/czlonkowski/n8n-mcp/privacy +Learn more: https://github.com/czlonkowski/n8n-mcp/blob/main/PRIVACY.md `); process.exit(args[1] ? 1 : 0); } diff --git a/src/telemetry/config-manager.ts b/src/telemetry/config-manager.ts index e7a2853..e0694be 100644 --- a/src/telemetry/config-manager.ts +++ b/src/telemetry/config-manager.ts @@ -4,7 +4,7 @@ */ import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs'; -import { join } from 'path'; +import { join, resolve, dirname } from 'path'; import { homedir } from 'os'; import { createHash } from 'crypto'; import { hostname, platform, arch } from 'os'; @@ -53,15 +53,24 @@ export class TelemetryConfigManager { if (!existsSync(this.configPath)) { // First run - create default config + const version = this.getPackageVersion(); + + // Check if telemetry is disabled via environment variable + const envDisabled = this.isDisabledByEnvironment(); + this.config = { - enabled: true, + enabled: !envDisabled, // Respect env var on first run userId: this.generateUserId(), firstRun: new Date().toISOString(), - version: require('../../package.json').version + version }; this.saveConfig(); - this.showFirstRunNotice(); + + // Only show notice if not disabled via environment + if (!envDisabled) { + this.showFirstRunNotice(); + } return this.config; } @@ -107,12 +116,51 @@ export class TelemetryConfigManager { /** * Check if telemetry is enabled + * Priority: Environment variable > Config file > Default (true) */ isEnabled(): boolean { + // Check environment variables first (for Docker users) + if (this.isDisabledByEnvironment()) { + return false; + } + const config = this.loadConfig(); return config.enabled; } + /** + * Check if telemetry is disabled via environment variable + */ + private isDisabledByEnvironment(): boolean { + const envVars = [ + 'N8N_MCP_TELEMETRY_DISABLED', + 'TELEMETRY_DISABLED', + 'DISABLE_TELEMETRY' + ]; + + for (const varName of envVars) { + const value = process.env[varName]; + if (value !== undefined) { + const normalized = value.toLowerCase().trim(); + + // Warn about invalid values + if (!['true', 'false', '1', '0', ''].includes(normalized)) { + console.warn( + `โš ๏ธ Invalid telemetry environment variable value: ${varName}="${value}"\n` + + ` Use "true" to disable or "false" to enable telemetry.` + ); + } + + // Accept common truthy values + if (normalized === 'true' || normalized === '1') { + return true; + } + } + } + + return false; + } + /** * Get the anonymous user ID */ @@ -155,14 +203,25 @@ export class TelemetryConfigManager { */ getStatus(): string { const config = this.loadConfig(); + + // Check if disabled by environment + const envDisabled = this.isDisabledByEnvironment(); + + let status = config.enabled ? 'ENABLED' : 'DISABLED'; + if (envDisabled) { + status = 'DISABLED (via environment variable)'; + } + return ` -Telemetry Status: ${config.enabled ? 'ENABLED' : 'DISABLED'} +Telemetry Status: ${status} Anonymous ID: ${config.userId} First Run: ${config.firstRun || 'Unknown'} Config Path: ${this.configPath} To opt-out: npx n8n-mcp telemetry disable To opt-in: npx n8n-mcp telemetry enable + +For Docker: Set N8N_MCP_TELEMETRY_DISABLED=true `; } @@ -199,9 +258,44 @@ To opt-in: npx n8n-mcp telemetry enable โ•‘ npx n8n-mcp telemetry disable โ•‘ โ•‘ โ•‘ โ•‘ Learn more: โ•‘ -โ•‘ https://github.com/czlonkowski/n8n-mcp/privacy โ•‘ +โ•‘ https://github.com/czlonkowski/n8n-mcp/blob/main/PRIVACY.md โ•‘ โ•‘ โ•‘ โ•šโ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• `); } + + /** + * Get package version safely + */ + private getPackageVersion(): string { + try { + // Try multiple approaches to find package.json + const possiblePaths = [ + resolve(__dirname, '..', '..', 'package.json'), + resolve(process.cwd(), 'package.json'), + resolve(__dirname, '..', '..', '..', 'package.json') + ]; + + for (const packagePath of possiblePaths) { + if (existsSync(packagePath)) { + const packageJson = JSON.parse(readFileSync(packagePath, 'utf-8')); + if (packageJson.version) { + return packageJson.version; + } + } + } + + // Fallback: try require (works in some environments) + try { + const packageJson = require('../../package.json'); + return packageJson.version || 'unknown'; + } catch { + // Ignore require error + } + + return 'unknown'; + } catch (error) { + return 'unknown'; + } + } } \ No newline at end of file diff --git a/src/telemetry/telemetry-manager.ts b/src/telemetry/telemetry-manager.ts index b43c652..a019308 100644 --- a/src/telemetry/telemetry-manager.ts +++ b/src/telemetry/telemetry-manager.ts @@ -7,6 +7,8 @@ import { createClient, SupabaseClient } from '@supabase/supabase-js'; import { TelemetryConfigManager } from './config-manager'; import { WorkflowSanitizer } from './workflow-sanitizer'; import { logger } from '../utils/logger'; +import { resolve } from 'path'; +import { existsSync, readFileSync } from 'fs'; interface TelemetryEvent { user_id: string; @@ -27,6 +29,28 @@ interface WorkflowTelemetry { created_at?: string; } +// Configuration constants +const TELEMETRY_CONFIG = { + BATCH_FLUSH_INTERVAL: 5000, // 5 seconds - reduced for multi-process + EVENT_QUEUE_THRESHOLD: 1, // Immediate flush for multi-process compatibility + WORKFLOW_QUEUE_THRESHOLD: 1, // Immediate flush for multi-process compatibility + MAX_RETRIES: 3, + RETRY_DELAY: 1000, // 1 second + OPERATION_TIMEOUT: 5000, // 5 seconds +} as const; + +// Hardcoded telemetry backend configuration +// IMPORTANT: This is intentionally hardcoded for zero-configuration telemetry +// The anon key is PUBLIC and SAFE to expose because: +// 1. It only allows INSERT operations (write-only) +// 2. Row Level Security (RLS) policies prevent reading/updating/deleting data +// 3. This is standard practice for anonymous telemetry collection +// 4. No sensitive user data is ever sent +const TELEMETRY_BACKEND = { + URL: 'https://ydyufsohxdfpopqbubwk.supabase.co', + ANON_KEY: 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InlkeXVmc29oeGRmcG9wcWJ1YndrIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NTg3OTYyMDAsImV4cCI6MjA3NDM3MjIwMH0.xESphg6h5ozaDsm4Vla3QnDJGc6Nc_cpfoqTHRynkCk' +} as const; + export class TelemetryManager { private static instance: TelemetryManager; private supabase: SupabaseClient | null = null; @@ -35,6 +59,7 @@ export class TelemetryManager { private workflowQueue: WorkflowTelemetry[] = []; private flushTimer?: NodeJS.Timeout; private isInitialized: boolean = false; + private isFlushingWorkflows: boolean = false; private constructor() { this.configManager = TelemetryConfigManager.getInstance(); @@ -57,13 +82,10 @@ export class TelemetryManager { return; } - const supabaseUrl = process.env.SUPABASE_URL; - const supabaseAnonKey = process.env.SUPABASE_ANON_KEY; - - if (!supabaseUrl || !supabaseAnonKey) { - logger.debug('Telemetry not configured: missing SUPABASE_URL or SUPABASE_ANON_KEY'); - return; - } + // Use hardcoded credentials for zero-configuration telemetry + // Environment variables can override for development/testing + const supabaseUrl = process.env.SUPABASE_URL || TELEMETRY_BACKEND.URL; + const supabaseAnonKey = process.env.SUPABASE_ANON_KEY || TELEMETRY_BACKEND.ANON_KEY; try { this.supabase = createClient(supabaseUrl, supabaseAnonKey, { @@ -116,9 +138,9 @@ export class TelemetryManager { } /** - * Track workflow creation + * Track workflow creation (fire-and-forget) */ - async trackWorkflowCreation(workflow: any, validationPassed: boolean): Promise { + trackWorkflowCreation(workflow: any, validationPassed: boolean): void { if (!this.isEnabled()) return; // Only store workflows that pass validation @@ -129,41 +151,67 @@ export class TelemetryManager { return; } - try { - const sanitized = WorkflowSanitizer.sanitizeWorkflow(workflow); + // Process asynchronously without blocking + setImmediate(async () => { + try { + const sanitized = WorkflowSanitizer.sanitizeWorkflow(workflow); - const telemetryData: WorkflowTelemetry = { - user_id: this.configManager.getUserId(), - workflow_hash: sanitized.workflowHash, - node_count: sanitized.nodeCount, - node_types: sanitized.nodeTypes, - has_trigger: sanitized.hasTrigger, - has_webhook: sanitized.hasWebhook, - complexity: sanitized.complexity, - sanitized_workflow: { - nodes: sanitized.nodes, - connections: sanitized.connections, - }, - }; + const telemetryData: WorkflowTelemetry = { + user_id: this.configManager.getUserId(), + workflow_hash: sanitized.workflowHash, + node_count: sanitized.nodeCount, + node_types: sanitized.nodeTypes, + has_trigger: sanitized.hasTrigger, + has_webhook: sanitized.hasWebhook, + complexity: sanitized.complexity, + sanitized_workflow: { + nodes: sanitized.nodes, + connections: sanitized.connections, + }, + }; - this.workflowQueue.push(telemetryData); + // Add to queue synchronously to avoid race conditions + const queueLength = this.addToWorkflowQueue(telemetryData); - // Also track as event - this.trackEvent('workflow_created', { - nodeCount: sanitized.nodeCount, - nodeTypes: sanitized.nodeTypes.length, - complexity: sanitized.complexity, - hasTrigger: sanitized.hasTrigger, - hasWebhook: sanitized.hasWebhook, - }); + // Also track as event + this.trackEvent('workflow_created', { + nodeCount: sanitized.nodeCount, + nodeTypes: sanitized.nodeTypes.length, + complexity: sanitized.complexity, + hasTrigger: sanitized.hasTrigger, + hasWebhook: sanitized.hasWebhook, + }); - // Flush if queue is getting large - if (this.workflowQueue.length >= 5) { - await this.flush(); + // Flush if queue reached threshold + if (queueLength >= TELEMETRY_CONFIG.WORKFLOW_QUEUE_THRESHOLD) { + await this.flush(); + } + } catch (error) { + logger.debug('Failed to track workflow creation:', error); } - } catch (error) { - logger.debug('Failed to track workflow creation:', error); + }); + } + + /** + * Thread-safe method to add workflow to queue + * Returns the new queue length after adding + */ + private addToWorkflowQueue(telemetryData: WorkflowTelemetry): number { + // Don't add to queue if we're currently flushing workflows + // This prevents race conditions where items are added during flush + if (this.isFlushingWorkflows) { + // Queue the flush for later to ensure we don't lose data + setImmediate(() => { + this.workflowQueue.push(telemetryData); + if (this.workflowQueue.length >= TELEMETRY_CONFIG.WORKFLOW_QUEUE_THRESHOLD) { + this.flush(); + } + }); + return 0; // Don't trigger immediate flush } + + this.workflowQueue.push(telemetryData); + return this.workflowQueue.length; } /** @@ -194,7 +242,7 @@ export class TelemetryManager { this.eventQueue.push(event); // Flush if queue is getting large - if (this.eventQueue.length >= 20) { + if (this.eventQueue.length >= TELEMETRY_CONFIG.EVENT_QUEUE_THRESHOLD) { this.flush(); } } @@ -206,13 +254,83 @@ export class TelemetryManager { if (!this.isEnabled()) return; this.trackEvent('session_start', { - version: require('../../package.json').version, + version: this.getPackageVersion(), platform: process.platform, arch: process.arch, nodeVersion: process.version, }); } + /** + * Get package version safely + */ + private getPackageVersion(): string { + try { + // Try multiple approaches to find package.json + const possiblePaths = [ + resolve(__dirname, '..', '..', 'package.json'), + resolve(process.cwd(), 'package.json'), + resolve(__dirname, '..', '..', '..', 'package.json') + ]; + + for (const packagePath of possiblePaths) { + if (existsSync(packagePath)) { + const packageJson = JSON.parse(readFileSync(packagePath, 'utf-8')); + if (packageJson.version) { + return packageJson.version; + } + } + } + + // Fallback: try require (works in some environments) + try { + const packageJson = require('../../package.json'); + return packageJson.version || 'unknown'; + } catch { + // Ignore require error + } + + return 'unknown'; + } catch (error) { + logger.debug('Failed to get package version:', error); + return 'unknown'; + } + } + + /** + * Execute Supabase operation with retry and timeout + */ + private async executeWithRetry( + operation: () => Promise, + operationName: string + ): Promise { + let lastError: Error | null = null; + + for (let attempt = 1; attempt <= TELEMETRY_CONFIG.MAX_RETRIES; attempt++) { + try { + // Create a timeout promise + const timeoutPromise = new Promise((_, reject) => { + setTimeout(() => reject(new Error('Operation timed out')), TELEMETRY_CONFIG.OPERATION_TIMEOUT); + }); + + // Race between operation and timeout + const result = await Promise.race([operation(), timeoutPromise]) as T; + return result; + } catch (error) { + lastError = error as Error; + logger.debug(`${operationName} attempt ${attempt} failed:`, error); + + if (attempt < TELEMETRY_CONFIG.MAX_RETRIES) { + // Wait before retrying + await new Promise(resolve => setTimeout(resolve, TELEMETRY_CONFIG.RETRY_DELAY * attempt)); + } + } + } + + logger.debug(`${operationName} failed after ${TELEMETRY_CONFIG.MAX_RETRIES} attempts:`, lastError); + return null; + } + /** * Flush queued events to Supabase */ @@ -224,42 +342,66 @@ export class TelemetryManager { const events = [...this.eventQueue]; this.eventQueue = []; - try { - const { error } = await this.supabase + await this.executeWithRetry(async () => { + const { error } = await this.supabase! .from('telemetry_events') .insert(events); // No .select() - we don't need the response if (error) { - logger.debug('Failed to flush telemetry events:', error.message); - } else { - logger.debug(`Flushed ${events.length} telemetry events`); + throw error; } - } catch (error) { - logger.debug('Error flushing telemetry events:', error); - } + + logger.debug(`Flushed ${events.length} telemetry events`); + return true; + }, 'Flush telemetry events'); } // Flush workflows if (this.workflowQueue.length > 0) { - const workflows = [...this.workflowQueue]; - this.workflowQueue = []; + this.isFlushingWorkflows = true; try { - // Use upsert to avoid duplicates based on workflow_hash - const { error } = await this.supabase - .from('telemetry_workflows') - .upsert(workflows, { - onConflict: 'workflow_hash', - ignoreDuplicates: true, - }); // No .select() - we don't need the response + const workflows = [...this.workflowQueue]; + this.workflowQueue = []; - if (error) { - logger.debug('Failed to flush telemetry workflows:', error.message); - } else { - logger.debug(`Flushed ${workflows.length} telemetry workflows`); + const result = await this.executeWithRetry(async () => { + // Deduplicate workflows by hash before inserting + const uniqueWorkflows = workflows.reduce((acc, workflow) => { + if (!acc.some(w => w.workflow_hash === workflow.workflow_hash)) { + acc.push(workflow); + } + return acc; + }, [] as WorkflowTelemetry[]); + + logger.debug(`Deduplicating workflows: ${workflows.length} -> ${uniqueWorkflows.length} unique`); + + // Use insert (same as events) - duplicates are handled by deduplication above + const { error } = await this.supabase! + .from('telemetry_workflows') + .insert(uniqueWorkflows); // No .select() - we don't need the response + + if (error) { + logger.debug('Detailed workflow flush error:', { + error: error, + workflowCount: workflows.length, + firstWorkflow: workflows[0] ? { + user_id: workflows[0].user_id, + workflow_hash: workflows[0].workflow_hash, + node_count: workflows[0].node_count + } : null + }); + throw error; + } + + logger.debug(`Flushed ${uniqueWorkflows.length} unique telemetry workflows (${workflows.length} total processed)`); + return true; + }, 'Flush telemetry workflows'); + + if (!result) { + logger.debug('Failed to flush workflows after retries'); } - } catch (error) { - logger.debug('Error flushing telemetry workflows:', error); + } finally { + this.isFlushingWorkflows = false; } } } @@ -268,10 +410,10 @@ export class TelemetryManager { * Start batch processor for periodic flushing */ private startBatchProcessor(): void { - // Flush every 30 seconds + // Flush periodically this.flushTimer = setInterval(() => { this.flush(); - }, 30000); + }, TELEMETRY_CONFIG.BATCH_FLUSH_INTERVAL); // Prevent timer from keeping process alive this.flushTimer.unref(); @@ -387,5 +529,12 @@ export class TelemetryManager { } } +// Create a global singleton to ensure only one instance across all imports +const globalAny = global as any; + +if (!globalAny.__telemetryManager) { + globalAny.__telemetryManager = TelemetryManager.getInstance(); +} + // Export singleton instance -export const telemetry = TelemetryManager.getInstance(); \ No newline at end of file +export const telemetry = globalAny.__telemetryManager as TelemetryManager; \ No newline at end of file diff --git a/tests/unit/telemetry/workflow-sanitizer.test.ts b/tests/unit/telemetry/workflow-sanitizer.test.ts index ce3f984..2f0b0a1 100644 --- a/tests/unit/telemetry/workflow-sanitizer.test.ts +++ b/tests/unit/telemetry/workflow-sanitizer.test.ts @@ -49,7 +49,7 @@ describe('WorkflowSanitizer', () => { const sanitized = WorkflowSanitizer.sanitizeWorkflow(workflow); - expect(sanitized.nodes[0].parameters.webhookUrl).toBe('https://[webhook-url]'); + expect(sanitized.nodes[0].parameters.webhookUrl).toBe('[REDACTED]'); expect(sanitized.nodes[0].parameters.method).toBe('POST'); // Method should remain expect(sanitized.nodes[0].parameters.path).toBe('my-webhook'); // Path should remain }); @@ -104,9 +104,9 @@ describe('WorkflowSanitizer', () => { const sanitized = WorkflowSanitizer.sanitizeWorkflow(workflow); - expect(sanitized.nodes[0].parameters.url).toBe('https://[domain]/endpoint'); + expect(sanitized.nodes[0].parameters.url).toBe('[REDACTED]'); expect(sanitized.nodes[0].parameters.endpoint).toBe('[REDACTED]'); - expect(sanitized.nodes[0].parameters.baseUrl).toBe('https://[domain]'); + expect(sanitized.nodes[0].parameters.baseUrl).toBe('[REDACTED]'); }); it('should calculate workflow metrics correctly', () => { @@ -288,19 +288,23 @@ describe('WorkflowSanitizer', () => { const sanitized = WorkflowSanitizer.sanitizeWorkflow(workflow); - // These should be removed - expect(sanitized.settings?.errorWorkflow).toBeUndefined(); - expect(sanitized.staticData).toBeUndefined(); - expect(sanitized.pinData).toBeUndefined(); - expect(sanitized.credentials).toBeUndefined(); - expect(sanitized.sharedWorkflows).toBeUndefined(); - expect(sanitized.ownedBy).toBeUndefined(); - expect(sanitized.createdBy).toBeUndefined(); - expect(sanitized.updatedBy).toBeUndefined(); - - // These should be preserved + // Verify that sensitive workflow-level properties are not in the sanitized output + // The sanitized workflow should only have specific fields as defined in SanitizedWorkflow interface expect(sanitized.nodes).toEqual([]); expect(sanitized.connections).toEqual({}); + expect(sanitized.nodeCount).toBe(0); + expect(sanitized.nodeTypes).toEqual([]); + + // Verify these fields don't exist in the sanitized output + const sanitizedAsAny = sanitized as any; + expect(sanitizedAsAny.settings).toBeUndefined(); + expect(sanitizedAsAny.staticData).toBeUndefined(); + expect(sanitizedAsAny.pinData).toBeUndefined(); + expect(sanitizedAsAny.credentials).toBeUndefined(); + expect(sanitizedAsAny.sharedWorkflows).toBeUndefined(); + expect(sanitizedAsAny.ownedBy).toBeUndefined(); + expect(sanitizedAsAny.createdBy).toBeUndefined(); + expect(sanitizedAsAny.updatedBy).toBeUndefined(); }); }); }); \ No newline at end of file diff --git a/verify-telemetry-fix.js b/verify-telemetry-fix.js new file mode 100644 index 0000000..17546a5 --- /dev/null +++ b/verify-telemetry-fix.js @@ -0,0 +1,132 @@ +#!/usr/bin/env node + +/** + * Verification script to test that telemetry permissions are fixed + * Run this AFTER applying the GRANT permissions fix + */ + +const { createClient } = require('@supabase/supabase-js'); +const crypto = require('crypto'); + +const TELEMETRY_BACKEND = { + URL: 'https://ydyufsohxdfpopqbubwk.supabase.co', + ANON_KEY: 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InlkeXVmc29oeGRmcG9wcWJ1YndrIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NTg3OTYyMDAsImV4cCI6MjA3NDM3MjIwMH0.xESphg6h5ozaDsm4Vla3QnDJGc6Nc_cpfoqTHRynkCk' +}; + +async function verifyTelemetryFix() { + console.log('๐Ÿ” VERIFYING TELEMETRY PERMISSIONS FIX'); + console.log('====================================\n'); + + const supabase = createClient(TELEMETRY_BACKEND.URL, TELEMETRY_BACKEND.ANON_KEY, { + auth: { + persistSession: false, + autoRefreshToken: false, + } + }); + + const testUserId = 'verify-' + crypto.randomBytes(4).toString('hex'); + + // Test 1: Event insert + console.log('๐Ÿ“ Test 1: Event insert'); + try { + const { data, error } = await supabase + .from('telemetry_events') + .insert([{ + user_id: testUserId, + event: 'verification_test', + properties: { fixed: true } + }]); + + if (error) { + console.error('โŒ Event insert failed:', error.message); + return false; + } else { + console.log('โœ… Event insert successful'); + } + } catch (e) { + console.error('โŒ Event insert exception:', e.message); + return false; + } + + // Test 2: Workflow insert + console.log('๐Ÿ“ Test 2: Workflow insert'); + try { + const { data, error } = await supabase + .from('telemetry_workflows') + .insert([{ + user_id: testUserId, + workflow_hash: 'verify-' + crypto.randomBytes(4).toString('hex'), + node_count: 2, + node_types: ['n8n-nodes-base.webhook', 'n8n-nodes-base.set'], + has_trigger: true, + has_webhook: true, + complexity: 'simple', + sanitized_workflow: { + nodes: [{ + id: 'test-node', + type: 'n8n-nodes-base.webhook', + position: [100, 100], + parameters: {} + }], + connections: {} + } + }]); + + if (error) { + console.error('โŒ Workflow insert failed:', error.message); + return false; + } else { + console.log('โœ… Workflow insert successful'); + } + } catch (e) { + console.error('โŒ Workflow insert exception:', e.message); + return false; + } + + // Test 3: Upsert operation (like real telemetry) + console.log('๐Ÿ“ Test 3: Upsert operation'); + try { + const workflowHash = 'upsert-verify-' + crypto.randomBytes(4).toString('hex'); + + const { data, error } = await supabase + .from('telemetry_workflows') + .upsert([{ + user_id: testUserId, + workflow_hash: workflowHash, + node_count: 3, + node_types: ['n8n-nodes-base.webhook', 'n8n-nodes-base.set', 'n8n-nodes-base.if'], + has_trigger: true, + has_webhook: true, + complexity: 'medium', + sanitized_workflow: { + nodes: [], + connections: {} + } + }], { + onConflict: 'workflow_hash', + ignoreDuplicates: true, + }); + + if (error) { + console.error('โŒ Upsert failed:', error.message); + return false; + } else { + console.log('โœ… Upsert successful'); + } + } catch (e) { + console.error('โŒ Upsert exception:', e.message); + return false; + } + + console.log('\n๐ŸŽ‰ All tests passed! Telemetry permissions are fixed.'); + console.log('๐Ÿ‘ Workflow telemetry should now work in the actual application.'); + + return true; +} + +async function main() { + const success = await verifyTelemetryFix(); + process.exit(success ? 0 : 1); +} + +main().catch(console.error); \ No newline at end of file