mirror of
https://github.com/czlonkowski/n8n-mcp.git
synced 2026-01-29 22:12:05 +00:00
fix: update telemetry to work with Supabase RLS and permissions
- Remove .select() from insert operations to avoid permission issues - Add debug logging for successful flushes - Add comprehensive test scripts for telemetry verification - Telemetry now successfully sends anonymous usage data to Supabase
This commit is contained in:
118
scripts/test-telemetry-debug.ts
Normal file
118
scripts/test-telemetry-debug.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
#!/usr/bin/env npx tsx
|
||||
/**
|
||||
* Debug script for telemetry integration
|
||||
* Tests direct Supabase connection
|
||||
*/
|
||||
|
||||
import { createClient } from '@supabase/supabase-js';
|
||||
import dotenv from 'dotenv';
|
||||
|
||||
// Load environment variables
|
||||
dotenv.config();
|
||||
|
||||
async function debugTelemetry() {
|
||||
console.log('🔍 Debugging Telemetry Integration\n');
|
||||
|
||||
const supabaseUrl = process.env.SUPABASE_URL;
|
||||
const supabaseAnonKey = process.env.SUPABASE_ANON_KEY;
|
||||
|
||||
if (!supabaseUrl || !supabaseAnonKey) {
|
||||
console.error('❌ Missing SUPABASE_URL or SUPABASE_ANON_KEY');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log('Environment:');
|
||||
console.log(' URL:', supabaseUrl);
|
||||
console.log(' Key:', supabaseAnonKey.substring(0, 30) + '...');
|
||||
|
||||
// Create Supabase client
|
||||
const supabase = createClient(supabaseUrl, supabaseAnonKey, {
|
||||
auth: {
|
||||
persistSession: false,
|
||||
autoRefreshToken: false,
|
||||
}
|
||||
});
|
||||
|
||||
// Test 1: Direct insert to telemetry_events
|
||||
console.log('\n📝 Test 1: Direct insert to telemetry_events...');
|
||||
const testEvent = {
|
||||
user_id: 'test-user-123',
|
||||
event: 'test_event',
|
||||
properties: {
|
||||
test: true,
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
};
|
||||
|
||||
const { data: eventData, error: eventError } = await supabase
|
||||
.from('telemetry_events')
|
||||
.insert([testEvent])
|
||||
.select();
|
||||
|
||||
if (eventError) {
|
||||
console.error('❌ Event insert failed:', eventError);
|
||||
} else {
|
||||
console.log('✅ Event inserted successfully:', eventData);
|
||||
}
|
||||
|
||||
// Test 2: Direct insert to telemetry_workflows
|
||||
console.log('\n📝 Test 2: Direct insert to telemetry_workflows...');
|
||||
const testWorkflow = {
|
||||
user_id: 'test-user-123',
|
||||
workflow_hash: 'test-hash-' + Date.now(),
|
||||
node_count: 3,
|
||||
node_types: ['webhook', 'http', 'slack'],
|
||||
has_trigger: true,
|
||||
has_webhook: true,
|
||||
complexity: 'simple',
|
||||
sanitized_workflow: {
|
||||
nodes: [],
|
||||
connections: {}
|
||||
}
|
||||
};
|
||||
|
||||
const { data: workflowData, error: workflowError } = await supabase
|
||||
.from('telemetry_workflows')
|
||||
.insert([testWorkflow])
|
||||
.select();
|
||||
|
||||
if (workflowError) {
|
||||
console.error('❌ Workflow insert failed:', workflowError);
|
||||
} else {
|
||||
console.log('✅ Workflow inserted successfully:', workflowData);
|
||||
}
|
||||
|
||||
// Test 3: Try to read data (should fail with anon key due to RLS)
|
||||
console.log('\n📖 Test 3: Attempting to read data (should fail due to RLS)...');
|
||||
const { data: readData, error: readError } = await supabase
|
||||
.from('telemetry_events')
|
||||
.select('*')
|
||||
.limit(1);
|
||||
|
||||
if (readError) {
|
||||
console.log('✅ Read correctly blocked by RLS:', readError.message);
|
||||
} else {
|
||||
console.log('⚠️ Unexpected: Read succeeded (RLS may not be working):', readData);
|
||||
}
|
||||
|
||||
// Test 4: Check table existence
|
||||
console.log('\n🔍 Test 4: Verifying tables exist...');
|
||||
const { data: tables, error: tablesError } = await supabase
|
||||
.rpc('get_tables', { schema_name: 'public' })
|
||||
.select('*');
|
||||
|
||||
if (tablesError) {
|
||||
// This is expected - the RPC function might not exist
|
||||
console.log('ℹ️ Cannot list tables (RPC function not available)');
|
||||
} else {
|
||||
console.log('Tables found:', tables);
|
||||
}
|
||||
|
||||
console.log('\n✨ Debug completed! Check your Supabase dashboard for the test data.');
|
||||
console.log('Dashboard: https://supabase.com/dashboard/project/ydyufsohxdfpopqbubwk/editor');
|
||||
}
|
||||
|
||||
debugTelemetry().catch(error => {
|
||||
console.error('❌ Debug failed:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
182
scripts/test-telemetry-integration.ts
Normal file
182
scripts/test-telemetry-integration.ts
Normal file
@@ -0,0 +1,182 @@
|
||||
#!/usr/bin/env npx tsx
|
||||
/**
|
||||
* Test script for telemetry integration
|
||||
* Verifies that telemetry data can be sent to Supabase
|
||||
*/
|
||||
|
||||
import { telemetry } from '../src/telemetry';
|
||||
import { WorkflowSanitizer } from '../src/telemetry/workflow-sanitizer';
|
||||
import dotenv from 'dotenv';
|
||||
|
||||
// Load environment variables
|
||||
dotenv.config();
|
||||
|
||||
async function testTelemetryIntegration() {
|
||||
console.log('🧪 Testing Telemetry Integration with Supabase\n');
|
||||
|
||||
// Check environment variables
|
||||
const supabaseUrl = process.env.SUPABASE_URL;
|
||||
const supabaseKey = process.env.SUPABASE_ANON_KEY;
|
||||
|
||||
if (!supabaseUrl || !supabaseKey) {
|
||||
console.error('❌ Missing SUPABASE_URL or SUPABASE_ANON_KEY in .env file');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log('✅ Environment variables configured');
|
||||
console.log(` Supabase URL: ${supabaseUrl}`);
|
||||
console.log(` Anon Key: ${supabaseKey.substring(0, 20)}...`);
|
||||
|
||||
// Test 1: Track tool usage
|
||||
console.log('\n📊 Test 1: Tracking tool usage...');
|
||||
telemetry.trackToolUsage('search_nodes', true, 1250);
|
||||
telemetry.trackToolUsage('get_node_info', true, 850);
|
||||
telemetry.trackToolUsage('validate_workflow', false, 2000);
|
||||
console.log(' ✓ Tool usage events queued');
|
||||
|
||||
// Test 2: Track errors
|
||||
console.log('\n🐛 Test 2: Tracking errors...');
|
||||
telemetry.trackError('ValidationError', 'workflow_validation', 'validate_workflow');
|
||||
telemetry.trackError('NetworkError', 'api_call', 'n8n_create_workflow');
|
||||
console.log(' ✓ Error events queued');
|
||||
|
||||
// Test 3: Track workflow creation
|
||||
console.log('\n🔧 Test 3: Tracking workflow creation...');
|
||||
const testWorkflow = {
|
||||
name: 'Test Workflow',
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
position: [100, 100],
|
||||
parameters: {
|
||||
path: 'test-webhook',
|
||||
webhookUrl: 'https://n8n.example.com/webhook/abc-123-def',
|
||||
method: 'POST',
|
||||
authentication: 'none'
|
||||
},
|
||||
credentials: {
|
||||
webhookAuth: {
|
||||
id: 'cred-123',
|
||||
name: 'My Webhook Auth'
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'HTTP Request',
|
||||
type: 'n8n-nodes-base.httpRequest',
|
||||
position: [300, 100],
|
||||
parameters: {
|
||||
url: 'https://api.example.com/endpoint',
|
||||
method: 'POST',
|
||||
authentication: 'genericCredentialType',
|
||||
genericAuthType: 'httpHeaderAuth',
|
||||
httpHeaders: {
|
||||
parameters: [
|
||||
{
|
||||
name: 'Authorization',
|
||||
value: 'Bearer sk-1234567890abcdef1234567890abcdef'
|
||||
}
|
||||
]
|
||||
},
|
||||
options: {
|
||||
timeout: 10000
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
name: 'Slack',
|
||||
type: 'n8n-nodes-base.slack',
|
||||
position: [500, 100],
|
||||
parameters: {
|
||||
channel: 'general',
|
||||
text: 'Message sent!',
|
||||
authentication: 'accessToken'
|
||||
},
|
||||
credentials: {
|
||||
slackApi: {
|
||||
id: 'cred-456',
|
||||
name: 'My Slack'
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'1': {
|
||||
main: [[{ node: '2', type: 'main', index: 0 }]]
|
||||
},
|
||||
'2': {
|
||||
main: [[{ node: '3', type: 'main', index: 0 }]]
|
||||
}
|
||||
},
|
||||
settings: {
|
||||
errorWorkflow: 'error-workflow-id',
|
||||
saveDataErrorExecution: 'all',
|
||||
saveDataSuccessExecution: 'none',
|
||||
saveExecutionProgress: true,
|
||||
saveManualExecutions: true,
|
||||
timezone: 'America/New_York'
|
||||
},
|
||||
staticData: { some: 'data' },
|
||||
pinData: { node1: 'pinned' },
|
||||
ownedBy: 'user-123',
|
||||
createdBy: 'user-123',
|
||||
updatedBy: 'user-456'
|
||||
};
|
||||
|
||||
// Track successful workflow
|
||||
await telemetry.trackWorkflowCreation(testWorkflow, true);
|
||||
console.log(' ✓ Workflow creation tracked');
|
||||
|
||||
// Test workflow sanitization
|
||||
console.log('\n🔒 Test 4: Verifying workflow sanitization...');
|
||||
const sanitized = WorkflowSanitizer.sanitizeWorkflow(testWorkflow);
|
||||
|
||||
// Verify sensitive data was removed
|
||||
const sanitizedStr = JSON.stringify(sanitized);
|
||||
const hasSensitiveData =
|
||||
sanitizedStr.includes('sk-1234567890abcdef') ||
|
||||
sanitizedStr.includes('cred-123') ||
|
||||
sanitizedStr.includes('cred-456') ||
|
||||
sanitizedStr.includes('user-123');
|
||||
|
||||
if (hasSensitiveData) {
|
||||
console.error(' ❌ Sensitive data found in sanitized workflow!');
|
||||
} else {
|
||||
console.log(' ✓ All sensitive data removed');
|
||||
}
|
||||
|
||||
console.log(' ✓ Workflow hash:', sanitized.workflowHash);
|
||||
console.log(' ✓ Node count:', sanitized.nodeCount);
|
||||
console.log(' ✓ Node types:', sanitized.nodeTypes);
|
||||
console.log(' ✓ Complexity:', sanitized.complexity);
|
||||
|
||||
// Test 5: Track session start
|
||||
console.log('\n🚀 Test 5: Tracking session start...');
|
||||
telemetry.trackSessionStart();
|
||||
console.log(' ✓ Session start tracked');
|
||||
|
||||
// Flush all events
|
||||
console.log('\n💾 Flushing telemetry data to Supabase...');
|
||||
await telemetry.flush();
|
||||
console.log(' ✓ Data flushed to Supabase');
|
||||
|
||||
// Test 6: Verify data in Supabase
|
||||
console.log('\n🔍 Test 6: Verifying data in Supabase...');
|
||||
console.log(' Please check your Supabase dashboard to verify:');
|
||||
console.log(' - telemetry_events table has new records');
|
||||
console.log(' - telemetry_workflows table has the test workflow');
|
||||
console.log(' - Views show aggregated data');
|
||||
console.log('\n Dashboard URL: https://supabase.com/dashboard/project/ydyufsohxdfpopqbubwk/editor');
|
||||
|
||||
console.log('\n✨ Telemetry integration test completed!');
|
||||
}
|
||||
|
||||
// Run the test
|
||||
testTelemetryIntegration().catch(error => {
|
||||
console.error('❌ Test failed:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
68
scripts/test-telemetry-no-select.ts
Normal file
68
scripts/test-telemetry-no-select.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
#!/usr/bin/env npx tsx
|
||||
/**
|
||||
* Test telemetry without requesting data back
|
||||
*/
|
||||
|
||||
import { createClient } from '@supabase/supabase-js';
|
||||
import dotenv from 'dotenv';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
async function testNoSelect() {
|
||||
const supabaseUrl = process.env.SUPABASE_URL!;
|
||||
const supabaseAnonKey = process.env.SUPABASE_ANON_KEY!;
|
||||
|
||||
console.log('🧪 Telemetry Test (No Select)\n');
|
||||
|
||||
const supabase = createClient(supabaseUrl, supabaseAnonKey, {
|
||||
auth: {
|
||||
persistSession: false,
|
||||
autoRefreshToken: false,
|
||||
}
|
||||
});
|
||||
|
||||
// Insert WITHOUT .select() - just fire and forget
|
||||
const testData = {
|
||||
user_id: 'test-' + Date.now(),
|
||||
event: 'test_event',
|
||||
properties: { test: true }
|
||||
};
|
||||
|
||||
console.log('Inserting:', testData);
|
||||
|
||||
const { error } = await supabase
|
||||
.from('telemetry_events')
|
||||
.insert([testData]); // No .select() here!
|
||||
|
||||
if (error) {
|
||||
console.error('❌ Failed:', error);
|
||||
} else {
|
||||
console.log('✅ Success! Data inserted (no response data)');
|
||||
}
|
||||
|
||||
// Test workflow insert too
|
||||
const testWorkflow = {
|
||||
user_id: 'test-' + Date.now(),
|
||||
workflow_hash: 'hash-' + Date.now(),
|
||||
node_count: 3,
|
||||
node_types: ['webhook', 'http', 'slack'],
|
||||
has_trigger: true,
|
||||
has_webhook: true,
|
||||
complexity: 'simple',
|
||||
sanitized_workflow: { nodes: [], connections: {} }
|
||||
};
|
||||
|
||||
console.log('\nInserting workflow:', testWorkflow);
|
||||
|
||||
const { error: workflowError } = await supabase
|
||||
.from('telemetry_workflows')
|
||||
.insert([testWorkflow]); // No .select() here!
|
||||
|
||||
if (workflowError) {
|
||||
console.error('❌ Workflow failed:', workflowError);
|
||||
} else {
|
||||
console.log('✅ Workflow inserted successfully!');
|
||||
}
|
||||
}
|
||||
|
||||
testNoSelect().catch(console.error);
|
||||
45
scripts/test-telemetry-simple.ts
Normal file
45
scripts/test-telemetry-simple.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env npx tsx
|
||||
/**
|
||||
* Simple test to verify telemetry works
|
||||
*/
|
||||
|
||||
import { createClient } from '@supabase/supabase-js';
|
||||
import dotenv from 'dotenv';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
async function testSimple() {
|
||||
const supabaseUrl = process.env.SUPABASE_URL!;
|
||||
const supabaseAnonKey = process.env.SUPABASE_ANON_KEY!;
|
||||
|
||||
console.log('🧪 Simple Telemetry Test\n');
|
||||
|
||||
const supabase = createClient(supabaseUrl, supabaseAnonKey, {
|
||||
auth: {
|
||||
persistSession: false,
|
||||
autoRefreshToken: false,
|
||||
}
|
||||
});
|
||||
|
||||
// Simple insert
|
||||
const testData = {
|
||||
user_id: 'simple-test-' + Date.now(),
|
||||
event: 'test_event',
|
||||
properties: { test: true }
|
||||
};
|
||||
|
||||
console.log('Inserting:', testData);
|
||||
|
||||
const { data, error } = await supabase
|
||||
.from('telemetry_events')
|
||||
.insert([testData])
|
||||
.select();
|
||||
|
||||
if (error) {
|
||||
console.error('❌ Failed:', error);
|
||||
} else {
|
||||
console.log('✅ Success! Inserted:', data);
|
||||
}
|
||||
}
|
||||
|
||||
testSimple().catch(console.error);
|
||||
@@ -227,10 +227,12 @@ export class TelemetryManager {
|
||||
try {
|
||||
const { error } = await this.supabase
|
||||
.from('telemetry_events')
|
||||
.insert(events);
|
||||
.insert(events); // No .select() - we don't need the response
|
||||
|
||||
if (error) {
|
||||
logger.debug('Failed to flush telemetry events:', error.message);
|
||||
} else {
|
||||
logger.debug(`Flushed ${events.length} telemetry events`);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.debug('Error flushing telemetry events:', error);
|
||||
@@ -249,10 +251,12 @@ export class TelemetryManager {
|
||||
.upsert(workflows, {
|
||||
onConflict: 'workflow_hash',
|
||||
ignoreDuplicates: true,
|
||||
});
|
||||
}); // No .select() - we don't need the response
|
||||
|
||||
if (error) {
|
||||
logger.debug('Failed to flush telemetry workflows:', error.message);
|
||||
} else {
|
||||
logger.debug(`Flushed ${workflows.length} telemetry workflows`);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.debug('Error flushing telemetry workflows:', error);
|
||||
|
||||
Reference in New Issue
Block a user