Initial commit: StreamFlow IPTV platform
This commit is contained in:
commit
73a8ae9ffd
1240 changed files with 278451 additions and 0 deletions
704
backend/utils/alertSystem.js
Normal file
704
backend/utils/alertSystem.js
Normal file
|
|
@ -0,0 +1,704 @@
|
|||
/**
|
||||
* Real-time Alert System
|
||||
* Automated notification system for security events
|
||||
* Supports multiple notification channels and alert rules
|
||||
*/
|
||||
|
||||
const logger = require('./logger');
|
||||
const logAggregator = require('./logAggregator');
|
||||
const { db } = require('../database/db');
|
||||
const EventEmitter = require('events');
|
||||
const responseProtocolManager = require('./responseProtocolManager');
|
||||
|
||||
class AlertSystem extends EventEmitter {
|
||||
constructor() {
|
||||
super();
|
||||
this.activeAlerts = new Map();
|
||||
this.alertRules = new Map();
|
||||
this.initialize();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize alert system
|
||||
*/
|
||||
async initialize() {
|
||||
await this.createAlertsTable();
|
||||
await this.createAlertRulesTable();
|
||||
await this.loadAlertRules();
|
||||
|
||||
logger.info('[AlertSystem] Initialized - Real-time monitoring enabled');
|
||||
}
|
||||
|
||||
/**
|
||||
* Create alerts table
|
||||
*/
|
||||
async createAlertsTable() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(`
|
||||
CREATE TABLE IF NOT EXISTS security_alerts (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
alert_id TEXT UNIQUE NOT NULL,
|
||||
rule_id TEXT,
|
||||
severity TEXT NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
description TEXT NOT NULL,
|
||||
affected_entity TEXT,
|
||||
source_data TEXT,
|
||||
status TEXT DEFAULT 'active',
|
||||
acknowledged_at DATETIME,
|
||||
acknowledged_by INTEGER,
|
||||
resolved_at DATETIME,
|
||||
resolved_by INTEGER,
|
||||
resolution_notes TEXT,
|
||||
notification_sent INTEGER DEFAULT 0,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`, (err) => {
|
||||
if (err) reject(err);
|
||||
else {
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_alerts_severity ON security_alerts(severity, created_at DESC)`);
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_alerts_status ON security_alerts(status, created_at DESC)`);
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_alerts_rule ON security_alerts(rule_id)`);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create alert rules table
|
||||
*/
|
||||
async createAlertRulesTable() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(`
|
||||
CREATE TABLE IF NOT EXISTS alert_rules (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
rule_id TEXT UNIQUE NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
rule_type TEXT NOT NULL,
|
||||
condition TEXT NOT NULL,
|
||||
severity TEXT NOT NULL,
|
||||
enabled INTEGER DEFAULT 1,
|
||||
notification_channels TEXT,
|
||||
cooldown_minutes INTEGER DEFAULT 10,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`, async (err) => {
|
||||
if (err) reject(err);
|
||||
else {
|
||||
await this.createDefaultRules();
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create default alert rules
|
||||
*/
|
||||
async createDefaultRules() {
|
||||
const defaultRules = [
|
||||
{
|
||||
rule_id: 'RULE-BRUTE-FORCE',
|
||||
name: 'Brute Force Detection',
|
||||
description: 'Alert on brute force attack patterns',
|
||||
rule_type: 'anomaly',
|
||||
condition: JSON.stringify({ anomaly_type: 'brute_force_attack' }),
|
||||
severity: 'critical',
|
||||
notification_channels: JSON.stringify(['in_app', 'email']),
|
||||
cooldown_minutes: 10
|
||||
},
|
||||
{
|
||||
rule_id: 'RULE-PRIVILEGE-ESC',
|
||||
name: 'Privilege Escalation',
|
||||
description: 'Alert on privilege escalation attempts',
|
||||
rule_type: 'anomaly',
|
||||
condition: JSON.stringify({ anomaly_type: 'privilege_escalation' }),
|
||||
severity: 'critical',
|
||||
notification_channels: JSON.stringify(['in_app', 'email']),
|
||||
cooldown_minutes: 5
|
||||
},
|
||||
{
|
||||
rule_id: 'RULE-DATA-EXFIL',
|
||||
name: 'Data Exfiltration',
|
||||
description: 'Alert on potential data exfiltration',
|
||||
rule_type: 'anomaly',
|
||||
condition: JSON.stringify({ anomaly_type: 'data_exfiltration' }),
|
||||
severity: 'high',
|
||||
notification_channels: JSON.stringify(['in_app', 'email']),
|
||||
cooldown_minutes: 15
|
||||
},
|
||||
{
|
||||
rule_id: 'RULE-THREAT-CRITICAL',
|
||||
name: 'Critical Threat Level',
|
||||
description: 'Alert when threat score exceeds 80',
|
||||
rule_type: 'threshold',
|
||||
condition: JSON.stringify({ metric: 'threat_score', operator: '>=', value: 80 }),
|
||||
severity: 'critical',
|
||||
notification_channels: JSON.stringify(['in_app', 'email']),
|
||||
cooldown_minutes: 30
|
||||
},
|
||||
{
|
||||
rule_id: 'RULE-SUSPICIOUS-IP',
|
||||
name: 'Suspicious IP Activity',
|
||||
description: 'Alert on suspicious IP behavior',
|
||||
rule_type: 'anomaly',
|
||||
condition: JSON.stringify({ anomaly_type: 'suspicious_ip' }),
|
||||
severity: 'high',
|
||||
notification_channels: JSON.stringify(['in_app']),
|
||||
cooldown_minutes: 20
|
||||
},
|
||||
{
|
||||
rule_id: 'RULE-SESSION-ANOMALY',
|
||||
name: 'Session Anomaly',
|
||||
description: 'Alert on unusual session patterns',
|
||||
rule_type: 'anomaly',
|
||||
condition: JSON.stringify({ anomaly_type: 'session_anomaly' }),
|
||||
severity: 'medium',
|
||||
notification_channels: JSON.stringify(['in_app']),
|
||||
cooldown_minutes: 30
|
||||
}
|
||||
];
|
||||
|
||||
for (const rule of defaultRules) {
|
||||
await new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT OR IGNORE INTO alert_rules
|
||||
(rule_id, name, description, rule_type, condition, severity, notification_channels, cooldown_minutes)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[
|
||||
rule.rule_id,
|
||||
rule.name,
|
||||
rule.description,
|
||||
rule.rule_type,
|
||||
rule.condition,
|
||||
rule.severity,
|
||||
rule.notification_channels,
|
||||
rule.cooldown_minutes
|
||||
],
|
||||
(err) => {
|
||||
if (err) reject(err);
|
||||
else resolve();
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
logger.info('[AlertSystem] Default alert rules created');
|
||||
}
|
||||
|
||||
/**
|
||||
* Load alert rules into memory
|
||||
*/
|
||||
async loadAlertRules() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
'SELECT * FROM alert_rules WHERE enabled = 1',
|
||||
[],
|
||||
(err, rows) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
this.alertRules.clear();
|
||||
for (const row of rows) {
|
||||
this.alertRules.set(row.rule_id, {
|
||||
...row,
|
||||
condition: JSON.parse(row.condition),
|
||||
notification_channels: JSON.parse(row.notification_channels)
|
||||
});
|
||||
}
|
||||
|
||||
logger.info(`[AlertSystem] Loaded ${rows.length} alert rules`);
|
||||
resolve(rows.length);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger alert based on anomaly
|
||||
* Enhanced with automated response protocols
|
||||
*/
|
||||
async triggerAnomalyAlert(anomaly) {
|
||||
// Find matching rules
|
||||
const matchingRules = [];
|
||||
for (const [ruleId, rule] of this.alertRules.entries()) {
|
||||
if (rule.rule_type === 'anomaly' && rule.condition.anomaly_type === anomaly.type) {
|
||||
matchingRules.push(rule);
|
||||
}
|
||||
}
|
||||
|
||||
if (matchingRules.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Create alerts for matching rules
|
||||
for (const rule of matchingRules) {
|
||||
// Check cooldown
|
||||
if (await this.isInCooldown(rule.rule_id, anomaly.affected_ip || anomaly.affected_user_id)) {
|
||||
logger.debug(`[AlertSystem] Alert ${rule.rule_id} in cooldown period`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const alertId = await this.createAlert({
|
||||
rule_id: rule.rule_id,
|
||||
severity: rule.severity,
|
||||
title: rule.name,
|
||||
description: anomaly.description,
|
||||
affected_entity: anomaly.affected_user_id || anomaly.affected_ip,
|
||||
source_data: JSON.stringify({
|
||||
anomalyId: anomaly.anomaly_id,
|
||||
type: anomaly.type,
|
||||
confidence: anomaly.confidence,
|
||||
patternData: JSON.parse(anomaly.pattern_data || '{}')
|
||||
})
|
||||
});
|
||||
|
||||
// Send notifications
|
||||
await this.sendNotifications(alertId, rule.notification_channels);
|
||||
|
||||
// Execute automated response protocols (CWE-778)
|
||||
await this.executeResponseProtocols('anomaly', {
|
||||
anomaly_type: anomaly.type,
|
||||
severity: rule.severity
|
||||
}, {
|
||||
alertId,
|
||||
ip_address: anomaly.affected_ip,
|
||||
user_id: anomaly.affected_user_id,
|
||||
confidence: anomaly.confidence
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute automated response protocols
|
||||
* CWE-778: Logs all protocol executions
|
||||
*/
|
||||
async executeResponseProtocols(triggerType, triggerEvent, context = {}) {
|
||||
try {
|
||||
const result = await responseProtocolManager.executeProtocols(triggerType, triggerEvent, context);
|
||||
|
||||
if (result.executed) {
|
||||
logger.warn(`[AlertSystem] Executed ${result.protocols.length} response protocol(s)`);
|
||||
|
||||
// Log protocol execution (CWE-778)
|
||||
logAggregator.aggregate('alert_system', 'warn', 'security', 'Response protocols executed', {
|
||||
triggerType,
|
||||
protocolsExecuted: result.protocols.length,
|
||||
protocols: result.protocols.map(p => ({
|
||||
protocolId: p.protocolId,
|
||||
protocolName: p.protocolName,
|
||||
status: p.status,
|
||||
actionsExecuted: p.actionsExecuted
|
||||
})),
|
||||
context
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('[AlertSystem] Response protocol execution failed:', error);
|
||||
|
||||
// Log execution failure (CWE-778)
|
||||
logAggregator.aggregate('alert_system', 'error', 'security', 'Response protocol execution failed', {
|
||||
triggerType,
|
||||
error: error.message,
|
||||
context
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger alert based on threshold
|
||||
*/
|
||||
async triggerThresholdAlert(metric, value) {
|
||||
for (const [ruleId, rule] of this.alertRules.entries()) {
|
||||
if (rule.rule_type !== 'threshold' || rule.condition.metric !== metric) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Evaluate condition
|
||||
const passed = this.evaluateCondition(rule.condition, value);
|
||||
if (!passed) continue;
|
||||
|
||||
// Check cooldown
|
||||
if (await this.isInCooldown(rule.rule_id, metric)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const alertId = await this.createAlert({
|
||||
rule_id: rule.rule_id,
|
||||
severity: rule.severity,
|
||||
title: rule.name,
|
||||
description: `${rule.description}: ${metric} = ${value}`,
|
||||
affected_entity: metric,
|
||||
source_data: JSON.stringify({ metric, value, threshold: rule.condition.value })
|
||||
});
|
||||
|
||||
await this.sendNotifications(alertId, rule.notification_channels);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluate threshold condition
|
||||
*/
|
||||
evaluateCondition(condition, value) {
|
||||
const { operator, value: threshold } = condition;
|
||||
|
||||
switch (operator) {
|
||||
case '>=': return value >= threshold;
|
||||
case '>': return value > threshold;
|
||||
case '<=': return value <= threshold;
|
||||
case '<': return value < threshold;
|
||||
case '==': return value == threshold;
|
||||
case '!=': return value != threshold;
|
||||
default: return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if alert is in cooldown period
|
||||
*/
|
||||
async isInCooldown(ruleId, affectedEntity) {
|
||||
const rule = this.alertRules.get(ruleId);
|
||||
if (!rule) return false;
|
||||
|
||||
const cooldownMinutes = rule.cooldown_minutes || 10;
|
||||
const cooldownTime = new Date(Date.now() - cooldownMinutes * 60 * 1000).toISOString();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.get(
|
||||
`SELECT COUNT(*) as count FROM security_alerts
|
||||
WHERE rule_id = ?
|
||||
AND affected_entity = ?
|
||||
AND created_at >= ?`,
|
||||
[ruleId, affectedEntity, cooldownTime],
|
||||
(err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row.count > 0);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create alert
|
||||
*/
|
||||
async createAlert(details) {
|
||||
const alertId = `ALERT-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT INTO security_alerts
|
||||
(alert_id, rule_id, severity, title, description, affected_entity, source_data)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)`,
|
||||
[
|
||||
alertId,
|
||||
details.rule_id,
|
||||
details.severity,
|
||||
details.title,
|
||||
details.description,
|
||||
details.affected_entity,
|
||||
details.source_data
|
||||
],
|
||||
(err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
logger.warn(`[AlertSystem] Alert triggered: ${alertId} - ${details.title} (${details.severity})`);
|
||||
|
||||
// Log to aggregated logs
|
||||
logAggregator.aggregate('alert_system', 'warn', 'security', details.title, {
|
||||
alertId,
|
||||
ruleId: details.rule_id,
|
||||
severity: details.severity,
|
||||
affectedEntity: details.affected_entity
|
||||
});
|
||||
|
||||
// Emit event for real-time notifications
|
||||
this.emit('alert', {
|
||||
alertId,
|
||||
...details,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
|
||||
this.activeAlerts.set(alertId, details);
|
||||
resolve(alertId);
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Send notifications through configured channels
|
||||
*/
|
||||
async sendNotifications(alertId, channels) {
|
||||
for (const channel of channels) {
|
||||
try {
|
||||
switch (channel) {
|
||||
case 'in_app':
|
||||
await this.sendInAppNotification(alertId);
|
||||
break;
|
||||
case 'email':
|
||||
await this.sendEmailNotification(alertId);
|
||||
break;
|
||||
case 'webhook':
|
||||
await this.sendWebhookNotification(alertId);
|
||||
break;
|
||||
default:
|
||||
logger.debug(`[AlertSystem] Unknown notification channel: ${channel}`);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[AlertSystem] Failed to send ${channel} notification:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
// Mark notification as sent
|
||||
await new Promise((resolve) => {
|
||||
db.run(
|
||||
'UPDATE security_alerts SET notification_sent = 1 WHERE alert_id = ?',
|
||||
[alertId],
|
||||
() => resolve()
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Send in-app notification (emit event)
|
||||
*/
|
||||
async sendInAppNotification(alertId) {
|
||||
const alert = await this.getAlert(alertId);
|
||||
if (!alert) return;
|
||||
|
||||
this.emit('notification', {
|
||||
type: 'security_alert',
|
||||
alertId: alert.alert_id,
|
||||
severity: alert.severity,
|
||||
title: alert.title,
|
||||
description: alert.description,
|
||||
timestamp: alert.created_at
|
||||
});
|
||||
|
||||
logger.debug(`[AlertSystem] In-app notification sent: ${alertId}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send email notification (placeholder)
|
||||
*/
|
||||
async sendEmailNotification(alertId) {
|
||||
const alert = await this.getAlert(alertId);
|
||||
if (!alert) return;
|
||||
|
||||
// TODO: Implement email sending (nodemailer)
|
||||
logger.info(`[AlertSystem] Email notification (stub): ${alertId}`);
|
||||
|
||||
// For now, just log it
|
||||
logger.info(`[AlertSystem] Email would be sent for alert ${alertId}: ${alert.title}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send webhook notification (placeholder)
|
||||
*/
|
||||
async sendWebhookNotification(alertId) {
|
||||
const alert = await this.getAlert(alertId);
|
||||
if (!alert) return;
|
||||
|
||||
// TODO: Implement webhook HTTP POST
|
||||
logger.info(`[AlertSystem] Webhook notification (stub): ${alertId}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get alert by ID
|
||||
*/
|
||||
async getAlert(alertId) {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.get(
|
||||
'SELECT * FROM security_alerts WHERE alert_id = ?',
|
||||
[alertId],
|
||||
(err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get active alerts
|
||||
*/
|
||||
async getAlerts(filters = {}) {
|
||||
const {
|
||||
status = 'active',
|
||||
severity,
|
||||
limit = 100,
|
||||
offset = 0
|
||||
} = filters;
|
||||
|
||||
let whereClause = ['status = ?'];
|
||||
let params = [status];
|
||||
|
||||
if (severity) {
|
||||
whereClause.push('severity = ?');
|
||||
params.push(severity);
|
||||
}
|
||||
|
||||
params.push(limit, offset);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT * FROM security_alerts
|
||||
WHERE ${whereClause.join(' AND ')}
|
||||
ORDER BY created_at DESC
|
||||
LIMIT ? OFFSET ?`,
|
||||
params,
|
||||
(err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Acknowledge alert
|
||||
*/
|
||||
async acknowledgeAlert(alertId, userId, notes = '') {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`UPDATE security_alerts
|
||||
SET status = 'acknowledged',
|
||||
acknowledged_at = CURRENT_TIMESTAMP,
|
||||
acknowledged_by = ?
|
||||
WHERE alert_id = ?`,
|
||||
[userId, alertId],
|
||||
(err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
logger.info(`[AlertSystem] Alert acknowledged: ${alertId} by user ${userId}`);
|
||||
|
||||
logAggregator.aggregate('alert_system', 'info', 'security', 'Alert acknowledged', {
|
||||
alertId,
|
||||
userId,
|
||||
notes
|
||||
});
|
||||
|
||||
this.emit('alert_acknowledged', { alertId, userId });
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve alert
|
||||
*/
|
||||
async resolveAlert(alertId, userId, notes) {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`UPDATE security_alerts
|
||||
SET status = 'resolved',
|
||||
resolved_at = CURRENT_TIMESTAMP,
|
||||
resolved_by = ?,
|
||||
resolution_notes = ?
|
||||
WHERE alert_id = ?`,
|
||||
[userId, notes, alertId],
|
||||
(err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
logger.info(`[AlertSystem] Alert resolved: ${alertId} by user ${userId}`);
|
||||
|
||||
logAggregator.aggregate('alert_system', 'info', 'security', 'Alert resolved', {
|
||||
alertId,
|
||||
userId,
|
||||
notes
|
||||
});
|
||||
|
||||
this.activeAlerts.delete(alertId);
|
||||
this.emit('alert_resolved', { alertId, userId });
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get alert statistics
|
||||
*/
|
||||
async getStatistics() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT
|
||||
status,
|
||||
severity,
|
||||
COUNT(*) as count
|
||||
FROM security_alerts
|
||||
WHERE created_at >= datetime('now', '-24 hours')
|
||||
GROUP BY status, severity`,
|
||||
[],
|
||||
(err, rows) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
const stats = {
|
||||
total: 0,
|
||||
byStatus: {},
|
||||
bySeverity: {}
|
||||
};
|
||||
|
||||
for (const row of rows) {
|
||||
stats.total += row.count;
|
||||
|
||||
if (!stats.byStatus[row.status]) {
|
||||
stats.byStatus[row.status] = 0;
|
||||
}
|
||||
stats.byStatus[row.status] += row.count;
|
||||
|
||||
if (!stats.bySeverity[row.severity]) {
|
||||
stats.bySeverity[row.severity] = 0;
|
||||
}
|
||||
stats.bySeverity[row.severity] += row.count;
|
||||
}
|
||||
|
||||
resolve(stats);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Create singleton instance
|
||||
const alertSystem = new AlertSystem();
|
||||
|
||||
// Connect security intelligence to alert system
|
||||
const securityIntelligence = require('./securityIntelligence');
|
||||
|
||||
// Listen for anomalies and trigger alerts
|
||||
setInterval(async () => {
|
||||
try {
|
||||
const anomalies = await securityIntelligence.getAnomalies({ status: 'open', limit: 50 });
|
||||
for (const anomaly of anomalies) {
|
||||
await alertSystem.triggerAnomalyAlert(anomaly);
|
||||
}
|
||||
|
||||
// Check threat score
|
||||
const threatScore = securityIntelligence.threatScore;
|
||||
if (threatScore >= 80) {
|
||||
await alertSystem.triggerThresholdAlert('threat_score', threatScore);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('[AlertSystem] Error checking for alerts:', error);
|
||||
}
|
||||
}, 60000); // Check every minute
|
||||
|
||||
module.exports = alertSystem;
|
||||
156
backend/utils/dataSanitizer.js
Normal file
156
backend/utils/dataSanitizer.js
Normal file
|
|
@ -0,0 +1,156 @@
|
|||
/**
|
||||
* Data Sanitization Utility
|
||||
* CWE-532: Prevents logging of sensitive data
|
||||
* Ensures compliance with HIPAA, PCI, SOX regulations
|
||||
*/
|
||||
|
||||
const SENSITIVE_FIELDS = [
|
||||
'password',
|
||||
'newPassword',
|
||||
'oldPassword',
|
||||
'currentPassword',
|
||||
'confirmPassword',
|
||||
'token',
|
||||
'accessToken',
|
||||
'refreshToken',
|
||||
'jwt',
|
||||
'secret',
|
||||
'apiKey',
|
||||
'api_key',
|
||||
'privateKey',
|
||||
'private_key',
|
||||
'two_factor_secret',
|
||||
'twoFactorSecret',
|
||||
'backup_codes',
|
||||
'backupCodes',
|
||||
'creditCard',
|
||||
'credit_card',
|
||||
'cvv',
|
||||
'ssn',
|
||||
'social_security',
|
||||
'pin',
|
||||
'authCode',
|
||||
'auth_code'
|
||||
];
|
||||
|
||||
/**
|
||||
* Sanitize object by removing or masking sensitive fields
|
||||
* @param {Object} data - Object to sanitize
|
||||
* @param {Array} additionalFields - Additional fields to sanitize
|
||||
* @returns {Object} Sanitized object
|
||||
*/
|
||||
function sanitizeForLogging(data, additionalFields = []) {
|
||||
if (!data || typeof data !== 'object') {
|
||||
return data;
|
||||
}
|
||||
|
||||
const sensitiveFields = [...SENSITIVE_FIELDS, ...additionalFields];
|
||||
const sanitized = Array.isArray(data) ? [] : {};
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
const lowerKey = key.toLowerCase();
|
||||
const isSensitive = sensitiveFields.some(field =>
|
||||
lowerKey.includes(field.toLowerCase())
|
||||
);
|
||||
|
||||
if (isSensitive) {
|
||||
sanitized[key] = '[REDACTED]';
|
||||
} else if (value && typeof value === 'object') {
|
||||
sanitized[key] = sanitizeForLogging(value, additionalFields);
|
||||
} else {
|
||||
sanitized[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize user object for export (remove password hash)
|
||||
* @param {Object} user - User object from database
|
||||
* @returns {Object} Sanitized user object
|
||||
*/
|
||||
function sanitizeUserForExport(user) {
|
||||
if (!user) return user;
|
||||
|
||||
const sanitized = { ...user };
|
||||
delete sanitized.password;
|
||||
delete sanitized.two_factor_secret;
|
||||
delete sanitized.backup_codes;
|
||||
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize user array for export
|
||||
* @param {Array} users - Array of user objects
|
||||
* @returns {Array} Sanitized user array
|
||||
*/
|
||||
function sanitizeUsersForExport(users) {
|
||||
if (!Array.isArray(users)) return users;
|
||||
return users.map(user => sanitizeUserForExport(user));
|
||||
}
|
||||
|
||||
/**
|
||||
* Mask token for logging (show only last 8 characters)
|
||||
* @param {String} token - Token to mask
|
||||
* @returns {String} Masked token
|
||||
*/
|
||||
function maskToken(token) {
|
||||
if (!token || typeof token !== 'string') return '[INVALID_TOKEN]';
|
||||
if (token.length <= 8) return '***';
|
||||
return '...' + token.slice(-8);
|
||||
}
|
||||
|
||||
/**
|
||||
* Mask email for logging (show only domain)
|
||||
* @param {String} email - Email to mask
|
||||
* @returns {String} Masked email
|
||||
*/
|
||||
function maskEmail(email) {
|
||||
if (!email || typeof email !== 'string') return '[INVALID_EMAIL]';
|
||||
const parts = email.split('@');
|
||||
if (parts.length !== 2) return '[INVALID_EMAIL]';
|
||||
return `***@${parts[1]}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize request body for logging
|
||||
* @param {Object} body - Request body
|
||||
* @returns {Object} Sanitized body
|
||||
*/
|
||||
function sanitizeRequestBody(body) {
|
||||
return sanitizeForLogging(body);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create safe metadata object for audit logging
|
||||
* Ensures no sensitive data is included in audit logs
|
||||
* @param {Object} data - Data to include in audit metadata
|
||||
* @returns {Object} Safe metadata object
|
||||
*/
|
||||
function createSafeAuditMetadata(data) {
|
||||
const safe = sanitizeForLogging(data);
|
||||
|
||||
// Specifically handle common patterns
|
||||
if (safe.user && typeof safe.user === 'object') {
|
||||
safe.user = sanitizeUserForExport(safe.user);
|
||||
}
|
||||
|
||||
if (safe.changes && typeof safe.changes === 'object') {
|
||||
safe.changes = sanitizeForLogging(safe.changes);
|
||||
}
|
||||
|
||||
return safe;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
sanitizeForLogging,
|
||||
sanitizeUserForExport,
|
||||
sanitizeUsersForExport,
|
||||
maskToken,
|
||||
maskEmail,
|
||||
sanitizeRequestBody,
|
||||
createSafeAuditMetadata,
|
||||
SENSITIVE_FIELDS
|
||||
};
|
||||
293
backend/utils/encryption.js
Normal file
293
backend/utils/encryption.js
Normal file
|
|
@ -0,0 +1,293 @@
|
|||
/**
|
||||
* Centralized Encryption Utility for CWE-311 Compliance
|
||||
* Provides AES-256-GCM encryption for sensitive data at rest
|
||||
*
|
||||
* Security Features:
|
||||
* - AES-256-GCM authenticated encryption
|
||||
* - Unique IV per encryption operation
|
||||
* - HMAC authentication tags
|
||||
* - Key rotation support
|
||||
* - Secure key derivation from master secret
|
||||
*/
|
||||
|
||||
const crypto = require('crypto');
|
||||
const logger = require('./logger');
|
||||
|
||||
// Encryption configuration
|
||||
const ALGORITHM = 'aes-256-gcm';
|
||||
const KEY_LENGTH = 32; // 256 bits
|
||||
const IV_LENGTH = 16; // 128 bits for GCM
|
||||
const AUTH_TAG_LENGTH = 16;
|
||||
const SALT_LENGTH = 32;
|
||||
|
||||
/**
|
||||
* Get master encryption key from environment or generate default
|
||||
* SECURITY WARNING: Always set ENCRYPTION_MASTER_KEY in production!
|
||||
*/
|
||||
function getMasterKey() {
|
||||
const envKey = process.env.ENCRYPTION_MASTER_KEY;
|
||||
|
||||
if (!envKey) {
|
||||
logger.warn('⚠️ ENCRYPTION_MASTER_KEY not set - using default (insecure for production)');
|
||||
// Use JWT_SECRET as fallback, but warn about it
|
||||
const fallbackKey = process.env.JWT_SECRET || 'default-insecure-key-change-in-production';
|
||||
return crypto.createHash('sha256').update(fallbackKey + '-encryption-v1').digest();
|
||||
}
|
||||
|
||||
// Derive proper key from master secret
|
||||
return crypto.createHash('sha256').update(envKey).digest();
|
||||
}
|
||||
|
||||
/**
|
||||
* Derive encryption key for specific purpose using HKDF-like approach
|
||||
* @param {String} purpose - Purpose identifier (e.g., 'settings', 'vpn', 'api-tokens')
|
||||
* @returns {Buffer} Derived encryption key
|
||||
*/
|
||||
function deriveKey(purpose) {
|
||||
const masterKey = getMasterKey();
|
||||
const info = Buffer.from(purpose + '-v1', 'utf8');
|
||||
|
||||
return crypto.createHmac('sha256', masterKey)
|
||||
.update(info)
|
||||
.digest();
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypt sensitive data with AES-256-GCM
|
||||
* @param {String} plaintext - Data to encrypt
|
||||
* @param {String} purpose - Purpose identifier for key derivation
|
||||
* @returns {String} Encrypted data in format: salt:iv:authTag:ciphertext (all hex encoded)
|
||||
*/
|
||||
function encrypt(plaintext, purpose = 'default') {
|
||||
try {
|
||||
if (!plaintext) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Generate random salt and IV for this encryption operation
|
||||
const salt = crypto.randomBytes(SALT_LENGTH);
|
||||
const iv = crypto.randomBytes(IV_LENGTH);
|
||||
|
||||
// Derive encryption key with salt for additional security
|
||||
const masterKey = getMasterKey();
|
||||
const derivedKey = crypto.pbkdf2Sync(
|
||||
masterKey,
|
||||
salt,
|
||||
100000, // iterations
|
||||
KEY_LENGTH,
|
||||
'sha256'
|
||||
);
|
||||
|
||||
// Create cipher
|
||||
const cipher = crypto.createCipheriv(ALGORITHM, derivedKey, iv);
|
||||
|
||||
// Encrypt
|
||||
let encrypted = cipher.update(plaintext, 'utf8', 'hex');
|
||||
encrypted += cipher.final('hex');
|
||||
|
||||
// Get authentication tag (GCM provides authenticated encryption)
|
||||
const authTag = cipher.getAuthTag();
|
||||
|
||||
// Return format: salt:iv:authTag:ciphertext
|
||||
return [
|
||||
salt.toString('hex'),
|
||||
iv.toString('hex'),
|
||||
authTag.toString('hex'),
|
||||
encrypted
|
||||
].join(':');
|
||||
|
||||
} catch (error) {
|
||||
logger.error('Encryption error:', { purpose, error: error.message });
|
||||
throw new Error('Failed to encrypt data');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrypt data encrypted with encrypt()
|
||||
* @param {String} encryptedData - Encrypted data in format: salt:iv:authTag:ciphertext
|
||||
* @param {String} purpose - Purpose identifier (must match encryption purpose)
|
||||
* @returns {String} Decrypted plaintext
|
||||
*/
|
||||
function decrypt(encryptedData, purpose = 'default') {
|
||||
try {
|
||||
if (!encryptedData) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Parse encrypted data
|
||||
const parts = encryptedData.split(':');
|
||||
if (parts.length !== 4) {
|
||||
throw new Error('Invalid encrypted data format');
|
||||
}
|
||||
|
||||
const salt = Buffer.from(parts[0], 'hex');
|
||||
const iv = Buffer.from(parts[1], 'hex');
|
||||
const authTag = Buffer.from(parts[2], 'hex');
|
||||
const encrypted = parts[3];
|
||||
|
||||
// Validate lengths
|
||||
if (salt.length !== SALT_LENGTH || iv.length !== IV_LENGTH || authTag.length !== AUTH_TAG_LENGTH) {
|
||||
throw new Error('Invalid encrypted data structure');
|
||||
}
|
||||
|
||||
// Derive the same key used for encryption
|
||||
const masterKey = getMasterKey();
|
||||
const derivedKey = crypto.pbkdf2Sync(
|
||||
masterKey,
|
||||
salt,
|
||||
100000,
|
||||
KEY_LENGTH,
|
||||
'sha256'
|
||||
);
|
||||
|
||||
// Create decipher
|
||||
const decipher = crypto.createDecipheriv(ALGORITHM, derivedKey, iv);
|
||||
decipher.setAuthTag(authTag);
|
||||
|
||||
// Decrypt
|
||||
let decrypted = decipher.update(encrypted, 'hex', 'utf8');
|
||||
decrypted += decipher.final('utf8');
|
||||
|
||||
return decrypted;
|
||||
|
||||
} catch (error) {
|
||||
logger.error('Decryption error:', { purpose, error: error.message });
|
||||
throw new Error('Failed to decrypt data');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypt sensitive settings value
|
||||
* Automatically detects if already encrypted
|
||||
*/
|
||||
function encryptSetting(value, key) {
|
||||
if (!value) return null;
|
||||
|
||||
// Don't encrypt if already encrypted (starts with salt:iv:authTag:ciphertext format)
|
||||
if (typeof value === 'string' && value.split(':').length === 4) {
|
||||
const parts = value.split(':');
|
||||
if (parts[0].length === SALT_LENGTH * 2 && parts[1].length === IV_LENGTH * 2) {
|
||||
return value; // Already encrypted
|
||||
}
|
||||
}
|
||||
|
||||
const plaintext = typeof value === 'string' ? value : JSON.stringify(value);
|
||||
return encrypt(plaintext, `setting:${key}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrypt sensitive setting value
|
||||
*/
|
||||
function decryptSetting(encryptedValue, key) {
|
||||
if (!encryptedValue) return null;
|
||||
|
||||
try {
|
||||
const decrypted = decrypt(encryptedValue, `setting:${key}`);
|
||||
|
||||
// Try to parse as JSON if it looks like JSON
|
||||
if (decrypted && (decrypted.startsWith('{') || decrypted.startsWith('['))) {
|
||||
try {
|
||||
return JSON.parse(decrypted);
|
||||
} catch {
|
||||
return decrypted;
|
||||
}
|
||||
}
|
||||
|
||||
return decrypted;
|
||||
} catch (error) {
|
||||
// If decryption fails, value might not be encrypted (migration scenario)
|
||||
logger.warn(`Failed to decrypt setting ${key}, returning as-is`);
|
||||
return encryptedValue;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if encryption key is properly configured
|
||||
*/
|
||||
function isEncryptionConfigured() {
|
||||
return !!process.env.ENCRYPTION_MASTER_KEY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get encryption health status
|
||||
*/
|
||||
function getEncryptionStatus() {
|
||||
const configured = isEncryptionConfigured();
|
||||
|
||||
return {
|
||||
configured,
|
||||
algorithm: ALGORITHM,
|
||||
keySize: KEY_LENGTH * 8, // bits
|
||||
status: configured ? 'secure' : 'default-key',
|
||||
warning: configured ? null : 'Using default encryption key - set ENCRYPTION_MASTER_KEY in production',
|
||||
recommendations: configured ? [] : [
|
||||
'Set ENCRYPTION_MASTER_KEY environment variable',
|
||||
'Use a strong random key (at least 32 characters)',
|
||||
'Store the key securely (e.g., Docker secrets, AWS Secrets Manager)'
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Re-encrypt data with new master key (for key rotation)
|
||||
* @param {String} oldEncryptedData - Data encrypted with old key
|
||||
* @param {String} purpose - Purpose identifier
|
||||
* @param {String} oldMasterKey - Old master key (optional, uses current env if not provided)
|
||||
* @returns {String} Data re-encrypted with current master key
|
||||
*/
|
||||
function reEncrypt(oldEncryptedData, purpose = 'default', oldMasterKey = null) {
|
||||
try {
|
||||
// Temporarily swap master key if provided
|
||||
const originalKey = process.env.ENCRYPTION_MASTER_KEY;
|
||||
if (oldMasterKey) {
|
||||
process.env.ENCRYPTION_MASTER_KEY = oldMasterKey;
|
||||
}
|
||||
|
||||
// Decrypt with old key
|
||||
const plaintext = decrypt(oldEncryptedData, purpose);
|
||||
|
||||
// Restore original key
|
||||
if (oldMasterKey) {
|
||||
process.env.ENCRYPTION_MASTER_KEY = originalKey;
|
||||
}
|
||||
|
||||
// Encrypt with current key
|
||||
return encrypt(plaintext, purpose);
|
||||
|
||||
} catch (error) {
|
||||
logger.error('Re-encryption error:', { purpose, error: error.message });
|
||||
throw new Error('Failed to re-encrypt data');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Hash sensitive data for comparison (one-way, cannot be decrypted)
|
||||
* Use for data that needs to be compared but not retrieved (e.g., backup codes)
|
||||
*/
|
||||
function hashSensitiveData(data) {
|
||||
return crypto.createHash('sha256').update(data).digest('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate cryptographically secure random token
|
||||
*/
|
||||
function generateSecureToken(length = 32) {
|
||||
return crypto.randomBytes(length).toString('hex');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
encrypt,
|
||||
decrypt,
|
||||
encryptSetting,
|
||||
decryptSetting,
|
||||
deriveKey,
|
||||
isEncryptionConfigured,
|
||||
getEncryptionStatus,
|
||||
reEncrypt,
|
||||
hashSensitiveData,
|
||||
generateSecureToken,
|
||||
|
||||
// Export for legacy VPN compatibility
|
||||
encryptVPN: (data) => encrypt(JSON.stringify(data), 'vpn'),
|
||||
decryptVPN: (data) => JSON.parse(decrypt(data, 'vpn'))
|
||||
};
|
||||
340
backend/utils/errorHandler.js
Normal file
340
backend/utils/errorHandler.js
Normal file
|
|
@ -0,0 +1,340 @@
|
|||
/**
|
||||
* Secure Error Handler Utility
|
||||
* Prevents CWE-209: Information Exposure Through Error Messages
|
||||
*
|
||||
* This utility sanitizes error messages before sending them to clients,
|
||||
* ensuring that internal system details, file paths, stack traces, and
|
||||
* other sensitive information are never exposed to end users.
|
||||
*/
|
||||
|
||||
const logger = require('./logger');
|
||||
|
||||
/**
|
||||
* Error types with user-friendly messages
|
||||
*/
|
||||
const ERROR_TYPES = {
|
||||
// Authentication & Authorization
|
||||
AUTH_FAILED: 'Authentication failed',
|
||||
AUTH_REQUIRED: 'Authentication required',
|
||||
AUTH_INVALID_TOKEN: 'Invalid or expired authentication token',
|
||||
AUTH_INSUFFICIENT_PERMISSIONS: 'Insufficient permissions',
|
||||
|
||||
// User Management
|
||||
USER_NOT_FOUND: 'User not found',
|
||||
USER_ALREADY_EXISTS: 'User already exists',
|
||||
USER_CREATION_FAILED: 'Failed to create user',
|
||||
USER_UPDATE_FAILED: 'Failed to update user',
|
||||
USER_DELETE_FAILED: 'Failed to delete user',
|
||||
|
||||
// Data Validation
|
||||
VALIDATION_FAILED: 'Validation failed',
|
||||
INVALID_INPUT: 'Invalid input provided',
|
||||
INVALID_FILE_TYPE: 'Invalid file type',
|
||||
FILE_TOO_LARGE: 'File size exceeds limit',
|
||||
MISSING_REQUIRED_FIELD: 'Required field is missing',
|
||||
|
||||
// Database Operations
|
||||
DATABASE_ERROR: 'Database operation failed',
|
||||
RECORD_NOT_FOUND: 'Record not found',
|
||||
DUPLICATE_ENTRY: 'Duplicate entry exists',
|
||||
|
||||
// File Operations
|
||||
FILE_NOT_FOUND: 'File not found',
|
||||
FILE_UPLOAD_FAILED: 'File upload failed',
|
||||
FILE_DELETE_FAILED: 'Failed to delete file',
|
||||
FILE_READ_FAILED: 'Failed to read file',
|
||||
FILE_WRITE_FAILED: 'Failed to write file',
|
||||
|
||||
// Network & External Services
|
||||
NETWORK_ERROR: 'Network request failed',
|
||||
EXTERNAL_SERVICE_ERROR: 'External service unavailable',
|
||||
TIMEOUT_ERROR: 'Request timeout',
|
||||
|
||||
// Rate Limiting
|
||||
RATE_LIMIT_EXCEEDED: 'Too many requests. Please try again later',
|
||||
|
||||
// Generic
|
||||
INTERNAL_ERROR: 'An internal error occurred',
|
||||
NOT_FOUND: 'Resource not found',
|
||||
FORBIDDEN: 'Access forbidden',
|
||||
BAD_REQUEST: 'Bad request',
|
||||
CONFLICT: 'Resource conflict',
|
||||
UNPROCESSABLE_ENTITY: 'Unable to process request',
|
||||
SERVICE_UNAVAILABLE: 'Service temporarily unavailable'
|
||||
};
|
||||
|
||||
/**
|
||||
* Sanitize error for client response
|
||||
* Removes sensitive information like stack traces, file paths, and internal details
|
||||
*
|
||||
* @param {Error|string} error - The error to sanitize
|
||||
* @param {string} defaultMessage - Default message if error cannot be parsed
|
||||
* @returns {Object} Sanitized error object with safe message
|
||||
*/
|
||||
function sanitizeError(error, defaultMessage = ERROR_TYPES.INTERNAL_ERROR) {
|
||||
// If error is a string, return it as is (assuming it's already safe)
|
||||
if (typeof error === 'string') {
|
||||
return {
|
||||
message: error,
|
||||
code: 'CUSTOM_ERROR'
|
||||
};
|
||||
}
|
||||
|
||||
// Extract error message
|
||||
const errorMessage = error?.message || defaultMessage;
|
||||
|
||||
// Check for known error patterns and map to safe messages
|
||||
|
||||
// Database errors
|
||||
if (errorMessage.includes('UNIQUE constraint') || errorMessage.includes('UNIQUE')) {
|
||||
return {
|
||||
message: ERROR_TYPES.DUPLICATE_ENTRY,
|
||||
code: 'DUPLICATE_ENTRY'
|
||||
};
|
||||
}
|
||||
|
||||
if (errorMessage.includes('FOREIGN KEY constraint')) {
|
||||
return {
|
||||
message: ERROR_TYPES.CONFLICT,
|
||||
code: 'FOREIGN_KEY_CONSTRAINT'
|
||||
};
|
||||
}
|
||||
|
||||
if (errorMessage.includes('NOT NULL constraint')) {
|
||||
return {
|
||||
message: ERROR_TYPES.MISSING_REQUIRED_FIELD,
|
||||
code: 'MISSING_FIELD'
|
||||
};
|
||||
}
|
||||
|
||||
// File system errors
|
||||
if (errorMessage.includes('ENOENT') || errorMessage.includes('no such file')) {
|
||||
return {
|
||||
message: ERROR_TYPES.FILE_NOT_FOUND,
|
||||
code: 'FILE_NOT_FOUND'
|
||||
};
|
||||
}
|
||||
|
||||
if (errorMessage.includes('EACCES') || errorMessage.includes('permission denied')) {
|
||||
return {
|
||||
message: ERROR_TYPES.FORBIDDEN,
|
||||
code: 'PERMISSION_DENIED'
|
||||
};
|
||||
}
|
||||
|
||||
if (errorMessage.includes('ENOSPC') || errorMessage.includes('no space')) {
|
||||
return {
|
||||
message: ERROR_TYPES.SERVICE_UNAVAILABLE,
|
||||
code: 'DISK_FULL'
|
||||
};
|
||||
}
|
||||
|
||||
// Network errors
|
||||
if (errorMessage.includes('ECONNREFUSED') || errorMessage.includes('connection refused')) {
|
||||
return {
|
||||
message: ERROR_TYPES.EXTERNAL_SERVICE_ERROR,
|
||||
code: 'CONNECTION_REFUSED'
|
||||
};
|
||||
}
|
||||
|
||||
if (errorMessage.includes('ETIMEDOUT') || errorMessage.includes('timeout')) {
|
||||
return {
|
||||
message: ERROR_TYPES.TIMEOUT_ERROR,
|
||||
code: 'TIMEOUT'
|
||||
};
|
||||
}
|
||||
|
||||
if (errorMessage.includes('ENOTFOUND') || errorMessage.includes('getaddrinfo')) {
|
||||
return {
|
||||
message: ERROR_TYPES.NETWORK_ERROR,
|
||||
code: 'DNS_ERROR'
|
||||
};
|
||||
}
|
||||
|
||||
// Authentication errors
|
||||
if (errorMessage.toLowerCase().includes('unauthorized') ||
|
||||
errorMessage.toLowerCase().includes('authentication')) {
|
||||
return {
|
||||
message: ERROR_TYPES.AUTH_FAILED,
|
||||
code: 'AUTH_ERROR'
|
||||
};
|
||||
}
|
||||
|
||||
if (errorMessage.toLowerCase().includes('forbidden') ||
|
||||
errorMessage.toLowerCase().includes('permission')) {
|
||||
return {
|
||||
message: ERROR_TYPES.AUTH_INSUFFICIENT_PERMISSIONS,
|
||||
code: 'PERMISSION_ERROR'
|
||||
};
|
||||
}
|
||||
|
||||
// Validation errors (pass through if they seem safe)
|
||||
if (errorMessage.toLowerCase().includes('validation') ||
|
||||
errorMessage.toLowerCase().includes('invalid')) {
|
||||
// Check if message is reasonably safe (no paths, no system info)
|
||||
if (!containsSensitiveInfo(errorMessage)) {
|
||||
return {
|
||||
message: errorMessage,
|
||||
code: 'VALIDATION_ERROR'
|
||||
};
|
||||
}
|
||||
return {
|
||||
message: ERROR_TYPES.VALIDATION_FAILED,
|
||||
code: 'VALIDATION_ERROR'
|
||||
};
|
||||
}
|
||||
|
||||
// Default to generic error message
|
||||
return {
|
||||
message: defaultMessage,
|
||||
code: 'INTERNAL_ERROR'
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if error message contains sensitive information
|
||||
*
|
||||
* @param {string} message - Error message to check
|
||||
* @returns {boolean} True if message contains sensitive info
|
||||
*/
|
||||
function containsSensitiveInfo(message) {
|
||||
const sensitivePatterns = [
|
||||
/\/[a-z0-9_\-\/]+\.(js|json|db|log|conf|env)/i, // File paths
|
||||
/at\s+[a-zA-Z0-9_]+\s+\(/i, // Stack trace patterns
|
||||
/line\s+\d+/i, // Line numbers
|
||||
/column\s+\d+/i, // Column numbers
|
||||
/Error:\s+SQLITE_/i, // SQLite internal errors
|
||||
/node_modules/i, // Node modules paths
|
||||
/\/home\//i, // Unix home directory
|
||||
/\/usr\//i, // Unix system paths
|
||||
/\/var\//i, // Unix var paths
|
||||
/\/tmp\//i, // Temp directory
|
||||
/C:\\/i, // Windows paths
|
||||
/\\Users\\/i, // Windows user paths
|
||||
/password/i, // Password references
|
||||
/secret/i, // Secret references
|
||||
/token/i, // Token references
|
||||
/key/i // Key references (be careful with "keyboard" etc.)
|
||||
];
|
||||
|
||||
return sensitivePatterns.some(pattern => pattern.test(message));
|
||||
}
|
||||
|
||||
/**
|
||||
* Log error securely (internal logs can contain full details)
|
||||
*
|
||||
* @param {Error|string} error - Error to log
|
||||
* @param {Object} context - Additional context
|
||||
*/
|
||||
function logError(error, context = {}) {
|
||||
const errorInfo = {
|
||||
message: error?.message || error,
|
||||
stack: error?.stack,
|
||||
code: error?.code,
|
||||
...context
|
||||
};
|
||||
|
||||
logger.error('Application error:', errorInfo);
|
||||
}
|
||||
|
||||
/**
|
||||
* Express error handler middleware
|
||||
* Catches all errors and returns sanitized responses
|
||||
*
|
||||
* @param {Error} err - Error object
|
||||
* @param {Object} req - Express request
|
||||
* @param {Object} res - Express response
|
||||
* @param {Function} next - Express next function
|
||||
*/
|
||||
function errorMiddleware(err, req, res, next) {
|
||||
// Log full error details internally
|
||||
logError(err, {
|
||||
method: req.method,
|
||||
path: req.path,
|
||||
userId: req.user?.id,
|
||||
ip: req.ip
|
||||
});
|
||||
|
||||
// Determine status code
|
||||
const statusCode = err.statusCode || err.status || 500;
|
||||
|
||||
// Sanitize error for client
|
||||
const sanitized = sanitizeError(err, ERROR_TYPES.INTERNAL_ERROR);
|
||||
|
||||
// Send sanitized response
|
||||
res.status(statusCode).json({
|
||||
error: sanitized.message,
|
||||
code: sanitized.code,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Async handler wrapper to catch async errors
|
||||
*
|
||||
* @param {Function} fn - Async route handler
|
||||
* @returns {Function} Wrapped function
|
||||
*/
|
||||
function asyncHandler(fn) {
|
||||
return (req, res, next) => {
|
||||
Promise.resolve(fn(req, res, next)).catch(next);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a safe error response object
|
||||
*
|
||||
* @param {string} message - Error message (should be user-safe)
|
||||
* @param {number} statusCode - HTTP status code
|
||||
* @param {string} code - Error code
|
||||
* @returns {Object} Error response object
|
||||
*/
|
||||
function createError(message, statusCode = 500, code = 'ERROR') {
|
||||
const error = new Error(message);
|
||||
error.statusCode = statusCode;
|
||||
error.code = code;
|
||||
return error;
|
||||
}
|
||||
|
||||
/**
|
||||
* Standard error responses
|
||||
*/
|
||||
const ErrorResponses = {
|
||||
badRequest: (message = ERROR_TYPES.BAD_REQUEST) =>
|
||||
createError(message, 400, 'BAD_REQUEST'),
|
||||
|
||||
unauthorized: (message = ERROR_TYPES.AUTH_REQUIRED) =>
|
||||
createError(message, 401, 'UNAUTHORIZED'),
|
||||
|
||||
forbidden: (message = ERROR_TYPES.FORBIDDEN) =>
|
||||
createError(message, 403, 'FORBIDDEN'),
|
||||
|
||||
notFound: (message = ERROR_TYPES.NOT_FOUND) =>
|
||||
createError(message, 404, 'NOT_FOUND'),
|
||||
|
||||
conflict: (message = ERROR_TYPES.CONFLICT) =>
|
||||
createError(message, 409, 'CONFLICT'),
|
||||
|
||||
unprocessable: (message = ERROR_TYPES.UNPROCESSABLE_ENTITY) =>
|
||||
createError(message, 422, 'UNPROCESSABLE_ENTITY'),
|
||||
|
||||
tooManyRequests: (message = ERROR_TYPES.RATE_LIMIT_EXCEEDED) =>
|
||||
createError(message, 429, 'TOO_MANY_REQUESTS'),
|
||||
|
||||
internal: (message = ERROR_TYPES.INTERNAL_ERROR) =>
|
||||
createError(message, 500, 'INTERNAL_ERROR'),
|
||||
|
||||
serviceUnavailable: (message = ERROR_TYPES.SERVICE_UNAVAILABLE) =>
|
||||
createError(message, 503, 'SERVICE_UNAVAILABLE')
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
ERROR_TYPES,
|
||||
sanitizeError,
|
||||
containsSensitiveInfo,
|
||||
logError,
|
||||
errorMiddleware,
|
||||
asyncHandler,
|
||||
createError,
|
||||
ErrorResponses
|
||||
};
|
||||
486
backend/utils/inputValidator.js
Normal file
486
backend/utils/inputValidator.js
Normal file
|
|
@ -0,0 +1,486 @@
|
|||
/**
|
||||
* Comprehensive Input Validation Utility
|
||||
* Implements whitelist-based validation for all user inputs
|
||||
*/
|
||||
|
||||
const validator = require('validator');
|
||||
|
||||
/**
|
||||
* Validation Rules Configuration
|
||||
*/
|
||||
const VALIDATION_RULES = {
|
||||
// User-related
|
||||
username: {
|
||||
minLength: 3,
|
||||
maxLength: 50,
|
||||
pattern: /^[a-zA-Z0-9_-]+$/,
|
||||
sanitize: true
|
||||
},
|
||||
email: {
|
||||
maxLength: 255,
|
||||
sanitize: true
|
||||
},
|
||||
password: {
|
||||
minLength: 8,
|
||||
maxLength: 128
|
||||
},
|
||||
// Content-related
|
||||
playlistName: {
|
||||
minLength: 1,
|
||||
maxLength: 200,
|
||||
pattern: /^[a-zA-Z0-9\s\-_.,()!]+$/,
|
||||
sanitize: true
|
||||
},
|
||||
channelName: {
|
||||
minLength: 1,
|
||||
maxLength: 200,
|
||||
sanitize: true
|
||||
},
|
||||
url: {
|
||||
maxLength: 2048,
|
||||
protocols: ['http', 'https', 'rtmp', 'rtsp', 'udp', 'rtp']
|
||||
},
|
||||
// Generic text fields
|
||||
description: {
|
||||
maxLength: 1000,
|
||||
sanitize: true
|
||||
},
|
||||
// File names
|
||||
filename: {
|
||||
maxLength: 255,
|
||||
pattern: /^[a-zA-Z0-9\s\-_.,()]+$/,
|
||||
sanitize: true
|
||||
},
|
||||
// Settings keys
|
||||
settingKey: {
|
||||
maxLength: 100,
|
||||
pattern: /^[a-zA-Z0-9_.-]+$/
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Sanitize string input to prevent XSS
|
||||
*/
|
||||
function sanitizeString(str) {
|
||||
if (typeof str !== 'string') return str;
|
||||
|
||||
// Remove HTML tags
|
||||
str = str.replace(/<[^>]*>/g, '');
|
||||
|
||||
// Remove script-related content
|
||||
str = str.replace(/javascript:/gi, '');
|
||||
str = str.replace(/on\w+\s*=/gi, '');
|
||||
|
||||
// Escape special characters
|
||||
return validator.escape(str);
|
||||
}
|
||||
|
||||
// Export sanitizeString
|
||||
module.exports.sanitizeString = sanitizeString;
|
||||
|
||||
/**
|
||||
* Validate username
|
||||
*/
|
||||
function validateUsername(username) {
|
||||
const errors = [];
|
||||
const rules = VALIDATION_RULES.username;
|
||||
|
||||
if (!username || typeof username !== 'string') {
|
||||
errors.push('Username is required');
|
||||
return { valid: false, errors, sanitized: null };
|
||||
}
|
||||
|
||||
const trimmed = username.trim();
|
||||
|
||||
if (trimmed.length < rules.minLength) {
|
||||
errors.push(`Username must be at least ${rules.minLength} characters`);
|
||||
}
|
||||
|
||||
if (trimmed.length > rules.maxLength) {
|
||||
errors.push(`Username must not exceed ${rules.maxLength} characters`);
|
||||
}
|
||||
|
||||
if (!rules.pattern.test(trimmed)) {
|
||||
errors.push('Username can only contain letters, numbers, hyphens, and underscores');
|
||||
}
|
||||
|
||||
const sanitized = rules.sanitize ? sanitizeString(trimmed) : trimmed;
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
sanitized
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate email
|
||||
*/
|
||||
function validateEmail(email) {
|
||||
const errors = [];
|
||||
const rules = VALIDATION_RULES.email;
|
||||
|
||||
if (!email || typeof email !== 'string') {
|
||||
errors.push('Email is required');
|
||||
return { valid: false, errors, sanitized: null };
|
||||
}
|
||||
|
||||
const trimmed = email.trim().toLowerCase();
|
||||
|
||||
if (!validator.isEmail(trimmed)) {
|
||||
errors.push('Invalid email format');
|
||||
}
|
||||
|
||||
if (trimmed.length > rules.maxLength) {
|
||||
errors.push(`Email must not exceed ${rules.maxLength} characters`);
|
||||
}
|
||||
|
||||
const sanitized = rules.sanitize ? sanitizeString(trimmed) : trimmed;
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
sanitized
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate URL
|
||||
*/
|
||||
function validateUrl(url, allowLocalhost = false) {
|
||||
const errors = [];
|
||||
const rules = VALIDATION_RULES.url;
|
||||
|
||||
if (!url || typeof url !== 'string') {
|
||||
errors.push('URL is required');
|
||||
return { valid: false, errors, sanitized: null };
|
||||
}
|
||||
|
||||
const trimmed = url.trim();
|
||||
|
||||
if (trimmed.length > rules.maxLength) {
|
||||
errors.push(`URL must not exceed ${rules.maxLength} characters`);
|
||||
}
|
||||
|
||||
// Check if URL is valid and uses allowed protocols
|
||||
const options = {
|
||||
protocols: rules.protocols,
|
||||
require_protocol: true,
|
||||
allow_underscores: true
|
||||
};
|
||||
|
||||
if (!allowLocalhost) {
|
||||
options.disallow_auth = false;
|
||||
}
|
||||
|
||||
if (!validator.isURL(trimmed, options)) {
|
||||
errors.push('Invalid URL format');
|
||||
}
|
||||
|
||||
// Additional security checks
|
||||
if (trimmed.includes('javascript:')) {
|
||||
errors.push('URL contains invalid content');
|
||||
}
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
sanitized: trimmed
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate playlist name
|
||||
*/
|
||||
function validatePlaylistName(name) {
|
||||
const errors = [];
|
||||
const rules = VALIDATION_RULES.playlistName;
|
||||
|
||||
if (!name || typeof name !== 'string') {
|
||||
errors.push('Playlist name is required');
|
||||
return { valid: false, errors, sanitized: null };
|
||||
}
|
||||
|
||||
const trimmed = name.trim();
|
||||
|
||||
if (trimmed.length < rules.minLength) {
|
||||
errors.push(`Playlist name must be at least ${rules.minLength} character`);
|
||||
}
|
||||
|
||||
if (trimmed.length > rules.maxLength) {
|
||||
errors.push(`Playlist name must not exceed ${rules.maxLength} characters`);
|
||||
}
|
||||
|
||||
if (!rules.pattern.test(trimmed)) {
|
||||
errors.push('Playlist name contains invalid characters');
|
||||
}
|
||||
|
||||
const sanitized = rules.sanitize ? sanitizeString(trimmed) : trimmed;
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
sanitized
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate channel name
|
||||
*/
|
||||
function validateChannelName(name) {
|
||||
const errors = [];
|
||||
const rules = VALIDATION_RULES.channelName;
|
||||
|
||||
if (!name || typeof name !== 'string') {
|
||||
errors.push('Channel name is required');
|
||||
return { valid: false, errors, sanitized: null };
|
||||
}
|
||||
|
||||
const trimmed = name.trim();
|
||||
|
||||
if (trimmed.length < rules.minLength) {
|
||||
errors.push(`Channel name must be at least ${rules.minLength} character`);
|
||||
}
|
||||
|
||||
if (trimmed.length > rules.maxLength) {
|
||||
errors.push(`Channel name must not exceed ${rules.maxLength} characters`);
|
||||
}
|
||||
|
||||
const sanitized = rules.sanitize ? sanitizeString(trimmed) : trimmed;
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
sanitized
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate description/text field
|
||||
*/
|
||||
function validateDescription(description) {
|
||||
const errors = [];
|
||||
const rules = VALIDATION_RULES.description;
|
||||
|
||||
if (!description) {
|
||||
return { valid: true, errors: [], sanitized: '' };
|
||||
}
|
||||
|
||||
if (typeof description !== 'string') {
|
||||
errors.push('Description must be a string');
|
||||
return { valid: false, errors, sanitized: null };
|
||||
}
|
||||
|
||||
const trimmed = description.trim();
|
||||
|
||||
if (trimmed.length > rules.maxLength) {
|
||||
errors.push(`Description must not exceed ${rules.maxLength} characters`);
|
||||
}
|
||||
|
||||
const sanitized = rules.sanitize ? sanitizeString(trimmed) : trimmed;
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
sanitized
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate filename
|
||||
*/
|
||||
function validateFilename(filename) {
|
||||
const errors = [];
|
||||
const rules = VALIDATION_RULES.filename;
|
||||
|
||||
if (!filename || typeof filename !== 'string') {
|
||||
errors.push('Filename is required');
|
||||
return { valid: false, errors, sanitized: null };
|
||||
}
|
||||
|
||||
const trimmed = filename.trim();
|
||||
|
||||
if (trimmed.length > rules.maxLength) {
|
||||
errors.push(`Filename must not exceed ${rules.maxLength} characters`);
|
||||
}
|
||||
|
||||
if (!rules.pattern.test(trimmed)) {
|
||||
errors.push('Filename contains invalid characters');
|
||||
}
|
||||
|
||||
// Check for path traversal attempts
|
||||
if (trimmed.includes('..') || trimmed.includes('/') || trimmed.includes('\\')) {
|
||||
errors.push('Filename contains invalid path characters');
|
||||
}
|
||||
|
||||
const sanitized = rules.sanitize ? sanitizeString(trimmed) : trimmed;
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
sanitized
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate setting key
|
||||
*/
|
||||
function validateSettingKey(key) {
|
||||
const errors = [];
|
||||
const rules = VALIDATION_RULES.settingKey;
|
||||
|
||||
if (!key || typeof key !== 'string') {
|
||||
errors.push('Setting key is required');
|
||||
return { valid: false, errors, sanitized: null };
|
||||
}
|
||||
|
||||
const trimmed = key.trim();
|
||||
|
||||
if (trimmed.length > rules.maxLength) {
|
||||
errors.push(`Setting key must not exceed ${rules.maxLength} characters`);
|
||||
}
|
||||
|
||||
if (!rules.pattern.test(trimmed)) {
|
||||
errors.push('Setting key contains invalid characters');
|
||||
}
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
sanitized: trimmed
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate integer
|
||||
*/
|
||||
function validateInteger(value, min = Number.MIN_SAFE_INTEGER, max = Number.MAX_SAFE_INTEGER) {
|
||||
const errors = [];
|
||||
|
||||
const num = parseInt(value, 10);
|
||||
|
||||
if (isNaN(num)) {
|
||||
errors.push('Must be a valid integer');
|
||||
return { valid: false, errors, sanitized: null };
|
||||
}
|
||||
|
||||
if (num < min) {
|
||||
errors.push(`Must be at least ${min}`);
|
||||
}
|
||||
|
||||
if (num > max) {
|
||||
errors.push(`Must not exceed ${max}`);
|
||||
}
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
sanitized: num
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate boolean
|
||||
*/
|
||||
function validateBoolean(value) {
|
||||
if (typeof value === 'boolean') {
|
||||
return { valid: true, errors: [], sanitized: value };
|
||||
}
|
||||
|
||||
if (value === 'true' || value === '1' || value === 1) {
|
||||
return { valid: true, errors: [], sanitized: true };
|
||||
}
|
||||
|
||||
if (value === 'false' || value === '0' || value === 0) {
|
||||
return { valid: true, errors: [], sanitized: false };
|
||||
}
|
||||
|
||||
return {
|
||||
valid: false,
|
||||
errors: ['Must be a valid boolean'],
|
||||
sanitized: null
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate JSON
|
||||
*/
|
||||
function validateJSON(value, maxSize = 10000) {
|
||||
const errors = [];
|
||||
|
||||
if (typeof value === 'object') {
|
||||
const jsonString = JSON.stringify(value);
|
||||
if (jsonString.length > maxSize) {
|
||||
errors.push(`JSON data exceeds maximum size of ${maxSize} characters`);
|
||||
}
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
sanitized: value
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof value !== 'string') {
|
||||
errors.push('Must be valid JSON');
|
||||
return { valid: false, errors, sanitized: null };
|
||||
}
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(value);
|
||||
if (value.length > maxSize) {
|
||||
errors.push(`JSON data exceeds maximum size of ${maxSize} characters`);
|
||||
}
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
sanitized: parsed
|
||||
};
|
||||
} catch (e) {
|
||||
errors.push('Invalid JSON format');
|
||||
return { valid: false, errors, sanitized: null };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize object with multiple fields
|
||||
*/
|
||||
function sanitizeObject(obj, schema) {
|
||||
const sanitized = {};
|
||||
const errors = {};
|
||||
let hasErrors = false;
|
||||
|
||||
for (const [key, validator] of Object.entries(schema)) {
|
||||
const value = obj[key];
|
||||
const result = validator(value);
|
||||
|
||||
if (!result.valid) {
|
||||
errors[key] = result.errors;
|
||||
hasErrors = true;
|
||||
} else {
|
||||
sanitized[key] = result.sanitized;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
valid: !hasErrors,
|
||||
errors,
|
||||
sanitized
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
validateUsername,
|
||||
validateEmail,
|
||||
validateUrl,
|
||||
validatePlaylistName,
|
||||
validateChannelName,
|
||||
validateDescription,
|
||||
validateFilename,
|
||||
validateSettingKey,
|
||||
validateInteger,
|
||||
validateBoolean,
|
||||
validateJSON,
|
||||
sanitizeString,
|
||||
sanitizeObject,
|
||||
VALIDATION_RULES
|
||||
};
|
||||
444
backend/utils/logAggregator.js
Normal file
444
backend/utils/logAggregator.js
Normal file
|
|
@ -0,0 +1,444 @@
|
|||
/**
|
||||
* Centralized Log Aggregation System (SIEM)
|
||||
* Consolidates logs from multiple sources into a protected repository
|
||||
* Provides holistic visibility across the infrastructure
|
||||
*/
|
||||
|
||||
const logger = require('./logger');
|
||||
const { db } = require('../database/db');
|
||||
const crypto = require('crypto');
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
|
||||
class LogAggregator {
|
||||
constructor() {
|
||||
this.logSources = new Map();
|
||||
this.aggregationBuffer = [];
|
||||
this.bufferSize = 100; // Batch size for bulk insert
|
||||
this.flushInterval = 5000; // 5 seconds
|
||||
this.initializeAggregation();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize aggregation system
|
||||
*/
|
||||
async initializeAggregation() {
|
||||
// Create aggregated_logs table if not exists
|
||||
await this.createAggregatedLogsTable();
|
||||
|
||||
// Start periodic flush
|
||||
setInterval(() => this.flushBuffer(), this.flushInterval);
|
||||
|
||||
logger.info('[LogAggregator] Initialized - SIEM mode active');
|
||||
}
|
||||
|
||||
/**
|
||||
* Create database table for aggregated logs
|
||||
*/
|
||||
async createAggregatedLogsTable() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(`
|
||||
CREATE TABLE IF NOT EXISTS aggregated_logs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
log_id TEXT UNIQUE NOT NULL,
|
||||
source TEXT NOT NULL,
|
||||
level TEXT NOT NULL,
|
||||
category TEXT NOT NULL,
|
||||
message TEXT NOT NULL,
|
||||
metadata TEXT,
|
||||
user_id INTEGER,
|
||||
ip_address TEXT,
|
||||
user_agent TEXT,
|
||||
signature TEXT NOT NULL,
|
||||
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`, (err) => {
|
||||
if (err) {
|
||||
logger.error('[LogAggregator] Failed to create aggregated_logs table:', err);
|
||||
reject(err);
|
||||
} else {
|
||||
// Create indexes for fast querying
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_aggregated_logs_source ON aggregated_logs(source, timestamp DESC)`);
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_aggregated_logs_level ON aggregated_logs(level, timestamp DESC)`);
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_aggregated_logs_category ON aggregated_logs(category, timestamp DESC)`);
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_aggregated_logs_user ON aggregated_logs(user_id, timestamp DESC)`);
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_aggregated_logs_timestamp ON aggregated_logs(timestamp DESC)`);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a log source
|
||||
* @param {String} sourceName - Name of the log source
|
||||
* @param {Object} config - Source configuration
|
||||
*/
|
||||
registerSource(sourceName, config = {}) {
|
||||
this.logSources.set(sourceName, {
|
||||
name: sourceName,
|
||||
enabled: config.enabled !== false,
|
||||
priority: config.priority || 'medium',
|
||||
retention: config.retention || 90, // days
|
||||
...config
|
||||
});
|
||||
|
||||
logger.info(`[LogAggregator] Registered source: ${sourceName}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Aggregate log entry with cryptographic signature
|
||||
* @param {String} source - Log source identifier
|
||||
* @param {String} level - Log level (info, warn, error, critical)
|
||||
* @param {String} category - Log category (auth, access, security, system, application)
|
||||
* @param {String} message - Log message
|
||||
* @param {Object} details - Additional details
|
||||
*/
|
||||
async aggregate(source, level, category, message, details = {}) {
|
||||
const logId = this.generateLogId();
|
||||
const timestamp = new Date().toISOString();
|
||||
|
||||
const logEntry = {
|
||||
log_id: logId,
|
||||
source,
|
||||
level,
|
||||
category,
|
||||
message,
|
||||
metadata: JSON.stringify({
|
||||
...details,
|
||||
aggregatedAt: timestamp
|
||||
}),
|
||||
user_id: details.userId || null,
|
||||
ip_address: details.ip || null,
|
||||
user_agent: details.userAgent || null,
|
||||
timestamp
|
||||
};
|
||||
|
||||
// Generate cryptographic signature for log integrity
|
||||
logEntry.signature = this.generateSignature(logEntry);
|
||||
|
||||
// Add to buffer
|
||||
this.aggregationBuffer.push(logEntry);
|
||||
|
||||
// Flush if buffer is full
|
||||
if (this.aggregationBuffer.length >= this.bufferSize) {
|
||||
await this.flushBuffer();
|
||||
}
|
||||
|
||||
return logId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate unique log ID
|
||||
*/
|
||||
generateLogId() {
|
||||
const timestamp = Date.now();
|
||||
const random = crypto.randomBytes(8).toString('hex');
|
||||
return `LOG-${timestamp}-${random}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate cryptographic signature for log entry
|
||||
* SHA-256 HMAC with secret key for integrity verification
|
||||
*/
|
||||
generateSignature(logEntry) {
|
||||
const secret = process.env.LOG_SIGNATURE_SECRET || 'default-secret-change-in-production';
|
||||
const data = `${logEntry.log_id}|${logEntry.source}|${logEntry.level}|${logEntry.category}|${logEntry.message}|${logEntry.timestamp}`;
|
||||
|
||||
return crypto
|
||||
.createHmac('sha256', secret)
|
||||
.update(data)
|
||||
.digest('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify log entry signature
|
||||
*/
|
||||
verifySignature(logEntry) {
|
||||
const expectedSignature = this.generateSignature(logEntry);
|
||||
return logEntry.signature === expectedSignature;
|
||||
}
|
||||
|
||||
/**
|
||||
* Flush aggregation buffer to database
|
||||
*/
|
||||
async flushBuffer() {
|
||||
if (this.aggregationBuffer.length === 0) return;
|
||||
|
||||
const batch = [...this.aggregationBuffer];
|
||||
this.aggregationBuffer = [];
|
||||
|
||||
try {
|
||||
await this.bulkInsert(batch);
|
||||
logger.debug(`[LogAggregator] Flushed ${batch.length} log entries`);
|
||||
} catch (error) {
|
||||
logger.error('[LogAggregator] Failed to flush buffer:', error);
|
||||
// Re-add failed entries to buffer
|
||||
this.aggregationBuffer.unshift(...batch);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Bulk insert log entries
|
||||
*/
|
||||
async bulkInsert(entries) {
|
||||
if (entries.length === 0) return;
|
||||
|
||||
const placeholders = entries.map(() => '(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)').join(',');
|
||||
const values = entries.flatMap(entry => [
|
||||
entry.log_id,
|
||||
entry.source,
|
||||
entry.level,
|
||||
entry.category,
|
||||
entry.message,
|
||||
entry.metadata,
|
||||
entry.user_id,
|
||||
entry.ip_address,
|
||||
entry.user_agent,
|
||||
entry.signature,
|
||||
entry.timestamp
|
||||
]);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT INTO aggregated_logs
|
||||
(log_id, source, level, category, message, metadata, user_id, ip_address, user_agent, signature, timestamp)
|
||||
VALUES ${placeholders}`,
|
||||
values,
|
||||
(err) => {
|
||||
if (err) reject(err);
|
||||
else resolve();
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Query aggregated logs
|
||||
*/
|
||||
async query(filters = {}) {
|
||||
const {
|
||||
source,
|
||||
level,
|
||||
category,
|
||||
userId,
|
||||
startDate,
|
||||
endDate,
|
||||
limit = 1000,
|
||||
offset = 0,
|
||||
orderBy = 'timestamp',
|
||||
order = 'DESC'
|
||||
} = filters;
|
||||
|
||||
let whereClause = [];
|
||||
let params = [];
|
||||
|
||||
if (source) {
|
||||
whereClause.push('source = ?');
|
||||
params.push(source);
|
||||
}
|
||||
|
||||
if (level) {
|
||||
if (Array.isArray(level)) {
|
||||
whereClause.push(`level IN (${level.map(() => '?').join(',')})`);
|
||||
params.push(...level);
|
||||
} else {
|
||||
whereClause.push('level = ?');
|
||||
params.push(level);
|
||||
}
|
||||
}
|
||||
|
||||
if (category) {
|
||||
whereClause.push('category = ?');
|
||||
params.push(category);
|
||||
}
|
||||
|
||||
if (userId) {
|
||||
whereClause.push('user_id = ?');
|
||||
params.push(userId);
|
||||
}
|
||||
|
||||
if (startDate) {
|
||||
whereClause.push('timestamp >= ?');
|
||||
params.push(startDate);
|
||||
}
|
||||
|
||||
if (endDate) {
|
||||
whereClause.push('timestamp <= ?');
|
||||
params.push(endDate);
|
||||
}
|
||||
|
||||
const where = whereClause.length > 0 ? `WHERE ${whereClause.join(' AND ')}` : '';
|
||||
params.push(limit, offset);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT * FROM aggregated_logs ${where}
|
||||
ORDER BY ${orderBy} ${order}
|
||||
LIMIT ? OFFSET ?`,
|
||||
params,
|
||||
(err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get log statistics
|
||||
*/
|
||||
async getStatistics(timeRange = 24) {
|
||||
const startTime = new Date(Date.now() - timeRange * 60 * 60 * 1000).toISOString();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT
|
||||
source,
|
||||
level,
|
||||
category,
|
||||
COUNT(*) as count,
|
||||
MIN(timestamp) as first_seen,
|
||||
MAX(timestamp) as last_seen
|
||||
FROM aggregated_logs
|
||||
WHERE timestamp >= ?
|
||||
GROUP BY source, level, category
|
||||
ORDER BY count DESC`,
|
||||
[startTime],
|
||||
(err, rows) => {
|
||||
if (err) reject(err);
|
||||
else {
|
||||
const stats = {
|
||||
timeRange: `${timeRange} hours`,
|
||||
totalLogs: rows.reduce((sum, row) => sum + row.count, 0),
|
||||
bySource: {},
|
||||
byLevel: {},
|
||||
byCategory: {},
|
||||
breakdown: rows
|
||||
};
|
||||
|
||||
rows.forEach(row => {
|
||||
// By source
|
||||
if (!stats.bySource[row.source]) stats.bySource[row.source] = 0;
|
||||
stats.bySource[row.source] += row.count;
|
||||
|
||||
// By level
|
||||
if (!stats.byLevel[row.level]) stats.byLevel[row.level] = 0;
|
||||
stats.byLevel[row.level] += row.count;
|
||||
|
||||
// By category
|
||||
if (!stats.byCategory[row.category]) stats.byCategory[row.category] = 0;
|
||||
stats.byCategory[row.category] += row.count;
|
||||
});
|
||||
|
||||
resolve(stats);
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify log integrity
|
||||
* Checks if log entries have been tampered with
|
||||
*/
|
||||
async verifyIntegrity(logIds = null) {
|
||||
const query = logIds
|
||||
? `SELECT * FROM aggregated_logs WHERE log_id IN (${logIds.map(() => '?').join(',')})`
|
||||
: `SELECT * FROM aggregated_logs ORDER BY timestamp DESC LIMIT 1000`;
|
||||
|
||||
const params = logIds || [];
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(query, params, (err, rows) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
const results = {
|
||||
total: rows.length,
|
||||
verified: 0,
|
||||
tampered: 0,
|
||||
tamperedLogs: []
|
||||
};
|
||||
|
||||
rows.forEach(row => {
|
||||
if (this.verifySignature(row)) {
|
||||
results.verified++;
|
||||
} else {
|
||||
results.tampered++;
|
||||
results.tamperedLogs.push({
|
||||
log_id: row.log_id,
|
||||
timestamp: row.timestamp,
|
||||
source: row.source
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
resolve(results);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup old logs based on retention policy
|
||||
*/
|
||||
async cleanup(retentionDays = 90) {
|
||||
const cutoffDate = new Date(Date.now() - retentionDays * 24 * 60 * 60 * 1000).toISOString();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
'DELETE FROM aggregated_logs WHERE timestamp < ?',
|
||||
[cutoffDate],
|
||||
function(err) {
|
||||
if (err) reject(err);
|
||||
else {
|
||||
logger.info(`[LogAggregator] Cleaned up ${this.changes} old log entries (retention: ${retentionDays} days)`);
|
||||
resolve(this.changes);
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Export logs to external SIEM system
|
||||
*/
|
||||
async export(filters = {}, format = 'json') {
|
||||
const logs = await this.query({ ...filters, limit: 10000 });
|
||||
|
||||
if (format === 'json') {
|
||||
return JSON.stringify(logs, null, 2);
|
||||
} else if (format === 'csv') {
|
||||
const headers = ['log_id', 'source', 'level', 'category', 'message', 'timestamp', 'ip_address', 'user_id'];
|
||||
const csv = [headers.join(',')];
|
||||
|
||||
logs.forEach(log => {
|
||||
const row = headers.map(header => {
|
||||
const value = log[header] || '';
|
||||
return `"${String(value).replace(/"/g, '""')}"`;
|
||||
});
|
||||
csv.push(row.join(','));
|
||||
});
|
||||
|
||||
return csv.join('\n');
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported export format: ${format}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Create singleton instance
|
||||
const logAggregator = new LogAggregator();
|
||||
|
||||
// Register default sources
|
||||
logAggregator.registerSource('authentication', { priority: 'critical', retention: 365 });
|
||||
logAggregator.registerSource('authorization', { priority: 'high', retention: 365 });
|
||||
logAggregator.registerSource('security_audit', { priority: 'critical', retention: 365 });
|
||||
logAggregator.registerSource('application', { priority: 'medium', retention: 90 });
|
||||
logAggregator.registerSource('system', { priority: 'high', retention: 180 });
|
||||
logAggregator.registerSource('access', { priority: 'low', retention: 30 });
|
||||
|
||||
module.exports = logAggregator;
|
||||
124
backend/utils/logger.js
Normal file
124
backend/utils/logger.js
Normal file
|
|
@ -0,0 +1,124 @@
|
|||
const winston = require('winston');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
|
||||
// Ensure logs directory exists
|
||||
const logsDir = path.join(__dirname, '../../logs');
|
||||
if (!fs.existsSync(logsDir)) {
|
||||
fs.mkdirSync(logsDir, { recursive: true, mode: 0o755 });
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom format to sanitize sensitive data from logs
|
||||
* Removes passwords, tokens, secrets, and other sensitive information
|
||||
*/
|
||||
const sanitizeFormat = winston.format((info) => {
|
||||
// Convert info to string for pattern matching
|
||||
const infoStr = JSON.stringify(info);
|
||||
|
||||
// Patterns to redact
|
||||
const sensitivePatterns = [
|
||||
{ pattern: /"password"\s*:\s*"[^"]*"/gi, replacement: '"password":"[REDACTED]"' },
|
||||
{ pattern: /"token"\s*:\s*"[^"]*"/gi, replacement: '"token":"[REDACTED]"' },
|
||||
{ pattern: /"secret"\s*:\s*"[^"]*"/gi, replacement: '"secret":"[REDACTED]"' },
|
||||
{ pattern: /"apiKey"\s*:\s*"[^"]*"/gi, replacement: '"apiKey":"[REDACTED]"' },
|
||||
{ pattern: /"api_key"\s*:\s*"[^"]*"/gi, replacement: '"api_key":"[REDACTED]"' },
|
||||
{ pattern: /"authorization"\s*:\s*"Bearer\s+[^"]*"/gi, replacement: '"authorization":"Bearer [REDACTED]"' },
|
||||
{ pattern: /"privateKey"\s*:\s*"[^"]*"/gi, replacement: '"privateKey":"[REDACTED]"' },
|
||||
{ pattern: /"private_key"\s*:\s*"[^"]*"/gi, replacement: '"private_key":"[REDACTED]"' }
|
||||
];
|
||||
|
||||
let sanitized = infoStr;
|
||||
sensitivePatterns.forEach(({ pattern, replacement }) => {
|
||||
sanitized = sanitized.replace(pattern, replacement);
|
||||
});
|
||||
|
||||
try {
|
||||
return JSON.parse(sanitized);
|
||||
} catch (e) {
|
||||
return info; // Return original if parsing fails
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Production format: Structured JSON logs without sensitive data
|
||||
*/
|
||||
const productionFormat = winston.format.combine(
|
||||
winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
|
||||
winston.format.errors({ stack: true }),
|
||||
sanitizeFormat(),
|
||||
winston.format.json()
|
||||
);
|
||||
|
||||
/**
|
||||
* Development format: Human-readable with colors
|
||||
*/
|
||||
const developmentFormat = winston.format.combine(
|
||||
winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
|
||||
winston.format.errors({ stack: true }),
|
||||
sanitizeFormat(),
|
||||
winston.format.colorize(),
|
||||
winston.format.printf(({ timestamp, level, message, ...meta }) => {
|
||||
let msg = `${timestamp} [${level}]: ${message}`;
|
||||
if (Object.keys(meta).length > 0) {
|
||||
msg += ` ${JSON.stringify(meta, null, 2)}`;
|
||||
}
|
||||
return msg;
|
||||
})
|
||||
);
|
||||
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
|
||||
const logger = winston.createLogger({
|
||||
level: process.env.LOG_LEVEL || (isProduction ? 'info' : 'debug'),
|
||||
format: productionFormat,
|
||||
defaultMeta: { service: 'streamflow-iptv' },
|
||||
transports: [
|
||||
// Error logs - separate file for errors only
|
||||
new winston.transports.File({
|
||||
filename: path.join(logsDir, 'error.log'),
|
||||
level: 'error',
|
||||
maxsize: 5242880, // 5MB
|
||||
maxFiles: 5,
|
||||
tailable: true
|
||||
}),
|
||||
// Combined logs - all levels
|
||||
new winston.transports.File({
|
||||
filename: path.join(logsDir, 'combined.log'),
|
||||
maxsize: 5242880, // 5MB
|
||||
maxFiles: 5,
|
||||
tailable: true
|
||||
})
|
||||
],
|
||||
// Don't exit on uncaught exceptions
|
||||
exitOnError: false
|
||||
});
|
||||
|
||||
// Console transport for development
|
||||
if (!isProduction) {
|
||||
logger.add(new winston.transports.Console({
|
||||
format: developmentFormat,
|
||||
handleExceptions: true
|
||||
}));
|
||||
}
|
||||
|
||||
// Security audit log helper
|
||||
logger.security = (action, details) => {
|
||||
logger.info('SECURITY_EVENT', {
|
||||
action,
|
||||
timestamp: new Date().toISOString(),
|
||||
...details
|
||||
});
|
||||
};
|
||||
|
||||
// Performance monitoring helper
|
||||
logger.performance = (operation, duration, details = {}) => {
|
||||
logger.info('PERFORMANCE', {
|
||||
operation,
|
||||
duration_ms: duration,
|
||||
timestamp: new Date().toISOString(),
|
||||
...details
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = logger;
|
||||
94
backend/utils/m3uParser.js
Normal file
94
backend/utils/m3uParser.js
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
const axios = require('axios');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { db } = require('../database/db');
|
||||
const logger = require('./logger');
|
||||
|
||||
const parseM3U = async (source, playlistId, isFile = false) => {
|
||||
try {
|
||||
let content;
|
||||
|
||||
if (isFile) {
|
||||
content = fs.readFileSync(source, 'utf8');
|
||||
} else {
|
||||
const response = await axios.get(source, { timeout: 30000 });
|
||||
content = response.data;
|
||||
}
|
||||
|
||||
const lines = content.split('\n').map(line => line.trim());
|
||||
const channels = [];
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
if (lines[i].startsWith('#EXTINF:')) {
|
||||
const info = lines[i];
|
||||
const url = lines[i + 1];
|
||||
|
||||
if (!url || url.startsWith('#')) continue;
|
||||
|
||||
const nameMatch = info.match(/,(.+)$/);
|
||||
const name = nameMatch ? nameMatch[1].trim() : 'Unknown';
|
||||
|
||||
const tvgIdMatch = info.match(/tvg-id="([^"]*)"/);
|
||||
const tvgNameMatch = info.match(/tvg-name="([^"]*)"/);
|
||||
const tvgLogoMatch = info.match(/tvg-logo="([^"]*)"/);
|
||||
const groupTitleMatch = info.match(/group-title="([^"]*)"/);
|
||||
const languageMatch = info.match(/tvg-language="([^"]*)"/);
|
||||
const countryMatch = info.match(/tvg-country="([^"]*)"/);
|
||||
|
||||
const isRadio = info.toLowerCase().includes('radio') ||
|
||||
groupTitleMatch?.[1]?.toLowerCase().includes('radio') ||
|
||||
url.toLowerCase().includes('radio');
|
||||
|
||||
channels.push({
|
||||
playlistId,
|
||||
name,
|
||||
url,
|
||||
logo: tvgLogoMatch ? tvgLogoMatch[1] : null,
|
||||
groupName: groupTitleMatch ? groupTitleMatch[1] : 'Uncategorized',
|
||||
tvgId: tvgIdMatch ? tvgIdMatch[1] : null,
|
||||
tvgName: tvgNameMatch ? tvgNameMatch[1] : null,
|
||||
language: languageMatch ? languageMatch[1] : null,
|
||||
country: countryMatch ? countryMatch[1] : null,
|
||||
isRadio: isRadio ? 1 : 0
|
||||
});
|
||||
|
||||
i++; // Skip the URL line
|
||||
}
|
||||
}
|
||||
|
||||
// Insert channels in batches
|
||||
const batchSize = 100;
|
||||
for (let i = 0; i < channels.length; i += batchSize) {
|
||||
const batch = channels.slice(i, i + batchSize);
|
||||
const stmt = db.prepare(`
|
||||
INSERT INTO channels (playlist_id, name, url, logo, group_name, tvg_id, tvg_name, language, country, is_radio)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
for (const channel of batch) {
|
||||
stmt.run(
|
||||
channel.playlistId,
|
||||
channel.name,
|
||||
channel.url,
|
||||
channel.logo,
|
||||
channel.groupName,
|
||||
channel.tvgId,
|
||||
channel.tvgName,
|
||||
channel.language,
|
||||
channel.country,
|
||||
channel.isRadio
|
||||
);
|
||||
}
|
||||
|
||||
stmt.finalize();
|
||||
}
|
||||
|
||||
logger.info(`Parsed ${channels.length} channels for playlist ${playlistId}`);
|
||||
return channels.length;
|
||||
} catch (error) {
|
||||
logger.error('M3U parsing error:', error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = { parseM3U };
|
||||
165
backend/utils/passwordPolicy.js
Normal file
165
backend/utils/passwordPolicy.js
Normal file
|
|
@ -0,0 +1,165 @@
|
|||
/**
|
||||
* Password Policy Configuration
|
||||
* Enforces strong password requirements
|
||||
*/
|
||||
|
||||
const PASSWORD_POLICY = {
|
||||
minLength: 12,
|
||||
maxLength: 128,
|
||||
requireUppercase: true,
|
||||
requireLowercase: true,
|
||||
requireNumbers: true,
|
||||
requireSpecialChars: true,
|
||||
specialChars: '!@#$%^&*()_+-=[]{}|;:,.<>?',
|
||||
preventCommonPasswords: true,
|
||||
preventUserInfo: true, // Don't allow username/email in password
|
||||
maxRepeatingChars: 3,
|
||||
historyCount: 5 // Remember last 5 passwords
|
||||
};
|
||||
|
||||
const ACCOUNT_LOCKOUT = {
|
||||
maxFailedAttempts: 5,
|
||||
lockoutDuration: 30 * 60 * 1000, // 30 minutes
|
||||
resetAfterSuccess: true,
|
||||
notifyOnLockout: true
|
||||
};
|
||||
|
||||
const PASSWORD_EXPIRY = {
|
||||
enabled: true,
|
||||
expiryDays: 90,
|
||||
warningDays: 14,
|
||||
gracePeriodDays: 7
|
||||
};
|
||||
|
||||
const SESSION_POLICY = {
|
||||
maxConcurrentSessions: 3,
|
||||
absoluteTimeout: 24 * 60 * 60 * 1000, // 24 hours
|
||||
idleTimeout: 2 * 60 * 60 * 1000, // 2 hours
|
||||
refreshTokenRotation: true
|
||||
};
|
||||
|
||||
// Common passwords to block (top 100 most common)
|
||||
const COMMON_PASSWORDS = [
|
||||
'123456', 'password', '12345678', 'qwerty', '123456789', '12345', '1234', '111111',
|
||||
'1234567', 'dragon', '123123', 'baseball', 'iloveyou', 'trustno1', '1234567890',
|
||||
'sunshine', 'master', '123321', '666666', 'photoshop', '1111111', 'princess', 'azerty',
|
||||
'000000', 'access', '696969', 'batman', '121212', 'letmein', 'qwertyuiop', 'admin',
|
||||
'welcome', 'monkey', 'login', 'abc123', 'starwars', 'shadow', 'ashley', 'football',
|
||||
'superman', 'michael', 'ninja', 'mustang', 'password1', 'passw0rd', 'password123'
|
||||
];
|
||||
|
||||
/**
|
||||
* Validates password against policy
|
||||
* @param {string} password - Password to validate
|
||||
* @param {object} userData - User data (username, email) to prevent personal info
|
||||
* @returns {object} - {valid: boolean, errors: string[]}
|
||||
*/
|
||||
function validatePassword(password, userData = {}) {
|
||||
const errors = [];
|
||||
|
||||
// Length check
|
||||
if (password.length < PASSWORD_POLICY.minLength) {
|
||||
errors.push(`Password must be at least ${PASSWORD_POLICY.minLength} characters long`);
|
||||
}
|
||||
if (password.length > PASSWORD_POLICY.maxLength) {
|
||||
errors.push(`Password must not exceed ${PASSWORD_POLICY.maxLength} characters`);
|
||||
}
|
||||
|
||||
// Character requirements
|
||||
if (PASSWORD_POLICY.requireUppercase && !/[A-Z]/.test(password)) {
|
||||
errors.push('Password must contain at least one uppercase letter');
|
||||
}
|
||||
if (PASSWORD_POLICY.requireLowercase && !/[a-z]/.test(password)) {
|
||||
errors.push('Password must contain at least one lowercase letter');
|
||||
}
|
||||
if (PASSWORD_POLICY.requireNumbers && !/\d/.test(password)) {
|
||||
errors.push('Password must contain at least one number');
|
||||
}
|
||||
if (PASSWORD_POLICY.requireSpecialChars) {
|
||||
const specialCharsRegex = new RegExp(`[${PASSWORD_POLICY.specialChars.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}]`);
|
||||
if (!specialCharsRegex.test(password)) {
|
||||
errors.push('Password must contain at least one special character (!@#$%^&*...)');
|
||||
}
|
||||
}
|
||||
|
||||
// Repeating characters
|
||||
const repeatingRegex = new RegExp(`(.)\\1{${PASSWORD_POLICY.maxRepeatingChars},}`);
|
||||
if (repeatingRegex.test(password)) {
|
||||
errors.push(`Password cannot contain more than ${PASSWORD_POLICY.maxRepeatingChars} repeating characters`);
|
||||
}
|
||||
|
||||
// Common passwords
|
||||
if (PASSWORD_POLICY.preventCommonPasswords) {
|
||||
const lowerPassword = password.toLowerCase();
|
||||
if (COMMON_PASSWORDS.some(common => lowerPassword.includes(common))) {
|
||||
errors.push('Password is too common or easily guessable');
|
||||
}
|
||||
}
|
||||
|
||||
// User info in password
|
||||
if (PASSWORD_POLICY.preventUserInfo && userData) {
|
||||
const lowerPassword = password.toLowerCase();
|
||||
if (userData.username && lowerPassword.includes(userData.username.toLowerCase())) {
|
||||
errors.push('Password cannot contain your username');
|
||||
}
|
||||
if (userData.email) {
|
||||
const emailParts = userData.email.split('@')[0].toLowerCase();
|
||||
if (lowerPassword.includes(emailParts)) {
|
||||
errors.push('Password cannot contain your email address');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
strength: calculatePasswordStrength(password)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate password strength score (0-100)
|
||||
*/
|
||||
function calculatePasswordStrength(password) {
|
||||
let score = 0;
|
||||
|
||||
// Length score (0-30 points)
|
||||
score += Math.min(30, password.length * 2);
|
||||
|
||||
// Character variety (0-40 points)
|
||||
if (/[a-z]/.test(password)) score += 10;
|
||||
if (/[A-Z]/.test(password)) score += 10;
|
||||
if (/\d/.test(password)) score += 10;
|
||||
if (/[^a-zA-Z0-9]/.test(password)) score += 10;
|
||||
|
||||
// Patterns (0-30 points)
|
||||
const hasNoRepeats = !/(.)\\1{2,}/.test(password);
|
||||
const hasNoSequence = !/(?:abc|bcd|cde|123|234|345)/i.test(password);
|
||||
const hasMixedCase = /[a-z]/.test(password) && /[A-Z]/.test(password);
|
||||
|
||||
if (hasNoRepeats) score += 10;
|
||||
if (hasNoSequence) score += 10;
|
||||
if (hasMixedCase) score += 10;
|
||||
|
||||
return Math.min(100, score);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get password strength label
|
||||
*/
|
||||
function getStrengthLabel(score) {
|
||||
if (score >= 80) return { label: 'Strong', color: 'success' };
|
||||
if (score >= 60) return { label: 'Good', color: 'info' };
|
||||
if (score >= 40) return { label: 'Fair', color: 'warning' };
|
||||
return { label: 'Weak', color: 'error' };
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
PASSWORD_POLICY,
|
||||
ACCOUNT_LOCKOUT,
|
||||
PASSWORD_EXPIRY,
|
||||
SESSION_POLICY,
|
||||
validatePassword,
|
||||
calculatePasswordStrength,
|
||||
getStrengthLabel
|
||||
};
|
||||
324
backend/utils/radioMetadata.js
Normal file
324
backend/utils/radioMetadata.js
Normal file
|
|
@ -0,0 +1,324 @@
|
|||
const axios = require('axios');
|
||||
const logger = require('./logger');
|
||||
|
||||
/**
|
||||
* Radio station metadata providers
|
||||
* Maps station names/URLs to their API endpoints or scraping methods
|
||||
*/
|
||||
|
||||
// Europa FM API
|
||||
async function getEuropaFMMetadata() {
|
||||
try {
|
||||
const response = await axios.get('https://www.europafm.ro/now-playing/', {
|
||||
timeout: 5000,
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
|
||||
}
|
||||
});
|
||||
|
||||
// Parse the HTML or JSON response
|
||||
const html = response.data;
|
||||
|
||||
// Look for common patterns in Europa FM's page
|
||||
const titleMatch = html.match(/<div class="now-playing-title">([^<]+)<\/div>/i) ||
|
||||
html.match(/<span class="track-title">([^<]+)<\/span>/i) ||
|
||||
html.match(/"title":"([^"]+)"/);
|
||||
const artistMatch = html.match(/<div class="now-playing-artist">([^<]+)<\/div>/i) ||
|
||||
html.match(/<span class="track-artist">([^<]+)<\/span>/i) ||
|
||||
html.match(/"artist":"([^"]+)"/);
|
||||
|
||||
if (titleMatch || artistMatch) {
|
||||
return {
|
||||
title: titleMatch ? titleMatch[1].trim() : null,
|
||||
artist: artistMatch ? artistMatch[1].trim() : null,
|
||||
source: 'Europa FM Website'
|
||||
};
|
||||
}
|
||||
|
||||
// Try API endpoint
|
||||
try {
|
||||
const apiResponse = await axios.get('https://www.europafm.ro/api/now-playing', {
|
||||
timeout: 3000,
|
||||
headers: { 'User-Agent': 'Mozilla/5.0' }
|
||||
});
|
||||
|
||||
if (apiResponse.data && apiResponse.data.title) {
|
||||
return {
|
||||
title: apiResponse.data.title,
|
||||
artist: apiResponse.data.artist || null,
|
||||
source: 'Europa FM API'
|
||||
};
|
||||
}
|
||||
} catch (apiError) {
|
||||
// API might not exist, continue
|
||||
}
|
||||
|
||||
return null;
|
||||
} catch (error) {
|
||||
logger.error('Europa FM metadata fetch error:', error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Radio Romania (various stations)
|
||||
async function getRadioRomaniaMetadata(stationId = 'actualitati') {
|
||||
try {
|
||||
const response = await axios.get(`https://www.radioromania.ro/live/${stationId}/`, {
|
||||
timeout: 5000,
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
|
||||
}
|
||||
});
|
||||
|
||||
const html = response.data;
|
||||
const titleMatch = html.match(/"currentSong":"([^"]+)"/);
|
||||
const artistMatch = html.match(/"currentArtist":"([^"]+)"/);
|
||||
|
||||
if (titleMatch || artistMatch) {
|
||||
return {
|
||||
title: titleMatch ? titleMatch[1].trim() : null,
|
||||
artist: artistMatch ? artistMatch[1].trim() : null,
|
||||
source: 'Radio Romania'
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
} catch (error) {
|
||||
logger.error('Radio Romania metadata fetch error:', error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Magic FM
|
||||
async function getMagicFMMetadata() {
|
||||
try {
|
||||
const response = await axios.get('https://www.magicfm.ro/now-playing/', {
|
||||
timeout: 5000,
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
|
||||
}
|
||||
});
|
||||
|
||||
const html = response.data;
|
||||
const titleMatch = html.match(/"title":"([^"]+)"/);
|
||||
const artistMatch = html.match(/"artist":"([^"]+)"/);
|
||||
|
||||
if (titleMatch || artistMatch) {
|
||||
return {
|
||||
title: titleMatch ? titleMatch[1].trim() : null,
|
||||
artist: artistMatch ? artistMatch[1].trim() : null,
|
||||
source: 'Magic FM'
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
} catch (error) {
|
||||
logger.error('Magic FM metadata fetch error:', error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Kiss FM
|
||||
async function getKissFMMetadata() {
|
||||
try {
|
||||
const response = await axios.get('https://www.kissfm.ro/now-playing/', {
|
||||
timeout: 5000,
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
|
||||
}
|
||||
});
|
||||
|
||||
const html = response.data;
|
||||
const titleMatch = html.match(/"title":"([^"]+)"/);
|
||||
const artistMatch = html.match(/"artist":"([^"]+)"/);
|
||||
|
||||
if (titleMatch || artistMatch) {
|
||||
return {
|
||||
title: titleMatch ? titleMatch[1].trim() : null,
|
||||
artist: artistMatch ? artistMatch[1].trim() : null,
|
||||
source: 'Kiss FM'
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
} catch (error) {
|
||||
logger.error('Kiss FM metadata fetch error:', error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Pro FM
|
||||
async function getProFMMetadata() {
|
||||
try {
|
||||
const response = await axios.get('https://www.profm.ro/now-playing/', {
|
||||
timeout: 5000,
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
|
||||
}
|
||||
});
|
||||
|
||||
const html = response.data;
|
||||
const titleMatch = html.match(/"title":"([^"]+)"/);
|
||||
const artistMatch = html.match(/"artist":"([^"]+)"/);
|
||||
|
||||
if (titleMatch || artistMatch) {
|
||||
return {
|
||||
title: titleMatch ? titleMatch[1].trim() : null,
|
||||
artist: artistMatch ? artistMatch[1].trim() : null,
|
||||
source: 'Pro FM'
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
} catch (error) {
|
||||
logger.error('Pro FM metadata fetch error:', error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Generic RadioBrowser API fallback
|
||||
async function getRadioBrowserMetadata(stationName) {
|
||||
try {
|
||||
// Search for station
|
||||
const searchResponse = await axios.get('https://de1.api.radio-browser.info/json/stations/search', {
|
||||
params: {
|
||||
name: stationName,
|
||||
limit: 1
|
||||
},
|
||||
timeout: 5000,
|
||||
headers: {
|
||||
'User-Agent': 'StreamFlow/1.0'
|
||||
}
|
||||
});
|
||||
|
||||
if (searchResponse.data && searchResponse.data.length > 0) {
|
||||
const station = searchResponse.data[0];
|
||||
|
||||
// Radio Browser doesn't provide real-time metadata, but we can try the station's homepage
|
||||
if (station.homepage) {
|
||||
try {
|
||||
const homepageResponse = await axios.get(station.homepage, {
|
||||
timeout: 3000,
|
||||
headers: { 'User-Agent': 'Mozilla/5.0' }
|
||||
});
|
||||
|
||||
const html = homepageResponse.data;
|
||||
// Try common patterns
|
||||
const patterns = [
|
||||
/"nowPlaying":"([^"]+)"/,
|
||||
/"current_track":"([^"]+)"/,
|
||||
/<div[^>]*class="[^"]*now-playing[^"]*"[^>]*>([^<]+)</i,
|
||||
/"title":"([^"]+)"/
|
||||
];
|
||||
|
||||
for (const pattern of patterns) {
|
||||
const match = html.match(pattern);
|
||||
if (match && match[1]) {
|
||||
const parts = match[1].split(' - ');
|
||||
if (parts.length >= 2) {
|
||||
return {
|
||||
artist: parts[0].trim(),
|
||||
title: parts.slice(1).join(' - ').trim(),
|
||||
source: 'Radio Browser + Website'
|
||||
};
|
||||
}
|
||||
return {
|
||||
title: match[1].trim(),
|
||||
artist: null,
|
||||
source: 'Radio Browser + Website'
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch (homepageError) {
|
||||
// Homepage fetch failed, ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
} catch (error) {
|
||||
logger.error('Radio Browser metadata fetch error:', error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Main function to get metadata for a radio station
|
||||
* Tries to identify the station and use the appropriate provider
|
||||
*/
|
||||
async function getRadioStationMetadata(channelName, channelUrl) {
|
||||
const nameLower = channelName.toLowerCase();
|
||||
const urlLower = channelUrl ? channelUrl.toLowerCase() : '';
|
||||
|
||||
logger.info(`[RadioMetadata] Fetching metadata for: ${channelName}`);
|
||||
|
||||
// Try specific providers based on station name or URL
|
||||
try {
|
||||
// Europa FM
|
||||
if (nameLower.includes('europa') && nameLower.includes('fm')) {
|
||||
const metadata = await getEuropaFMMetadata();
|
||||
if (metadata) {
|
||||
logger.info(`[RadioMetadata] Found metadata from ${metadata.source}`);
|
||||
return metadata;
|
||||
}
|
||||
}
|
||||
|
||||
// Radio Romania
|
||||
if (nameLower.includes('radio') && nameLower.includes('romania')) {
|
||||
let stationId = 'actualitati';
|
||||
if (nameLower.includes('muzical')) stationId = 'muzical';
|
||||
if (nameLower.includes('cultural')) stationId = 'cultural';
|
||||
if (nameLower.includes('Cluj')) stationId = 'cluj';
|
||||
|
||||
const metadata = await getRadioRomaniaMetadata(stationId);
|
||||
if (metadata) {
|
||||
logger.info(`[RadioMetadata] Found metadata from ${metadata.source}`);
|
||||
return metadata;
|
||||
}
|
||||
}
|
||||
|
||||
// Magic FM
|
||||
if (nameLower.includes('magic') && nameLower.includes('fm')) {
|
||||
const metadata = await getMagicFMMetadata();
|
||||
if (metadata) {
|
||||
logger.info(`[RadioMetadata] Found metadata from ${metadata.source}`);
|
||||
return metadata;
|
||||
}
|
||||
}
|
||||
|
||||
// Kiss FM
|
||||
if (nameLower.includes('kiss') && nameLower.includes('fm')) {
|
||||
const metadata = await getKissFMMetadata();
|
||||
if (metadata) {
|
||||
logger.info(`[RadioMetadata] Found metadata from ${metadata.source}`);
|
||||
return metadata;
|
||||
}
|
||||
}
|
||||
|
||||
// Pro FM
|
||||
if (nameLower.includes('pro') && nameLower.includes('fm')) {
|
||||
const metadata = await getProFMMetadata();
|
||||
if (metadata) {
|
||||
logger.info(`[RadioMetadata] Found metadata from ${metadata.source}`);
|
||||
return metadata;
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to RadioBrowser
|
||||
const metadata = await getRadioBrowserMetadata(channelName);
|
||||
if (metadata) {
|
||||
logger.info(`[RadioMetadata] Found metadata from ${metadata.source}`);
|
||||
return metadata;
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`[RadioMetadata] Error fetching metadata: ${error.message}`);
|
||||
}
|
||||
|
||||
logger.info(`[RadioMetadata] No external metadata found for ${channelName}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getRadioStationMetadata
|
||||
};
|
||||
858
backend/utils/responseProtocolManager.js
Normal file
858
backend/utils/responseProtocolManager.js
Normal file
|
|
@ -0,0 +1,858 @@
|
|||
/**
|
||||
* Response Protocol Manager
|
||||
* Automated response protocols for security incident handling
|
||||
* CWE-778 Compliance: Logs all automated responses and protocol executions
|
||||
*/
|
||||
|
||||
const logger = require('./logger');
|
||||
const logAggregator = require('./logAggregator');
|
||||
const { db } = require('../database/db');
|
||||
const EventEmitter = require('events');
|
||||
|
||||
class ResponseProtocolManager extends EventEmitter {
|
||||
constructor() {
|
||||
super();
|
||||
this.protocols = new Map();
|
||||
this.executionHistory = new Map();
|
||||
this.initialize();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize response protocol manager
|
||||
*/
|
||||
async initialize() {
|
||||
await this.createProtocolsTable();
|
||||
await this.createExecutionHistoryTable();
|
||||
await this.loadProtocols();
|
||||
|
||||
logger.info('[ResponseProtocolManager] Initialized with automated response protocols');
|
||||
|
||||
// Log initialization (CWE-778)
|
||||
logAggregator.aggregate('response_protocol_manager', 'info', 'security', 'Response protocol manager initialized', {
|
||||
totalProtocols: this.protocols.size
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create protocols table
|
||||
*/
|
||||
async createProtocolsTable() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(`
|
||||
CREATE TABLE IF NOT EXISTS response_protocols (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
protocol_id TEXT UNIQUE NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
trigger_type TEXT NOT NULL,
|
||||
trigger_condition TEXT NOT NULL,
|
||||
actions TEXT NOT NULL,
|
||||
severity TEXT NOT NULL,
|
||||
enabled INTEGER DEFAULT 1,
|
||||
auto_execute INTEGER DEFAULT 0,
|
||||
cooldown_minutes INTEGER DEFAULT 60,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`, async (err) => {
|
||||
if (err) reject(err);
|
||||
else {
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_protocols_trigger ON response_protocols(trigger_type, enabled)`);
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_protocols_severity ON response_protocols(severity, enabled)`);
|
||||
await this.createDefaultProtocols();
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create execution history table
|
||||
*/
|
||||
async createExecutionHistoryTable() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(`
|
||||
CREATE TABLE IF NOT EXISTS protocol_executions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
execution_id TEXT UNIQUE NOT NULL,
|
||||
protocol_id TEXT NOT NULL,
|
||||
trigger_event TEXT NOT NULL,
|
||||
actions_executed TEXT NOT NULL,
|
||||
execution_status TEXT NOT NULL,
|
||||
execution_result TEXT,
|
||||
executed_by TEXT DEFAULT 'system',
|
||||
executed_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`, (err) => {
|
||||
if (err) reject(err);
|
||||
else {
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_executions_protocol ON protocol_executions(protocol_id)`);
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_executions_status ON protocol_executions(execution_status)`);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create default response protocols
|
||||
*/
|
||||
async createDefaultProtocols() {
|
||||
const defaultProtocols = [
|
||||
{
|
||||
protocol_id: 'PROTOCOL-BRUTE-FORCE-RESPONSE',
|
||||
name: 'Brute Force Attack Response',
|
||||
description: 'Automated response to brute force attacks',
|
||||
trigger_type: 'anomaly',
|
||||
trigger_condition: JSON.stringify({ anomaly_type: 'brute_force_attack', severity: 'critical' }),
|
||||
actions: JSON.stringify([
|
||||
{ action: 'block_ip', duration_minutes: 60, reason: 'brute_force_attack' },
|
||||
{ action: 'notify_admin', channel: 'email', priority: 'high' },
|
||||
{ action: 'log_incident', category: 'security_breach' }
|
||||
]),
|
||||
severity: 'critical',
|
||||
auto_execute: 1,
|
||||
cooldown_minutes: 30
|
||||
},
|
||||
{
|
||||
protocol_id: 'PROTOCOL-CREDENTIAL-STUFFING-RESPONSE',
|
||||
name: 'Credential Stuffing Response',
|
||||
description: 'Automated response to credential stuffing attacks',
|
||||
trigger_type: 'anomaly',
|
||||
trigger_condition: JSON.stringify({ anomaly_type: 'credential_stuffing', severity: 'critical' }),
|
||||
actions: JSON.stringify([
|
||||
{ action: 'block_ip', duration_minutes: 120, reason: 'credential_stuffing' },
|
||||
{ action: 'require_2fa', target: 'affected_accounts' },
|
||||
{ action: 'notify_admin', channel: 'email', priority: 'high' },
|
||||
{ action: 'log_incident', category: 'account_compromise' }
|
||||
]),
|
||||
severity: 'critical',
|
||||
auto_execute: 1,
|
||||
cooldown_minutes: 60
|
||||
},
|
||||
{
|
||||
protocol_id: 'PROTOCOL-PRIVILEGE-ESC-RESPONSE',
|
||||
name: 'Privilege Escalation Response',
|
||||
description: 'Automated response to privilege escalation attempts',
|
||||
trigger_type: 'anomaly',
|
||||
trigger_condition: JSON.stringify({ anomaly_type: 'privilege_escalation', severity: 'critical' }),
|
||||
actions: JSON.stringify([
|
||||
{ action: 'lock_account', target: 'attacker', duration_minutes: 240 },
|
||||
{ action: 'revoke_sessions', target: 'attacker' },
|
||||
{ action: 'notify_admin', channel: 'email', priority: 'critical' },
|
||||
{ action: 'escalate_incident', level: 'security_team' },
|
||||
{ action: 'log_incident', category: 'privilege_violation' }
|
||||
]),
|
||||
severity: 'critical',
|
||||
auto_execute: 1,
|
||||
cooldown_minutes: 15
|
||||
},
|
||||
{
|
||||
protocol_id: 'PROTOCOL-SUSPICIOUS-IP-RESPONSE',
|
||||
name: 'Suspicious IP Response',
|
||||
description: 'Automated response to suspicious IP activity',
|
||||
trigger_type: 'anomaly',
|
||||
trigger_condition: JSON.stringify({ anomaly_type: 'suspicious_ip', severity: 'high' }),
|
||||
actions: JSON.stringify([
|
||||
{ action: 'rate_limit_ip', limit: 10, window_minutes: 10 },
|
||||
{ action: 'notify_admin', channel: 'in_app', priority: 'medium' },
|
||||
{ action: 'log_incident', category: 'suspicious_activity' }
|
||||
]),
|
||||
severity: 'high',
|
||||
auto_execute: 1,
|
||||
cooldown_minutes: 60
|
||||
},
|
||||
{
|
||||
protocol_id: 'PROTOCOL-DATA-EXFIL-RESPONSE',
|
||||
name: 'Data Exfiltration Response',
|
||||
description: 'Automated response to data exfiltration attempts',
|
||||
trigger_type: 'anomaly',
|
||||
trigger_condition: JSON.stringify({ anomaly_type: 'data_exfiltration', severity: 'high' }),
|
||||
actions: JSON.stringify([
|
||||
{ action: 'block_ip', duration_minutes: 180, reason: 'data_exfiltration' },
|
||||
{ action: 'lock_account', target: 'attacker', duration_minutes: 360 },
|
||||
{ action: 'notify_admin', channel: 'email', priority: 'critical' },
|
||||
{ action: 'escalate_incident', level: 'data_protection_team' },
|
||||
{ action: 'log_incident', category: 'data_breach' }
|
||||
]),
|
||||
severity: 'high',
|
||||
auto_execute: 0,
|
||||
cooldown_minutes: 120
|
||||
},
|
||||
{
|
||||
protocol_id: 'PROTOCOL-MALICIOUS-SIGNATURE-RESPONSE',
|
||||
name: 'Malicious Signature Response',
|
||||
description: 'Automated response to malicious signature matches',
|
||||
trigger_type: 'signature',
|
||||
trigger_condition: JSON.stringify({ signature_type: 'attack_pattern', threat_level: 'critical', auto_block: true }),
|
||||
actions: JSON.stringify([
|
||||
{ action: 'block_ip', duration_minutes: 240, reason: 'malicious_signature' },
|
||||
{ action: 'notify_admin', channel: 'email', priority: 'high' },
|
||||
{ action: 'log_incident', category: 'attack_detected' }
|
||||
]),
|
||||
severity: 'critical',
|
||||
auto_execute: 1,
|
||||
cooldown_minutes: 30
|
||||
},
|
||||
{
|
||||
protocol_id: 'PROTOCOL-THREAT-SCORE-CRITICAL',
|
||||
name: 'Critical Threat Score Response',
|
||||
description: 'Automated response when threat score reaches critical level',
|
||||
trigger_type: 'threshold',
|
||||
trigger_condition: JSON.stringify({ metric: 'threat_score', operator: '>=', value: 80 }),
|
||||
actions: JSON.stringify([
|
||||
{ action: 'notify_admin', channel: 'email', priority: 'critical' },
|
||||
{ action: 'escalate_incident', level: 'security_team' },
|
||||
{ action: 'enable_enhanced_monitoring', duration_minutes: 120 },
|
||||
{ action: 'log_incident', category: 'threat_escalation' }
|
||||
]),
|
||||
severity: 'critical',
|
||||
auto_execute: 1,
|
||||
cooldown_minutes: 60
|
||||
}
|
||||
];
|
||||
|
||||
for (const protocol of defaultProtocols) {
|
||||
await new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT OR IGNORE INTO response_protocols
|
||||
(protocol_id, name, description, trigger_type, trigger_condition, actions, severity, auto_execute, cooldown_minutes)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[
|
||||
protocol.protocol_id,
|
||||
protocol.name,
|
||||
protocol.description,
|
||||
protocol.trigger_type,
|
||||
protocol.trigger_condition,
|
||||
protocol.actions,
|
||||
protocol.severity,
|
||||
protocol.auto_execute,
|
||||
protocol.cooldown_minutes
|
||||
],
|
||||
(err) => {
|
||||
if (err) reject(err);
|
||||
else resolve();
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
logger.info(`[ResponseProtocolManager] Created ${defaultProtocols.length} default protocols`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load protocols from database into memory
|
||||
*/
|
||||
async loadProtocols() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT * FROM response_protocols WHERE enabled = 1`,
|
||||
[],
|
||||
(err, rows) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
this.protocols.clear();
|
||||
rows.forEach(row => {
|
||||
row.trigger_condition = JSON.parse(row.trigger_condition);
|
||||
row.actions = JSON.parse(row.actions);
|
||||
this.protocols.set(row.protocol_id, row);
|
||||
});
|
||||
logger.info(`[ResponseProtocolManager] Loaded ${rows.length} active protocols`);
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute protocols based on trigger event
|
||||
* CWE-778: Logs all protocol executions
|
||||
*/
|
||||
async executeProtocols(triggerType, triggerEvent, context = {}) {
|
||||
const matchingProtocols = Array.from(this.protocols.values()).filter(
|
||||
p => p.trigger_type === triggerType && this.matchesTriggerCondition(p.trigger_condition, triggerEvent)
|
||||
);
|
||||
|
||||
if (matchingProtocols.length === 0) {
|
||||
return { executed: false, protocols: [] };
|
||||
}
|
||||
|
||||
const executedProtocols = [];
|
||||
|
||||
for (const protocol of matchingProtocols) {
|
||||
// Check cooldown
|
||||
if (this.isInCooldown(protocol.protocol_id)) {
|
||||
logger.info(`[ResponseProtocolManager] Protocol ${protocol.protocol_id} in cooldown, skipping`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if auto-execute is enabled
|
||||
if (!protocol.auto_execute) {
|
||||
logger.info(`[ResponseProtocolManager] Protocol ${protocol.protocol_id} requires manual execution, skipping`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Execute protocol
|
||||
const executionResult = await this.executeProtocolActions(protocol, triggerEvent, context);
|
||||
executedProtocols.push(executionResult);
|
||||
|
||||
// Set cooldown
|
||||
this.setCooldown(protocol.protocol_id, protocol.cooldown_minutes);
|
||||
|
||||
// Log protocol execution (CWE-778)
|
||||
logAggregator.aggregate('response_protocol_manager', 'warn', 'security', 'Response protocol executed', {
|
||||
protocolId: protocol.protocol_id,
|
||||
protocolName: protocol.name,
|
||||
triggerType,
|
||||
triggerEvent: JSON.stringify(triggerEvent).substring(0, 200),
|
||||
actionsExecuted: executionResult.actionsExecuted.length,
|
||||
executionStatus: executionResult.status,
|
||||
context
|
||||
});
|
||||
|
||||
logger.warn(`[ResponseProtocolManager] Protocol executed: ${protocol.name} (${executionResult.status})`);
|
||||
}
|
||||
|
||||
return {
|
||||
executed: executedProtocols.length > 0,
|
||||
protocols: executedProtocols
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute protocol actions
|
||||
*/
|
||||
async executeProtocolActions(protocol, triggerEvent, context) {
|
||||
const executionId = `EXEC-${Date.now()}-${Math.random().toString(36).substr(2, 9).toUpperCase()}`;
|
||||
const actionsExecuted = [];
|
||||
const actionResults = [];
|
||||
|
||||
for (const action of protocol.actions) {
|
||||
try {
|
||||
const result = await this.executeAction(action, triggerEvent, context);
|
||||
actionsExecuted.push(action.action);
|
||||
actionResults.push({ action: action.action, status: 'success', result });
|
||||
|
||||
// Emit event for action execution
|
||||
this.emit('action_executed', {
|
||||
protocolId: protocol.protocol_id,
|
||||
action: action.action,
|
||||
result
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(`[ResponseProtocolManager] Action execution failed: ${action.action}`, error);
|
||||
actionResults.push({ action: action.action, status: 'failed', error: error.message });
|
||||
}
|
||||
}
|
||||
|
||||
const executionStatus = actionResults.every(r => r.status === 'success') ? 'success' : 'partial';
|
||||
|
||||
// Save execution history
|
||||
await this.saveExecutionHistory({
|
||||
executionId,
|
||||
protocolId: protocol.protocol_id,
|
||||
triggerEvent: JSON.stringify(triggerEvent),
|
||||
actionsExecuted: JSON.stringify(actionsExecuted),
|
||||
executionStatus,
|
||||
executionResult: JSON.stringify(actionResults)
|
||||
});
|
||||
|
||||
return {
|
||||
executionId,
|
||||
protocolId: protocol.protocol_id,
|
||||
protocolName: protocol.name,
|
||||
actionsExecuted,
|
||||
actionResults,
|
||||
status: executionStatus
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute individual action
|
||||
*/
|
||||
async executeAction(action, triggerEvent, context) {
|
||||
switch (action.action) {
|
||||
case 'block_ip':
|
||||
return await this.actionBlockIP(action, triggerEvent, context);
|
||||
case 'lock_account':
|
||||
return await this.actionLockAccount(action, triggerEvent, context);
|
||||
case 'revoke_sessions':
|
||||
return await this.actionRevokeSessions(action, triggerEvent, context);
|
||||
case 'require_2fa':
|
||||
return await this.actionRequire2FA(action, triggerEvent, context);
|
||||
case 'rate_limit_ip':
|
||||
return await this.actionRateLimitIP(action, triggerEvent, context);
|
||||
case 'notify_admin':
|
||||
return await this.actionNotifyAdmin(action, triggerEvent, context);
|
||||
case 'escalate_incident':
|
||||
return await this.actionEscalateIncident(action, triggerEvent, context);
|
||||
case 'log_incident':
|
||||
return await this.actionLogIncident(action, triggerEvent, context);
|
||||
case 'enable_enhanced_monitoring':
|
||||
return await this.actionEnableEnhancedMonitoring(action, triggerEvent, context);
|
||||
default:
|
||||
throw new Error(`Unknown action: ${action.action}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Action: Block IP address
|
||||
*/
|
||||
async actionBlockIP(action, triggerEvent, context) {
|
||||
const ipAddress = context.ip_address || triggerEvent.ip_address;
|
||||
const duration = action.duration_minutes || 60;
|
||||
const reason = action.reason || 'security_violation';
|
||||
|
||||
if (!ipAddress) {
|
||||
throw new Error('No IP address provided for blocking');
|
||||
}
|
||||
|
||||
// TODO: Implement actual IP blocking (firewall rules, rate limiter, etc.)
|
||||
logger.warn(`[ResponseProtocolManager] Action: Block IP ${ipAddress} for ${duration} minutes (reason: ${reason})`);
|
||||
|
||||
return {
|
||||
action: 'block_ip',
|
||||
ipAddress,
|
||||
duration,
|
||||
reason,
|
||||
expiresAt: new Date(Date.now() + duration * 60 * 1000).toISOString()
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Action: Lock user account
|
||||
*/
|
||||
async actionLockAccount(action, triggerEvent, context) {
|
||||
const userId = context.user_id || triggerEvent.user_id;
|
||||
const duration = action.duration_minutes || 120;
|
||||
|
||||
if (!userId) {
|
||||
throw new Error('No user ID provided for account locking');
|
||||
}
|
||||
|
||||
// TODO: Implement actual account locking
|
||||
logger.warn(`[ResponseProtocolManager] Action: Lock account ${userId} for ${duration} minutes`);
|
||||
|
||||
return {
|
||||
action: 'lock_account',
|
||||
userId,
|
||||
duration,
|
||||
lockedUntil: new Date(Date.now() + duration * 60 * 1000).toISOString()
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Action: Revoke all sessions
|
||||
*/
|
||||
async actionRevokeSessions(action, triggerEvent, context) {
|
||||
const userId = context.user_id || triggerEvent.user_id;
|
||||
|
||||
if (!userId) {
|
||||
throw new Error('No user ID provided for session revocation');
|
||||
}
|
||||
|
||||
// TODO: Implement actual session revocation
|
||||
logger.warn(`[ResponseProtocolManager] Action: Revoke all sessions for user ${userId}`);
|
||||
|
||||
return { action: 'revoke_sessions', userId, revokedAt: new Date().toISOString() };
|
||||
}
|
||||
|
||||
/**
|
||||
* Action: Require 2FA for affected accounts
|
||||
*/
|
||||
async actionRequire2FA(action, triggerEvent, context) {
|
||||
const target = action.target || 'affected_accounts';
|
||||
|
||||
// TODO: Implement 2FA requirement
|
||||
logger.warn(`[ResponseProtocolManager] Action: Require 2FA for ${target}`);
|
||||
|
||||
return { action: 'require_2fa', target, enabledAt: new Date().toISOString() };
|
||||
}
|
||||
|
||||
/**
|
||||
* Action: Rate limit IP
|
||||
*/
|
||||
async actionRateLimitIP(action, triggerEvent, context) {
|
||||
const ipAddress = context.ip_address || triggerEvent.ip_address;
|
||||
const limit = action.limit || 10;
|
||||
const windowMinutes = action.window_minutes || 10;
|
||||
|
||||
if (!ipAddress) {
|
||||
throw new Error('No IP address provided for rate limiting');
|
||||
}
|
||||
|
||||
// TODO: Implement actual rate limiting
|
||||
logger.warn(`[ResponseProtocolManager] Action: Rate limit IP ${ipAddress} to ${limit} requests per ${windowMinutes} minutes`);
|
||||
|
||||
return { action: 'rate_limit_ip', ipAddress, limit, windowMinutes };
|
||||
}
|
||||
|
||||
/**
|
||||
* Action: Notify administrator
|
||||
*/
|
||||
async actionNotifyAdmin(action, triggerEvent, context) {
|
||||
const channel = action.channel || 'in_app';
|
||||
const priority = action.priority || 'medium';
|
||||
|
||||
// TODO: Implement actual admin notification (email, SMS, webhook)
|
||||
logger.warn(`[ResponseProtocolManager] Action: Notify admin via ${channel} (priority: ${priority})`);
|
||||
|
||||
return { action: 'notify_admin', channel, priority, notifiedAt: new Date().toISOString() };
|
||||
}
|
||||
|
||||
/**
|
||||
* Action: Escalate incident
|
||||
*/
|
||||
async actionEscalateIncident(action, triggerEvent, context) {
|
||||
const level = action.level || 'security_team';
|
||||
|
||||
// TODO: Implement actual incident escalation
|
||||
logger.warn(`[ResponseProtocolManager] Action: Escalate incident to ${level}`);
|
||||
|
||||
return { action: 'escalate_incident', level, escalatedAt: new Date().toISOString() };
|
||||
}
|
||||
|
||||
/**
|
||||
* Action: Log incident
|
||||
*/
|
||||
async actionLogIncident(action, triggerEvent, context) {
|
||||
const category = action.category || 'security_incident';
|
||||
|
||||
logAggregator.aggregate('response_protocol_manager', 'error', category, 'Security incident logged', {
|
||||
triggerEvent,
|
||||
context
|
||||
});
|
||||
|
||||
return { action: 'log_incident', category, loggedAt: new Date().toISOString() };
|
||||
}
|
||||
|
||||
/**
|
||||
* Action: Enable enhanced monitoring
|
||||
*/
|
||||
async actionEnableEnhancedMonitoring(action, triggerEvent, context) {
|
||||
const duration = action.duration_minutes || 120;
|
||||
|
||||
// TODO: Implement enhanced monitoring mode
|
||||
logger.warn(`[ResponseProtocolManager] Action: Enable enhanced monitoring for ${duration} minutes`);
|
||||
|
||||
return {
|
||||
action: 'enable_enhanced_monitoring',
|
||||
duration,
|
||||
expiresAt: new Date(Date.now() + duration * 60 * 1000).toISOString()
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if trigger condition matches event
|
||||
*/
|
||||
matchesTriggerCondition(condition, event) {
|
||||
for (const [key, value] of Object.entries(condition)) {
|
||||
if (event[key] !== value) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if protocol is in cooldown
|
||||
*/
|
||||
isInCooldown(protocolId) {
|
||||
const lastExecution = this.executionHistory.get(protocolId);
|
||||
if (!lastExecution) return false;
|
||||
|
||||
const cooldownEnd = new Date(lastExecution.cooldownUntil);
|
||||
return Date.now() < cooldownEnd.getTime();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set cooldown for protocol
|
||||
*/
|
||||
setCooldown(protocolId, cooldownMinutes) {
|
||||
this.executionHistory.set(protocolId, {
|
||||
lastExecuted: new Date().toISOString(),
|
||||
cooldownUntil: new Date(Date.now() + cooldownMinutes * 60 * 1000).toISOString()
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Save execution history to database
|
||||
*/
|
||||
async saveExecutionHistory(data) {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT INTO protocol_executions
|
||||
(execution_id, protocol_id, trigger_event, actions_executed, execution_status, execution_result)
|
||||
VALUES (?, ?, ?, ?, ?, ?)`,
|
||||
[
|
||||
data.executionId,
|
||||
data.protocolId,
|
||||
data.triggerEvent,
|
||||
data.actionsExecuted,
|
||||
data.executionStatus,
|
||||
data.executionResult
|
||||
],
|
||||
(err) => {
|
||||
if (err) reject(err);
|
||||
else resolve();
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all protocols
|
||||
*/
|
||||
async getProtocols(filters = {}) {
|
||||
const { triggerType, severity, enabled, limit = 100 } = filters;
|
||||
|
||||
let whereClause = [];
|
||||
let params = [];
|
||||
|
||||
if (triggerType) {
|
||||
whereClause.push('trigger_type = ?');
|
||||
params.push(triggerType);
|
||||
}
|
||||
|
||||
if (severity) {
|
||||
whereClause.push('severity = ?');
|
||||
params.push(severity);
|
||||
}
|
||||
|
||||
if (enabled !== undefined) {
|
||||
whereClause.push('enabled = ?');
|
||||
params.push(enabled ? 1 : 0);
|
||||
}
|
||||
|
||||
const where = whereClause.length > 0 ? `WHERE ${whereClause.join(' AND ')}` : '';
|
||||
params.push(limit);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT * FROM response_protocols ${where}
|
||||
ORDER BY severity DESC, trigger_type
|
||||
LIMIT ?`,
|
||||
params,
|
||||
(err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get protocol by ID
|
||||
*/
|
||||
async getProtocolById(protocolId) {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.get(
|
||||
`SELECT * FROM response_protocols WHERE protocol_id = ?`,
|
||||
[protocolId],
|
||||
(err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get execution history
|
||||
*/
|
||||
async getExecutionHistory(filters = {}) {
|
||||
const { protocolId, status, limit = 100 } = filters;
|
||||
|
||||
let whereClause = [];
|
||||
let params = [];
|
||||
|
||||
if (protocolId) {
|
||||
whereClause.push('protocol_id = ?');
|
||||
params.push(protocolId);
|
||||
}
|
||||
|
||||
if (status) {
|
||||
whereClause.push('execution_status = ?');
|
||||
params.push(status);
|
||||
}
|
||||
|
||||
const where = whereClause.length > 0 ? `WHERE ${whereClause.join(' AND ')}` : '';
|
||||
params.push(limit);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT * FROM protocol_executions ${where}
|
||||
ORDER BY executed_at DESC
|
||||
LIMIT ?`,
|
||||
params,
|
||||
(err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new protocol
|
||||
* CWE-778: Logs protocol creation
|
||||
*/
|
||||
async createProtocol(data, userId) {
|
||||
const protocolId = `PROTOCOL-${Date.now()}-${Math.random().toString(36).substr(2, 9).toUpperCase()}`;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT INTO response_protocols
|
||||
(protocol_id, name, description, trigger_type, trigger_condition, actions, severity, enabled, auto_execute, cooldown_minutes)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[
|
||||
protocolId,
|
||||
data.name,
|
||||
data.description || '',
|
||||
data.trigger_type,
|
||||
JSON.stringify(data.trigger_condition),
|
||||
JSON.stringify(data.actions),
|
||||
data.severity,
|
||||
data.enabled !== undefined ? (data.enabled ? 1 : 0) : 1,
|
||||
data.auto_execute !== undefined ? (data.auto_execute ? 1 : 0) : 0,
|
||||
data.cooldown_minutes || 60
|
||||
],
|
||||
async (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
await this.loadProtocols();
|
||||
|
||||
// Log protocol creation (CWE-778)
|
||||
logAggregator.aggregate('response_protocol_manager', 'info', 'security', 'Response protocol created', {
|
||||
protocolId,
|
||||
userId,
|
||||
name: data.name,
|
||||
triggerType: data.trigger_type,
|
||||
severity: data.severity,
|
||||
autoExecute: data.auto_execute === 1
|
||||
});
|
||||
|
||||
logger.info(`[ResponseProtocolManager] Protocol created: ${protocolId} by user ${userId}`);
|
||||
resolve({ protocolId });
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update protocol
|
||||
* CWE-778: Logs protocol modifications
|
||||
*/
|
||||
async updateProtocol(protocolId, updates, userId) {
|
||||
const allowedFields = ['name', 'description', 'trigger_condition', 'actions', 'severity', 'enabled', 'auto_execute', 'cooldown_minutes'];
|
||||
const setClause = [];
|
||||
const params = [];
|
||||
|
||||
for (const [key, value] of Object.entries(updates)) {
|
||||
if (allowedFields.includes(key)) {
|
||||
setClause.push(`${key} = ?`);
|
||||
if (key === 'trigger_condition' || key === 'actions') {
|
||||
params.push(JSON.stringify(value));
|
||||
} else if (key === 'enabled' || key === 'auto_execute') {
|
||||
params.push(value ? 1 : 0);
|
||||
} else {
|
||||
params.push(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (setClause.length === 0) {
|
||||
throw new Error('No valid fields to update');
|
||||
}
|
||||
|
||||
setClause.push('updated_at = CURRENT_TIMESTAMP');
|
||||
params.push(protocolId);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`UPDATE response_protocols
|
||||
SET ${setClause.join(', ')}
|
||||
WHERE protocol_id = ?`,
|
||||
params,
|
||||
async (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
await this.loadProtocols();
|
||||
|
||||
// Log protocol update (CWE-778)
|
||||
logAggregator.aggregate('response_protocol_manager', 'info', 'security', 'Response protocol updated', {
|
||||
protocolId,
|
||||
userId,
|
||||
updates
|
||||
});
|
||||
|
||||
logger.info(`[ResponseProtocolManager] Protocol updated: ${protocolId} by user ${userId}`);
|
||||
resolve({ success: true });
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete protocol
|
||||
* CWE-778: Logs protocol deletion
|
||||
*/
|
||||
async deleteProtocol(protocolId, userId) {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`DELETE FROM response_protocols WHERE protocol_id = ?`,
|
||||
[protocolId],
|
||||
async (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
await this.loadProtocols();
|
||||
|
||||
// Log protocol deletion (CWE-778)
|
||||
logAggregator.aggregate('response_protocol_manager', 'warn', 'security', 'Response protocol deleted', {
|
||||
protocolId,
|
||||
userId
|
||||
});
|
||||
|
||||
logger.info(`[ResponseProtocolManager] Protocol deleted: ${protocolId} by user ${userId}`);
|
||||
resolve({ success: true });
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get protocol statistics
|
||||
*/
|
||||
async getStatistics() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.get(
|
||||
`SELECT
|
||||
COUNT(*) as total,
|
||||
SUM(CASE WHEN enabled = 1 THEN 1 ELSE 0 END) as enabled,
|
||||
SUM(CASE WHEN enabled = 0 THEN 1 ELSE 0 END) as disabled,
|
||||
SUM(CASE WHEN auto_execute = 1 THEN 1 ELSE 0 END) as auto_execute_enabled,
|
||||
COUNT(DISTINCT trigger_type) as unique_triggers,
|
||||
COUNT(DISTINCT severity) as unique_severities
|
||||
FROM response_protocols`,
|
||||
[],
|
||||
(err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Create singleton instance
|
||||
const responseProtocolManager = new ResponseProtocolManager();
|
||||
|
||||
module.exports = responseProtocolManager;
|
||||
565
backend/utils/riskSignatureManager.js
Normal file
565
backend/utils/riskSignatureManager.js
Normal file
|
|
@ -0,0 +1,565 @@
|
|||
/**
|
||||
* Risk Signature Manager
|
||||
* Predefined risk signatures for threat detection
|
||||
* CWE-778 Compliance: Logs all signature matches and management operations
|
||||
*/
|
||||
|
||||
const logger = require('./logger');
|
||||
const logAggregator = require('./logAggregator');
|
||||
const { db } = require('../database/db');
|
||||
|
||||
class RiskSignatureManager {
|
||||
constructor() {
|
||||
this.signatures = new Map();
|
||||
this.initialize();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize risk signature manager
|
||||
*/
|
||||
async initialize() {
|
||||
await this.createSignaturesTable();
|
||||
await this.loadSignatures();
|
||||
|
||||
logger.info('[RiskSignatureManager] Initialized with predefined risk signatures');
|
||||
|
||||
// Log initialization (CWE-778)
|
||||
logAggregator.aggregate('risk_signature_manager', 'info', 'security', 'Risk signature manager initialized', {
|
||||
totalSignatures: this.signatures.size
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create risk signatures table
|
||||
*/
|
||||
async createSignaturesTable() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(`
|
||||
CREATE TABLE IF NOT EXISTS risk_signatures (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
signature_id TEXT UNIQUE NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
signature_type TEXT NOT NULL,
|
||||
pattern TEXT NOT NULL,
|
||||
match_type TEXT NOT NULL,
|
||||
threat_level TEXT NOT NULL,
|
||||
confidence REAL DEFAULT 0.8,
|
||||
enabled INTEGER DEFAULT 1,
|
||||
auto_block INTEGER DEFAULT 0,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`, async (err) => {
|
||||
if (err) reject(err);
|
||||
else {
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_signatures_type ON risk_signatures(signature_type, enabled)`);
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_signatures_threat ON risk_signatures(threat_level, enabled)`);
|
||||
await this.createDefaultSignatures();
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create default risk signatures
|
||||
*/
|
||||
async createDefaultSignatures() {
|
||||
const defaultSignatures = [
|
||||
// IP-based signatures
|
||||
{
|
||||
signature_id: 'SIG-IP-TOR',
|
||||
name: 'TOR Exit Node',
|
||||
description: 'Known TOR exit node IP address',
|
||||
signature_type: 'ip_address',
|
||||
pattern: '(^10\\.\\d+\\.\\d+\\.\\d+|^172\\.(1[6-9]|2[0-9]|3[01])\\.\\d+\\.\\d+|^192\\.168\\.\\d+\\.\\d+)',
|
||||
match_type: 'regex',
|
||||
threat_level: 'high',
|
||||
confidence: 0.9,
|
||||
auto_block: 0
|
||||
},
|
||||
{
|
||||
signature_id: 'SIG-IP-SUSPICIOUS',
|
||||
name: 'Suspicious IP Range',
|
||||
description: 'IP from suspicious geographic region',
|
||||
signature_type: 'ip_address',
|
||||
pattern: '',
|
||||
match_type: 'custom',
|
||||
threat_level: 'medium',
|
||||
confidence: 0.7,
|
||||
auto_block: 0
|
||||
},
|
||||
// User-agent signatures
|
||||
{
|
||||
signature_id: 'SIG-UA-BOT-MALICIOUS',
|
||||
name: 'Malicious Bot User-Agent',
|
||||
description: 'Known malicious bot signatures',
|
||||
signature_type: 'user_agent',
|
||||
pattern: '(scrapy|python-requests|curl|wget|nikto|sqlmap|havij|acunetix|nessus|openvas)',
|
||||
match_type: 'regex_case_insensitive',
|
||||
threat_level: 'high',
|
||||
confidence: 0.95,
|
||||
auto_block: 1
|
||||
},
|
||||
{
|
||||
signature_id: 'SIG-UA-VULNERABILITY-SCANNER',
|
||||
name: 'Vulnerability Scanner',
|
||||
description: 'Automated vulnerability scanning tools',
|
||||
signature_type: 'user_agent',
|
||||
pattern: '(nmap|masscan|zap|burp|metasploit|w3af|arachni)',
|
||||
match_type: 'regex_case_insensitive',
|
||||
threat_level: 'critical',
|
||||
confidence: 0.99,
|
||||
auto_block: 1
|
||||
},
|
||||
// Attack pattern signatures
|
||||
{
|
||||
signature_id: 'SIG-ATTACK-SQL-INJECTION',
|
||||
name: 'SQL Injection Pattern',
|
||||
description: 'Common SQL injection attack patterns',
|
||||
signature_type: 'attack_pattern',
|
||||
pattern: '(union.*select|select.*from|insert.*into|delete.*from|drop.*table|exec.*xp_|script.*alert)',
|
||||
match_type: 'regex_case_insensitive',
|
||||
threat_level: 'critical',
|
||||
confidence: 0.85,
|
||||
auto_block: 1
|
||||
},
|
||||
{
|
||||
signature_id: 'SIG-ATTACK-XSS',
|
||||
name: 'Cross-Site Scripting Pattern',
|
||||
description: 'XSS attack patterns',
|
||||
signature_type: 'attack_pattern',
|
||||
pattern: '(<script|javascript:|onerror=|onload=|<iframe|eval\\(|alert\\()',
|
||||
match_type: 'regex_case_insensitive',
|
||||
threat_level: 'high',
|
||||
confidence: 0.8,
|
||||
auto_block: 1
|
||||
},
|
||||
{
|
||||
signature_id: 'SIG-ATTACK-PATH-TRAVERSAL',
|
||||
name: 'Path Traversal Pattern',
|
||||
description: 'Directory traversal attack patterns',
|
||||
signature_type: 'attack_pattern',
|
||||
pattern: '(\\.\\./|\\.\\.\\\\/|%2e%2e/|%252e%252e/)',
|
||||
match_type: 'regex_case_insensitive',
|
||||
threat_level: 'high',
|
||||
confidence: 0.9,
|
||||
auto_block: 1
|
||||
},
|
||||
{
|
||||
signature_id: 'SIG-ATTACK-COMMAND-INJECTION',
|
||||
name: 'Command Injection Pattern',
|
||||
description: 'OS command injection patterns',
|
||||
signature_type: 'attack_pattern',
|
||||
pattern: '(;\\s*(rm|cat|ls|wget|curl|bash|sh|cmd|powershell)|\\|\\s*(nc|netcat))',
|
||||
match_type: 'regex_case_insensitive',
|
||||
threat_level: 'critical',
|
||||
confidence: 0.95,
|
||||
auto_block: 1
|
||||
},
|
||||
// Behavioral signatures
|
||||
{
|
||||
signature_id: 'SIG-BEHAVIOR-BRUTE-FORCE',
|
||||
name: 'Brute Force Behavior',
|
||||
description: 'Rapid repeated authentication attempts',
|
||||
signature_type: 'behavior',
|
||||
pattern: 'failed_login_rate',
|
||||
match_type: 'custom',
|
||||
threat_level: 'critical',
|
||||
confidence: 0.9,
|
||||
auto_block: 1
|
||||
},
|
||||
{
|
||||
signature_id: 'SIG-BEHAVIOR-CREDENTIAL-STUFFING',
|
||||
name: 'Credential Stuffing Behavior',
|
||||
description: 'Multiple username attempts from single source',
|
||||
signature_type: 'behavior',
|
||||
pattern: 'unique_username_rate',
|
||||
match_type: 'custom',
|
||||
threat_level: 'high',
|
||||
confidence: 0.85,
|
||||
auto_block: 1
|
||||
},
|
||||
{
|
||||
signature_id: 'SIG-BEHAVIOR-PRIVILEGE-ESC',
|
||||
name: 'Privilege Escalation Behavior',
|
||||
description: 'Repeated unauthorized access attempts',
|
||||
signature_type: 'behavior',
|
||||
pattern: 'authorization_failure_rate',
|
||||
match_type: 'custom',
|
||||
threat_level: 'critical',
|
||||
confidence: 0.95,
|
||||
auto_block: 1
|
||||
},
|
||||
{
|
||||
signature_id: 'SIG-BEHAVIOR-DATA-EXFIL',
|
||||
name: 'Data Exfiltration Behavior',
|
||||
description: 'Unusual data download patterns',
|
||||
signature_type: 'behavior',
|
||||
pattern: 'download_volume_rate',
|
||||
match_type: 'custom',
|
||||
threat_level: 'high',
|
||||
confidence: 0.8,
|
||||
auto_block: 0
|
||||
}
|
||||
];
|
||||
|
||||
for (const signature of defaultSignatures) {
|
||||
await new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT OR IGNORE INTO risk_signatures
|
||||
(signature_id, name, description, signature_type, pattern, match_type, threat_level, confidence, auto_block)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[
|
||||
signature.signature_id,
|
||||
signature.name,
|
||||
signature.description,
|
||||
signature.signature_type,
|
||||
signature.pattern,
|
||||
signature.match_type,
|
||||
signature.threat_level,
|
||||
signature.confidence,
|
||||
signature.auto_block
|
||||
],
|
||||
(err) => {
|
||||
if (err) reject(err);
|
||||
else resolve();
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
logger.info(`[RiskSignatureManager] Created ${defaultSignatures.length} default signatures`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load signatures from database into memory
|
||||
*/
|
||||
async loadSignatures() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT * FROM risk_signatures WHERE enabled = 1`,
|
||||
[],
|
||||
(err, rows) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
this.signatures.clear();
|
||||
rows.forEach(row => {
|
||||
this.signatures.set(row.signature_id, row);
|
||||
});
|
||||
logger.info(`[RiskSignatureManager] Loaded ${rows.length} active signatures`);
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Match input against risk signatures
|
||||
* CWE-778: Logs all signature matches
|
||||
*/
|
||||
async matchSignatures(input, signatureType, context = {}) {
|
||||
const matchingSignatures = Array.from(this.signatures.values()).filter(
|
||||
s => s.signature_type === signatureType
|
||||
);
|
||||
|
||||
if (matchingSignatures.length === 0) {
|
||||
return { matched: false, signatures: [] };
|
||||
}
|
||||
|
||||
const matches = [];
|
||||
|
||||
for (const signature of matchingSignatures) {
|
||||
const matched = this.testPattern(input, signature.pattern, signature.match_type);
|
||||
|
||||
if (matched) {
|
||||
matches.push({
|
||||
...signature,
|
||||
matchedInput: input,
|
||||
context
|
||||
});
|
||||
|
||||
// Log signature match (CWE-778)
|
||||
logAggregator.aggregate('risk_signature_manager', 'warn', 'security', 'Risk signature matched', {
|
||||
signatureId: signature.signature_id,
|
||||
signatureName: signature.name,
|
||||
signatureType,
|
||||
threatLevel: signature.threat_level,
|
||||
confidence: signature.confidence,
|
||||
autoBlock: signature.auto_block === 1,
|
||||
matchedInput: input.substring(0, 100), // Truncate for logging
|
||||
context
|
||||
});
|
||||
|
||||
logger.warn(`[RiskSignatureManager] Signature matched: ${signature.name} (${signature.threat_level})`);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
matched: matches.length > 0,
|
||||
signatures: matches,
|
||||
highestThreat: matches.length > 0 ? this.getHighestThreatLevel(matches) : null,
|
||||
shouldAutoBlock: matches.some(m => m.auto_block === 1)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Test pattern against input
|
||||
*/
|
||||
testPattern(input, pattern, matchType) {
|
||||
try {
|
||||
switch (matchType) {
|
||||
case 'regex':
|
||||
return new RegExp(pattern).test(input);
|
||||
case 'regex_case_insensitive':
|
||||
return new RegExp(pattern, 'i').test(input);
|
||||
case 'exact':
|
||||
return input === pattern;
|
||||
case 'contains':
|
||||
return input.includes(pattern);
|
||||
case 'custom':
|
||||
// Custom patterns handled by specific detection methods
|
||||
return false;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[RiskSignatureManager] Pattern test error: ${error.message}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get highest threat level from matches
|
||||
*/
|
||||
getHighestThreatLevel(matches) {
|
||||
const threatLevels = { critical: 4, high: 3, medium: 2, low: 1 };
|
||||
let highest = 'low';
|
||||
let highestScore = 0;
|
||||
|
||||
for (const match of matches) {
|
||||
const score = threatLevels[match.threat_level] || 0;
|
||||
if (score > highestScore) {
|
||||
highestScore = score;
|
||||
highest = match.threat_level;
|
||||
}
|
||||
}
|
||||
|
||||
return highest;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all signatures
|
||||
*/
|
||||
async getSignatures(filters = {}) {
|
||||
const { signatureType, threatLevel, enabled, limit = 100 } = filters;
|
||||
|
||||
let whereClause = [];
|
||||
let params = [];
|
||||
|
||||
if (signatureType) {
|
||||
whereClause.push('signature_type = ?');
|
||||
params.push(signatureType);
|
||||
}
|
||||
|
||||
if (threatLevel) {
|
||||
whereClause.push('threat_level = ?');
|
||||
params.push(threatLevel);
|
||||
}
|
||||
|
||||
if (enabled !== undefined) {
|
||||
whereClause.push('enabled = ?');
|
||||
params.push(enabled ? 1 : 0);
|
||||
}
|
||||
|
||||
const where = whereClause.length > 0 ? `WHERE ${whereClause.join(' AND ')}` : '';
|
||||
params.push(limit);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT * FROM risk_signatures ${where}
|
||||
ORDER BY threat_level DESC, confidence DESC
|
||||
LIMIT ?`,
|
||||
params,
|
||||
(err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get signature by ID
|
||||
*/
|
||||
async getSignatureById(signatureId) {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.get(
|
||||
`SELECT * FROM risk_signatures WHERE signature_id = ?`,
|
||||
[signatureId],
|
||||
(err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new signature
|
||||
* CWE-778: Logs signature creation
|
||||
*/
|
||||
async createSignature(data, userId) {
|
||||
const signatureId = `SIG-${Date.now()}-${Math.random().toString(36).substr(2, 9).toUpperCase()}`;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT INTO risk_signatures
|
||||
(signature_id, name, description, signature_type, pattern, match_type, threat_level, confidence, enabled, auto_block)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[
|
||||
signatureId,
|
||||
data.name,
|
||||
data.description || '',
|
||||
data.signature_type,
|
||||
data.pattern,
|
||||
data.match_type,
|
||||
data.threat_level,
|
||||
data.confidence || 0.8,
|
||||
data.enabled !== undefined ? (data.enabled ? 1 : 0) : 1,
|
||||
data.auto_block !== undefined ? (data.auto_block ? 1 : 0) : 0
|
||||
],
|
||||
async (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
await this.loadSignatures();
|
||||
|
||||
// Log signature creation (CWE-778)
|
||||
logAggregator.aggregate('risk_signature_manager', 'info', 'security', 'Risk signature created', {
|
||||
signatureId,
|
||||
userId,
|
||||
name: data.name,
|
||||
signatureType: data.signature_type,
|
||||
threatLevel: data.threat_level,
|
||||
autoBlock: data.auto_block === 1
|
||||
});
|
||||
|
||||
logger.info(`[RiskSignatureManager] Signature created: ${signatureId} by user ${userId}`);
|
||||
resolve({ signatureId });
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update signature
|
||||
* CWE-778: Logs signature modifications
|
||||
*/
|
||||
async updateSignature(signatureId, updates, userId) {
|
||||
const allowedFields = ['name', 'description', 'pattern', 'match_type', 'threat_level', 'confidence', 'enabled', 'auto_block'];
|
||||
const setClause = [];
|
||||
const params = [];
|
||||
|
||||
for (const [key, value] of Object.entries(updates)) {
|
||||
if (allowedFields.includes(key)) {
|
||||
setClause.push(`${key} = ?`);
|
||||
params.push((key === 'enabled' || key === 'auto_block') ? (value ? 1 : 0) : value);
|
||||
}
|
||||
}
|
||||
|
||||
if (setClause.length === 0) {
|
||||
throw new Error('No valid fields to update');
|
||||
}
|
||||
|
||||
setClause.push('updated_at = CURRENT_TIMESTAMP');
|
||||
params.push(signatureId);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`UPDATE risk_signatures
|
||||
SET ${setClause.join(', ')}
|
||||
WHERE signature_id = ?`,
|
||||
params,
|
||||
async (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
await this.loadSignatures();
|
||||
|
||||
// Log signature update (CWE-778)
|
||||
logAggregator.aggregate('risk_signature_manager', 'info', 'security', 'Risk signature updated', {
|
||||
signatureId,
|
||||
userId,
|
||||
updates
|
||||
});
|
||||
|
||||
logger.info(`[RiskSignatureManager] Signature updated: ${signatureId} by user ${userId}`);
|
||||
resolve({ success: true });
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete signature
|
||||
* CWE-778: Logs signature deletion
|
||||
*/
|
||||
async deleteSignature(signatureId, userId) {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`DELETE FROM risk_signatures WHERE signature_id = ?`,
|
||||
[signatureId],
|
||||
async (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
await this.loadSignatures();
|
||||
|
||||
// Log signature deletion (CWE-778)
|
||||
logAggregator.aggregate('risk_signature_manager', 'warn', 'security', 'Risk signature deleted', {
|
||||
signatureId,
|
||||
userId
|
||||
});
|
||||
|
||||
logger.info(`[RiskSignatureManager] Signature deleted: ${signatureId} by user ${userId}`);
|
||||
resolve({ success: true });
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get signature statistics
|
||||
*/
|
||||
async getStatistics() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.get(
|
||||
`SELECT
|
||||
COUNT(*) as total,
|
||||
SUM(CASE WHEN enabled = 1 THEN 1 ELSE 0 END) as enabled,
|
||||
SUM(CASE WHEN enabled = 0 THEN 1 ELSE 0 END) as disabled,
|
||||
SUM(CASE WHEN auto_block = 1 THEN 1 ELSE 0 END) as auto_block_enabled,
|
||||
COUNT(DISTINCT signature_type) as unique_types,
|
||||
COUNT(DISTINCT threat_level) as unique_threat_levels
|
||||
FROM risk_signatures`,
|
||||
[],
|
||||
(err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Create singleton instance
|
||||
const riskSignatureManager = new RiskSignatureManager();
|
||||
|
||||
module.exports = riskSignatureManager;
|
||||
227
backend/utils/routeProtection.js
Normal file
227
backend/utils/routeProtection.js
Normal file
|
|
@ -0,0 +1,227 @@
|
|||
/**
|
||||
* Route Protection Utility
|
||||
* Ensures all routes have proper error handling (CWE-391 compliance)
|
||||
* Wraps database callbacks and async operations with error handlers
|
||||
*/
|
||||
|
||||
const logger = require('./logger');
|
||||
const { sanitizeError } = require('./errorHandler');
|
||||
|
||||
/**
|
||||
* Wrap database callback to ensure errors are caught
|
||||
*
|
||||
* @param {Function} callback - Database callback function
|
||||
* @param {Object} res - Express response object
|
||||
* @param {string} context - Context for error logging
|
||||
* @returns {Function} Wrapped callback
|
||||
*/
|
||||
function wrapDbCallback(callback, res, context = 'Database operation') {
|
||||
return function(err, ...args) {
|
||||
if (err) {
|
||||
logger.error(`${context} error:`, {
|
||||
error: err.message,
|
||||
stack: err.stack,
|
||||
context
|
||||
});
|
||||
|
||||
const sanitized = sanitizeError(err);
|
||||
return res.status(500).json({
|
||||
error: sanitized.message,
|
||||
code: sanitized.code,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
return callback(err, ...args);
|
||||
} catch (callbackError) {
|
||||
logger.error(`${context} callback error:`, {
|
||||
error: callbackError.message,
|
||||
stack: callbackError.stack,
|
||||
context
|
||||
});
|
||||
|
||||
const sanitized = sanitizeError(callbackError);
|
||||
return res.status(500).json({
|
||||
error: sanitized.message,
|
||||
code: sanitized.code,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap sync route handler to catch any errors
|
||||
*
|
||||
* @param {Function} handler - Route handler function
|
||||
* @returns {Function} Wrapped handler
|
||||
*/
|
||||
function wrapSyncHandler(handler) {
|
||||
return function(req, res, next) {
|
||||
try {
|
||||
return handler(req, res, next);
|
||||
} catch (error) {
|
||||
logger.error('Sync route handler error:', {
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
method: req.method,
|
||||
path: req.path
|
||||
});
|
||||
|
||||
next(error);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap async route handler to catch promise rejections
|
||||
*
|
||||
* @param {Function} handler - Async route handler function
|
||||
* @returns {Function} Wrapped handler
|
||||
*/
|
||||
function wrapAsyncHandler(handler) {
|
||||
return function(req, res, next) {
|
||||
Promise.resolve(handler(req, res, next))
|
||||
.catch((error) => {
|
||||
logger.error('Async route handler error:', {
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
method: req.method,
|
||||
path: req.path
|
||||
});
|
||||
|
||||
next(error);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Smart route wrapper - automatically detects and wraps handlers
|
||||
*
|
||||
* @param {Function} handler - Route handler (sync or async)
|
||||
* @returns {Function} Wrapped handler
|
||||
*/
|
||||
function protectRoute(handler) {
|
||||
// Check if handler is async
|
||||
if (handler.constructor.name === 'AsyncFunction') {
|
||||
return wrapAsyncHandler(handler);
|
||||
}
|
||||
|
||||
return wrapSyncHandler(handler);
|
||||
}
|
||||
|
||||
/**
|
||||
* Protect all routes in a router
|
||||
*
|
||||
* @param {Object} router - Express router
|
||||
* @param {Array} routes - Array of route definitions
|
||||
*/
|
||||
function protectAllRoutes(router, routes) {
|
||||
routes.forEach(({ method, path, middleware = [], handler }) => {
|
||||
const wrappedHandler = protectRoute(handler);
|
||||
router[method](path, ...middleware, wrappedHandler);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a safe database wrapper with automatic error handling
|
||||
*
|
||||
* @param {Object} db - Database connection
|
||||
* @returns {Object} Wrapped database object
|
||||
*/
|
||||
function createSafeDb(db) {
|
||||
return {
|
||||
// Wrap db.run
|
||||
run: (sql, params, callback) => {
|
||||
try {
|
||||
return db.run(sql, params, (err, ...args) => {
|
||||
if (err) {
|
||||
logger.error('Database run error:', {
|
||||
error: err.message,
|
||||
sql: sql.substring(0, 100) // Log first 100 chars only
|
||||
});
|
||||
}
|
||||
if (callback) {
|
||||
try {
|
||||
return callback(err, ...args);
|
||||
} catch (callbackError) {
|
||||
logger.error('Database run callback error:', callbackError);
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Database run exception:', error);
|
||||
if (callback) {
|
||||
callback(error);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// Wrap db.get
|
||||
get: (sql, params, callback) => {
|
||||
try {
|
||||
return db.get(sql, params, (err, ...args) => {
|
||||
if (err) {
|
||||
logger.error('Database get error:', {
|
||||
error: err.message,
|
||||
sql: sql.substring(0, 100)
|
||||
});
|
||||
}
|
||||
if (callback) {
|
||||
try {
|
||||
return callback(err, ...args);
|
||||
} catch (callbackError) {
|
||||
logger.error('Database get callback error:', callbackError);
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Database get exception:', error);
|
||||
if (callback) {
|
||||
callback(error);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// Wrap db.all
|
||||
all: (sql, params, callback) => {
|
||||
try {
|
||||
return db.all(sql, params, (err, ...args) => {
|
||||
if (err) {
|
||||
logger.error('Database all error:', {
|
||||
error: err.message,
|
||||
sql: sql.substring(0, 100)
|
||||
});
|
||||
}
|
||||
if (callback) {
|
||||
try {
|
||||
return callback(err, ...args);
|
||||
} catch (callbackError) {
|
||||
logger.error('Database all callback error:', callbackError);
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Database all exception:', error);
|
||||
if (callback) {
|
||||
callback(error);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// Pass through other methods
|
||||
close: (...args) => db.close(...args),
|
||||
serialize: (...args) => db.serialize(...args),
|
||||
parallelize: (...args) => db.parallelize(...args)
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
wrapDbCallback,
|
||||
wrapSyncHandler,
|
||||
wrapAsyncHandler,
|
||||
protectRoute,
|
||||
protectAllRoutes,
|
||||
createSafeDb
|
||||
};
|
||||
531
backend/utils/securityAudit.js
Normal file
531
backend/utils/securityAudit.js
Normal file
|
|
@ -0,0 +1,531 @@
|
|||
/**
|
||||
* Security Audit Logger
|
||||
* Tracks security-related events for compliance and forensics
|
||||
* Integrated with SIEM for centralized log aggregation
|
||||
*/
|
||||
|
||||
const logger = require('./logger');
|
||||
const { db } = require('../database/db');
|
||||
const logAggregator = require('./logAggregator');
|
||||
|
||||
class SecurityAuditLogger {
|
||||
/**
|
||||
* Log authentication events
|
||||
*/
|
||||
static async logAuthEvent(eventType, userId, details = {}) {
|
||||
const event = {
|
||||
event_type: eventType,
|
||||
user_id: userId,
|
||||
ip_address: details.ip || 'unknown',
|
||||
user_agent: details.userAgent || 'unknown',
|
||||
success: details.success !== false,
|
||||
failure_reason: details.failureReason || null,
|
||||
metadata: JSON.stringify(details.metadata || {}),
|
||||
timestamp: new Date().toISOString()
|
||||
};
|
||||
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT INTO security_audit_log (event_type, user_id, ip_address, user_agent, success, failure_reason, metadata, timestamp)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[event.event_type, event.user_id, event.ip_address, event.user_agent, event.success ? 1 : 0, event.failure_reason, event.metadata, event.timestamp],
|
||||
(err) => err ? reject(err) : resolve()
|
||||
);
|
||||
});
|
||||
|
||||
logger.info(`[SECURITY] ${eventType}: user=${userId}, ip=${event.ip_address}, success=${event.success}`);
|
||||
|
||||
// Aggregate to SIEM
|
||||
const level = event.success ? 'info' : 'warn';
|
||||
const source = eventType.startsWith('LOGIN') || eventType.includes('PASSWORD') ? 'authentication' : 'security_audit';
|
||||
logAggregator.aggregate(source, level, 'authentication', `${eventType}: ${event.success ? 'success' : 'failure'}`, {
|
||||
userId: event.user_id,
|
||||
ip: event.ip_address,
|
||||
userAgent: event.user_agent,
|
||||
failureReason: event.failure_reason,
|
||||
metadata: details.metadata || {}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to log security event:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Log login attempt
|
||||
*/
|
||||
static async logLoginAttempt(username, success, details = {}) {
|
||||
return this.logAuthEvent('LOGIN_ATTEMPT', null, {
|
||||
success,
|
||||
failureReason: success ? null : (details.reason || 'Invalid credentials'),
|
||||
ip: details.ip,
|
||||
userAgent: details.userAgent,
|
||||
metadata: { username }
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Log successful login
|
||||
*/
|
||||
static async logLoginSuccess(userId, details = {}) {
|
||||
return this.logAuthEvent('LOGIN_SUCCESS', userId, {
|
||||
success: true,
|
||||
ip: details.ip,
|
||||
userAgent: details.userAgent,
|
||||
metadata: { twoFactorUsed: details.twoFactorUsed || false }
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Log failed login
|
||||
*/
|
||||
static async logLoginFailure(username, reason, details = {}) {
|
||||
return this.logAuthEvent('LOGIN_FAILURE', null, {
|
||||
success: false,
|
||||
failureReason: reason,
|
||||
ip: details.ip,
|
||||
userAgent: details.userAgent,
|
||||
metadata: { username }
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Log account lockout
|
||||
*/
|
||||
static async logAccountLockout(userId, details = {}) {
|
||||
return this.logAuthEvent('ACCOUNT_LOCKOUT', userId, {
|
||||
success: false,
|
||||
failureReason: 'Too many failed login attempts',
|
||||
ip: details.ip,
|
||||
userAgent: details.userAgent,
|
||||
metadata: { failedAttempts: details.failedAttempts }
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Log password change
|
||||
*/
|
||||
static async logPasswordChange(userId, details = {}) {
|
||||
return this.logAuthEvent('PASSWORD_CHANGE', userId, {
|
||||
success: true,
|
||||
ip: details.ip,
|
||||
userAgent: details.userAgent,
|
||||
metadata: { forced: details.forced || false, expired: details.expired || false }
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Log 2FA events
|
||||
*/
|
||||
static async log2FAEvent(eventType, userId, success, details = {}) {
|
||||
return this.logAuthEvent(eventType, userId, {
|
||||
success,
|
||||
failureReason: success ? null : (details.reason || 'Invalid code'),
|
||||
ip: details.ip,
|
||||
userAgent: details.userAgent,
|
||||
metadata: details.metadata || {}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Log session events
|
||||
*/
|
||||
static async logSessionEvent(eventType, userId, details = {}) {
|
||||
return this.logAuthEvent(eventType, userId, {
|
||||
success: true,
|
||||
ip: details.ip,
|
||||
userAgent: details.userAgent,
|
||||
metadata: { sessionId: details.sessionId }
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Log privilege escalation
|
||||
*/
|
||||
static async logPrivilegeEscalation(userId, details = {}) {
|
||||
return this.logAuthEvent('PRIVILEGE_ESCALATION', userId, {
|
||||
success: true,
|
||||
ip: details.ip,
|
||||
userAgent: details.userAgent,
|
||||
metadata: {
|
||||
fromRole: details.fromRole,
|
||||
toRole: details.toRole,
|
||||
grantedBy: details.grantedBy
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get recent security events for a user
|
||||
*/
|
||||
static async getUserSecurityEvents(userId, limit = 50) {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT * FROM security_audit_log
|
||||
WHERE user_id = ?
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT ?`,
|
||||
[userId, limit],
|
||||
(err, rows) => err ? reject(err) : resolve(rows)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get failed login attempts for an IP
|
||||
*/
|
||||
static async getFailedAttemptsForIP(ipAddress, timeWindowMinutes = 30) {
|
||||
const cutoffTime = new Date(Date.now() - timeWindowMinutes * 60 * 1000).toISOString();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.get(
|
||||
`SELECT COUNT(*) as count
|
||||
FROM security_audit_log
|
||||
WHERE event_type IN ('LOGIN_FAILURE', 'LOGIN_ATTEMPT')
|
||||
AND success = 0
|
||||
AND ip_address = ?
|
||||
AND timestamp > ?`,
|
||||
[ipAddress, cutoffTime],
|
||||
(err, row) => err ? reject(err) : resolve(row.count || 0)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get failed login attempts for a user
|
||||
*/
|
||||
static async getFailedAttemptsForUser(username, timeWindowMinutes = 30) {
|
||||
const cutoffTime = new Date(Date.now() - timeWindowMinutes * 60 * 1000).toISOString();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.get(
|
||||
`SELECT COUNT(*) as count
|
||||
FROM security_audit_log
|
||||
WHERE event_type IN ('LOGIN_FAILURE', 'LOGIN_ATTEMPT')
|
||||
AND success = 0
|
||||
AND metadata LIKE ?
|
||||
AND timestamp > ?`,
|
||||
[`%"username":"${username}"%`, cutoffTime],
|
||||
(err, row) => err ? reject(err) : resolve(row.count || 0)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear old audit logs (data retention)
|
||||
*/
|
||||
static async cleanupOldLogs(retentionDays = 90) {
|
||||
const cutoffDate = new Date(Date.now() - retentionDays * 24 * 60 * 60 * 1000).toISOString();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
'DELETE FROM security_audit_log WHERE timestamp < ?',
|
||||
[cutoffDate],
|
||||
function(err) {
|
||||
if (err) reject(err);
|
||||
else {
|
||||
logger.info(`Cleaned up ${this.changes} old security audit logs`);
|
||||
resolve(this.changes);
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* CWE-778: Log token issuance (JWT, OAuth, etc.)
|
||||
* Includes relevant metadata such as client ID, IP address, device info
|
||||
*/
|
||||
static async logTokenIssuance(userId, tokenType, details = {}) {
|
||||
return this.logAuthEvent('TOKEN_ISSUED', userId, {
|
||||
success: true,
|
||||
ip: details.ip,
|
||||
userAgent: details.userAgent,
|
||||
metadata: {
|
||||
tokenType: tokenType, // 'JWT', 'REFRESH', 'TEMP_2FA', 'OAUTH'
|
||||
clientId: details.clientId,
|
||||
deviceInfo: details.deviceInfo || this.extractDeviceInfo(details.userAgent),
|
||||
expiresIn: details.expiresIn,
|
||||
purpose: details.purpose, // 'login', '2fa', 'registration', 'password_reset'
|
||||
scope: details.scope,
|
||||
grantType: details.grantType
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* CWE-778: Log token refresh
|
||||
*/
|
||||
static async logTokenRefresh(userId, details = {}) {
|
||||
return this.logAuthEvent('TOKEN_REFRESHED', userId, {
|
||||
success: true,
|
||||
ip: details.ip,
|
||||
userAgent: details.userAgent,
|
||||
metadata: {
|
||||
tokenType: details.tokenType || 'JWT',
|
||||
clientId: details.clientId,
|
||||
deviceInfo: details.deviceInfo || this.extractDeviceInfo(details.userAgent),
|
||||
oldTokenExpiry: details.oldTokenExpiry,
|
||||
newTokenExpiry: details.newTokenExpiry
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* CWE-778: Log token revocation
|
||||
*/
|
||||
static async logTokenRevocation(userId, reason, details = {}) {
|
||||
return this.logAuthEvent('TOKEN_REVOKED', userId, {
|
||||
success: true,
|
||||
ip: details.ip,
|
||||
userAgent: details.userAgent,
|
||||
metadata: {
|
||||
reason: reason, // 'logout', 'password_change', 'security_breach', 'admin_action'
|
||||
tokenType: details.tokenType || 'JWT',
|
||||
sessionId: details.sessionId,
|
||||
revokedBy: details.revokedBy // user_id of admin who revoked it
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* CWE-778: Log privilege changes
|
||||
* Any activities where user's privilege level changes
|
||||
*/
|
||||
static async logPrivilegeChange(userId, action, details = {}) {
|
||||
return this.logAuthEvent('PRIVILEGE_CHANGE', userId, {
|
||||
success: true,
|
||||
ip: details.ip,
|
||||
userAgent: details.userAgent,
|
||||
metadata: {
|
||||
action: action, // 'role_assigned', 'role_removed', 'permission_granted', 'permission_revoked'
|
||||
previousRole: details.previousRole,
|
||||
newRole: details.newRole,
|
||||
previousPermissions: details.previousPermissions,
|
||||
newPermissions: details.newPermissions,
|
||||
changedBy: details.changedBy, // user_id of admin who made the change
|
||||
changedByUsername: details.changedByUsername,
|
||||
reason: details.reason,
|
||||
affectedUser: details.affectedUser // username of the user being changed
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* CWE-778: Log permission grant
|
||||
*/
|
||||
static async logPermissionGrant(userId, permission, details = {}) {
|
||||
return this.logAuthEvent('PERMISSION_GRANTED', userId, {
|
||||
success: true,
|
||||
ip: details.ip,
|
||||
userAgent: details.userAgent,
|
||||
metadata: {
|
||||
permission: permission,
|
||||
grantedBy: details.grantedBy,
|
||||
grantedByUsername: details.grantedByUsername,
|
||||
scope: details.scope,
|
||||
expiresAt: details.expiresAt
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* CWE-778: Log permission revocation
|
||||
*/
|
||||
static async logPermissionRevocation(userId, permission, details = {}) {
|
||||
return this.logAuthEvent('PERMISSION_REVOKED', userId, {
|
||||
success: true,
|
||||
ip: details.ip,
|
||||
userAgent: details.userAgent,
|
||||
metadata: {
|
||||
permission: permission,
|
||||
revokedBy: details.revokedBy,
|
||||
revokedByUsername: details.revokedByUsername,
|
||||
reason: details.reason
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* CWE-778: Log user activation/deactivation
|
||||
*/
|
||||
static async logAccountStatusChange(userId, newStatus, details = {}) {
|
||||
return this.logAuthEvent('ACCOUNT_STATUS_CHANGED', userId, {
|
||||
success: true,
|
||||
ip: details.ip,
|
||||
userAgent: details.userAgent,
|
||||
metadata: {
|
||||
previousStatus: details.previousStatus,
|
||||
newStatus: newStatus, // 'active', 'inactive', 'suspended', 'locked'
|
||||
changedBy: details.changedBy,
|
||||
changedByUsername: details.changedByUsername,
|
||||
reason: details.reason
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* CWE-778: Log administrative activities
|
||||
* For admin actions like user creation, deletion, unlock, force logout
|
||||
*/
|
||||
static async logAdminActivity(adminId, action, details = {}) {
|
||||
return this.logAuthEvent('ADMIN_ACTIVITY', adminId, {
|
||||
success: true,
|
||||
ip: details.ip,
|
||||
userAgent: details.userAgent,
|
||||
metadata: {
|
||||
action: action, // 'user_created', 'user_deleted', 'account_unlocked', 'password_reset', 'force_logout'
|
||||
targetUserId: details.targetUserId,
|
||||
targetUsername: details.targetUsername,
|
||||
adminUsername: details.adminUsername,
|
||||
changes: details.changes,
|
||||
reason: details.reason,
|
||||
deviceInfo: details.deviceInfo || this.extractDeviceInfo(details.userAgent)
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* CWE-778: Log sensitive data access
|
||||
* For accessing user lists, settings, VPN configs, backups, etc.
|
||||
*/
|
||||
static async logSensitiveDataAccess(userId, dataType, details = {}) {
|
||||
return this.logAuthEvent('SENSITIVE_DATA_ACCESS', userId, {
|
||||
success: true,
|
||||
ip: details.ip,
|
||||
userAgent: details.userAgent,
|
||||
metadata: {
|
||||
dataType: dataType, // 'user_list', 'user_details', 'settings', 'vpn_configs', 'backups', 'audit_logs'
|
||||
accessMethod: details.accessMethod || 'view', // 'view', 'export', 'download'
|
||||
recordCount: details.recordCount,
|
||||
filters: details.filters,
|
||||
scope: details.scope, // 'own', 'all', 'specific'
|
||||
deviceInfo: details.deviceInfo || this.extractDeviceInfo(details.userAgent)
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract device info from user agent
|
||||
*/
|
||||
static extractDeviceInfo(userAgent = '') {
|
||||
if (!userAgent) return { deviceType: 'unknown', os: 'unknown', browser: 'unknown' };
|
||||
|
||||
const ua = userAgent.toLowerCase();
|
||||
|
||||
// Device type
|
||||
let deviceType = 'desktop';
|
||||
if (/bot|crawler|spider/.test(ua)) deviceType = 'bot';
|
||||
else if (/mobile|android|iphone|ipod/.test(ua)) deviceType = 'mobile';
|
||||
else if (/tablet|ipad/.test(ua)) deviceType = 'tablet';
|
||||
|
||||
// Operating System
|
||||
let os = 'unknown';
|
||||
if (/windows/.test(ua)) os = 'Windows';
|
||||
else if (/mac os|macos/.test(ua)) os = 'macOS';
|
||||
else if (/linux/.test(ua)) os = 'Linux';
|
||||
else if (/android/.test(ua)) os = 'Android';
|
||||
else if (/ios|iphone|ipad/.test(ua)) os = 'iOS';
|
||||
|
||||
// Browser
|
||||
let browser = 'unknown';
|
||||
if (/firefox/.test(ua)) browser = 'Firefox';
|
||||
else if (/chrome/.test(ua) && !/edge|edg/.test(ua)) browser = 'Chrome';
|
||||
else if (/safari/.test(ua) && !/chrome/.test(ua)) browser = 'Safari';
|
||||
else if (/edge|edg/.test(ua)) browser = 'Edge';
|
||||
else if (/opera|opr/.test(ua)) browser = 'Opera';
|
||||
|
||||
return { deviceType, os, browser };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get comprehensive audit statistics
|
||||
*/
|
||||
static async getAuditStatistics(timeRangeDays = 30) {
|
||||
const cutoffDate = new Date(Date.now() - timeRangeDays * 24 * 60 * 60 * 1000).toISOString();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT
|
||||
event_type,
|
||||
success,
|
||||
COUNT(*) as count,
|
||||
COUNT(DISTINCT user_id) as unique_users,
|
||||
COUNT(DISTINCT ip_address) as unique_ips
|
||||
FROM security_audit_log
|
||||
WHERE timestamp > ?
|
||||
GROUP BY event_type, success
|
||||
ORDER BY count DESC`,
|
||||
[cutoffDate],
|
||||
(err, rows) => err ? reject(err) : resolve(rows)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Log system events (startup, shutdown, cleanup, etc.)
|
||||
*/
|
||||
static async logSystemEvent(eventType, success, details = {}) {
|
||||
return this.logAuthEvent(`SYSTEM_${eventType.toUpperCase()}`, null, {
|
||||
success,
|
||||
failureReason: success ? null : (details.error || 'System event failed'),
|
||||
metadata: details
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Log security incidents (tampering, breaches, etc.)
|
||||
*/
|
||||
static async logSecurityIncident(incidentType, details = {}) {
|
||||
const event = {
|
||||
event_type: `SECURITY_INCIDENT_${incidentType.toUpperCase()}`,
|
||||
user_id: details.userId || null,
|
||||
ip_address: details.ip || 'system',
|
||||
user_agent: details.userAgent || 'system',
|
||||
success: false,
|
||||
failure_reason: `Security incident: ${incidentType}`,
|
||||
metadata: JSON.stringify(details),
|
||||
timestamp: new Date().toISOString()
|
||||
};
|
||||
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT INTO security_audit_log (event_type, user_id, ip_address, user_agent, success, failure_reason, metadata, timestamp)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[event.event_type, event.user_id, event.ip_address, event.user_agent, 0, event.failure_reason, event.metadata, event.timestamp],
|
||||
(err) => err ? reject(err) : resolve()
|
||||
);
|
||||
});
|
||||
|
||||
logger.error(`[SECURITY INCIDENT] ${incidentType}: ${JSON.stringify(details)}`);
|
||||
|
||||
// Aggregate to SIEM with CRITICAL level
|
||||
logAggregator.aggregate('security_audit', 'critical', 'security_incident', `Security incident: ${incidentType}`, {
|
||||
incidentType,
|
||||
...details
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to log security incident:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Log admin activities (user management, config changes, etc.)
|
||||
*/
|
||||
static async logAdminActivity(adminId, action, details = {}) {
|
||||
return this.logAuthEvent(`ADMIN_${action.toUpperCase()}`, adminId, {
|
||||
success: true,
|
||||
ip: details.ip,
|
||||
userAgent: details.userAgent,
|
||||
metadata: {
|
||||
action,
|
||||
target: details.target || details.targetUserId,
|
||||
changes: details.changes,
|
||||
...details
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SecurityAuditLogger;
|
||||
853
backend/utils/securityIntelligence.js
Normal file
853
backend/utils/securityIntelligence.js
Normal file
|
|
@ -0,0 +1,853 @@
|
|||
/**
|
||||
* Security Intelligence & Pattern Analysis
|
||||
* Algorithm-driven surveillance system for automatic pattern detection
|
||||
* Includes anomaly detection, threat intelligence, and predictive analysis
|
||||
* Enhanced with configurable thresholds and risk signatures (CWE-778)
|
||||
*/
|
||||
|
||||
const logger = require('./logger');
|
||||
const logAggregator = require('./logAggregator');
|
||||
const { db } = require('../database/db');
|
||||
const thresholdManager = require('./thresholdManager');
|
||||
const riskSignatureManager = require('./riskSignatureManager');
|
||||
|
||||
class SecurityIntelligence {
|
||||
constructor() {
|
||||
this.patterns = new Map();
|
||||
this.anomalies = [];
|
||||
this.threatScore = 0;
|
||||
this.analysisInterval = 60000; // 1 minute
|
||||
this.initialize();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize security intelligence system
|
||||
*/
|
||||
async initialize() {
|
||||
await this.createAnomaliesTable();
|
||||
await this.createThreatIntelligenceTable();
|
||||
|
||||
// Start continuous monitoring
|
||||
setInterval(() => this.analyze(), this.analysisInterval);
|
||||
|
||||
logger.info('[SecurityIntelligence] Initialized - Active monitoring enabled');
|
||||
}
|
||||
|
||||
/**
|
||||
* Create anomalies table
|
||||
*/
|
||||
async createAnomaliesTable() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(`
|
||||
CREATE TABLE IF NOT EXISTS security_anomalies (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
anomaly_id TEXT UNIQUE NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
severity TEXT NOT NULL,
|
||||
description TEXT NOT NULL,
|
||||
confidence REAL NOT NULL,
|
||||
affected_user_id INTEGER,
|
||||
affected_ip TEXT,
|
||||
pattern_data TEXT,
|
||||
related_logs TEXT,
|
||||
status TEXT DEFAULT 'open',
|
||||
resolved_at DATETIME,
|
||||
resolved_by INTEGER,
|
||||
resolution_notes TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`, (err) => {
|
||||
if (err) reject(err);
|
||||
else {
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_anomalies_type ON security_anomalies(type, created_at DESC)`);
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_anomalies_severity ON security_anomalies(severity, created_at DESC)`);
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_anomalies_status ON security_anomalies(status, created_at DESC)`);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create threat intelligence table
|
||||
*/
|
||||
async createThreatIntelligenceTable() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(`
|
||||
CREATE TABLE IF NOT EXISTS threat_intelligence (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
indicator TEXT NOT NULL,
|
||||
indicator_type TEXT NOT NULL,
|
||||
threat_level TEXT NOT NULL,
|
||||
description TEXT,
|
||||
source TEXT,
|
||||
confidence REAL NOT NULL,
|
||||
first_seen DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
last_seen DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
occurrence_count INTEGER DEFAULT 1,
|
||||
metadata TEXT
|
||||
)
|
||||
`, (err) => {
|
||||
if (err) reject(err);
|
||||
else {
|
||||
db.run(`CREATE UNIQUE INDEX IF NOT EXISTS idx_threat_indicator ON threat_intelligence(indicator, indicator_type)`);
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_threat_level ON threat_intelligence(threat_level, last_seen DESC)`);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Main analysis loop - runs continuously
|
||||
*/
|
||||
async analyze() {
|
||||
try {
|
||||
logger.debug('[SecurityIntelligence] Running analysis cycle');
|
||||
|
||||
// Run all detection algorithms in parallel
|
||||
await Promise.all([
|
||||
this.detectBruteForceAttacks(),
|
||||
this.detectAccountEnumeration(),
|
||||
this.detectPrivilegeEscalation(),
|
||||
this.detectAnomalousAccess(),
|
||||
this.detectSuspiciousIPs(),
|
||||
this.detectDataExfiltration(),
|
||||
this.detectSessionAnomalies(),
|
||||
this.detectRateLimitAbuse()
|
||||
]);
|
||||
|
||||
// Calculate overall threat score
|
||||
await this.calculateThreatScore();
|
||||
|
||||
} catch (error) {
|
||||
logger.error('[SecurityIntelligence] Analysis cycle failed:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect brute force authentication attacks
|
||||
* Enhanced with configurable thresholds
|
||||
*/
|
||||
async detectBruteForceAttacks() {
|
||||
// Get configured threshold or use default
|
||||
const thresholdConfig = await thresholdManager.getThresholds({ patternType: 'brute_force_attack' });
|
||||
const timeWindow = thresholdConfig[0]?.time_window_minutes || 10;
|
||||
const threshold = thresholdConfig[0]?.threshold_value || 10;
|
||||
const startTime = new Date(Date.now() - timeWindow * 60 * 1000).toISOString();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT ip_address, COUNT(*) as attempt_count,
|
||||
MAX(timestamp) as last_attempt,
|
||||
GROUP_CONCAT(log_id) as log_ids
|
||||
FROM aggregated_logs
|
||||
WHERE category = 'authentication'
|
||||
AND level IN ('warn', 'error')
|
||||
AND message LIKE '%failed%'
|
||||
AND timestamp >= ?
|
||||
AND ip_address IS NOT NULL
|
||||
GROUP BY ip_address
|
||||
HAVING attempt_count >= ?`,
|
||||
[startTime, threshold],
|
||||
async (err, rows) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
for (const row of rows) {
|
||||
// Evaluate threshold
|
||||
const thresholdResult = await thresholdManager.evaluateThreshold(
|
||||
'brute_force_attack',
|
||||
'failed_login_count',
|
||||
row.attempt_count,
|
||||
{ ip_address: row.ip_address, timeWindow }
|
||||
);
|
||||
|
||||
if (!thresholdResult.exceeded) continue;
|
||||
|
||||
const configuredThreshold = thresholdResult.thresholds[0];
|
||||
const severity = configuredThreshold?.severity || (row.attempt_count > 20 ? 'critical' : 'high');
|
||||
|
||||
await this.createAnomaly({
|
||||
type: 'brute_force_attack',
|
||||
severity: severity,
|
||||
description: `Brute force attack detected from IP ${row.ip_address}: ${row.attempt_count} failed login attempts in ${timeWindow} minutes (threshold: ${threshold})`,
|
||||
confidence: Math.min(row.attempt_count / threshold, 1.0),
|
||||
affected_ip: row.ip_address,
|
||||
pattern_data: JSON.stringify({
|
||||
attemptCount: row.attempt_count,
|
||||
timeWindow: `${timeWindow} minutes`,
|
||||
threshold: threshold,
|
||||
thresholdExceeded: thresholdResult.exceeded,
|
||||
lastAttempt: row.last_attempt
|
||||
}),
|
||||
related_logs: row.log_ids
|
||||
});
|
||||
|
||||
// Add to threat intelligence
|
||||
await this.addThreatIndicator(row.ip_address, 'ip', severity === 'critical' ? 'critical' : 'high', 'Brute force attack source');
|
||||
}
|
||||
|
||||
resolve(rows.length);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect account enumeration attempts
|
||||
* Enhanced with configurable thresholds
|
||||
*/
|
||||
async detectAccountEnumeration() {
|
||||
// Get configured threshold or use default
|
||||
const thresholdConfig = await thresholdManager.getThresholds({ patternType: 'credential_stuffing' });
|
||||
const timeWindow = thresholdConfig[0]?.time_window_minutes || 5;
|
||||
const threshold = thresholdConfig[0]?.threshold_value || 5;
|
||||
const startTime = new Date(Date.now() - timeWindow * 60 * 1000).toISOString();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT ip_address,
|
||||
COUNT(DISTINCT json_extract(metadata, '$.username')) as unique_usernames,
|
||||
COUNT(*) as total_attempts,
|
||||
GROUP_CONCAT(log_id) as log_ids
|
||||
FROM aggregated_logs
|
||||
WHERE category = 'authentication'
|
||||
AND level = 'warn'
|
||||
AND timestamp >= ?
|
||||
AND ip_address IS NOT NULL
|
||||
AND metadata LIKE '%username%'
|
||||
GROUP BY ip_address
|
||||
HAVING unique_usernames >= ?`,
|
||||
[startTime, threshold],
|
||||
async (err, rows) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
for (const row of rows) {
|
||||
await this.createAnomaly({
|
||||
type: 'account_enumeration',
|
||||
severity: 'medium',
|
||||
description: `Account enumeration detected from IP ${row.ip_address}: ${row.unique_usernames} different usernames tried in ${timeWindow} minutes`,
|
||||
confidence: Math.min(row.unique_usernames / (threshold * 2), 1.0),
|
||||
affected_ip: row.ip_address,
|
||||
pattern_data: JSON.stringify({
|
||||
uniqueUsernames: row.unique_usernames,
|
||||
totalAttempts: row.total_attempts,
|
||||
timeWindow: `${timeWindow} minutes`
|
||||
}),
|
||||
related_logs: row.log_ids
|
||||
});
|
||||
}
|
||||
|
||||
resolve(rows.length);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect privilege escalation attempts
|
||||
*/
|
||||
async detectPrivilegeEscalation() {
|
||||
const timeWindow = 30; // minutes
|
||||
const startTime = new Date(Date.now() - timeWindow * 60 * 1000).toISOString();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT user_id, ip_address,
|
||||
COUNT(*) as escalation_attempts,
|
||||
GROUP_CONCAT(log_id) as log_ids
|
||||
FROM aggregated_logs
|
||||
WHERE category = 'authorization'
|
||||
AND (message LIKE '%denied%' OR message LIKE '%unauthorized%')
|
||||
AND timestamp >= ?
|
||||
AND user_id IS NOT NULL
|
||||
GROUP BY user_id, ip_address
|
||||
HAVING escalation_attempts >= 3`,
|
||||
[startTime],
|
||||
async (err, rows) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
for (const row of rows) {
|
||||
await this.createAnomaly({
|
||||
type: 'privilege_escalation',
|
||||
severity: 'critical',
|
||||
description: `Privilege escalation attempt detected: User ${row.user_id} attempted ${row.escalation_attempts} unauthorized actions`,
|
||||
confidence: 0.85,
|
||||
affected_user_id: row.user_id,
|
||||
affected_ip: row.ip_address,
|
||||
pattern_data: JSON.stringify({
|
||||
escalationAttempts: row.escalation_attempts,
|
||||
timeWindow: `${timeWindow} minutes`
|
||||
}),
|
||||
related_logs: row.log_ids
|
||||
});
|
||||
}
|
||||
|
||||
resolve(rows.length);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect anomalous access patterns
|
||||
*/
|
||||
async detectAnomalousAccess() {
|
||||
const timeWindow = 60; // minutes
|
||||
const startTime = new Date(Date.now() - timeWindow * 60 * 1000).toISOString();
|
||||
|
||||
// Detect access from unusual hours (2 AM - 5 AM)
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT user_id, ip_address,
|
||||
COUNT(*) as access_count,
|
||||
GROUP_CONCAT(log_id) as log_ids
|
||||
FROM aggregated_logs
|
||||
WHERE category IN ('access', 'security_audit')
|
||||
AND timestamp >= ?
|
||||
AND CAST(strftime('%H', timestamp) AS INTEGER) BETWEEN 2 AND 5
|
||||
AND user_id IS NOT NULL
|
||||
GROUP BY user_id, ip_address
|
||||
HAVING access_count >= 3`,
|
||||
[startTime],
|
||||
async (err, rows) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
for (const row of rows) {
|
||||
await this.createAnomaly({
|
||||
type: 'anomalous_access',
|
||||
severity: 'medium',
|
||||
description: `Unusual access pattern: User ${row.user_id} accessed system during off-hours (${row.access_count} times)`,
|
||||
confidence: 0.7,
|
||||
affected_user_id: row.user_id,
|
||||
affected_ip: row.ip_address,
|
||||
pattern_data: JSON.stringify({
|
||||
accessCount: row.access_count,
|
||||
timeRange: '2 AM - 5 AM'
|
||||
}),
|
||||
related_logs: row.log_ids
|
||||
});
|
||||
}
|
||||
|
||||
resolve(rows.length);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect suspicious IP addresses
|
||||
*/
|
||||
async detectSuspiciousIPs() {
|
||||
const timeWindow = 60; // minutes
|
||||
const threshold = 100; // requests
|
||||
const startTime = new Date(Date.now() - timeWindow * 60 * 1000).toISOString();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT ip_address,
|
||||
COUNT(*) as request_count,
|
||||
COUNT(DISTINCT user_id) as unique_users,
|
||||
COUNT(CASE WHEN level = 'error' THEN 1 END) as error_count,
|
||||
GROUP_CONCAT(DISTINCT source) as sources
|
||||
FROM aggregated_logs
|
||||
WHERE timestamp >= ?
|
||||
AND ip_address IS NOT NULL
|
||||
GROUP BY ip_address
|
||||
HAVING request_count >= ? OR unique_users >= 10`,
|
||||
[startTime, threshold],
|
||||
async (err, rows) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
for (const row of rows) {
|
||||
let severity = 'low';
|
||||
let reason = [];
|
||||
|
||||
if (row.request_count >= threshold * 2) {
|
||||
severity = 'high';
|
||||
reason.push(`excessive requests (${row.request_count})`);
|
||||
} else if (row.request_count >= threshold) {
|
||||
severity = 'medium';
|
||||
reason.push(`high request volume (${row.request_count})`);
|
||||
}
|
||||
|
||||
if (row.unique_users >= 10) {
|
||||
severity = 'high';
|
||||
reason.push(`multiple user accounts (${row.unique_users})`);
|
||||
}
|
||||
|
||||
if (row.error_count > row.request_count * 0.3) {
|
||||
severity = 'high';
|
||||
reason.push(`high error rate (${row.error_count})`);
|
||||
}
|
||||
|
||||
await this.createAnomaly({
|
||||
type: 'suspicious_ip',
|
||||
severity,
|
||||
description: `Suspicious IP activity from ${row.ip_address}: ${reason.join(', ')}`,
|
||||
confidence: 0.75,
|
||||
affected_ip: row.ip_address,
|
||||
pattern_data: JSON.stringify({
|
||||
requestCount: row.request_count,
|
||||
uniqueUsers: row.unique_users,
|
||||
errorCount: row.error_count,
|
||||
sources: row.sources
|
||||
}),
|
||||
related_logs: null
|
||||
});
|
||||
|
||||
if (severity === 'high') {
|
||||
await this.addThreatIndicator(row.ip_address, 'ip', severity, reason.join(', '));
|
||||
}
|
||||
}
|
||||
|
||||
resolve(rows.length);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect potential data exfiltration
|
||||
*/
|
||||
async detectDataExfiltration() {
|
||||
const timeWindow = 30; // minutes
|
||||
const downloadThreshold = 5; // Large downloads
|
||||
const startTime = new Date(Date.now() - timeWindow * 60 * 1000).toISOString();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT user_id, ip_address,
|
||||
COUNT(*) as download_count,
|
||||
GROUP_CONCAT(log_id) as log_ids
|
||||
FROM aggregated_logs
|
||||
WHERE category = 'access'
|
||||
AND (message LIKE '%download%' OR message LIKE '%export%' OR message LIKE '%backup%')
|
||||
AND timestamp >= ?
|
||||
AND user_id IS NOT NULL
|
||||
GROUP BY user_id, ip_address
|
||||
HAVING download_count >= ?`,
|
||||
[startTime, downloadThreshold],
|
||||
async (err, rows) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
for (const row of rows) {
|
||||
await this.createAnomaly({
|
||||
type: 'data_exfiltration',
|
||||
severity: 'high',
|
||||
description: `Potential data exfiltration: User ${row.user_id} performed ${row.download_count} download/export operations in ${timeWindow} minutes`,
|
||||
confidence: 0.8,
|
||||
affected_user_id: row.user_id,
|
||||
affected_ip: row.ip_address,
|
||||
pattern_data: JSON.stringify({
|
||||
downloadCount: row.download_count,
|
||||
timeWindow: `${timeWindow} minutes`
|
||||
}),
|
||||
related_logs: row.log_ids
|
||||
});
|
||||
}
|
||||
|
||||
resolve(rows.length);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect session anomalies
|
||||
*/
|
||||
async detectSessionAnomalies() {
|
||||
const timeWindow = 24; // hours
|
||||
const startTime = new Date(Date.now() - timeWindow * 60 * 60 * 1000).toISOString();
|
||||
|
||||
// Detect impossible travel (same user, different locations in short time)
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT user_id,
|
||||
COUNT(DISTINCT ip_address) as unique_ips,
|
||||
GROUP_CONCAT(DISTINCT ip_address) as ips,
|
||||
COUNT(*) as session_count
|
||||
FROM aggregated_logs
|
||||
WHERE category = 'authentication'
|
||||
AND message LIKE '%login%success%'
|
||||
AND timestamp >= ?
|
||||
AND user_id IS NOT NULL
|
||||
GROUP BY user_id
|
||||
HAVING unique_ips >= 5`,
|
||||
[startTime],
|
||||
async (err, rows) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
for (const row of rows) {
|
||||
await this.createAnomaly({
|
||||
type: 'session_anomaly',
|
||||
severity: 'medium',
|
||||
description: `Session anomaly: User ${row.user_id} logged in from ${row.unique_ips} different IP addresses in ${timeWindow} hours`,
|
||||
confidence: 0.7,
|
||||
affected_user_id: row.user_id,
|
||||
pattern_data: JSON.stringify({
|
||||
uniqueIPs: row.unique_ips,
|
||||
ipAddresses: row.ips.split(','),
|
||||
sessionCount: row.session_count,
|
||||
timeWindow: `${timeWindow} hours`
|
||||
}),
|
||||
related_logs: null
|
||||
});
|
||||
}
|
||||
|
||||
resolve(rows.length);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect rate limit abuse
|
||||
*/
|
||||
async detectRateLimitAbuse() {
|
||||
const timeWindow = 15; // minutes
|
||||
const startTime = new Date(Date.now() - timeWindow * 60 * 1000).toISOString();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT ip_address,
|
||||
COUNT(*) as blocked_count,
|
||||
GROUP_CONCAT(log_id) as log_ids
|
||||
FROM aggregated_logs
|
||||
WHERE category = 'system'
|
||||
AND message LIKE '%rate limit%'
|
||||
AND timestamp >= ?
|
||||
AND ip_address IS NOT NULL
|
||||
GROUP BY ip_address
|
||||
HAVING blocked_count >= 5`,
|
||||
[startTime],
|
||||
async (err, rows) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
for (const row of rows) {
|
||||
await this.createAnomaly({
|
||||
type: 'rate_limit_abuse',
|
||||
severity: 'medium',
|
||||
description: `Rate limit abuse: IP ${row.ip_address} was rate-limited ${row.blocked_count} times in ${timeWindow} minutes`,
|
||||
confidence: 0.9,
|
||||
affected_ip: row.ip_address,
|
||||
pattern_data: JSON.stringify({
|
||||
blockedCount: row.blocked_count,
|
||||
timeWindow: `${timeWindow} minutes`
|
||||
}),
|
||||
related_logs: row.log_ids
|
||||
});
|
||||
|
||||
await this.addThreatIndicator(row.ip_address, 'ip', 'medium', 'Rate limit abuse');
|
||||
}
|
||||
|
||||
resolve(rows.length);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create anomaly record
|
||||
*/
|
||||
async createAnomaly(details) {
|
||||
const anomalyId = `ANOM-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||
|
||||
// Check if similar anomaly exists (deduplication)
|
||||
const existing = await this.findSimilarAnomaly(details);
|
||||
if (existing) {
|
||||
logger.debug(`[SecurityIntelligence] Similar anomaly exists: ${existing.anomaly_id}`);
|
||||
return existing.anomaly_id;
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT INTO security_anomalies
|
||||
(anomaly_id, type, severity, description, confidence, affected_user_id, affected_ip, pattern_data, related_logs)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[
|
||||
anomalyId,
|
||||
details.type,
|
||||
details.severity,
|
||||
details.description,
|
||||
details.confidence,
|
||||
details.affected_user_id || null,
|
||||
details.affected_ip || null,
|
||||
details.pattern_data,
|
||||
details.related_logs
|
||||
],
|
||||
(err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
logger.warn(`[SecurityIntelligence] Anomaly detected: ${details.type} - ${details.severity} - ${details.description}`);
|
||||
|
||||
// Log to aggregated logs as well
|
||||
logAggregator.aggregate('security_intelligence', 'warn', 'security', details.description, {
|
||||
anomalyId,
|
||||
type: details.type,
|
||||
severity: details.severity,
|
||||
confidence: details.confidence
|
||||
});
|
||||
|
||||
resolve(anomalyId);
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Find similar anomaly (deduplication)
|
||||
*/
|
||||
async findSimilarAnomaly(details) {
|
||||
const recentTime = new Date(Date.now() - 10 * 60 * 1000).toISOString(); // Last 10 minutes
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.get(
|
||||
`SELECT * FROM security_anomalies
|
||||
WHERE type = ?
|
||||
AND severity = ?
|
||||
AND (affected_user_id = ? OR affected_ip = ?)
|
||||
AND status = 'open'
|
||||
AND created_at >= ?
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 1`,
|
||||
[details.type, details.severity, details.affected_user_id, details.affected_ip, recentTime],
|
||||
(err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add threat indicator to intelligence database
|
||||
*/
|
||||
async addThreatIndicator(indicator, type, level, description) {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT INTO threat_intelligence (indicator, indicator_type, threat_level, description, confidence, source)
|
||||
VALUES (?, ?, ?, ?, 0.8, 'internal_detection')
|
||||
ON CONFLICT(indicator, indicator_type) DO UPDATE SET
|
||||
last_seen = CURRENT_TIMESTAMP,
|
||||
occurrence_count = occurrence_count + 1,
|
||||
threat_level = ?,
|
||||
description = ?`,
|
||||
[indicator, type, level, description, level, description],
|
||||
(err) => {
|
||||
if (err) reject(err);
|
||||
else {
|
||||
logger.info(`[SecurityIntelligence] Threat indicator added: ${type}=${indicator} (${level})`);
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate overall threat score (0-100)
|
||||
*/
|
||||
async calculateThreatScore() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.get(
|
||||
`SELECT
|
||||
COUNT(CASE WHEN severity = 'critical' AND status = 'open' THEN 1 END) as critical_count,
|
||||
COUNT(CASE WHEN severity = 'high' AND status = 'open' THEN 1 END) as high_count,
|
||||
COUNT(CASE WHEN severity = 'medium' AND status = 'open' THEN 1 END) as medium_count,
|
||||
COUNT(CASE WHEN severity = 'low' AND status = 'open' THEN 1 END) as low_count
|
||||
FROM security_anomalies
|
||||
WHERE created_at >= datetime('now', '-24 hours')`,
|
||||
[],
|
||||
(err, row) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
// Weight severity levels
|
||||
const score = Math.min(
|
||||
(row.critical_count * 40) +
|
||||
(row.high_count * 20) +
|
||||
(row.medium_count * 10) +
|
||||
(row.low_count * 5),
|
||||
100
|
||||
);
|
||||
|
||||
this.threatScore = score;
|
||||
|
||||
if (score >= 80) {
|
||||
logger.error(`[SecurityIntelligence] CRITICAL THREAT LEVEL: ${score}/100`);
|
||||
} else if (score >= 50) {
|
||||
logger.warn(`[SecurityIntelligence] HIGH THREAT LEVEL: ${score}/100`);
|
||||
} else if (score >= 20) {
|
||||
logger.info(`[SecurityIntelligence] MEDIUM THREAT LEVEL: ${score}/100`);
|
||||
} else {
|
||||
logger.debug(`[SecurityIntelligence] LOW THREAT LEVEL: ${score}/100`);
|
||||
}
|
||||
|
||||
resolve(score);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get active anomalies
|
||||
*/
|
||||
async getAnomalies(filters = {}) {
|
||||
const {
|
||||
status = 'open',
|
||||
severity,
|
||||
type,
|
||||
limit = 100,
|
||||
offset = 0
|
||||
} = filters;
|
||||
|
||||
let whereClause = ['status = ?'];
|
||||
let params = [status];
|
||||
|
||||
if (severity) {
|
||||
whereClause.push('severity = ?');
|
||||
params.push(severity);
|
||||
}
|
||||
|
||||
if (type) {
|
||||
whereClause.push('type = ?');
|
||||
params.push(type);
|
||||
}
|
||||
|
||||
params.push(limit, offset);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT * FROM security_anomalies
|
||||
WHERE ${whereClause.join(' AND ')}
|
||||
ORDER BY created_at DESC
|
||||
LIMIT ? OFFSET ?`,
|
||||
params,
|
||||
(err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve anomaly
|
||||
*/
|
||||
async resolveAnomaly(anomalyId, resolvedBy, notes) {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`UPDATE security_anomalies
|
||||
SET status = 'resolved',
|
||||
resolved_at = CURRENT_TIMESTAMP,
|
||||
resolved_by = ?,
|
||||
resolution_notes = ?
|
||||
WHERE anomaly_id = ?`,
|
||||
[resolvedBy, notes, anomalyId],
|
||||
(err) => {
|
||||
if (err) reject(err);
|
||||
else {
|
||||
logger.info(`[SecurityIntelligence] Anomaly resolved: ${anomalyId} by user ${resolvedBy}`);
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get threat intelligence
|
||||
*/
|
||||
async getThreatIntelligence(filters = {}) {
|
||||
const { level, type, limit = 100 } = filters;
|
||||
|
||||
let whereClause = [];
|
||||
let params = [];
|
||||
|
||||
if (level) {
|
||||
whereClause.push('threat_level = ?');
|
||||
params.push(level);
|
||||
}
|
||||
|
||||
if (type) {
|
||||
whereClause.push('indicator_type = ?');
|
||||
params.push(type);
|
||||
}
|
||||
|
||||
const where = whereClause.length > 0 ? `WHERE ${whereClause.join(' AND ')}` : '';
|
||||
params.push(limit);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT * FROM threat_intelligence ${where}
|
||||
ORDER BY last_seen DESC, occurrence_count DESC
|
||||
LIMIT ?`,
|
||||
params,
|
||||
(err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get security intelligence dashboard data
|
||||
*/
|
||||
async getDashboardData() {
|
||||
const [anomalies, threats, score] = await Promise.all([
|
||||
this.getAnomalies({ status: 'open', limit: 50 }),
|
||||
this.getThreatIntelligence({ limit: 20 }),
|
||||
this.calculateThreatScore()
|
||||
]);
|
||||
|
||||
const anomalyStats = {
|
||||
critical: anomalies.filter(a => a.severity === 'critical').length,
|
||||
high: anomalies.filter(a => a.severity === 'high').length,
|
||||
medium: anomalies.filter(a => a.severity === 'medium').length,
|
||||
low: anomalies.filter(a => a.severity === 'low').length
|
||||
};
|
||||
|
||||
return {
|
||||
threatScore: score,
|
||||
anomalies: anomalies.slice(0, 10),
|
||||
anomalyStats,
|
||||
threats: threats.slice(0, 10),
|
||||
timestamp: new Date().toISOString()
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Create singleton instance
|
||||
const securityIntelligence = new SecurityIntelligence();
|
||||
|
||||
module.exports = securityIntelligence;
|
||||
481
backend/utils/thresholdManager.js
Normal file
481
backend/utils/thresholdManager.js
Normal file
|
|
@ -0,0 +1,481 @@
|
|||
/**
|
||||
* Threshold Manager
|
||||
* Configurable notification thresholds for security threat detection
|
||||
* CWE-778 Compliance: Logs all threshold configurations and evaluations
|
||||
*/
|
||||
|
||||
const logger = require('./logger');
|
||||
const logAggregator = require('./logAggregator');
|
||||
const { db } = require('../database/db');
|
||||
|
||||
class ThresholdManager {
|
||||
constructor() {
|
||||
this.thresholds = new Map();
|
||||
this.initialize();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize threshold manager
|
||||
*/
|
||||
async initialize() {
|
||||
await this.createThresholdsTable();
|
||||
await this.loadThresholds();
|
||||
|
||||
logger.info('[ThresholdManager] Initialized with configurable thresholds');
|
||||
|
||||
// Log initialization (CWE-778)
|
||||
logAggregator.aggregate('threshold_manager', 'info', 'security', 'Threshold manager initialized', {
|
||||
totalThresholds: this.thresholds.size
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create thresholds table
|
||||
*/
|
||||
async createThresholdsTable() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(`
|
||||
CREATE TABLE IF NOT EXISTS security_thresholds (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
threshold_id TEXT UNIQUE NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
pattern_type TEXT NOT NULL,
|
||||
metric_name TEXT NOT NULL,
|
||||
operator TEXT NOT NULL,
|
||||
threshold_value INTEGER NOT NULL,
|
||||
time_window_minutes INTEGER DEFAULT 30,
|
||||
severity TEXT NOT NULL,
|
||||
enabled INTEGER DEFAULT 1,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`, async (err) => {
|
||||
if (err) reject(err);
|
||||
else {
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_thresholds_pattern ON security_thresholds(pattern_type, enabled)`);
|
||||
db.run(`CREATE INDEX IF NOT EXISTS idx_thresholds_enabled ON security_thresholds(enabled)`);
|
||||
await this.createDefaultThresholds();
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create default thresholds for common security patterns
|
||||
*/
|
||||
async createDefaultThresholds() {
|
||||
const defaultThresholds = [
|
||||
{
|
||||
threshold_id: 'THRESHOLD-BRUTE-FORCE',
|
||||
name: 'Brute Force Attack Threshold',
|
||||
description: 'Alert when failed login attempts exceed threshold',
|
||||
pattern_type: 'brute_force_attack',
|
||||
metric_name: 'failed_login_count',
|
||||
operator: '>=',
|
||||
threshold_value: 5,
|
||||
time_window_minutes: 10,
|
||||
severity: 'critical'
|
||||
},
|
||||
{
|
||||
threshold_id: 'THRESHOLD-CREDENTIAL-STUFFING',
|
||||
name: 'Credential Stuffing Threshold',
|
||||
description: 'Alert on multiple username attempts from same IP',
|
||||
pattern_type: 'credential_stuffing',
|
||||
metric_name: 'unique_username_count',
|
||||
operator: '>=',
|
||||
threshold_value: 5,
|
||||
time_window_minutes: 5,
|
||||
severity: 'critical'
|
||||
},
|
||||
{
|
||||
threshold_id: 'THRESHOLD-PRIVILEGE-ESC',
|
||||
name: 'Privilege Escalation Threshold',
|
||||
description: 'Alert on repeated unauthorized access attempts',
|
||||
pattern_type: 'privilege_escalation',
|
||||
metric_name: 'escalation_attempt_count',
|
||||
operator: '>=',
|
||||
threshold_value: 3,
|
||||
time_window_minutes: 30,
|
||||
severity: 'critical'
|
||||
},
|
||||
{
|
||||
threshold_id: 'THRESHOLD-SUSPICIOUS-IP',
|
||||
name: 'Suspicious IP Activity Threshold',
|
||||
description: 'Alert on excessive requests from single IP',
|
||||
pattern_type: 'suspicious_ip',
|
||||
metric_name: 'request_count',
|
||||
operator: '>=',
|
||||
threshold_value: 100,
|
||||
time_window_minutes: 15,
|
||||
severity: 'high'
|
||||
},
|
||||
{
|
||||
threshold_id: 'THRESHOLD-DATA-EXFIL',
|
||||
name: 'Data Exfiltration Threshold',
|
||||
description: 'Alert on excessive data downloads',
|
||||
pattern_type: 'data_exfiltration',
|
||||
metric_name: 'download_count',
|
||||
operator: '>=',
|
||||
threshold_value: 10,
|
||||
time_window_minutes: 60,
|
||||
severity: 'high'
|
||||
},
|
||||
{
|
||||
threshold_id: 'THRESHOLD-SESSION-ANOMALY',
|
||||
name: 'Session Anomaly Threshold',
|
||||
description: 'Alert on unusual session patterns',
|
||||
pattern_type: 'session_anomaly',
|
||||
metric_name: 'anomaly_score',
|
||||
operator: '>=',
|
||||
threshold_value: 70,
|
||||
time_window_minutes: 30,
|
||||
severity: 'medium'
|
||||
},
|
||||
{
|
||||
threshold_id: 'THRESHOLD-IMPOSSIBLE-TRAVEL',
|
||||
name: 'Impossible Travel Threshold',
|
||||
description: 'Alert on geographically impossible travel speed',
|
||||
pattern_type: 'impossible_travel',
|
||||
metric_name: 'travel_speed_kmh',
|
||||
operator: '>=',
|
||||
threshold_value: 800,
|
||||
time_window_minutes: 60,
|
||||
severity: 'high'
|
||||
},
|
||||
{
|
||||
threshold_id: 'THRESHOLD-THREAT-SCORE',
|
||||
name: 'Critical Threat Score Threshold',
|
||||
description: 'Alert when overall threat score is critical',
|
||||
pattern_type: 'threat_score',
|
||||
metric_name: 'threat_score',
|
||||
operator: '>=',
|
||||
threshold_value: 80,
|
||||
time_window_minutes: 60,
|
||||
severity: 'critical'
|
||||
}
|
||||
];
|
||||
|
||||
for (const threshold of defaultThresholds) {
|
||||
await new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT OR IGNORE INTO security_thresholds
|
||||
(threshold_id, name, description, pattern_type, metric_name, operator, threshold_value, time_window_minutes, severity)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[
|
||||
threshold.threshold_id,
|
||||
threshold.name,
|
||||
threshold.description,
|
||||
threshold.pattern_type,
|
||||
threshold.metric_name,
|
||||
threshold.operator,
|
||||
threshold.threshold_value,
|
||||
threshold.time_window_minutes,
|
||||
threshold.severity
|
||||
],
|
||||
(err) => {
|
||||
if (err) reject(err);
|
||||
else resolve();
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
logger.info(`[ThresholdManager] Created ${defaultThresholds.length} default thresholds`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load thresholds from database into memory
|
||||
*/
|
||||
async loadThresholds() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT * FROM security_thresholds WHERE enabled = 1`,
|
||||
[],
|
||||
(err, rows) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
this.thresholds.clear();
|
||||
rows.forEach(row => {
|
||||
this.thresholds.set(row.threshold_id, row);
|
||||
});
|
||||
logger.info(`[ThresholdManager] Loaded ${rows.length} active thresholds`);
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluate if a metric value exceeds threshold
|
||||
* CWE-778: Logs all threshold evaluations
|
||||
*/
|
||||
async evaluateThreshold(patternType, metricName, value, context = {}) {
|
||||
const matchingThresholds = Array.from(this.thresholds.values()).filter(
|
||||
t => t.pattern_type === patternType && t.metric_name === metricName
|
||||
);
|
||||
|
||||
if (matchingThresholds.length === 0) {
|
||||
return { exceeded: false, thresholds: [] };
|
||||
}
|
||||
|
||||
const exceededThresholds = [];
|
||||
|
||||
for (const threshold of matchingThresholds) {
|
||||
const exceeded = this.compareValue(value, threshold.operator, threshold.threshold_value);
|
||||
|
||||
// Log threshold evaluation (CWE-778)
|
||||
logAggregator.aggregate('threshold_manager', 'info', 'security', 'Threshold evaluated', {
|
||||
thresholdId: threshold.threshold_id,
|
||||
patternType,
|
||||
metricName,
|
||||
value,
|
||||
operator: threshold.operator,
|
||||
thresholdValue: threshold.threshold_value,
|
||||
exceeded,
|
||||
severity: threshold.severity,
|
||||
context
|
||||
});
|
||||
|
||||
if (exceeded) {
|
||||
exceededThresholds.push({
|
||||
...threshold,
|
||||
actualValue: value,
|
||||
context
|
||||
});
|
||||
|
||||
logger.warn(`[ThresholdManager] Threshold exceeded: ${threshold.name} (${value} ${threshold.operator} ${threshold.threshold_value})`);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
exceeded: exceededThresholds.length > 0,
|
||||
thresholds: exceededThresholds
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare value against threshold using operator
|
||||
*/
|
||||
compareValue(value, operator, threshold) {
|
||||
switch (operator) {
|
||||
case '>=': return value >= threshold;
|
||||
case '>': return value > threshold;
|
||||
case '<=': return value <= threshold;
|
||||
case '<': return value < threshold;
|
||||
case '==': return value == threshold;
|
||||
case '!=': return value != threshold;
|
||||
default: return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all thresholds
|
||||
*/
|
||||
async getThresholds(filters = {}) {
|
||||
const { patternType, enabled, limit = 100 } = filters;
|
||||
|
||||
let whereClause = [];
|
||||
let params = [];
|
||||
|
||||
if (patternType) {
|
||||
whereClause.push('pattern_type = ?');
|
||||
params.push(patternType);
|
||||
}
|
||||
|
||||
if (enabled !== undefined) {
|
||||
whereClause.push('enabled = ?');
|
||||
params.push(enabled ? 1 : 0);
|
||||
}
|
||||
|
||||
const where = whereClause.length > 0 ? `WHERE ${whereClause.join(' AND ')}` : '';
|
||||
params.push(limit);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT * FROM security_thresholds ${where}
|
||||
ORDER BY pattern_type, threshold_value DESC
|
||||
LIMIT ?`,
|
||||
params,
|
||||
(err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get threshold by ID
|
||||
*/
|
||||
async getThresholdById(thresholdId) {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.get(
|
||||
`SELECT * FROM security_thresholds WHERE threshold_id = ?`,
|
||||
[thresholdId],
|
||||
(err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new threshold
|
||||
* CWE-778: Logs threshold creation
|
||||
*/
|
||||
async createThreshold(data, userId) {
|
||||
const thresholdId = `THRESHOLD-${Date.now()}-${Math.random().toString(36).substr(2, 9).toUpperCase()}`;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT INTO security_thresholds
|
||||
(threshold_id, name, description, pattern_type, metric_name, operator, threshold_value, time_window_minutes, severity, enabled)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[
|
||||
thresholdId,
|
||||
data.name,
|
||||
data.description || '',
|
||||
data.pattern_type,
|
||||
data.metric_name,
|
||||
data.operator,
|
||||
data.threshold_value,
|
||||
data.time_window_minutes || 30,
|
||||
data.severity,
|
||||
data.enabled !== undefined ? (data.enabled ? 1 : 0) : 1
|
||||
],
|
||||
async (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
await this.loadThresholds();
|
||||
|
||||
// Log threshold creation (CWE-778)
|
||||
logAggregator.aggregate('threshold_manager', 'info', 'security', 'Threshold created', {
|
||||
thresholdId,
|
||||
userId,
|
||||
name: data.name,
|
||||
patternType: data.pattern_type,
|
||||
metricName: data.metric_name,
|
||||
thresholdValue: data.threshold_value,
|
||||
severity: data.severity
|
||||
});
|
||||
|
||||
logger.info(`[ThresholdManager] Threshold created: ${thresholdId} by user ${userId}`);
|
||||
resolve({ thresholdId });
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update threshold
|
||||
* CWE-778: Logs threshold modifications
|
||||
*/
|
||||
async updateThreshold(thresholdId, updates, userId) {
|
||||
const allowedFields = ['name', 'description', 'operator', 'threshold_value', 'time_window_minutes', 'severity', 'enabled'];
|
||||
const setClause = [];
|
||||
const params = [];
|
||||
|
||||
for (const [key, value] of Object.entries(updates)) {
|
||||
if (allowedFields.includes(key)) {
|
||||
setClause.push(`${key} = ?`);
|
||||
params.push(key === 'enabled' ? (value ? 1 : 0) : value);
|
||||
}
|
||||
}
|
||||
|
||||
if (setClause.length === 0) {
|
||||
throw new Error('No valid fields to update');
|
||||
}
|
||||
|
||||
setClause.push('updated_at = CURRENT_TIMESTAMP');
|
||||
params.push(thresholdId);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`UPDATE security_thresholds
|
||||
SET ${setClause.join(', ')}
|
||||
WHERE threshold_id = ?`,
|
||||
params,
|
||||
async (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
await this.loadThresholds();
|
||||
|
||||
// Log threshold update (CWE-778)
|
||||
logAggregator.aggregate('threshold_manager', 'info', 'security', 'Threshold updated', {
|
||||
thresholdId,
|
||||
userId,
|
||||
updates
|
||||
});
|
||||
|
||||
logger.info(`[ThresholdManager] Threshold updated: ${thresholdId} by user ${userId}`);
|
||||
resolve({ success: true });
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete threshold
|
||||
* CWE-778: Logs threshold deletion
|
||||
*/
|
||||
async deleteThreshold(thresholdId, userId) {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`DELETE FROM security_thresholds WHERE threshold_id = ?`,
|
||||
[thresholdId],
|
||||
async (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
await this.loadThresholds();
|
||||
|
||||
// Log threshold deletion (CWE-778)
|
||||
logAggregator.aggregate('threshold_manager', 'warn', 'security', 'Threshold deleted', {
|
||||
thresholdId,
|
||||
userId
|
||||
});
|
||||
|
||||
logger.info(`[ThresholdManager] Threshold deleted: ${thresholdId} by user ${userId}`);
|
||||
resolve({ success: true });
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get threshold statistics
|
||||
*/
|
||||
async getStatistics() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.get(
|
||||
`SELECT
|
||||
COUNT(*) as total,
|
||||
SUM(CASE WHEN enabled = 1 THEN 1 ELSE 0 END) as enabled,
|
||||
SUM(CASE WHEN enabled = 0 THEN 1 ELSE 0 END) as disabled,
|
||||
COUNT(DISTINCT pattern_type) as unique_patterns,
|
||||
COUNT(DISTINCT severity) as unique_severities
|
||||
FROM security_thresholds`,
|
||||
[],
|
||||
(err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Create singleton instance
|
||||
const thresholdManager = new ThresholdManager();
|
||||
|
||||
module.exports = thresholdManager;
|
||||
245
backend/utils/vpnDiagnostics.js
Normal file
245
backend/utils/vpnDiagnostics.js
Normal file
|
|
@ -0,0 +1,245 @@
|
|||
const axios = require('axios');
|
||||
const { exec } = require('child_process');
|
||||
const { promisify } = require('util');
|
||||
const fs = require('fs').promises;
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
/**
|
||||
* VPN Diagnostics - Check for IP and DNS leaks
|
||||
*/
|
||||
class VPNDiagnostics {
|
||||
/**
|
||||
* Get current public IP address
|
||||
*/
|
||||
static async getPublicIP() {
|
||||
try {
|
||||
const response = await axios.get('https://api.ipify.org?format=json', {
|
||||
timeout: 10000,
|
||||
validateStatus: () => true
|
||||
});
|
||||
return response.data.ip;
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to get public IP: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get detailed IP information (location, ISP, etc.)
|
||||
*/
|
||||
static async getIPInfo(ip) {
|
||||
try {
|
||||
const response = await axios.get(`https://ipinfo.io/${ip || ''}/json`, {
|
||||
timeout: 10000,
|
||||
validateStatus: () => true
|
||||
});
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
console.warn('Could not get IP info:', error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current DNS servers from resolv.conf
|
||||
*/
|
||||
static async getDNSServers() {
|
||||
try {
|
||||
const resolvConf = await fs.readFile('/etc/resolv.conf', 'utf8');
|
||||
const nameservers = resolvConf
|
||||
.split('\n')
|
||||
.filter(line => line.trim().startsWith('nameserver'))
|
||||
.map(line => line.split(/\s+/)[1]);
|
||||
|
||||
return nameservers;
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to read DNS servers: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if VPN interface (tun) exists
|
||||
*/
|
||||
static async checkVPNInterface() {
|
||||
try {
|
||||
const { stdout } = await execAsync('ip addr show tun0 2>/dev/null');
|
||||
return {
|
||||
exists: true,
|
||||
details: stdout.trim()
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
exists: false,
|
||||
details: null
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get routing table
|
||||
*/
|
||||
static async getRoutingTable() {
|
||||
try {
|
||||
const { stdout } = await execAsync('ip route');
|
||||
return stdout.trim().split('\n');
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to get routing table: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test DNS resolution through current DNS servers
|
||||
*/
|
||||
static async testDNSResolution(domain = 'google.com') {
|
||||
try {
|
||||
const { stdout } = await execAsync(`nslookup ${domain} | grep -A1 "Name:" | tail -1`);
|
||||
return {
|
||||
success: true,
|
||||
result: stdout.trim()
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check for DNS leaks by testing against multiple DNS leak test services
|
||||
*/
|
||||
static async checkDNSLeaks() {
|
||||
const dnsServers = await this.getDNSServers();
|
||||
|
||||
// Check if DNS servers are common VPN DNS servers
|
||||
const commonVPNDNS = ['10.2.0.1', '10.2.0.2', '10.8.0.1', '10.0.0.1'];
|
||||
const isUsingVPNDNS = dnsServers.some(dns => commonVPNDNS.includes(dns) || dns.startsWith('10.'));
|
||||
|
||||
// Check if DNS servers match common public DNS (potential leak)
|
||||
const commonPublicDNS = [
|
||||
'8.8.8.8', '8.8.4.4', // Google
|
||||
'1.1.1.1', '1.0.0.1', // Cloudflare
|
||||
'9.9.9.9', // Quad9
|
||||
];
|
||||
const isUsingPublicDNS = dnsServers.some(dns => commonPublicDNS.includes(dns));
|
||||
|
||||
return {
|
||||
dnsServers,
|
||||
isUsingVPNDNS,
|
||||
isUsingPublicDNS,
|
||||
potentialLeak: !isUsingVPNDNS && isUsingPublicDNS
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Run comprehensive VPN diagnostics
|
||||
*/
|
||||
static async runFullDiagnostics() {
|
||||
console.log('🔍 Running VPN diagnostics...');
|
||||
|
||||
const results = {
|
||||
timestamp: new Date().toISOString(),
|
||||
publicIP: null,
|
||||
ipInfo: null,
|
||||
dnsServers: [],
|
||||
vpnInterface: null,
|
||||
routingTable: [],
|
||||
dnsLeakCheck: null,
|
||||
summary: {
|
||||
vpnActive: false,
|
||||
ipLeak: false,
|
||||
dnsLeak: false,
|
||||
issues: []
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
// Get public IP
|
||||
results.publicIP = await this.getPublicIP();
|
||||
console.log(`✓ Public IP: ${results.publicIP}`);
|
||||
|
||||
// Get IP info
|
||||
results.ipInfo = await this.getIPInfo(results.publicIP);
|
||||
if (results.ipInfo) {
|
||||
console.log(`✓ Location: ${results.ipInfo.city}, ${results.ipInfo.country}`);
|
||||
console.log(`✓ ISP: ${results.ipInfo.org}`);
|
||||
}
|
||||
|
||||
// Get DNS servers
|
||||
results.dnsServers = await this.getDNSServers();
|
||||
console.log(`✓ DNS Servers: ${results.dnsServers.join(', ')}`);
|
||||
|
||||
// Check VPN interface
|
||||
results.vpnInterface = await this.checkVPNInterface();
|
||||
if (results.vpnInterface.exists) {
|
||||
console.log('✓ VPN interface (tun0) is UP');
|
||||
results.summary.vpnActive = true;
|
||||
} else {
|
||||
console.log('✗ VPN interface (tun0) NOT found');
|
||||
results.summary.issues.push('VPN interface not found');
|
||||
}
|
||||
|
||||
// Get routing table
|
||||
results.routingTable = await this.getRoutingTable();
|
||||
const defaultRoute = results.routingTable.find(route => route.startsWith('default'));
|
||||
if (defaultRoute) {
|
||||
console.log(`✓ Default route: ${defaultRoute}`);
|
||||
if (defaultRoute.includes('tun')) {
|
||||
console.log('✓ Traffic is routed through VPN');
|
||||
} else {
|
||||
console.log('✗ Traffic NOT routed through VPN');
|
||||
results.summary.ipLeak = true;
|
||||
results.summary.issues.push('Traffic not routed through VPN interface');
|
||||
}
|
||||
}
|
||||
|
||||
// DNS leak check
|
||||
results.dnsLeakCheck = await this.checkDNSLeaks();
|
||||
if (results.dnsLeakCheck.isUsingVPNDNS) {
|
||||
console.log('✓ Using VPN DNS servers');
|
||||
} else if (results.dnsLeakCheck.potentialLeak) {
|
||||
console.log('✗ Potential DNS leak detected');
|
||||
results.summary.dnsLeak = true;
|
||||
results.summary.issues.push('Using non-VPN DNS servers');
|
||||
}
|
||||
|
||||
// Overall status
|
||||
if (results.summary.vpnActive && !results.summary.ipLeak && !results.summary.dnsLeak) {
|
||||
console.log('\n✓ VPN is working correctly - No leaks detected');
|
||||
} else {
|
||||
console.log('\n✗ VPN issues detected:');
|
||||
results.summary.issues.forEach(issue => console.log(` - ${issue}`));
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error during diagnostics:', error);
|
||||
results.summary.issues.push(error.message);
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick check if VPN is working (used by status endpoint)
|
||||
*/
|
||||
static async quickCheck() {
|
||||
try {
|
||||
const [publicIP, dnsServers, vpnInterface] = await Promise.all([
|
||||
this.getPublicIP(),
|
||||
this.getDNSServers(),
|
||||
this.checkVPNInterface()
|
||||
]);
|
||||
|
||||
return {
|
||||
publicIP,
|
||||
dnsServers,
|
||||
vpnInterfaceActive: vpnInterface.exists,
|
||||
timestamp: new Date().toISOString()
|
||||
};
|
||||
} catch (error) {
|
||||
throw new Error(`Quick check failed: ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = VPNDiagnostics;
|
||||
Loading…
Add table
Add a link
Reference in a new issue