Initial commit: StreamFlow IPTV platform

This commit is contained in:
aiulian25 2025-12-17 00:42:43 +00:00
commit 73a8ae9ffd
1240 changed files with 278451 additions and 0 deletions

36
backend/.eslintrc.js Normal file
View file

@ -0,0 +1,36 @@
module.exports = {
env: {
node: true,
es2021: true,
},
extends: [
'eslint:recommended',
'plugin:security/recommended',
],
plugins: ['security'],
parserOptions: {
ecmaVersion: 'latest',
sourceType: 'module',
},
rules: {
// Security rules
'security/detect-object-injection': 'warn',
'security/detect-non-literal-regexp': 'warn',
'security/detect-unsafe-regex': 'error',
'security/detect-buffer-noassert': 'error',
'security/detect-child-process': 'warn',
'security/detect-disable-mustache-escape': 'error',
'security/detect-eval-with-expression': 'error',
'security/detect-no-csrf-before-method-override': 'error',
'security/detect-non-literal-fs-filename': 'warn',
'security/detect-non-literal-require': 'warn',
'security/detect-possible-timing-attacks': 'warn',
'security/detect-pseudoRandomBytes': 'error',
// Best practices
'no-console': 'warn',
'no-eval': 'error',
'no-implied-eval': 'error',
'no-new-func': 'error',
},
};

390
backend/database/db.js Normal file
View file

@ -0,0 +1,390 @@
const sqlite3 = require('sqlite3').verbose();
const path = require('path');
const fs = require('fs');
const logger = require('../utils/logger');
const DB_PATH = process.env.DB_PATH || path.join(__dirname, '../../data/streamflow.db');
// Ensure data directory exists
const dataDir = path.dirname(DB_PATH);
if (!fs.existsSync(dataDir)) {
fs.mkdirSync(dataDir, { recursive: true, mode: 0o755 });
}
const db = new sqlite3.Database(DB_PATH, (err) => {
if (err) {
logger.error('Database connection error:', err);
} else {
logger.info('Connected to SQLite database');
}
});
const initialize = () => {
// Initialize RBAC roles first (must be before user creation)
const { initializeRoles } = require('../middleware/rbac');
initializeRoles();
db.serialize(() => {
// Users table
db.run(`CREATE TABLE IF NOT EXISTS users (
id INTEGER PRIMARY KEY AUTOINCREMENT,
username TEXT UNIQUE NOT NULL,
email TEXT UNIQUE NOT NULL,
password TEXT NOT NULL,
role TEXT DEFAULT 'user',
must_change_password BOOLEAN DEFAULT 0,
is_active BOOLEAN DEFAULT 1,
two_factor_enabled BOOLEAN DEFAULT 0,
two_factor_secret TEXT,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
created_by INTEGER,
FOREIGN KEY (created_by) REFERENCES users(id) ON DELETE SET NULL
)`);
// 2FA backup codes table
db.run(`CREATE TABLE IF NOT EXISTS two_factor_backup_codes (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
code TEXT NOT NULL,
used BOOLEAN DEFAULT 0,
used_at DATETIME,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
)`);
// Password history table (prevent password reuse)
db.run(`CREATE TABLE IF NOT EXISTS password_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
password_hash TEXT NOT NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
)`);
// Login attempts tracking table
db.run(`CREATE TABLE IF NOT EXISTS login_attempts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
username TEXT NOT NULL,
ip_address TEXT NOT NULL,
user_agent TEXT,
success BOOLEAN NOT NULL,
failure_reason TEXT,
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
)`);
// Create indexes for login_attempts
db.run('CREATE INDEX IF NOT EXISTS idx_username_timestamp ON login_attempts(username, timestamp)');
db.run('CREATE INDEX IF NOT EXISTS idx_ip_timestamp ON login_attempts(ip_address, timestamp)');
// Account lockouts table
db.run(`CREATE TABLE IF NOT EXISTS account_lockouts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
locked_until DATETIME NOT NULL,
locked_by TEXT DEFAULT 'system',
reason TEXT,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
)`);
// Active sessions table
db.run(`CREATE TABLE IF NOT EXISTS active_sessions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
session_token TEXT UNIQUE NOT NULL,
ip_address TEXT,
user_agent TEXT,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
last_activity DATETIME DEFAULT CURRENT_TIMESTAMP,
expires_at DATETIME NOT NULL,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
)`);
// Security audit log table
db.run(`CREATE TABLE IF NOT EXISTS security_audit_log (
id INTEGER PRIMARY KEY AUTOINCREMENT,
event_type TEXT NOT NULL,
user_id INTEGER,
ip_address TEXT,
user_agent TEXT,
success BOOLEAN NOT NULL,
failure_reason TEXT,
metadata TEXT,
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE SET NULL
)`);
// Create indexes for security_audit_log
db.run('CREATE INDEX IF NOT EXISTS idx_event_timestamp ON security_audit_log(event_type, timestamp)');
db.run('CREATE INDEX IF NOT EXISTS idx_user_timestamp ON security_audit_log(user_id, timestamp)');
// Add new columns if they don't exist (migration)
db.all("PRAGMA table_info(users)", [], (err, columns) => {
if (!err) {
if (!columns.some(col => col.name === 'must_change_password')) {
db.run("ALTER TABLE users ADD COLUMN must_change_password BOOLEAN DEFAULT 0");
}
if (!columns.some(col => col.name === 'is_active')) {
db.run("ALTER TABLE users ADD COLUMN is_active BOOLEAN DEFAULT 1");
}
if (!columns.some(col => col.name === 'created_by')) {
db.run("ALTER TABLE users ADD COLUMN created_by INTEGER REFERENCES users(id) ON DELETE SET NULL");
}
if (!columns.some(col => col.name === 'two_factor_enabled')) {
db.run("ALTER TABLE users ADD COLUMN two_factor_enabled BOOLEAN DEFAULT 0");
}
if (!columns.some(col => col.name === 'two_factor_secret')) {
db.run("ALTER TABLE users ADD COLUMN two_factor_secret TEXT");
}
if (!columns.some(col => col.name === 'password_changed_at')) {
db.run("ALTER TABLE users ADD COLUMN password_changed_at DATETIME DEFAULT CURRENT_TIMESTAMP");
}
if (!columns.some(col => col.name === 'password_expires_at')) {
db.run("ALTER TABLE users ADD COLUMN password_expires_at DATETIME");
}
if (!columns.some(col => col.name === 'failed_login_attempts')) {
db.run("ALTER TABLE users ADD COLUMN failed_login_attempts INTEGER DEFAULT 0");
}
if (!columns.some(col => col.name === 'last_failed_login')) {
db.run("ALTER TABLE users ADD COLUMN last_failed_login DATETIME");
}
if (!columns.some(col => col.name === 'locked_until')) {
db.run("ALTER TABLE users ADD COLUMN locked_until DATETIME");
}
if (!columns.some(col => col.name === 'last_login_at')) {
db.run("ALTER TABLE users ADD COLUMN last_login_at DATETIME");
}
if (!columns.some(col => col.name === 'last_login_ip')) {
db.run("ALTER TABLE users ADD COLUMN last_login_ip TEXT");
}
}
});
// Create default admin user if no users exist
db.get("SELECT COUNT(*) as count FROM users", [], (err, result) => {
if (!err && result.count === 0) {
const bcrypt = require('bcrypt');
const defaultPassword = bcrypt.hashSync('admin', 10);
db.run(
`INSERT INTO users (username, email, password, role, must_change_password)
VALUES (?, ?, ?, ?, ?)`,
['admin', 'admin@streamflow.local', defaultPassword, 'admin', 1],
(err) => {
if (err) {
console.error('Failed to create default admin:', err);
} else {
// CWE-532: Never log passwords - even default ones
console.log('✓ Default admin user created (username: admin)');
console.log('⚠ SECURITY: Change the default admin password immediately!');
}
}
);
}
});
// Profiles table (multi-user support)
db.run(`CREATE TABLE IF NOT EXISTS profiles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
name TEXT NOT NULL,
avatar TEXT,
is_child BOOLEAN DEFAULT 0,
age_restriction INTEGER DEFAULT 18,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
)`);
// Playlists table
db.run(`CREATE TABLE IF NOT EXISTS playlists (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
name TEXT NOT NULL,
url TEXT,
type TEXT DEFAULT 'live',
filename TEXT,
category TEXT,
is_active BOOLEAN DEFAULT 1,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
)`);
// Channels table
db.run(`CREATE TABLE IF NOT EXISTS channels (
id INTEGER PRIMARY KEY AUTOINCREMENT,
playlist_id INTEGER NOT NULL,
name TEXT NOT NULL,
url TEXT NOT NULL,
logo TEXT,
custom_logo TEXT,
group_name TEXT,
tvg_id TEXT,
tvg_name TEXT,
language TEXT,
country TEXT,
is_radio BOOLEAN DEFAULT 0,
is_active BOOLEAN DEFAULT 1,
health_status TEXT DEFAULT 'unknown',
last_checked DATETIME,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (playlist_id) REFERENCES playlists(id) ON DELETE CASCADE
)`);
// Add custom_logo column if it doesn't exist (migration)
db.all("PRAGMA table_info(channels)", [], (err, columns) => {
if (!err && !columns.some(col => col.name === 'custom_logo')) {
db.run("ALTER TABLE channels ADD COLUMN custom_logo TEXT", (err) => {
if (err) console.error('Migration error:', err);
});
}
});
// Custom groups table
db.run(`CREATE TABLE IF NOT EXISTS custom_groups (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
name TEXT NOT NULL,
icon TEXT,
order_index INTEGER DEFAULT 0,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
)`);
// Group channels (many-to-many)
db.run(`CREATE TABLE IF NOT EXISTS group_channels (
id INTEGER PRIMARY KEY AUTOINCREMENT,
group_id INTEGER NOT NULL,
channel_id INTEGER NOT NULL,
order_index INTEGER DEFAULT 0,
FOREIGN KEY (group_id) REFERENCES custom_groups(id) ON DELETE CASCADE,
FOREIGN KEY (channel_id) REFERENCES channels(id) ON DELETE CASCADE,
UNIQUE(group_id, channel_id)
)`);
// Recordings table
db.run(`CREATE TABLE IF NOT EXISTS recordings (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
profile_id INTEGER,
channel_id INTEGER NOT NULL,
title TEXT NOT NULL,
description TEXT,
start_time DATETIME NOT NULL,
end_time DATETIME NOT NULL,
duration INTEGER,
file_path TEXT,
file_size INTEGER,
status TEXT DEFAULT 'scheduled',
is_series BOOLEAN DEFAULT 0,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
FOREIGN KEY (profile_id) REFERENCES profiles(id) ON DELETE SET NULL,
FOREIGN KEY (channel_id) REFERENCES channels(id) ON DELETE CASCADE
)`);
// Watch history table
db.run(`CREATE TABLE IF NOT EXISTS watch_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
profile_id INTEGER,
channel_id INTEGER NOT NULL,
watched_at DATETIME DEFAULT CURRENT_TIMESTAMP,
duration INTEGER,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
FOREIGN KEY (profile_id) REFERENCES profiles(id) ON DELETE SET NULL,
FOREIGN KEY (channel_id) REFERENCES channels(id) ON DELETE CASCADE
)`);
// Favorites table
db.run(`CREATE TABLE IF NOT EXISTS favorites (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
profile_id INTEGER,
channel_id INTEGER NOT NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
FOREIGN KEY (profile_id) REFERENCES profiles(id) ON DELETE SET NULL,
FOREIGN KEY (channel_id) REFERENCES channels(id) ON DELETE CASCADE,
UNIQUE(user_id, profile_id, channel_id)
)`);
// Settings table
db.run(`CREATE TABLE IF NOT EXISTS settings (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
key TEXT NOT NULL,
value TEXT,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
UNIQUE(user_id, key)
)`);
// API tokens table
db.run(`CREATE TABLE IF NOT EXISTS api_tokens (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
token TEXT UNIQUE NOT NULL,
name TEXT,
expires_at DATETIME,
usage_count INTEGER DEFAULT 0,
usage_limit INTEGER,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
)`);
// Channel logos cache table
db.run(`CREATE TABLE IF NOT EXISTS logo_cache (
id INTEGER PRIMARY KEY AUTOINCREMENT,
channel_name TEXT UNIQUE NOT NULL,
logo_url TEXT,
local_path TEXT,
last_updated DATETIME DEFAULT CURRENT_TIMESTAMP
)`);
// M3U files library table
db.run(`CREATE TABLE IF NOT EXISTS m3u_files (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
name TEXT NOT NULL,
original_filename TEXT NOT NULL,
file_path TEXT NOT NULL,
size INTEGER NOT NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
)`);
// Create VPN configs table for multiple configuration files
db.run(`CREATE TABLE IF NOT EXISTS vpn_configs (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
name TEXT NOT NULL,
config_type TEXT NOT NULL CHECK(config_type IN ('openvpn', 'wireguard')),
config_data TEXT NOT NULL,
country TEXT,
server_name TEXT,
endpoint TEXT,
is_active BOOLEAN DEFAULT 0,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
)`);
// Create indexes for performance
db.run('CREATE INDEX IF NOT EXISTS idx_channels_playlist ON channels(playlist_id)');
db.run('CREATE INDEX IF NOT EXISTS idx_channels_tvg_id ON channels(tvg_id)');
db.run('CREATE INDEX IF NOT EXISTS idx_watch_history_user ON watch_history(user_id, profile_id)');
db.run('CREATE INDEX IF NOT EXISTS idx_favorites_user ON favorites(user_id, profile_id)');
db.run('CREATE INDEX IF NOT EXISTS idx_recordings_user ON recordings(user_id, profile_id)');
db.run('CREATE INDEX IF NOT EXISTS idx_m3u_files_user ON m3u_files(user_id)');
logger.info('Database tables initialized');
});
};
module.exports = {
db,
initialize
};

28
backend/healthcheck.js Normal file
View file

@ -0,0 +1,28 @@
const http = require('http');
const options = {
hostname: 'localhost',
port: process.env.PORT || 12345,
path: '/api/health',
method: 'GET',
timeout: 5000
};
const req = http.request(options, (res) => {
if (res.statusCode === 200) {
process.exit(0);
} else {
process.exit(1);
}
});
req.on('error', () => {
process.exit(1);
});
req.on('timeout', () => {
req.destroy();
process.exit(1);
});
req.end();

View file

@ -0,0 +1,128 @@
const cron = require('node-cron');
const axios = require('axios');
const { db } = require('../database/db');
const logger = require('../utils/logger');
const CHECK_TIMEOUT = 10000; // 10 seconds timeout
const BATCH_SIZE = 10; // Check 10 channels at a time
/**
* Check if a channel URL is accessible
*/
async function checkChannelHealth(channelId, url) {
try {
const response = await axios.get(url, {
timeout: CHECK_TIMEOUT,
maxRedirects: 5,
responseType: 'stream',
headers: {
'User-Agent': 'StreamFlow/1.0'
}
});
// Close the stream immediately
if (response.data && typeof response.data.destroy === 'function') {
response.data.destroy();
}
// Consider 2xx and 3xx as healthy
const isHealthy = response.status >= 200 && response.status < 400;
const status = isHealthy ? 'healthy' : 'degraded';
// Update channel health status
db.run(
'UPDATE channels SET health_status = ?, last_checked = CURRENT_TIMESTAMP WHERE id = ?',
[status, channelId],
(err) => {
if (err) {
logger.error(`Failed to update health status for channel ${channelId}:`, err);
}
}
);
return { channelId, status, healthy: isHealthy };
} catch (error) {
// Mark as dead if request fails
const status = 'dead';
db.run(
'UPDATE channels SET health_status = ?, last_checked = CURRENT_TIMESTAMP WHERE id = ?',
[status, channelId],
(err) => {
if (err) {
logger.error(`Failed to update health status for channel ${channelId}:`, err);
}
}
);
logger.debug(`Channel ${channelId} health check failed: ${error.message}`);
return { channelId, status, healthy: false, error: error.message };
}
}
/**
* Check all channels in batches
*/
async function checkAllChannels() {
return new Promise((resolve, reject) => {
db.all(
'SELECT id, url FROM channels WHERE is_active = 1',
[],
async (err, channels) => {
if (err) {
logger.error('Failed to fetch channels for health check:', err);
reject(err);
return;
}
logger.info(`Starting health check for ${channels.length} channels`);
const results = {
total: channels.length,
healthy: 0,
degraded: 0,
dead: 0
};
// Process channels in batches
for (let i = 0; i < channels.length; i += BATCH_SIZE) {
const batch = channels.slice(i, i + BATCH_SIZE);
const promises = batch.map(channel => checkChannelHealth(channel.id, channel.url));
try {
const batchResults = await Promise.all(promises);
batchResults.forEach(result => {
if (result.status === 'healthy') results.healthy++;
else if (result.status === 'degraded') results.degraded++;
else if (result.status === 'dead') results.dead++;
});
// Small delay between batches to avoid overwhelming the server
await new Promise(resolve => setTimeout(resolve, 1000));
} catch (error) {
logger.error('Error in batch health check:', error);
}
}
logger.info('Health check completed:', results);
resolve(results);
}
);
});
}
// Check channel health every 6 hours
cron.schedule('0 */6 * * *', async () => {
logger.info('Running scheduled channel health check');
try {
await checkAllChannels();
} catch (error) {
logger.error('Channel health check failed:', error);
}
});
// Export for manual triggering
module.exports = {
checkAllChannels,
checkChannelHealth
};

View file

@ -0,0 +1,438 @@
/**
* Log Management & Retention System (CWE-53 Compliance)
* Automated cleanup, archival, integrity verification, and monitoring
*/
const logger = require('../utils/logger');
const logAggregator = require('../utils/logAggregator');
const SecurityAuditLogger = require('../utils/securityAudit');
const fs = require('fs').promises;
const path = require('path');
const zlib = require('zlib');
const { promisify } = require('util');
const gzip = promisify(zlib.gzip);
class LogManagement {
constructor() {
this.archiveDir = path.join(__dirname, '../../data/log-archives');
this.initialized = false;
}
/**
* Initialize log management system
*/
async initialize() {
if (this.initialized) return;
try {
// Ensure archive directory exists
await fs.mkdir(this.archiveDir, { recursive: true, mode: 0o700 });
// Schedule daily log cleanup (runs at 2 AM)
this.scheduleDailyCleanup();
// Schedule hourly integrity verification
this.scheduleIntegrityChecks();
// Schedule weekly archival
this.scheduleWeeklyArchival();
logger.info('[LogManagement] Initialized - Automated cleanup, archival, and integrity checks active');
this.initialized = true;
} catch (error) {
logger.error('[LogManagement] Failed to initialize:', error);
}
}
/**
* Schedule daily log cleanup at 2 AM
*/
scheduleDailyCleanup() {
const scheduleNextCleanup = () => {
const now = new Date();
const next2AM = new Date();
next2AM.setHours(2, 0, 0, 0);
// If it's past 2 AM today, schedule for tomorrow
if (now > next2AM) {
next2AM.setDate(next2AM.getDate() + 1);
}
const msUntil2AM = next2AM - now;
setTimeout(async () => {
await this.performDailyCleanup();
scheduleNextCleanup(); // Schedule next day
}, msUntil2AM);
logger.info(`[LogManagement] Daily cleanup scheduled for ${next2AM.toISOString()}`);
};
scheduleNextCleanup();
}
/**
* Schedule hourly integrity verification
*/
scheduleIntegrityChecks() {
// Run immediately on startup
this.verifyLogIntegrity();
// Then run every hour
setInterval(() => {
this.verifyLogIntegrity();
}, 60 * 60 * 1000); // 1 hour
logger.info('[LogManagement] Hourly integrity checks scheduled');
}
/**
* Schedule weekly archival (every Sunday at 3 AM)
*/
scheduleWeeklyArchival() {
const scheduleNextArchival = () => {
const now = new Date();
const nextSunday3AM = new Date();
nextSunday3AM.setHours(3, 0, 0, 0);
// Calculate days until next Sunday (0 = Sunday)
const daysUntilSunday = (7 - now.getDay()) % 7 || 7;
nextSunday3AM.setDate(nextSunday3AM.getDate() + daysUntilSunday);
// If we're past 3 AM on Sunday, wait until next Sunday
if (now.getDay() === 0 && now > nextSunday3AM) {
nextSunday3AM.setDate(nextSunday3AM.getDate() + 7);
}
const msUntilArchival = nextSunday3AM - now;
setTimeout(async () => {
await this.performWeeklyArchival();
scheduleNextArchival(); // Schedule next week
}, msUntilArchival);
logger.info(`[LogManagement] Weekly archival scheduled for ${nextSunday3AM.toISOString()}`);
};
scheduleNextArchival();
}
/**
* Perform daily log cleanup with archival
*/
async performDailyCleanup() {
try {
logger.info('[LogManagement] Starting daily log cleanup...');
// Get retention settings from environment or defaults
const auditRetention = parseInt(process.env.AUDIT_LOG_RETENTION) || 90;
const aggregatedRetention = parseInt(process.env.AGGREGATED_LOG_RETENTION) || 90;
// Archive logs before deletion
await this.archiveOldLogs(auditRetention);
// Cleanup audit logs
const auditDeleted = await SecurityAuditLogger.cleanupOldLogs(auditRetention);
logger.info(`[LogManagement] Cleaned up ${auditDeleted} old security audit logs (>${auditRetention} days)`);
// Cleanup aggregated logs
const aggregatedDeleted = await logAggregator.cleanup(aggregatedRetention);
logger.info(`[LogManagement] Cleaned up ${aggregatedDeleted} old aggregated logs (>${aggregatedRetention} days)`);
// Cleanup old file logs (keep last 30 days of rotated files)
await this.cleanupFileLogRotations();
// Log the cleanup event
await SecurityAuditLogger.logSystemEvent('log_cleanup', true, {
auditDeleted,
aggregatedDeleted,
retentionDays: { audit: auditRetention, aggregated: aggregatedRetention }
});
logger.info('[LogManagement] Daily log cleanup completed successfully');
} catch (error) {
logger.error('[LogManagement] Error during daily cleanup:', error);
await SecurityAuditLogger.logSystemEvent('log_cleanup', false, {
error: error.message
});
}
}
/**
* Archive old logs to compressed files before deletion
*/
async archiveOldLogs(retentionDays) {
try {
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - retentionDays);
// Query logs that will be deleted
const logsToArchive = await logAggregator.query({
endDate: cutoffDate.toISOString(),
limit: 10000
});
if (logsToArchive.length === 0) {
logger.info('[LogManagement] No logs to archive');
return;
}
// Create archive filename
const archiveFilename = `logs-archive-${cutoffDate.toISOString().split('T')[0]}-${Date.now()}.json.gz`;
const archivePath = path.join(this.archiveDir, archiveFilename);
// Compress and save
const logsJson = JSON.stringify(logsToArchive, null, 2);
const compressed = await gzip(logsJson);
await fs.writeFile(archivePath, compressed, { mode: 0o600 });
// Set restrictive permissions
await fs.chmod(archivePath, 0o600);
logger.info(`[LogManagement] Archived ${logsToArchive.length} logs to ${archiveFilename} (${(compressed.length / 1024).toFixed(2)} KB)`);
return archiveFilename;
} catch (error) {
logger.error('[LogManagement] Error archiving logs:', error);
throw error;
}
}
/**
* Perform weekly full archival
*/
async performWeeklyArchival() {
try {
logger.info('[LogManagement] Starting weekly full log archival...');
// Archive all logs from last week
const lastWeek = new Date();
lastWeek.setDate(lastWeek.getDate() - 7);
const allLogs = await logAggregator.query({
startDate: new Date(lastWeek.getTime() - 7 * 24 * 60 * 60 * 1000).toISOString(),
endDate: lastWeek.toISOString(),
limit: 50000
});
if (allLogs.length > 0) {
const archiveFilename = `logs-weekly-${lastWeek.toISOString().split('T')[0]}.json.gz`;
const archivePath = path.join(this.archiveDir, archiveFilename);
const logsJson = JSON.stringify(allLogs, null, 2);
const compressed = await gzip(logsJson);
await fs.writeFile(archivePath, compressed, { mode: 0o600 });
logger.info(`[LogManagement] Weekly archive complete: ${archiveFilename} (${allLogs.length} logs, ${(compressed.length / 1024 / 1024).toFixed(2)} MB)`);
}
// Cleanup old archives (keep 1 year)
await this.cleanupOldArchives(365);
await SecurityAuditLogger.logSystemEvent('log_weekly_archive', true, {
logsArchived: allLogs.length
});
} catch (error) {
logger.error('[LogManagement] Error during weekly archival:', error);
await SecurityAuditLogger.logSystemEvent('log_weekly_archive', false, {
error: error.message
});
}
}
/**
* Verify log integrity and alert on tampering
*/
async verifyLogIntegrity() {
try {
logger.debug('[LogManagement] Starting log integrity verification...');
const result = await logAggregator.verifyIntegrity();
if (result.tampered > 0) {
// CRITICAL: Log tampering detected!
logger.error(`[LogManagement] ⚠️ LOG TAMPERING DETECTED! ${result.tampered} tampered logs found`);
// Log to security audit
await SecurityAuditLogger.logSecurityIncident('log_tampering', {
tamperedCount: result.tampered,
verifiedCount: result.verified,
totalCount: result.total,
tamperedLogs: result.tamperedLogs.slice(0, 10) // First 10 for details
});
// In production, this should trigger alerts (email, Slack, PagerDuty, etc.)
logger.error('[LogManagement] Security team should be notified immediately');
} else {
logger.debug(`[LogManagement] Integrity check passed: ${result.verified} logs verified`);
}
return result;
} catch (error) {
logger.error('[LogManagement] Error during integrity verification:', error);
return null;
}
}
/**
* Cleanup rotated file logs older than X days
*/
async cleanupFileLogRotations() {
try {
const logsDir = path.join(__dirname, '../../logs');
const files = await fs.readdir(logsDir);
const now = Date.now();
const maxAge = 30 * 24 * 60 * 60 * 1000; // 30 days
let deletedCount = 0;
for (const file of files) {
// Only process rotated files (*.log.1, *.log.2, etc.)
if (file.match(/\.log\.\d+$/)) {
const filePath = path.join(logsDir, file);
const stats = await fs.stat(filePath);
const fileAge = now - stats.mtime.getTime();
if (fileAge > maxAge) {
await fs.unlink(filePath);
deletedCount++;
logger.debug(`[LogManagement] Deleted old rotated log: ${file}`);
}
}
}
if (deletedCount > 0) {
logger.info(`[LogManagement] Cleaned up ${deletedCount} old rotated log files`);
}
} catch (error) {
logger.error('[LogManagement] Error cleaning up rotated logs:', error);
}
}
/**
* Cleanup old archives (keep for specified days)
*/
async cleanupOldArchives(retentionDays) {
try {
const files = await fs.readdir(this.archiveDir);
const now = Date.now();
const maxAge = retentionDays * 24 * 60 * 60 * 1000;
let deletedCount = 0;
for (const file of files) {
if (file.endsWith('.json.gz')) {
const filePath = path.join(this.archiveDir, file);
const stats = await fs.stat(filePath);
const fileAge = now - stats.mtime.getTime();
if (fileAge > maxAge) {
await fs.unlink(filePath);
deletedCount++;
logger.debug(`[LogManagement] Deleted old archive: ${file}`);
}
}
}
if (deletedCount > 0) {
logger.info(`[LogManagement] Cleaned up ${deletedCount} old log archives`);
}
} catch (error) {
logger.error('[LogManagement] Error cleaning up old archives:', error);
}
}
/**
* Get log management statistics
*/
async getStatistics() {
try {
const stats = await logAggregator.getStatistics(30);
// Get archive info
const archives = await fs.readdir(this.archiveDir);
const archiveFiles = archives.filter(f => f.endsWith('.json.gz'));
let totalArchiveSize = 0;
for (const file of archiveFiles) {
const filePath = path.join(this.archiveDir, file);
const stat = await fs.stat(filePath);
totalArchiveSize += stat.size;
}
return {
...stats,
archives: {
count: archiveFiles.length,
totalSize: totalArchiveSize,
totalSizeMB: (totalArchiveSize / 1024 / 1024).toFixed(2)
}
};
} catch (error) {
logger.error('[LogManagement] Error getting statistics:', error);
return null;
}
}
/**
* List available log archives
*/
async listArchives() {
try {
const files = await fs.readdir(this.archiveDir);
const archives = [];
for (const file of files) {
if (file.endsWith('.json.gz')) {
const filePath = path.join(this.archiveDir, file);
const stats = await fs.stat(filePath);
archives.push({
filename: file,
size: stats.size,
sizeMB: (stats.size / 1024 / 1024).toFixed(2),
created: stats.mtime.toISOString()
});
}
}
// Sort by creation date descending
archives.sort((a, b) => new Date(b.created) - new Date(a.created));
return archives;
} catch (error) {
logger.error('[LogManagement] Error listing archives:', error);
return [];
}
}
/**
* Manual trigger for log cleanup (admin function)
*/
async manualCleanup(retentionDays) {
logger.info(`[LogManagement] Manual cleanup triggered (retention: ${retentionDays} days)`);
const auditDeleted = await SecurityAuditLogger.cleanupOldLogs(retentionDays);
const aggregatedDeleted = await logAggregator.cleanup(retentionDays);
await SecurityAuditLogger.logAdminActivity(null, 'manual_log_cleanup', {
auditDeleted,
aggregatedDeleted,
retentionDays
});
return { auditDeleted, aggregatedDeleted };
}
/**
* Manual trigger for integrity verification (admin function)
*/
async manualIntegrityCheck() {
logger.info('[LogManagement] Manual integrity check triggered');
return await this.verifyLogIntegrity();
}
}
// Create singleton instance
const logManagement = new LogManagement();
module.exports = logManagement;

213
backend/jobs/logoCacher.js Normal file
View file

@ -0,0 +1,213 @@
const { db } = require('../database/db');
const axios = require('axios');
const fs = require('fs').promises;
const path = require('path');
const crypto = require('crypto');
const logger = require('../utils/logger');
const LOGO_CACHE_DIR = path.join(__dirname, '../../data/logo-cache');
const CACHE_REFRESH_INTERVAL = 7 * 24 * 60 * 60 * 1000; // 7 days
const BATCH_SIZE = 5; // Process 5 logos at a time
const BATCH_DELAY = 2000; // 2 seconds between batches
// Ensure cache directory exists
async function ensureCacheDir() {
try {
await fs.mkdir(LOGO_CACHE_DIR, { recursive: true });
} catch (err) {
logger.error('Failed to create logo cache directory:', err);
}
}
// Download and cache a single logo
async function cacheLogo(channelName, logoUrl) {
if (!logoUrl || !logoUrl.startsWith('http')) {
return null;
}
try {
// Generate filename from URL hash
const hash = crypto.createHash('md5').update(logoUrl).digest('hex');
const ext = path.extname(new URL(logoUrl).pathname) || '.png';
const filename = `${hash}${ext}`;
const localPath = path.join(LOGO_CACHE_DIR, filename);
const relativeLocalPath = path.join('/app/data/logo-cache', filename);
console.log(`[LogoCacher] Path for ${channelName}: ${relativeLocalPath}`);
// Check if already cached in database and file exists
const existingEntry = await new Promise((resolve) => {
db.get(
'SELECT local_path, last_updated FROM logo_cache WHERE logo_url = ? LIMIT 1',
[logoUrl],
(err, row) => resolve(row)
);
});
if (existingEntry) {
try {
const stats = await fs.stat(localPath);
const age = Date.now() - new Date(existingEntry.last_updated).getTime();
if (age < CACHE_REFRESH_INTERVAL) {
logger.debug(`Logo already cached for ${channelName}`);
return relativeLocalPath;
}
} catch (err) {
// File doesn't exist anymore, re-download
}
}
// Download logo
const response = await axios.get(logoUrl, {
responseType: 'arraybuffer',
timeout: 45000, // Increased for VPN connection
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36',
'Accept': 'image/*',
},
maxRedirects: 5
});
if (!response.data || response.data.length === 0) {
console.error(`[LogoCacher] Empty response for ${channelName}`);
return null;
}
// Save to disk
await fs.writeFile(localPath, response.data);
console.log(`[LogoCacher] Cached logo for ${channelName} (${response.data.length} bytes)`);
logger.info(`Cached logo for ${channelName}: ${logoUrl}`);
// Update database
return new Promise((resolve, reject) => {
console.log(`[LogoCacher] Inserting DB: name="${channelName}", url="${logoUrl}", path="${relativeLocalPath}"`);
db.run(
`INSERT OR REPLACE INTO logo_cache (channel_name, logo_url, local_path, last_updated)
VALUES (?, ?, ?, CURRENT_TIMESTAMP)`,
[channelName, logoUrl, relativeLocalPath],
function(err) {
if (err) {
console.error(`[LogoCacher] DB INSERT FAILED for ${channelName}:`, err);
logger.error(`Failed to update logo cache DB for ${channelName}:`, err);
reject(err);
} else {
console.log(`[LogoCacher] DB INSERT SUCCESS for ${channelName}, rowID: ${this.lastID}`);
resolve(relativeLocalPath);
}
}
);
});
} catch (error) {
logger.error(`Failed to cache logo for ${channelName}:`, error.message);
return null;
}
}
// Get all channels with logos that need caching
function getChannelsNeedingCache() {
return new Promise((resolve, reject) => {
db.all(
`SELECT DISTINCT c.name, COALESCE(c.custom_logo, c.logo) as logo
FROM channels c
WHERE (c.logo IS NOT NULL AND c.logo LIKE 'http%')
OR (c.custom_logo IS NOT NULL AND c.custom_logo LIKE 'http%')
ORDER BY c.name`,
[],
(err, rows) => {
if (err) {
logger.error('Error fetching channels for logo caching:', err);
reject(err);
} else {
resolve(rows || []);
}
}
);
});
}
// Process logos in batches
async function cacheAllLogos() {
try {
console.log('[LogoCacher] Starting logo caching...');
await ensureCacheDir();
const channels = await getChannelsNeedingCache();
console.log(`[LogoCacher] Found ${channels.length} channels with logos`);
if (channels.length === 0) {
logger.info('All channel logos are already cached');
return;
}
logger.info(`Caching logos for ${channels.length} channels...`);
let cached = 0;
// Process in batches
for (let i = 0; i < channels.length; i += BATCH_SIZE) {
const batch = channels.slice(i, i + BATCH_SIZE);
console.log(`[LogoCacher] Processing batch ${Math.floor(i/BATCH_SIZE) + 1}/${Math.ceil(channels.length/BATCH_SIZE)}`);
const results = await Promise.all(
batch.map(channel => cacheLogo(channel.name, channel.logo))
);
cached += results.filter(r => r !== null).length;
// Wait between batches to avoid overwhelming servers
if (i + BATCH_SIZE < channels.length) {
await new Promise(resolve => setTimeout(resolve, BATCH_DELAY));
}
}
logger.info(`Logo caching complete. Cached ${cached}/${channels.length} logos.`);
console.log(`[LogoCacher] Completed: ${cached}/${channels.length} logos cached`);
} catch (error) {
logger.error('Error in logo caching job:', error);
console.error('[LogoCacher] Error:', error.message);
}
}
// Clean up old cached logos
async function cleanupOldLogos() {
try {
// First, delete all database entries
await new Promise((resolve, reject) => {
db.run('DELETE FROM logo_cache', (err) => {
if (err) reject(err);
else resolve();
});
});
console.log('[LogoCacher] Cleared all logo cache database entries');
// Then delete old files
const files = await fs.readdir(LOGO_CACHE_DIR);
const cutoffTime = Date.now() - (30 * 24 * 60 * 60 * 1000); // 30 days
let deleted = 0;
for (const file of files) {
const filePath = path.join(LOGO_CACHE_DIR, file);
try {
const stats = await fs.stat(filePath);
if (stats.mtime.getTime() < cutoffTime) {
await fs.unlink(filePath);
deleted++;
}
} catch (err) {
// Ignore errors for individual files
}
}
if (deleted > 0) {
logger.info(`Cleaned up ${deleted} old cached logos`);
}
} catch (error) {
logger.error('Error cleaning up old logos:', error);
}
}
// Initialize cache directory on startup but don't auto-cache
// (VPN must be connected first for external logo downloads)
ensureCacheDir().then(() => {
logger.info('Logo caching job initialized (manual trigger required)');
});
module.exports = { cacheAllLogos, cacheLogo, cleanupOldLogos };

View file

@ -0,0 +1,10 @@
const cron = require('node-cron');
const logger = require('../utils/logger');
// Check scheduled recordings every minute
cron.schedule('* * * * *', () => {
logger.info('Checking scheduled recordings');
// Recording scheduler logic will be implemented here
});
module.exports = cron;

129
backend/middleware/auth.js Normal file
View file

@ -0,0 +1,129 @@
const jwt = require('jsonwebtoken');
const logger = require('../utils/logger');
const db = require('../database/db').db;
const { SESSION_POLICY } = require('../utils/passwordPolicy');
const JWT_SECRET = process.env.JWT_SECRET || 'change_this_in_production';
const authenticate = (req, res, next) => {
// Check Authorization header first, then query parameter
let token = req.headers.authorization?.split(' ')[1];
if (!token && req.query.token) {
token = req.query.token;
}
if (!token) {
logger.info('[AUTH] No token provided');
return res.status(401).json({ error: 'Authentication required' });
}
// CWE-532: Do not log tokens or token details - they are credentials
logger.info('[AUTH] Verifying authentication token');
try {
const decoded = jwt.verify(token, JWT_SECRET);
logger.info(`[AUTH] Token verified for user ${decoded.userId}`);
// Check session activity and idle timeout
db.get(
'SELECT * FROM active_sessions WHERE session_token = ? AND user_id = ?',
[token, decoded.userId],
(err, session) => {
if (err) {
logger.error('Session check error:', err);
return res.status(500).json({ error: 'Session validation failed' });
}
if (!session) {
logger.info('[AUTH] Session not found for token in database');
return res.status(401).json({ error: 'Session not found or expired', sessionExpired: true });
}
logger.info('[AUTH] Session found, checking expiry');
// Check if session has expired (absolute timeout)
const now = new Date();
const expiresAt = new Date(session.expires_at);
if (now >= expiresAt) {
// Delete expired session
db.run('DELETE FROM active_sessions WHERE id = ?', [session.id]);
return res.status(401).json({ error: 'Session expired', sessionExpired: true });
}
// Check idle timeout (2 hours by default)
const lastActivity = new Date(session.last_activity);
const idleTimeMs = now - lastActivity;
const idleTimeoutMs = SESSION_POLICY.idleTimeout * 60 * 60 * 1000; // Convert hours to ms
if (idleTimeMs > idleTimeoutMs) {
// Session idle for too long - terminate it
db.run('DELETE FROM active_sessions WHERE id = ?', [session.id]);
logger.info(`Session ${session.id} terminated due to idle timeout (${idleTimeMs}ms idle)`);
return res.status(401).json({ error: 'Session expired due to inactivity', sessionExpired: true });
}
// Update last activity
db.run(
'UPDATE active_sessions SET last_activity = ? WHERE id = ?',
[now.toISOString(), session.id],
(updateErr) => {
if (updateErr) {
logger.error('Failed to update session activity:', updateErr);
}
}
);
req.user = decoded;
req.sessionId = session.id;
next();
}
);
} catch (error) {
logger.error('Authentication error:', error);
logger.error(`[AUTH] JWT Verification Failed: ${error.name} - ${error.message}`);
// Provide more specific error messages
let errorMessage = 'Invalid or expired token';
if (error.name === 'TokenExpiredError') {
errorMessage = 'Token has expired';
} else if (error.name === 'JsonWebTokenError') {
errorMessage = 'Invalid token';
}
res.status(401).json({
error: errorMessage,
sessionExpired: true // This triggers automatic logout on frontend
});
}
};
const authorize = (...roles) => {
return (req, res, next) => {
if (!req.user) {
return res.status(401).json({ error: 'Authentication required' });
}
if (!roles.includes(req.user.role)) {
return res.status(403).json({ error: 'Insufficient permissions' });
}
next();
};
};
// Convenience middleware for admin-only routes
const requireAdmin = (req, res, next) => {
if (!req.user) {
return res.status(401).json({ error: 'Authentication required' });
}
if (req.user.role !== 'admin') {
return res.status(403).json({ error: 'Admin access required' });
}
next();
};
module.exports = { authenticate, authorize, requireAdmin };

View file

@ -0,0 +1,360 @@
/**
* Validation Middleware
* Provides reusable validation middleware for common request patterns
*/
const {
validateUsername,
validateEmail,
validateUrl,
validatePlaylistName,
validateChannelName,
validateDescription,
validateFilename,
validateSettingKey,
validateInteger,
validateBoolean,
validateJSON,
sanitizeObject
} = require('../utils/inputValidator');
const logger = require('../utils/logger');
/**
* Generic validation middleware factory
*/
function createValidationMiddleware(validators) {
return (req, res, next) => {
const errors = {};
const sanitized = {};
let hasErrors = false;
// Validate body parameters
if (validators.body) {
for (const [field, validator] of Object.entries(validators.body)) {
const value = req.body[field];
const result = validator(value);
if (!result.valid) {
errors[field] = result.errors;
hasErrors = true;
} else if (result.sanitized !== undefined) {
sanitized[field] = result.sanitized;
}
}
}
// Validate query parameters
if (validators.query) {
for (const [field, validator] of Object.entries(validators.query)) {
const value = req.query[field];
const result = validator(value);
if (!result.valid) {
errors[`query.${field}`] = result.errors;
hasErrors = true;
} else if (result.sanitized !== undefined) {
if (!req.sanitizedQuery) req.sanitizedQuery = {};
req.sanitizedQuery[field] = result.sanitized;
}
}
}
// Validate params
if (validators.params) {
for (const [field, validator] of Object.entries(validators.params)) {
const value = req.params[field];
const result = validator(value);
if (!result.valid) {
errors[`params.${field}`] = result.errors;
hasErrors = true;
} else if (result.sanitized !== undefined) {
if (!req.sanitizedParams) req.sanitizedParams = {};
req.sanitizedParams[field] = result.sanitized;
}
}
}
if (hasErrors) {
logger.warn('Validation failed:', { errors, path: req.path, ip: req.ip });
return res.status(400).json({
error: 'Validation failed',
details: errors
});
}
// Replace request data with sanitized versions
if (Object.keys(sanitized).length > 0) {
req.body = { ...req.body, ...sanitized };
}
next();
};
}
/**
* Validate playlist creation/update
*/
const validatePlaylist = createValidationMiddleware({
body: {
name: validatePlaylistName,
url: (value) => validateUrl(value, false),
category: (value) => {
if (!value) return { valid: true, errors: [], sanitized: null };
return validateDescription(value);
},
type: (value) => {
if (!value) return { valid: true, errors: [], sanitized: 'live' };
const allowed = ['live', 'vod', 'series', 'radio'];
if (!allowed.includes(value)) {
return { valid: false, errors: ['Invalid playlist type'], sanitized: null };
}
return { valid: true, errors: [], sanitized: value };
}
}
});
/**
* Validate channel update
*/
const validateChannelUpdate = createValidationMiddleware({
body: {
name: (value) => {
if (!value) return { valid: true, errors: [], sanitized: undefined };
return validateChannelName(value);
},
group_name: (value) => {
if (!value) return { valid: true, errors: [], sanitized: undefined };
return validateDescription(value);
}
}
});
/**
* Validate settings
*/
const validateSettings = createValidationMiddleware({
params: {
key: validateSettingKey
},
body: {
value: (value) => {
// Settings can be strings, numbers, booleans, or JSON objects
if (value === undefined || value === null) {
return { valid: false, errors: ['Value is required'], sanitized: null };
}
// If it's an object, validate as JSON
if (typeof value === 'object') {
return validateJSON(value, 100000);
}
return { valid: true, errors: [], sanitized: value };
}
}
});
/**
* Validate ID parameter
*/
const validateIdParam = createValidationMiddleware({
params: {
id: (value) => validateInteger(value, 1, Number.MAX_SAFE_INTEGER)
}
});
/**
* Validate channelId parameter
*/
const validateChannelIdParam = createValidationMiddleware({
params: {
channelId: (value) => validateInteger(value, 1, Number.MAX_SAFE_INTEGER)
}
});
/**
* Validate pagination parameters
*/
const validatePagination = createValidationMiddleware({
query: {
limit: (value) => {
if (!value) return { valid: true, errors: [], sanitized: 100 };
return validateInteger(value, 1, 1000);
},
offset: (value) => {
if (!value) return { valid: true, errors: [], sanitized: 0 };
return validateInteger(value, 0, Number.MAX_SAFE_INTEGER);
}
}
});
/**
* Validate search query
*/
const validateSearch = createValidationMiddleware({
query: {
search: (value) => {
if (!value) return { valid: true, errors: [], sanitized: undefined };
return validateDescription(value);
},
q: (value) => {
if (!value) return { valid: true, errors: [], sanitized: undefined };
return validateDescription(value);
}
}
});
/**
* Validate user creation
*/
const validateUserCreation = createValidationMiddleware({
body: {
username: validateUsername,
email: validateEmail,
password: (value) => {
if (!value || typeof value !== 'string') {
return { valid: false, errors: ['Password is required'], sanitized: null };
}
if (value.length < 8) {
return { valid: false, errors: ['Password must be at least 8 characters'], sanitized: null };
}
if (value.length > 128) {
return { valid: false, errors: ['Password must not exceed 128 characters'], sanitized: null };
}
return { valid: true, errors: [], sanitized: value };
},
role: (value) => {
const allowed = ['user', 'admin'];
if (!allowed.includes(value)) {
return { valid: false, errors: ['Invalid role'], sanitized: null };
}
return { valid: true, errors: [], sanitized: value };
}
}
});
/**
* Validate user update
*/
const validateUserUpdate = createValidationMiddleware({
params: {
id: (value) => validateInteger(value, 1, Number.MAX_SAFE_INTEGER)
},
body: {
username: (value) => {
if (!value) return { valid: true, errors: [], sanitized: undefined };
return validateUsername(value);
},
email: (value) => {
if (!value) return { valid: true, errors: [], sanitized: undefined };
return validateEmail(value);
},
role: (value) => {
if (!value) return { valid: true, errors: [], sanitized: undefined };
const allowed = ['user', 'admin'];
if (!allowed.includes(value)) {
return { valid: false, errors: ['Invalid role'], sanitized: null };
}
return { valid: true, errors: [], sanitized: value };
},
is_active: (value) => {
if (value === undefined) return { valid: true, errors: [], sanitized: undefined };
return validateBoolean(value);
}
}
});
/**
* Validate filename for file operations
*/
const validateFileOperation = createValidationMiddleware({
params: {
filename: validateFilename
}
});
/**
* Validate bulk delete operations
*/
const validateBulkDelete = createValidationMiddleware({
body: {
ids: (value) => {
if (!Array.isArray(value)) {
return { valid: false, errors: ['IDs must be an array'], sanitized: null };
}
if (value.length === 0) {
return { valid: false, errors: ['At least one ID required'], sanitized: null };
}
if (value.length > 1000) {
return { valid: false, errors: ['Cannot delete more than 1000 items at once'], sanitized: null };
}
const errors = [];
const sanitized = [];
for (let i = 0; i < value.length; i++) {
const result = validateInteger(value[i], 1, Number.MAX_SAFE_INTEGER);
if (!result.valid) {
errors.push(`Invalid ID at index ${i}`);
} else {
sanitized.push(result.sanitized);
}
}
if (errors.length > 0) {
return { valid: false, errors, sanitized: null };
}
return { valid: true, errors: [], sanitized };
}
}
});
/**
* Generic text field validator
*/
function validateTextField(maxLength = 1000, required = true) {
return (value) => {
if (!value || value === '') {
if (required) {
return { valid: false, errors: ['This field is required'], sanitized: null };
}
return { valid: true, errors: [], sanitized: '' };
}
if (typeof value !== 'string') {
return { valid: false, errors: ['Must be a string'], sanitized: null };
}
const trimmed = value.trim();
if (required && trimmed.length === 0) {
return { valid: false, errors: ['This field is required'], sanitized: null };
}
if (trimmed.length > maxLength) {
return { valid: false, errors: [`Must not exceed ${maxLength} characters`], sanitized: null };
}
const result = validateDescription(trimmed);
return result;
};
}
module.exports = {
createValidationMiddleware,
validatePlaylist,
validateChannelUpdate,
validateSettings,
validateIdParam,
validateChannelIdParam,
validatePagination,
validateSearch,
validateUserCreation,
validateUserUpdate,
validateFileOperation,
validateBulkDelete,
validateTextField
};

View file

@ -0,0 +1,84 @@
const rateLimit = require('express-rate-limit');
/**
* Strict rate limiter for authentication endpoints
* 5 requests per 15 minutes
*/
const authLimiter = rateLimit({
windowMs: 15 * 60 * 1000, // 15 minutes
max: 5,
message: { error: 'Too many authentication attempts, please try again later' },
standardHeaders: true,
legacyHeaders: false,
});
/**
* Moderate rate limiter for data modification endpoints
* (Create, Update, Delete operations)
* 30 requests per 15 minutes
*/
const modifyLimiter = rateLimit({
windowMs: 15 * 60 * 1000, // 15 minutes
max: 30,
message: { error: 'Too many modification requests, please slow down' },
standardHeaders: true,
legacyHeaders: false,
});
/**
* Lenient rate limiter for read operations
* 100 requests per 15 minutes
*/
const readLimiter = rateLimit({
windowMs: 15 * 60 * 1000, // 15 minutes
max: 100,
message: { error: 'Too many requests, please slow down' },
standardHeaders: true,
legacyHeaders: false,
});
/**
* Moderate rate limiter for resource-intensive operations
* (Streaming, backup, file uploads)
* Increased to 1000/min to support HLS streaming which makes many segment requests
*/
const heavyLimiter = rateLimit({
windowMs: 60 * 1000, // 1 minute
max: 1000,
message: { error: 'Too many resource-intensive requests, please wait' },
standardHeaders: true,
legacyHeaders: false,
});
/**
* Very strict limiter for backup/restore operations
* 3 requests per hour
*/
const backupLimiter = rateLimit({
windowMs: 60 * 60 * 1000, // 1 hour
max: 3,
message: { error: 'Too many backup operations, please wait before trying again' },
standardHeaders: true,
legacyHeaders: false,
});
/**
* General API rate limiter
* 200 requests per 15 minutes
*/
const apiLimiter = rateLimit({
windowMs: 15 * 60 * 1000, // 15 minutes
max: 200,
message: { error: 'Too many API requests, please try again later' },
standardHeaders: true,
legacyHeaders: false,
});
module.exports = {
authLimiter,
modifyLimiter,
readLimiter,
heavyLimiter,
backupLimiter,
apiLimiter
};

525
backend/middleware/rbac.js Normal file
View file

@ -0,0 +1,525 @@
const logger = require('../utils/logger');
const { db } = require('../database/db');
/**
* RBAC (Role-Based Access Control) Middleware
*
* Implements granular permission checking following the principle of least privilege.
* Each user has roles, and each role has specific permissions.
* This prevents over-privileged accounts and limits attack surface.
*/
// Define all available permissions in the system
const PERMISSIONS = {
// User Management
'users.view': 'View user list and details',
'users.create': 'Create new users',
'users.edit': 'Edit existing users',
'users.delete': 'Delete users',
'users.manage_roles': 'Assign and modify user roles',
'users.unlock': 'Unlock locked user accounts',
'users.reset_password': 'Reset user passwords',
// Session Management
'sessions.view_own': 'View own active sessions',
'sessions.view_all': 'View all user sessions',
'sessions.terminate_own': 'Terminate own sessions',
'sessions.terminate_any': 'Terminate any user session',
'sessions.view_stats': 'View session statistics',
// Content Management
'playlists.view': 'View playlists',
'playlists.create': 'Create playlists',
'playlists.edit': 'Edit playlists',
'playlists.delete': 'Delete playlists',
'playlists.import': 'Import M3U files',
'channels.view': 'View channels',
'channels.edit': 'Edit channel details',
'channels.upload_logo': 'Upload custom channel logos',
'channels.delete_logo': 'Delete custom channel logos',
'favorites.view': 'View favorites',
'favorites.manage': 'Add/remove favorites',
'history.view_own': 'View own watch history',
'history.view_all': 'View all user watch history',
'history.delete_own': 'Delete own watch history',
'history.delete_any': 'Delete any user watch history',
// System & Settings
'settings.view': 'View application settings',
'settings.edit': 'Modify application settings',
'stats.view': 'View analytics and statistics',
'stats.view_detailed': 'View detailed analytics (user activity, etc.)',
'backup.view': 'View available backups',
'backup.create': 'Create backups',
'backup.restore': 'Restore from backups',
'backup.delete': 'Delete backups',
'backup.download': 'Download backups',
// Security Management
'security.view_sessions': 'View security session dashboard',
'security.view_csp': 'View CSP violation dashboard',
'security.manage_2fa': 'Manage two-factor authentication',
'security.view_audit': 'View audit logs',
// Search & Discovery
'search.use': 'Use search functionality',
'search.admin': 'Search users and system settings',
// VPN & Network
'vpn.view': 'View VPN settings',
'vpn.configure': 'Configure VPN settings',
'vpn.connect': 'Connect/disconnect VPN'
};
// Default role definitions with their permissions
const DEFAULT_ROLES = {
'admin': {
name: 'Administrator',
description: 'Full system access',
permissions: Object.keys(PERMISSIONS), // Admins have all permissions
is_system_role: true
},
'moderator': {
name: 'Moderator',
description: 'Content management and user support',
permissions: [
// User viewing (but not management)
'users.view',
// Content management
'playlists.view',
'playlists.create',
'playlists.edit',
'playlists.delete',
'playlists.import',
'channels.view',
'channels.edit',
'channels.upload_logo',
'channels.delete_logo',
// History management
'history.view_all',
'history.delete_any',
// Settings (view only)
'settings.view',
// Statistics
'stats.view',
'stats.view_detailed',
// Own sessions
'sessions.view_own',
'sessions.terminate_own',
// Own favorites
'favorites.view',
'favorites.manage',
// Search
'search.use',
// Own security
'security.manage_2fa'
],
is_system_role: true
},
'user': {
name: 'Regular User',
description: 'Standard user with content access',
permissions: [
// Own playlists
'playlists.view',
'playlists.create',
'playlists.edit',
'playlists.delete',
'playlists.import',
// Channels (view and customize)
'channels.view',
'channels.upload_logo',
'channels.delete_logo',
// Own favorites
'favorites.view',
'favorites.manage',
// Own history
'history.view_own',
'history.delete_own',
// Own settings
'settings.view',
'settings.edit',
// Own sessions
'sessions.view_own',
'sessions.terminate_own',
// Search (basic)
'search.use',
// Own security
'security.manage_2fa',
// VPN (if enabled)
'vpn.view',
'vpn.configure',
'vpn.connect'
],
is_system_role: true
},
'viewer': {
name: 'Viewer',
description: 'Read-only access for content viewing',
permissions: [
// View only
'playlists.view',
'channels.view',
'favorites.view',
'favorites.manage',
'history.view_own',
'history.delete_own',
'settings.view',
'sessions.view_own',
'sessions.terminate_own',
'search.use',
'security.manage_2fa'
],
is_system_role: true
}
};
/**
* Cache for user permissions to reduce database queries
* Format: { userId: { permissions: [...], expires: timestamp } }
*/
const permissionCache = new Map();
const CACHE_TTL = 5 * 60 * 1000; // 5 minutes
/**
* Clear permission cache for a specific user
*/
const clearUserPermissionCache = (userId) => {
permissionCache.delete(userId);
logger.info(`Permission cache cleared for user ${userId}`);
};
/**
* Clear entire permission cache (call after role/permission changes)
*/
const clearAllPermissionCache = () => {
permissionCache.clear();
logger.info('All permission cache cleared');
};
/**
* Get user permissions from database with caching
*/
const getUserPermissions = (userId) => {
return new Promise((resolve, reject) => {
// Check cache first
const cached = permissionCache.get(userId);
if (cached && cached.expires > Date.now()) {
return resolve(cached.permissions);
}
// Query database
db.get(
`SELECT u.role, r.permissions
FROM users u
LEFT JOIN roles r ON u.role = r.role_key
WHERE u.id = ? AND u.is_active = 1`,
[userId],
(err, result) => {
if (err) {
logger.error('Error fetching user permissions:', err);
return reject(err);
}
if (!result) {
return reject(new Error('User not found or inactive'));
}
// Parse permissions (stored as JSON string)
let permissions = [];
try {
if (result.permissions) {
permissions = JSON.parse(result.permissions);
} else {
// Fallback to default role permissions if not in database
const defaultRole = DEFAULT_ROLES[result.role];
permissions = defaultRole ? defaultRole.permissions : [];
}
} catch (parseErr) {
logger.error('Error parsing permissions:', parseErr);
permissions = [];
}
// Cache the result
permissionCache.set(userId, {
permissions,
expires: Date.now() + CACHE_TTL
});
resolve(permissions);
}
);
});
};
/**
* Check if user has a specific permission
*/
const hasPermission = async (userId, permission) => {
try {
const permissions = await getUserPermissions(userId);
return permissions.includes(permission);
} catch (error) {
logger.error('Permission check failed:', error);
return false;
}
};
/**
* Check if user has ANY of the specified permissions
*/
const hasAnyPermission = async (userId, permissionList) => {
try {
const permissions = await getUserPermissions(userId);
return permissionList.some(p => permissions.includes(p));
} catch (error) {
logger.error('Permission check failed:', error);
return false;
}
};
/**
* Check if user has ALL of the specified permissions
*/
const hasAllPermissions = async (userId, permissionList) => {
try {
const permissions = await getUserPermissions(userId);
return permissionList.every(p => permissions.includes(p));
} catch (error) {
logger.error('Permission check failed:', error);
return false;
}
};
/**
* Middleware: Require specific permission(s)
* Usage: requirePermission('users.view')
* Usage: requirePermission(['users.view', 'users.edit']) - requires ANY
*/
const requirePermission = (requiredPermissions) => {
// Normalize to array
const permissionList = Array.isArray(requiredPermissions)
? requiredPermissions
: [requiredPermissions];
return async (req, res, next) => {
if (!req.user || !req.user.userId) {
return res.status(401).json({
error: 'Authentication required',
code: 'AUTH_REQUIRED'
});
}
try {
// Bypass permission check for user ID 1 (first admin) or users with role 'admin'
if (req.user.userId === 1 || req.user.role === 'admin') {
req.userPermissions = Object.keys(PERMISSIONS); // Grant all permissions
return next();
}
const userPermissions = await getUserPermissions(req.user.userId);
// Check if user has any of the required permissions
const hasAccess = permissionList.some(p => userPermissions.includes(p));
if (!hasAccess) {
logger.warn(`Access denied: User ${req.user.userId} lacks permission(s): ${permissionList.join(', ')}`);
return res.status(403).json({
error: 'Insufficient permissions',
code: 'INSUFFICIENT_PERMISSIONS',
required: permissionList,
details: 'You do not have the required permissions to perform this action'
});
}
// Attach user permissions to request for further checks
req.userPermissions = userPermissions;
next();
} catch (error) {
logger.error('Permission check error:', error);
res.status(500).json({
error: 'Permission validation failed',
code: 'PERMISSION_CHECK_FAILED'
});
}
};
};
/**
* Middleware: Require ALL specified permissions
* Usage: requireAllPermissions(['users.view', 'users.edit'])
*/
const requireAllPermissions = (permissionList) => {
return async (req, res, next) => {
if (!req.user || !req.user.userId) {
return res.status(401).json({
error: 'Authentication required',
code: 'AUTH_REQUIRED'
});
}
try {
// Bypass permission check for user ID 1 (first admin) or users with role 'admin'
if (req.user.userId === 1 || req.user.role === 'admin') {
req.userPermissions = Object.keys(PERMISSIONS); // Grant all permissions
return next();
}
const userPermissions = await getUserPermissions(req.user.userId);
// Check if user has ALL required permissions
const hasAccess = permissionList.every(p => userPermissions.includes(p));
if (!hasAccess) {
logger.warn(`Access denied: User ${req.user.userId} lacks all permissions: ${permissionList.join(', ')}`);
return res.status(403).json({
error: 'Insufficient permissions',
code: 'INSUFFICIENT_PERMISSIONS',
required: permissionList,
details: 'You do not have all the required permissions to perform this action'
});
}
req.userPermissions = userPermissions;
next();
} catch (error) {
logger.error('Permission check error:', error);
res.status(500).json({
error: 'Permission validation failed',
code: 'PERMISSION_CHECK_FAILED'
});
}
};
};
/**
* Initialize roles table and seed default roles
*/
const initializeRoles = () => {
db.serialize(() => {
// Create roles table
db.run(`CREATE TABLE IF NOT EXISTS roles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
role_key TEXT UNIQUE NOT NULL,
name TEXT NOT NULL,
description TEXT,
permissions TEXT NOT NULL,
is_system_role BOOLEAN DEFAULT 0,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
)`, (err) => {
if (err) {
logger.error('Failed to create roles table:', err);
return;
}
// Seed default roles
Object.entries(DEFAULT_ROLES).forEach(([roleKey, roleData]) => {
db.run(
`INSERT OR IGNORE INTO roles (role_key, name, description, permissions, is_system_role)
VALUES (?, ?, ?, ?, ?)`,
[
roleKey,
roleData.name,
roleData.description,
JSON.stringify(roleData.permissions),
roleData.is_system_role ? 1 : 0
],
(err) => {
if (err) {
logger.error(`Failed to seed role ${roleKey}:`, err);
} else {
logger.info(`✓ Role seeded: ${roleKey}`);
}
}
);
});
// Create permission audit log table
db.run(`CREATE TABLE IF NOT EXISTS permission_audit_log (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
action TEXT NOT NULL,
target_type TEXT NOT NULL,
target_id INTEGER,
old_value TEXT,
new_value TEXT,
ip_address TEXT,
user_agent TEXT,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
)`, (err) => {
if (err) {
logger.error('Failed to create permission_audit_log table:', err);
} else {
logger.info('✓ Permission audit log table created');
}
});
});
});
};
/**
* Log permission-related actions for audit trail
*/
const logPermissionAction = (userId, action, targetType, targetId, oldValue, newValue, req) => {
const ipAddress = req?.ip || req?.connection?.remoteAddress || 'unknown';
const userAgent = req?.headers['user-agent'] || 'unknown';
db.run(
`INSERT INTO permission_audit_log
(user_id, action, target_type, target_id, old_value, new_value, ip_address, user_agent)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
[
userId,
action,
targetType,
targetId || null,
oldValue ? JSON.stringify(oldValue) : null,
newValue ? JSON.stringify(newValue) : null,
ipAddress,
userAgent
],
(err) => {
if (err) {
logger.error('Failed to log permission action:', err);
}
}
);
};
module.exports = {
PERMISSIONS,
DEFAULT_ROLES,
requirePermission,
requireAllPermissions,
hasPermission,
hasAnyPermission,
hasAllPermissions,
getUserPermissions,
clearUserPermissionCache,
clearAllPermissionCache,
initializeRoles,
logPermissionAction
};

View file

@ -0,0 +1,349 @@
/**
* Enhanced Security Middleware
* Implements account lockout, password expiry, and session management
*/
const { db } = require('../database/db');
const { ACCOUNT_LOCKOUT, PASSWORD_EXPIRY, SESSION_POLICY } = require('../utils/passwordPolicy');
const SecurityAuditLogger = require('../utils/securityAudit');
const logger = require('../utils/logger');
/**
* Check if account is locked
*/
async function checkAccountLockout(userId) {
return new Promise((resolve, reject) => {
db.get(
'SELECT locked_until FROM users WHERE id = ?',
[userId],
(err, user) => {
if (err) return reject(err);
if (!user) return resolve({ locked: false });
if (user.locked_until) {
const lockoutEnd = new Date(user.locked_until);
const now = new Date();
if (now < lockoutEnd) {
const remainingMinutes = Math.ceil((lockoutEnd - now) / 60000);
return resolve({
locked: true,
remainingMinutes,
message: `Account locked. Try again in ${remainingMinutes} minutes.`
});
} else {
// Lockout expired, clear it
db.run('UPDATE users SET locked_until = NULL, failed_login_attempts = 0 WHERE id = ?', [userId]);
return resolve({ locked: false });
}
}
resolve({ locked: false });
}
);
});
}
/**
* Record failed login attempt
*/
async function recordFailedLogin(username, ip, userAgent) {
try {
// Log to audit
await SecurityAuditLogger.logLoginFailure(username, 'Invalid credentials', { ip, userAgent });
// Get user ID
const user = await new Promise((resolve, reject) => {
db.get('SELECT id, failed_login_attempts FROM users WHERE username = ? OR email = ?', [username, username], (err, row) => {
if (err) reject(err);
else resolve(row);
});
});
if (!user) return;
const failedAttempts = (user.failed_login_attempts || 0) + 1;
// Update failed attempts
await new Promise((resolve, reject) => {
db.run(
'UPDATE users SET failed_login_attempts = ?, last_failed_login = ? WHERE id = ?',
[failedAttempts, new Date().toISOString(), user.id],
(err) => err ? reject(err) : resolve()
);
});
// Check if lockout threshold reached
if (failedAttempts >= ACCOUNT_LOCKOUT.maxFailedAttempts) {
const lockUntil = new Date(Date.now() + ACCOUNT_LOCKOUT.lockoutDuration).toISOString();
await new Promise((resolve, reject) => {
db.run(
'UPDATE users SET locked_until = ? WHERE id = ?',
[lockUntil, user.id],
(err) => err ? reject(err) : resolve()
);
});
await SecurityAuditLogger.logAccountLockout(user.id, {
ip,
userAgent,
failedAttempts
});
logger.warn(`Account locked for user ${username} after ${failedAttempts} failed attempts`);
}
} catch (error) {
logger.error('Error recording failed login:', error);
}
}
/**
* Clear failed login attempts on successful login
*/
async function clearFailedAttempts(userId) {
return new Promise((resolve, reject) => {
db.run(
'UPDATE users SET failed_login_attempts = 0, last_failed_login = NULL WHERE id = ?',
[userId],
(err) => err ? reject(err) : resolve()
);
});
}
/**
* Check if password has expired
*/
async function checkPasswordExpiry(userId) {
if (!PASSWORD_EXPIRY.enabled) {
return { expired: false, warning: false };
}
return new Promise((resolve, reject) => {
db.get(
'SELECT password_changed_at, password_expires_at FROM users WHERE id = ?',
[userId],
(err, user) => {
if (err) return reject(err);
if (!user) return resolve({ expired: false, warning: false });
const now = new Date();
let expiryDate;
if (user.password_expires_at) {
expiryDate = new Date(user.password_expires_at);
} else if (user.password_changed_at) {
expiryDate = new Date(user.password_changed_at);
expiryDate.setDate(expiryDate.getDate() + PASSWORD_EXPIRY.expiryDays);
} else {
// No password change date, set expiry from now
expiryDate = new Date();
expiryDate.setDate(expiryDate.getDate() + PASSWORD_EXPIRY.expiryDays);
}
const daysUntilExpiry = Math.ceil((expiryDate - now) / (24 * 60 * 60 * 1000));
if (daysUntilExpiry <= 0) {
return resolve({
expired: true,
warning: false,
message: 'Your password has expired. Please change it to continue.',
gracePeriodDays: PASSWORD_EXPIRY.gracePeriodDays
});
}
if (daysUntilExpiry <= PASSWORD_EXPIRY.warningDays) {
return resolve({
expired: false,
warning: true,
daysRemaining: daysUntilExpiry,
message: `Your password will expire in ${daysUntilExpiry} days. Please change it soon.`
});
}
resolve({ expired: false, warning: false });
}
);
});
}
/**
* Update password expiry date
*/
async function updatePasswordExpiry(userId) {
if (!PASSWORD_EXPIRY.enabled) return;
const now = new Date();
const expiryDate = new Date(now);
expiryDate.setDate(expiryDate.getDate() + PASSWORD_EXPIRY.expiryDays);
return new Promise((resolve, reject) => {
db.run(
'UPDATE users SET password_changed_at = ?, password_expires_at = ? WHERE id = ?',
[now.toISOString(), expiryDate.toISOString(), userId],
(err) => err ? reject(err) : resolve()
);
});
}
/**
* Middleware: Check account lockout before authentication
*/
const enforceAccountLockout = async (req, res, next) => {
const { username } = req.body;
if (!username) {
return next();
}
try {
// Get user
const user = await new Promise((resolve, reject) => {
db.get('SELECT id FROM users WHERE username = ? OR email = ?', [username, username], (err, row) => {
if (err) reject(err);
else resolve(row);
});
});
if (!user) {
return next(); // User doesn't exist, let auth handle it
}
// Check lockout
const lockoutStatus = await checkAccountLockout(user.id);
if (lockoutStatus.locked) {
return res.status(423).json({
error: lockoutStatus.message,
remainingMinutes: lockoutStatus.remainingMinutes,
locked: true
});
}
next();
} catch (error) {
logger.error('Account lockout check error:', error);
next();
}
};
/**
* Middleware: Check password expiry after authentication
*/
const enforcePasswordExpiry = async (req, res, next) => {
if (!req.user || !req.user.userId) {
return next();
}
try {
const expiryStatus = await checkPasswordExpiry(req.user.userId);
if (expiryStatus.expired) {
return res.status(403).json({
error: expiryStatus.message,
passwordExpired: true,
gracePeriodDays: expiryStatus.gracePeriodDays,
requirePasswordChange: true
});
}
if (expiryStatus.warning) {
// Add warning header but allow request
res.setHeader('X-Password-Expiry-Warning', expiryStatus.message);
res.setHeader('X-Password-Days-Remaining', expiryStatus.daysRemaining.toString());
}
next();
} catch (error) {
logger.error('Password expiry check error:', error);
next();
}
};
/**
* Manage active sessions
*/
async function createSession(userId, token, req) {
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
const userAgent = req.headers['user-agent'];
const expiresAt = new Date(Date.now() + SESSION_POLICY.absoluteTimeout);
// Check concurrent sessions
const activeSessions = await new Promise((resolve, reject) => {
db.all(
'SELECT COUNT(*) as count FROM active_sessions WHERE user_id = ? AND expires_at > ?',
[userId, new Date().toISOString()],
(err, rows) => err ? reject(err) : resolve(rows[0].count)
);
});
if (activeSessions >= SESSION_POLICY.maxConcurrentSessions) {
// Remove oldest session
await new Promise((resolve, reject) => {
db.run(
'DELETE FROM active_sessions WHERE id IN (SELECT id FROM active_sessions WHERE user_id = ? ORDER BY last_activity ASC LIMIT 1)',
[userId],
(err) => err ? reject(err) : resolve()
);
});
}
// Create new session
return new Promise((resolve, reject) => {
db.run(
'INSERT INTO active_sessions (user_id, session_token, ip_address, user_agent, expires_at) VALUES (?, ?, ?, ?, ?)',
[userId, token, ip, userAgent, expiresAt.toISOString()],
(err) => err ? reject(err) : resolve()
);
});
}
/**
* Update session activity
*/
async function updateSessionActivity(token) {
return new Promise((resolve, reject) => {
db.run(
'UPDATE active_sessions SET last_activity = ? WHERE session_token = ?',
[new Date().toISOString(), token],
(err) => err ? reject(err) : resolve()
);
});
}
/**
* Cleanup expired sessions
*/
async function cleanupExpiredSessions() {
return new Promise((resolve, reject) => {
db.run(
'DELETE FROM active_sessions WHERE expires_at < ?',
[new Date().toISOString()],
function(err) {
if (err) reject(err);
else {
if (this.changes > 0) {
logger.info(`Cleaned up ${this.changes} expired sessions`);
}
resolve(this.changes);
}
}
);
});
}
// Run session cleanup every hour
setInterval(cleanupExpiredSessions, 60 * 60 * 1000);
module.exports = {
checkAccountLockout,
recordFailedLogin,
clearFailedAttempts,
checkPasswordExpiry,
updatePasswordExpiry,
enforceAccountLockout,
enforcePasswordExpiry,
createSession,
updateSessionActivity,
cleanupExpiredSessions
};

6253
backend/package-lock.json generated Normal file

File diff suppressed because it is too large Load diff

44
backend/package.json Normal file
View file

@ -0,0 +1,44 @@
{
"name": "streamflow-backend",
"version": "1.0.0",
"description": "StreamFlow IPTV Backend API",
"main": "server.js",
"scripts": {
"start": "node server.js",
"dev": "nodemon server.js",
"security:audit": "npm audit --audit-level=moderate",
"security:lint": "eslint . --ext .js",
"security:check": "npm run security:audit && npm run security:lint"
},
"dependencies": {
"express": "^4.18.2",
"sqlite3": "^5.1.6",
"bcryptjs": "^2.4.3",
"jsonwebtoken": "^9.0.2",
"express-rate-limit": "^7.1.5",
"helmet": "^7.1.0",
"cors": "^2.8.5",
"multer": "^1.4.5-lts.1",
"node-cron": "^3.0.3",
"axios": "^1.6.5",
"xml2js": "^0.6.2",
"fluent-ffmpeg": "^2.1.2",
"socks-proxy-agent": "^8.0.2",
"express-validator": "^7.0.1",
"dotenv": "^16.3.1",
"compression": "^1.7.4",
"winston": "^3.11.0",
"iptv-playlist-parser": "^0.12.2",
"bcrypt": "^5.1.1",
"speakeasy": "^2.0.0",
"qrcode": "^1.5.3",
"archiver": "^7.0.1",
"unzipper": "^0.12.3",
"express-fileupload": "^1.5.1"
},
"devDependencies": {
"nodemon": "^3.0.2",
"eslint": "^8.57.0",
"eslint-plugin-security": "^3.0.1"
}
}

776
backend/routes/auth.js Normal file
View file

@ -0,0 +1,776 @@
const express = require('express');
const router = express.Router();
const bcrypt = require('bcryptjs');
const jwt = require('jsonwebtoken');
const { body, validationResult } = require('express-validator');
const { authLimiter } = require('../middleware/rateLimiter');
const { authenticate } = require('../middleware/auth');
const { db } = require('../database/db');
const logger = require('../utils/logger');
const { validatePassword, calculatePasswordStrength } = require('../utils/passwordPolicy');
const SecurityAuditLogger = require('../utils/securityAudit');
const {
enforceAccountLockout,
recordFailedLogin,
clearFailedAttempts,
updatePasswordExpiry,
createSession,
checkPasswordExpiry
} = require('../middleware/securityEnhancements');
const JWT_SECRET = process.env.JWT_SECRET || 'change_this_in_production';
const JWT_EXPIRES_IN = '7d';
// Register - Controlled by DISABLE_SIGNUPS environment variable
router.post('/register',
[
body('username').trim().isLength({ min: 3, max: 50 }).isAlphanumeric(),
body('email').isEmail().normalizeEmail(),
body('password').notEmpty()
],
async (req, res) => {
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
const userAgent = req.headers['user-agent'];
// Check if signups are disabled (default: true)
const disableSignups = process.env.DISABLE_SIGNUPS !== 'false';
if (disableSignups) {
await SecurityAuditLogger.logAuthEvent(null, 'registration_attempt', 'blocked', {
ip,
userAgent,
reason: 'Registration disabled'
});
return res.status(403).json({
error: 'Registration is disabled. Contact an administrator to create your account.'
});
}
// If signups are enabled, proceed with registration (fallback for future flexibility)
const errors = validationResult(req);
if (!errors.isEmpty()) {
return res.status(400).json({ errors: errors.array() });
}
const { username, email, password } = req.body;
try {
// Validate password against policy
const passwordValidation = validatePassword(password, username, email);
if (!passwordValidation.valid) {
await SecurityAuditLogger.logAuthEvent(null, 'registration_attempt', 'failed', {
ip,
userAgent,
username,
reason: passwordValidation.errors.join(', ')
});
return res.status(400).json({
error: 'Password does not meet requirements',
details: passwordValidation.errors,
strength: calculatePasswordStrength(password)
});
}
const hashedPassword = await bcrypt.hash(password, 10);
const now = new Date().toISOString();
db.run(
'INSERT INTO users (username, email, password, role, must_change_password, password_changed_at, password_expires_at) VALUES (?, ?, ?, ?, ?, ?, ?)',
[username, email, hashedPassword, 'user', 0, now, null],
async function(err) {
if (err) {
if (err.message.includes('UNIQUE')) {
await SecurityAuditLogger.logAuthEvent(null, 'registration_attempt', 'failed', {
ip,
userAgent,
username,
reason: 'Duplicate username or email'
});
return res.status(400).json({ error: 'Username or email already exists' });
}
logger.error('Registration error:', err);
return res.status(500).json({ error: 'Registration failed' });
}
const userId = this.lastID;
// Update password expiry
await updatePasswordExpiry(userId);
// Log successful registration
await SecurityAuditLogger.logAuthEvent(userId, 'registration', 'success', {
ip,
userAgent,
username
});
const token = jwt.sign(
{ userId, role: 'user' },
JWT_SECRET,
{ expiresIn: JWT_EXPIRES_IN }
);
// CWE-778: Log token issuance
await SecurityAuditLogger.logTokenIssuance(userId, 'JWT', {
ip,
userAgent,
expiresIn: JWT_EXPIRES_IN,
purpose: 'registration'
});
// Create session
await createSession(userId, token, req);
// Set secure HTTP-only cookie
res.cookie('auth_token', token, {
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
sameSite: 'strict',
maxAge: 7 * 24 * 60 * 60 * 1000 // 7 days
});
res.status(201).json({
message: 'Registration successful',
token,
user: {
id: userId,
username,
email,
role: 'user'
}
});
}
);
} catch (error) {
logger.error('Registration error:', error);
await SecurityAuditLogger.logAuthEvent(null, 'registration_attempt', 'error', {
ip,
userAgent,
error: error.message
});
res.status(500).json({ error: 'Registration failed' });
}
}
);
// Login with strict rate limiting and account lockout
router.post('/login', authLimiter, enforceAccountLockout,
[
body('username').trim().notEmpty(),
body('password').notEmpty()
],
async (req, res) => {
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
const userAgent = req.headers['user-agent'];
const errors = validationResult(req);
if (!errors.isEmpty()) {
return res.status(400).json({ errors: errors.array() });
}
const { username, password } = req.body;
try {
db.get(
'SELECT * FROM users WHERE username = ? OR email = ?',
[username, username],
async (err, user) => {
if (err) {
logger.error('Login error:', err);
return res.status(500).json({ error: 'Login failed' });
}
if (!user) {
await recordFailedLogin(username, ip, userAgent);
return res.status(401).json({ error: 'Invalid credentials' });
}
// Check if user is active
if (!user.is_active) {
await SecurityAuditLogger.logLoginFailure(username, 'Account disabled', { ip, userAgent });
return res.status(403).json({ error: 'Account is disabled. Contact an administrator.' });
}
const isValidPassword = await bcrypt.compare(password, user.password);
if (!isValidPassword) {
await recordFailedLogin(username, ip, userAgent);
return res.status(401).json({ error: 'Invalid credentials' });
}
// Clear failed attempts on successful password check
await clearFailedAttempts(user.id);
// Check password expiry
const expiryStatus = await checkPasswordExpiry(user.id);
if (expiryStatus.expired) {
await SecurityAuditLogger.logLoginFailure(username, 'Password expired', { ip, userAgent });
return res.status(403).json({
error: expiryStatus.message,
passwordExpired: true,
requirePasswordChange: true
});
}
// Check if 2FA is enabled
if (user.two_factor_enabled) {
// Create temporary token for 2FA verification
const tempToken = jwt.sign(
{ userId: user.id, temp: true, purpose: '2fa' },
JWT_SECRET,
{ expiresIn: '10m' }
);
// CWE-778: Log temp token issuance for 2FA
await SecurityAuditLogger.logTokenIssuance(user.id, 'TEMP_2FA', {
ip,
userAgent,
expiresIn: '10m',
purpose: '2fa'
});
await SecurityAuditLogger.logAuthEvent(user.id, '2fa_required', 'pending', { ip, userAgent });
return res.json({
require2FA: true,
tempToken,
userId: user.id,
passwordWarning: expiryStatus.warning ? expiryStatus.message : null
});
}
const token = jwt.sign(
{ userId: user.id, role: user.role },
JWT_SECRET,
{ expiresIn: JWT_EXPIRES_IN }
);
// CWE-778: Log token issuance
await SecurityAuditLogger.logTokenIssuance(user.id, 'JWT', {
ip,
userAgent,
expiresIn: JWT_EXPIRES_IN,
purpose: 'login'
});
// Update last login
db.run(
'UPDATE users SET last_login_at = ?, last_login_ip = ? WHERE id = ?',
[new Date().toISOString(), ip, user.id]
);
// Create session
await createSession(user.id, token, req);
// Log successful login
await SecurityAuditLogger.logLoginSuccess(user.id, { ip, userAgent });
// Set secure HTTP-only cookie
res.cookie('auth_token', token, {
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
sameSite: 'strict',
maxAge: 7 * 24 * 60 * 60 * 1000 // 7 days
});
res.json({
message: 'Login successful',
token,
user: {
id: user.id,
username: user.username,
email: user.email,
role: user.role,
must_change_password: user.must_change_password === 1
},
passwordWarning: expiryStatus.warning ? expiryStatus.message : null
});
}
);
} catch (error) {
logger.error('Login error:', error);
await SecurityAuditLogger.logAuthEvent(null, 'login_attempt', 'error', {
ip,
userAgent,
error: error.message
});
res.status(500).json({ error: 'Login failed' });
}
}
);
// Verify 2FA and complete login
router.post('/verify-2fa', authLimiter,
[
body('tempToken').notEmpty(),
body('code').notEmpty()
],
async (req, res) => {
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
const userAgent = req.headers['user-agent'];
const errors = validationResult(req);
if (!errors.isEmpty()) {
return res.status(400).json({ errors: errors.array() });
}
const { tempToken, code } = req.body;
try {
// Verify temp token
const decoded = jwt.verify(tempToken, JWT_SECRET);
if (!decoded.temp || decoded.purpose !== '2fa') {
return res.status(401).json({ error: 'Invalid token' });
}
const speakeasy = require('speakeasy');
db.get(
'SELECT * FROM users WHERE id = ?',
[decoded.userId],
async (err, user) => {
if (err || !user) {
logger.error('2FA verify - user not found:', err);
return res.status(401).json({ error: 'Invalid token' });
}
if (!user.two_factor_enabled) {
return res.status(400).json({ error: '2FA not enabled for this user' });
}
// Check if it's a backup code
db.get(
'SELECT id FROM two_factor_backup_codes WHERE user_id = ? AND code = ? AND used = 0',
[user.id, code.toUpperCase()],
async (err, backupCode) => {
if (backupCode) {
// Mark backup code as used
db.run(
'UPDATE two_factor_backup_codes SET used = 1, used_at = CURRENT_TIMESTAMP WHERE id = ?',
[backupCode.id]
);
logger.info(`Backup code used for user ${user.id}`);
// Log 2FA success with backup code
await SecurityAuditLogger.log2FAEvent(user.id, 'backup_code_used', 'success', { ip, userAgent });
// Generate full token
const token = jwt.sign(
{ userId: user.id, role: user.role },
JWT_SECRET,
{ expiresIn: JWT_EXPIRES_IN }
);
// CWE-778: Log token issuance after 2FA backup code
await SecurityAuditLogger.logTokenIssuance(user.id, 'JWT', {
ip,
userAgent,
expiresIn: JWT_EXPIRES_IN,
purpose: '2fa_backup_verification'
});
// Update last login
db.run(
'UPDATE users SET last_login_at = ?, last_login_ip = ? WHERE id = ?',
[new Date().toISOString(), ip, user.id]
);
// Create session
await createSession(user.id, token, req);
// Log successful login
await SecurityAuditLogger.logLoginSuccess(user.id, { ip, userAgent, method: '2fa_backup' });
// Set secure HTTP-only cookie
res.cookie('auth_token', token, {
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
sameSite: 'strict',
maxAge: 7 * 24 * 60 * 60 * 1000 // 7 days
});
return res.json({
message: 'Login successful with backup code',
token,
user: {
id: user.id,
username: user.username,
email: user.email,
role: user.role,
must_change_password: user.must_change_password === 1
}
});
}
// Verify TOTP code
const verified = speakeasy.totp.verify({
secret: user.two_factor_secret,
encoding: 'base32',
token: code,
window: 2
});
if (!verified) {
await SecurityAuditLogger.log2FAEvent(user.id, 'totp_verification', 'failed', {
ip,
userAgent,
reason: 'Invalid code'
});
return res.status(400).json({ error: 'Invalid 2FA code' });
}
// Log 2FA success
await SecurityAuditLogger.log2FAEvent(user.id, 'totp_verification', 'success', { ip, userAgent });
// Generate full token
const token = jwt.sign(
{ userId: user.id, role: user.role },
JWT_SECRET,
{ expiresIn: JWT_EXPIRES_IN }
);
// CWE-778: Log token issuance after TOTP 2FA
await SecurityAuditLogger.logTokenIssuance(user.id, 'JWT', {
ip,
userAgent,
expiresIn: JWT_EXPIRES_IN,
purpose: '2fa_totp_verification'
});
// Update last login
db.run(
'UPDATE users SET last_login_at = ?, last_login_ip = ? WHERE id = ?',
[new Date().toISOString(), ip, user.id]
);
// Create session
await createSession(user.id, token, req);
// Log successful login
await SecurityAuditLogger.logLoginSuccess(user.id, { ip, userAgent, method: '2fa_totp' });
// Set secure HTTP-only cookie
res.cookie('auth_token', token, {
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
sameSite: 'strict',
maxAge: 7 * 24 * 60 * 60 * 1000 // 7 days
});
res.json({
message: 'Login successful',
token,
user: {
id: user.id,
username: user.username,
email: user.email,
role: user.role,
must_change_password: user.must_change_password === 1
}
});
}
);
}
);
} catch (error) {
logger.error('2FA verify error:', error);
if (error.name === 'TokenExpiredError') {
return res.status(401).json({ error: '2FA session expired. Please login again' });
}
res.status(500).json({ error: 'Failed to verify 2FA' });
}
}
);
// Change password with enhanced security
router.post('/change-password',
[
body('currentPassword').notEmpty(),
body('newPassword').notEmpty()
],
async (req, res) => {
const token = req.headers.authorization?.split(' ')[1];
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
const userAgent = req.headers['user-agent'];
if (!token) {
return res.status(401).json({ error: 'No token provided' });
}
try {
const decoded = jwt.verify(token, JWT_SECRET);
const { currentPassword, newPassword } = req.body;
db.get(
'SELECT * FROM users WHERE id = ?',
[decoded.userId],
async (err, user) => {
if (err || !user) {
return res.status(404).json({ error: 'User not found' });
}
const isValidPassword = await bcrypt.compare(currentPassword, user.password);
if (!isValidPassword) {
await SecurityAuditLogger.logPasswordChange(user.id, 'failed', {
ip,
userAgent,
reason: 'Incorrect current password'
});
return res.status(401).json({ error: 'Current password is incorrect' });
}
// Validate new password
const passwordValidation = validatePassword(newPassword, user.username, user.email);
if (!passwordValidation.valid) {
await SecurityAuditLogger.logPasswordChange(user.id, 'failed', {
ip,
userAgent,
reason: passwordValidation.errors.join(', ')
});
return res.status(400).json({
error: 'New password does not meet requirements',
details: passwordValidation.errors,
strength: calculatePasswordStrength(newPassword)
});
}
// Check password history (prevent reuse of last 5 passwords)
db.all(
'SELECT password_hash FROM password_history WHERE user_id = ? ORDER BY changed_at DESC LIMIT 5',
[user.id],
async (err, history) => {
if (err) {
logger.error('Password history check error:', err);
return res.status(500).json({ error: 'Failed to change password' });
}
// Check if new password matches any recent password
for (const record of history || []) {
const matches = await bcrypt.compare(newPassword, record.password_hash);
if (matches) {
await SecurityAuditLogger.logPasswordChange(user.id, 'failed', {
ip,
userAgent,
reason: 'Password reused from history'
});
return res.status(400).json({
error: 'Cannot reuse any of your last 5 passwords'
});
}
}
const hashedPassword = await bcrypt.hash(newPassword, 10);
// Save old password to history
db.run(
'INSERT INTO password_history (user_id, password_hash) VALUES (?, ?)',
[user.id, user.password]
);
// Update password
db.run(
'UPDATE users SET password = ?, must_change_password = 0, updated_at = CURRENT_TIMESTAMP WHERE id = ?',
[hashedPassword, user.id],
async (err) => {
if (err) {
logger.error('Password change error:', err);
return res.status(500).json({ error: 'Failed to change password' });
}
// Update password expiry
await updatePasswordExpiry(user.id);
// CWE-778: Revoke all tokens on password change (security best practice)
await SecurityAuditLogger.logTokenRevocation(user.id, 'password_change', {
ip,
userAgent
});
// Log successful password change
await SecurityAuditLogger.logPasswordChange(user.id, 'success', { ip, userAgent });
res.json({ message: 'Password changed successfully' });
}
);
}
);
}
);
} catch (error) {
logger.error('Password change error:', error);
res.status(401).json({ error: 'Invalid token' });
}
}
);
// Verify token
router.get('/verify', async (req, res) => {
const token = req.headers.authorization?.split(' ')[1];
if (!token) {
return res.status(401).json({ error: 'No token provided' });
}
try {
const decoded = jwt.verify(token, JWT_SECRET);
db.get(
'SELECT id, username, email, role, must_change_password, is_active FROM users WHERE id = ?',
[decoded.userId],
async (err, user) => {
if (err || !user) {
return res.status(401).json({ error: 'Invalid token' });
}
if (!user.is_active) {
return res.status(403).json({ error: 'Account is disabled' });
}
// Check password expiry for warning
const expiryStatus = await checkPasswordExpiry(user.id);
res.json({
valid: true,
user: {
...user,
must_change_password: user.must_change_password === 1
},
passwordWarning: expiryStatus.warning ? expiryStatus.message : null,
daysUntilExpiry: expiryStatus.daysRemaining || null
});
}
);
} catch (error) {
res.status(401).json({ error: 'Invalid token' });
}
});
// Check password strength
router.post('/check-password-strength',
[body('password').notEmpty()],
(req, res) => {
const { password, username, email } = req.body;
const validation = validatePassword(password, username, email);
const strength = calculatePasswordStrength(password);
res.json({
valid: validation.valid,
errors: validation.errors,
strength: {
score: strength.score,
level: strength.level,
feedback: strength.feedback
}
});
}
);
// Get security status for current user
router.get('/security-status', async (req, res) => {
const token = req.headers.authorization?.split(' ')[1];
if (!token) {
return res.status(401).json({ error: 'No token provided' });
}
try {
const decoded = jwt.verify(token, JWT_SECRET);
db.get(
'SELECT id, username, two_factor_enabled, password_changed_at, password_expires_at, last_login_at, last_login_ip, failed_login_attempts FROM users WHERE id = ?',
[decoded.userId],
async (err, user) => {
if (err || !user) {
return res.status(401).json({ error: 'Invalid token' });
}
// Get active sessions count
const sessions = await new Promise((resolve, reject) => {
db.all(
'SELECT COUNT(*) as count, MAX(last_activity) as last_activity FROM active_sessions WHERE user_id = ? AND expires_at > ?',
[user.id, new Date().toISOString()],
(err, rows) => err ? reject(err) : resolve(rows[0])
);
});
// Get recent security events
let recentEvents = [];
try {
recentEvents = await SecurityAuditLogger.getUserSecurityEvents(user.id, 10);
} catch (eventErr) {
logger.error('Error fetching security events:', eventErr);
}
// Check password expiry
const expiryStatus = await checkPasswordExpiry(user.id);
res.json({
twoFactorEnabled: user.two_factor_enabled === 1,
passwordAge: user.password_changed_at ?
Math.floor((Date.now() - new Date(user.password_changed_at)) / (24 * 60 * 60 * 1000)) : null,
passwordExpiry: expiryStatus,
lastLogin: {
timestamp: user.last_login_at,
ip: user.last_login_ip
},
activeSessions: sessions.count || 0,
lastActivity: sessions.last_activity,
failedLoginAttempts: user.failed_login_attempts || 0,
recentEvents: Array.isArray(recentEvents) ? recentEvents.map(e => ({
type: e.event_type,
status: e.status,
timestamp: e.created_at
})) : []
});
}
);
} catch (error) {
logger.error('Security status error:', error);
res.status(401).json({ error: 'Invalid token' });
}
});
// Logout endpoint - invalidate session and clear cookie
router.post('/logout', authenticate, async (req, res) => {
const token = req.headers.authorization?.replace('Bearer ', '');
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
const userAgent = req.headers['user-agent'];
try {
// Delete the session from database
await new Promise((resolve, reject) => {
db.run(
'DELETE FROM active_sessions WHERE session_token = ?',
[token],
(err) => err ? reject(err) : resolve()
);
});
// CWE-778: Log token revocation on logout
await SecurityAuditLogger.logTokenRevocation(req.user.userId, 'user_logout', {
ip,
userAgent
});
// Log logout event
await SecurityAuditLogger.logAuthEvent(req.user.userId, 'logout', 'success', {
ip,
userAgent
});
// Clear the HTTP-only cookie
res.clearCookie('auth_token', {
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
sameSite: 'strict'
});
res.json({ message: 'Logout successful' });
} catch (error) {
logger.error('Logout error:', error);
res.status(500).json({ error: 'Logout failed' });
}
});
module.exports = router;

471
backend/routes/backup.js Normal file
View file

@ -0,0 +1,471 @@
const express = require('express');
const router = express.Router();
const { db } = require('../database/db');
const { authenticate } = require('../middleware/auth');
const { backupLimiter, heavyLimiter, readLimiter } = require('../middleware/rateLimiter');
const fs = require('fs').promises;
const path = require('path');
const archiver = require('archiver');
const unzipper = require('unzipper');
const { promisify } = require('util');
const dbAll = promisify(db.all.bind(db));
const dbRun = promisify(db.run.bind(db));
const BACKUP_DIR = path.join(__dirname, '../../data/backups');
// Ensure backup directory exists
const ensureBackupDir = async () => {
try {
await fs.mkdir(BACKUP_DIR, { recursive: true });
} catch (error) {
console.error('Failed to create backup directory:', error);
}
};
/**
* GET /api/backup/list
* List all backups for the current user
*/
router.get('/list', authenticate, readLimiter, async (req, res) => {
try {
await ensureBackupDir();
const userId = req.user.id;
const username = req.user.username;
const files = await fs.readdir(BACKUP_DIR);
const userBackups = files.filter(f =>
(f.startsWith(`StreamFlow_Backup_${username}_`) || f.startsWith(`backup_user${userId}_`)) && f.endsWith('.zip')
);
const backupList = await Promise.all(
userBackups.map(async (filename) => {
const filePath = path.join(BACKUP_DIR, filename);
const stats = await fs.stat(filePath);
// Parse filename: StreamFlow_Backup_{username}_{timestamp}.zip or old format backup_user{id}_{timestamp}.zip
let match = filename.match(/StreamFlow_Backup_[^_]+_(\d+)\.zip/);
if (!match) {
match = filename.match(/backup_user\d+_(\d+)\.zip/);
}
const filenameTimestamp = match ? parseInt(match[1]) : 0;
// Use file's actual mtime (modification time) which is more reliable
const actualTimestamp = stats.mtimeMs;
return {
filename,
size: stats.size,
created: new Date(actualTimestamp).toISOString(),
timestamp: actualTimestamp
};
})
);
// Sort by timestamp descending (newest first)
backupList.sort((a, b) => b.timestamp - a.timestamp);
res.json(backupList);
} catch (error) {
console.error('Failed to list backups:', error);
res.status(500).json({ error: 'Failed to list backups' });
}
});
/**
* POST /api/backup/create
* Create a new backup of user data
*/
router.post('/create', authenticate, backupLimiter, async (req, res) => {
try {
await ensureBackupDir();
const userId = req.user.id;
const username = req.user.username;
const timestamp = Date.now();
const filename = `StreamFlow_Backup_${username}_${timestamp}.zip`;
const backupPath = path.join(BACKUP_DIR, filename);
// Create write stream for zip
const output = require('fs').createWriteStream(backupPath);
const archive = archiver('zip', { zlib: { level: 9 } });
output.on('close', () => {
res.json({
success: true,
filename,
size: archive.pointer(),
created: new Date(timestamp).toISOString()
});
});
archive.on('error', (err) => {
throw err;
});
archive.pipe(output);
// Export user data - CWE-532: Exclude password and sensitive fields
const userData = await dbAll(
`SELECT id, username, email, role, two_factor_enabled, is_active,
created_at, updated_at, last_login_at, last_login_ip,
password_changed_at, password_expires_at
FROM users WHERE id = ?`,
[userId]
);
archive.append(JSON.stringify(userData, null, 2), { name: 'user.json' });
// Export playlists
const playlists = await dbAll('SELECT * FROM playlists WHERE user_id = ?', [userId]);
archive.append(JSON.stringify(playlists, null, 2), { name: 'playlists.json' });
// Export channels
const channels = await dbAll(
`SELECT DISTINCT c.* FROM channels c
JOIN playlists p ON c.playlist_id = p.id
WHERE p.user_id = ?`,
[userId]
);
archive.append(JSON.stringify(channels, null, 2), { name: 'channels.json' });
// Export favorites
const favorites = await dbAll('SELECT * FROM favorites WHERE user_id = ?', [userId]);
archive.append(JSON.stringify(favorites, null, 2), { name: 'favorites.json' });
// Export custom channel logos
const customLogos = await dbAll(
`SELECT c.id, c.name, c.custom_logo FROM channels c
JOIN playlists p ON c.playlist_id = p.id
WHERE p.user_id = ? AND c.custom_logo IS NOT NULL`,
[userId]
);
// Add custom logo files
for (const channel of customLogos) {
if (channel.custom_logo) {
const logoPath = path.join(__dirname, '../../data/logos', channel.custom_logo);
try {
const logoExists = await fs.access(logoPath).then(() => true).catch(() => false);
if (logoExists) {
archive.file(logoPath, { name: `logos/${channel.custom_logo}` });
}
} catch (err) {
console.error(`Failed to add logo ${channel.custom_logo}:`, err);
}
}
}
// Export m3u files
const m3uFiles = await dbAll('SELECT * FROM m3u_files WHERE user_id = ?', [userId]);
archive.append(JSON.stringify(m3uFiles, null, 2), { name: 'm3u_files.json' });
// Add actual m3u files
for (const m3uFile of m3uFiles) {
if (m3uFile.file_path) {
const m3uPath = path.join(__dirname, '../../data/playlists', m3uFile.file_path);
try {
const m3uExists = await fs.access(m3uPath).then(() => true).catch(() => false);
if (m3uExists) {
archive.file(m3uPath, { name: `m3u_files/${m3uFile.file_path}` });
}
} catch (err) {
console.error(`Failed to add m3u file ${m3uFile.file_path}:`, err);
}
}
}
// Export user settings (stream settings, preferences)
const settings = {
created: new Date(timestamp).toISOString(),
version: '1.0',
userId: userId,
username: req.user.username
};
archive.append(JSON.stringify(settings, null, 2), { name: 'backup_info.json' });
// Finalize the archive
await archive.finalize();
} catch (error) {
console.error('Failed to create backup:', error);
res.status(500).json({ error: 'Failed to create backup' });
}
});
/**
* GET /api/backup/download/:filename
* Download a backup file
*/
router.get('/download/:filename', authenticate, heavyLimiter, async (req, res) => {
try {
const userId = req.user.id;
const username = req.user.username;
const { filename } = req.params;
// Validate filename belongs to user (support both old and new format)
const isOldFormat = filename.startsWith(`backup_user${userId}_`);
const isNewFormat = filename.startsWith(`StreamFlow_Backup_${username}_`);
if ((!isOldFormat && !isNewFormat) || !filename.endsWith('.zip')) {
return res.status(403).json({ error: 'Unauthorized' });
}
const filePath = path.join(BACKUP_DIR, filename);
// Check if file exists
try {
await fs.access(filePath);
} catch (err) {
return res.status(404).json({ error: 'Backup not found' });
}
res.download(filePath, filename);
} catch (error) {
console.error('Failed to download backup:', error);
res.status(500).json({ error: 'Failed to download backup' });
}
});
/**
* DELETE /api/backup/:filename
* Delete a backup file
*/
router.delete('/:filename', authenticate, readLimiter, async (req, res) => {
try {
const userId = req.user.id;
const username = req.user.username;
const { filename } = req.params;
// Validate filename belongs to user (support both old and new format)
const isOldFormat = filename.startsWith(`backup_user${userId}_`);
const isNewFormat = filename.startsWith(`StreamFlow_Backup_${username}_`);
if ((!isOldFormat && !isNewFormat) || !filename.endsWith('.zip')) {
return res.status(403).json({ error: 'Unauthorized' });
}
const filePath = path.join(BACKUP_DIR, filename);
await fs.unlink(filePath);
res.json({ success: true, message: 'Backup deleted' });
} catch (error) {
console.error('Failed to delete backup:', error);
res.status(500).json({ error: 'Failed to delete backup' });
}
});
/**
* POST /api/backup/upload
* Upload a backup file for restoration
*/
router.post('/upload', authenticate, heavyLimiter, async (req, res) => {
try {
await ensureBackupDir();
if (!req.files || !req.files.backup) {
return res.status(400).json({ error: 'No backup file provided' });
}
const userId = req.user.id;
const username = req.user.username;
const backupFile = req.files.backup;
const timestamp = Date.now();
const filename = `StreamFlow_Backup_${username}_${timestamp}.zip`;
const uploadPath = path.join(BACKUP_DIR, filename);
await backupFile.mv(uploadPath);
const stats = await fs.stat(uploadPath);
res.json({
success: true,
filename,
size: stats.size,
created: new Date(timestamp).toISOString()
});
} catch (error) {
console.error('Failed to upload backup:', error);
res.status(500).json({ error: 'Failed to upload backup' });
}
});
/**
* POST /api/backup/restore/:filename
* Restore data from a backup file
*/
router.post('/restore/:filename', authenticate, backupLimiter, async (req, res) => {
try {
const userId = req.user.id;
const username = req.user.username;
const { filename } = req.params;
// Validate filename belongs to user (support both old and new format)
const isOldFormat = filename.startsWith(`backup_user${userId}_`);
const isNewFormat = filename.startsWith(`StreamFlow_Backup_${username}_`);
if ((!isOldFormat && !isNewFormat) || !filename.endsWith('.zip')) {
return res.status(403).json({ error: 'Unauthorized' });
}
const backupPath = path.join(BACKUP_DIR, filename);
const extractPath = path.join(BACKUP_DIR, `extract_${userId}_${Date.now()}`);
// Check if backup exists
try {
await fs.access(backupPath);
} catch (err) {
return res.status(404).json({ error: 'Backup not found' });
}
// Extract backup
await fs.mkdir(extractPath, { recursive: true });
await require('fs')
.createReadStream(backupPath)
.pipe(unzipper.Extract({ path: extractPath }))
.promise();
// Read backup data
const playlistsData = JSON.parse(await fs.readFile(path.join(extractPath, 'playlists.json'), 'utf8'));
const channelsData = JSON.parse(await fs.readFile(path.join(extractPath, 'channels.json'), 'utf8'));
const favoritesData = JSON.parse(await fs.readFile(path.join(extractPath, 'favorites.json'), 'utf8'));
// Start transaction-like operations
let restoredPlaylists = 0;
let restoredChannels = 0;
let restoredFavorites = 0;
// Restore playlists
for (const playlist of playlistsData) {
try {
await dbRun(
`INSERT OR IGNORE INTO playlists (name, url, username, password, user_id, created_at)
VALUES (?, ?, ?, ?, ?, ?)`,
[playlist.name, playlist.url, playlist.username, playlist.password, userId, playlist.created_at]
);
restoredPlaylists++;
} catch (err) {
console.error('Failed to restore playlist:', err);
}
}
// Get new playlist IDs mapping
const newPlaylists = await dbAll('SELECT id, name FROM playlists WHERE user_id = ?', [userId]);
const playlistMap = {};
playlistsData.forEach((oldP, idx) => {
const newP = newPlaylists.find(p => p.name === oldP.name);
if (newP) playlistMap[oldP.id] = newP.id;
});
// Restore channels
for (const channel of channelsData) {
const newPlaylistId = playlistMap[channel.playlist_id];
if (!newPlaylistId) continue;
try {
await dbRun(
`INSERT OR IGNORE INTO channels
(name, url, logo, group_name, playlist_id, custom_logo, is_radio, tvg_id, tvg_name)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
[
channel.name, channel.url, channel.logo, channel.group_name,
newPlaylistId, channel.custom_logo, channel.is_radio,
channel.tvg_id, channel.tvg_name
]
);
restoredChannels++;
} catch (err) {
console.error('Failed to restore channel:', err);
}
}
// Restore custom logos
const logosDir = path.join(extractPath, 'logos');
try {
const logoFiles = await fs.readdir(logosDir);
const targetLogosDir = path.join(__dirname, '../../data/logos');
await fs.mkdir(targetLogosDir, { recursive: true });
for (const logoFile of logoFiles) {
const src = path.join(logosDir, logoFile);
const dest = path.join(targetLogosDir, logoFile);
await fs.copyFile(src, dest);
}
} catch (err) {
// Logos directory might not exist in older backups
console.log('No custom logos to restore');
}
// Restore favorites (map to new channel IDs)
const newChannels = await dbAll(
`SELECT c.id, c.name, c.url FROM channels c
JOIN playlists p ON c.playlist_id = p.id
WHERE p.user_id = ?`,
[userId]
);
for (const fav of favoritesData) {
const oldChannel = channelsData.find(c => c.id === fav.channel_id);
if (!oldChannel) continue;
const newChannel = newChannels.find(c => c.name === oldChannel.name && c.url === oldChannel.url);
if (!newChannel) continue;
try {
await dbRun(
`INSERT OR IGNORE INTO favorites (user_id, channel_id, custom_logo, is_radio)
VALUES (?, ?, ?, ?)`,
[userId, newChannel.id, fav.custom_logo, fav.is_radio]
);
restoredFavorites++;
} catch (err) {
console.error('Failed to restore favorite:', err);
}
}
// Restore M3U files
try {
const m3uFilesData = JSON.parse(await fs.readFile(path.join(extractPath, 'm3u_files.json'), 'utf8'));
const m3uFilesDir = path.join(extractPath, 'm3u_files');
const targetM3uDir = path.join(__dirname, '../../data/playlists');
await fs.mkdir(targetM3uDir, { recursive: true });
for (const m3uFile of m3uFilesData) {
try {
// Copy physical file
if (m3uFile.file_path) {
const src = path.join(m3uFilesDir, m3uFile.file_path);
const dest = path.join(targetM3uDir, m3uFile.file_path);
await fs.copyFile(src, dest);
}
// Insert database record
await dbRun(
`INSERT OR IGNORE INTO m3u_files (user_id, name, file_path, uploaded_at)
VALUES (?, ?, ?, ?)`,
[userId, m3uFile.name, m3uFile.file_path, m3uFile.uploaded_at]
);
} catch (err) {
console.error('Failed to restore m3u file:', err);
}
}
} catch (err) {
console.log('No M3U files to restore');
}
// Cleanup extraction directory
await fs.rm(extractPath, { recursive: true, force: true });
res.json({
success: true,
message: 'Backup restored successfully',
stats: {
playlists: restoredPlaylists,
channels: restoredChannels,
favorites: restoredFavorites
}
});
} catch (error) {
console.error('Failed to restore backup:', error);
res.status(500).json({ error: 'Failed to restore backup' });
}
});
module.exports = router;

312
backend/routes/channels.js Normal file
View file

@ -0,0 +1,312 @@
const express = require('express');
const router = express.Router();
const multer = require('multer');
const path = require('path');
const fs = require('fs');
const { authenticate } = require('../middleware/auth');
const { modifyLimiter, readLimiter } = require('../middleware/rateLimiter');
const { db } = require('../database/db');
const logger = require('../utils/logger');
const {
validateIdParam,
validateChannelUpdate,
validatePagination,
validateSearch
} = require('../middleware/inputValidation');
// Configure multer for logo uploads
const storage = multer.diskStorage({
destination: (req, file, cb) => {
const uploadDir = path.join(__dirname, '../uploads/logos');
if (!fs.existsSync(uploadDir)) {
fs.mkdirSync(uploadDir, { recursive: true });
}
cb(null, uploadDir);
},
filename: (req, file, cb) => {
const uniqueSuffix = Date.now() + '-' + Math.round(Math.random() * 1E9);
cb(null, 'channel-' + uniqueSuffix + path.extname(file.originalname));
}
});
const upload = multer({
storage: storage,
limits: { fileSize: 5 * 1024 * 1024 }, // 5MB limit
fileFilter: (req, file, cb) => {
const allowedTypes = /jpeg|jpg|png|gif|svg|webp/;
const extname = allowedTypes.test(path.extname(file.originalname).toLowerCase());
const mimetype = allowedTypes.test(file.mimetype);
if (mimetype && extname) {
return cb(null, true);
}
cb(new Error('Only image files are allowed'));
}
});
// Get channels with filters
router.get('/', authenticate, readLimiter, validatePagination, validateSearch, (req, res) => {
const { playlistId, isRadio, groupName, search } = req.query;
const limit = req.sanitizedQuery?.limit || 100;
const offset = req.sanitizedQuery?.offset || 0;
let query = `
SELECT c.*, p.name as playlist_name,
COALESCE(c.custom_logo, c.logo) as logo
FROM channels c
JOIN playlists p ON c.playlist_id = p.id
WHERE p.user_id = ? AND c.is_active = 1
`;
const params = [req.user.userId];
if (playlistId) {
query += ' AND c.playlist_id = ?';
params.push(playlistId);
}
if (isRadio !== undefined) {
query += ' AND c.is_radio = ?';
params.push(isRadio === 'true' ? 1 : 0);
}
if (groupName) {
query += ' AND c.group_name = ?';
params.push(groupName);
}
if (search) {
query += ' AND (c.name LIKE ? OR c.group_name LIKE ?)';
params.push(`%${search}%`, `%${search}%`);
}
query += ' ORDER BY c.name LIMIT ? OFFSET ?';
params.push(parseInt(limit), parseInt(offset));
db.all(query, params, (err, channels) => {
if (err) {
return res.status(500).json({ error: 'Failed to fetch channels' });
}
res.json(channels);
});
});
// Get channel groups
router.get('/groups', authenticate, readLimiter, (req, res) => {
const { playlistId, isRadio } = req.query;
// Validate playlist ID if provided
if (playlistId && (isNaN(parseInt(playlistId)) || parseInt(playlistId) < 1)) {
return res.status(400).json({ error: 'Invalid playlist ID' });
}
let query = `
SELECT DISTINCT c.group_name, COUNT(*) as count
FROM channels c
JOIN playlists p ON c.playlist_id = p.id
WHERE p.user_id = ? AND c.is_active = 1
`;
const params = [req.user.userId];
if (playlistId) {
query += ' AND c.playlist_id = ?';
params.push(playlistId);
}
if (isRadio !== undefined) {
query += ' AND c.is_radio = ?';
params.push(isRadio === 'true' ? 1 : 0);
}
query += ' GROUP BY c.group_name ORDER BY c.group_name';
db.all(query, params, (err, groups) => {
if (err) {
return res.status(500).json({ error: 'Failed to fetch groups' });
}
res.json(groups);
});
});
// Upload custom logo for channel
router.post('/:id/logo', authenticate, modifyLimiter, validateIdParam, upload.single('logo'), (req, res) => {
const channelId = req.params.id;
const userId = req.user.userId;
if (!req.file) {
return res.status(400).json({ error: 'No file uploaded' });
}
// Verify channel belongs to user
db.get(
`SELECT c.*, c.custom_logo as old_logo
FROM channels c
JOIN playlists p ON c.playlist_id = p.id
WHERE c.id = ? AND p.user_id = ?`,
[channelId, userId],
(err, channel) => {
if (err) {
return res.status(500).json({ error: 'Database error' });
}
if (!channel) {
// Delete uploaded file if channel not found
fs.unlinkSync(req.file.path);
return res.status(404).json({ error: 'Channel not found' });
}
// Delete old custom logo if exists
if (channel.old_logo) {
const oldLogoPath = path.join(__dirname, '..', channel.old_logo);
if (fs.existsSync(oldLogoPath)) {
fs.unlinkSync(oldLogoPath);
}
}
// Save new logo path
const logoPath = `/uploads/logos/${req.file.filename}`;
db.run(
'UPDATE channels SET custom_logo = ? WHERE id = ?',
[logoPath, channelId],
(err) => {
if (err) {
return res.status(500).json({ error: 'Failed to update logo' });
}
res.json({
message: 'Logo uploaded successfully',
logoUrl: logoPath
});
}
);
}
);
});
// Delete custom logo for a channel
router.delete('/:id/logo', authenticate, modifyLimiter, (req, res) => {
const channelId = req.params.id;
const userId = req.user.userId;
// Verify channel belongs to user and get current logo
db.get(
`SELECT c.*, c.custom_logo
FROM channels c
JOIN playlists p ON c.playlist_id = p.id
WHERE c.id = ? AND p.user_id = ?`,
[channelId, userId],
(err, channel) => {
if (err) {
return res.status(500).json({ error: 'Database error' });
}
if (!channel) {
return res.status(404).json({ error: 'Channel not found' });
}
if (!channel.custom_logo) {
return res.status(400).json({ error: 'No custom logo to delete' });
}
// Delete file from filesystem
const logoPath = path.join(__dirname, '..', channel.custom_logo);
if (fs.existsSync(logoPath)) {
fs.unlinkSync(logoPath);
}
// Remove logo from database
db.run(
'UPDATE channels SET custom_logo = NULL WHERE id = ?',
[channelId],
(err) => {
if (err) {
return res.status(500).json({ error: 'Failed to delete logo' });
}
res.json({ message: 'Logo deleted successfully' });
}
);
}
);
});
// Get single channel by ID
router.get('/:id', authenticate, readLimiter, (req, res) => {
const channelId = req.params.id;
const userId = req.user.userId;
db.get(
`SELECT c.*, p.name as playlist_name,
COALESCE(c.custom_logo, c.logo) as logo
FROM channels c
JOIN playlists p ON c.playlist_id = p.id
WHERE c.id = ? AND p.user_id = ?`,
[channelId, userId],
(err, channel) => {
if (err) {
return res.status(500).json({ error: 'Database error' });
}
if (!channel) {
return res.status(404).json({ error: 'Channel not found' });
}
res.json(channel);
}
);
});
// Delete channel from playlist
router.delete('/:id', authenticate, modifyLimiter, validateIdParam, (req, res) => {
const channelId = req.params.id;
const userId = req.user.userId;
// Verify channel belongs to user
db.get(
`SELECT c.id, c.playlist_id, c.custom_logo
FROM channels c
JOIN playlists p ON c.playlist_id = p.id
WHERE c.id = ? AND p.user_id = ?`,
[channelId, userId],
(err, channel) => {
if (err) {
logger.error('Error fetching channel for deletion:', err);
return res.status(500).json({ error: 'Database error' });
}
if (!channel) {
return res.status(404).json({ error: 'Channel not found' });
}
// Delete custom logo file if exists
if (channel.custom_logo) {
const logoPath = path.join(__dirname, '..', channel.custom_logo);
if (fs.existsSync(logoPath)) {
try {
fs.unlinkSync(logoPath);
} catch (err) {
logger.error('Error deleting custom logo file:', err);
}
}
}
// Delete channel from database
db.run('DELETE FROM channels WHERE id = ?', [channelId], function(err) {
if (err) {
logger.error('Error deleting channel:', err);
return res.status(500).json({ error: 'Failed to delete channel' });
}
// Update playlist channel count
db.run(
'UPDATE playlists SET channel_count = (SELECT COUNT(*) FROM channels WHERE playlist_id = ?) WHERE id = ?',
[channel.playlist_id, channel.playlist_id],
(updateErr) => {
if (updateErr) {
logger.error('Error updating playlist count:', updateErr);
}
}
);
logger.info(`Channel ${channelId} deleted by user ${userId}`);
res.json({
message: 'Channel deleted successfully',
deletedId: channelId
});
});
}
);
});
module.exports = router;

218
backend/routes/csp.js Normal file
View file

@ -0,0 +1,218 @@
const express = require('express');
const router = express.Router();
const { authenticate, requireAdmin } = require('../middleware/auth');
const logger = require('../utils/logger');
const { db } = require('../database/db');
// Store CSP violations in database
db.run(`
CREATE TABLE IF NOT EXISTS csp_violations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
document_uri TEXT,
violated_directive TEXT,
blocked_uri TEXT,
source_file TEXT,
line_number INTEGER,
column_number INTEGER,
user_agent TEXT,
ip_address TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
`);
// CSP violation reporting endpoint (no auth required)
router.post('/report', express.json({ type: 'application/csp-report' }), (req, res) => {
const report = req.body['csp-report'];
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
const userAgent = req.headers['user-agent'];
if (!report) {
return res.status(400).json({ error: 'Invalid CSP report' });
}
logger.warn('CSP Violation:', {
documentUri: report['document-uri'],
violatedDirective: report['violated-directive'],
blockedUri: report['blocked-uri'],
sourceFile: report['source-file'],
lineNumber: report['line-number'],
columnNumber: report['column-number'],
ip,
userAgent
});
// Store in database
db.run(
`INSERT INTO csp_violations
(document_uri, violated_directive, blocked_uri, source_file, line_number, column_number, user_agent, ip_address)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
[
report['document-uri'],
report['violated-directive'],
report['blocked-uri'],
report['source-file'],
report['line-number'],
report['column-number'],
userAgent,
ip
],
(err) => {
if (err) {
logger.error('Failed to store CSP violation:', err);
}
}
);
res.status(204).end();
});
// Get CSP violations (admin only)
router.get('/violations', authenticate, requireAdmin, (req, res) => {
const { limit = 100, offset = 0 } = req.query;
db.all(
`SELECT * FROM csp_violations
ORDER BY created_at DESC
LIMIT ? OFFSET ?`,
[parseInt(limit), parseInt(offset)],
(err, violations) => {
if (err) {
logger.error('Failed to fetch CSP violations:', err);
return res.status(500).json({ error: 'Failed to fetch violations' });
}
// Get total count
db.get('SELECT COUNT(*) as total FROM csp_violations', (countErr, countResult) => {
if (countErr) {
logger.error('Failed to count CSP violations:', countErr);
return res.status(500).json({ error: 'Failed to count violations' });
}
res.json({
violations,
total: countResult.total,
limit: parseInt(limit),
offset: parseInt(offset)
});
});
}
);
});
// Get CSP violation statistics (admin only)
router.get('/stats', authenticate, requireAdmin, (req, res) => {
const { days = 7 } = req.query;
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - parseInt(days));
Promise.all([
// Total violations
new Promise((resolve, reject) => {
db.get(
'SELECT COUNT(*) as total FROM csp_violations WHERE created_at >= ?',
[cutoffDate.toISOString()],
(err, row) => err ? reject(err) : resolve(row.total)
);
}),
// By directive
new Promise((resolve, reject) => {
db.all(
`SELECT violated_directive, COUNT(*) as count
FROM csp_violations
WHERE created_at >= ?
GROUP BY violated_directive
ORDER BY count DESC
LIMIT 10`,
[cutoffDate.toISOString()],
(err, rows) => err ? reject(err) : resolve(rows)
);
}),
// By blocked URI
new Promise((resolve, reject) => {
db.all(
`SELECT blocked_uri, COUNT(*) as count
FROM csp_violations
WHERE created_at >= ?
GROUP BY blocked_uri
ORDER BY count DESC
LIMIT 10`,
[cutoffDate.toISOString()],
(err, rows) => err ? reject(err) : resolve(rows)
);
}),
// Recent violations
new Promise((resolve, reject) => {
db.all(
`SELECT * FROM csp_violations
WHERE created_at >= ?
ORDER BY created_at DESC
LIMIT 20`,
[cutoffDate.toISOString()],
(err, rows) => err ? reject(err) : resolve(rows)
);
})
])
.then(([total, byDirective, byUri, recent]) => {
res.json({
total,
byDirective,
byUri,
recent,
days: parseInt(days)
});
})
.catch((err) => {
logger.error('Failed to fetch CSP stats:', err);
res.status(500).json({ error: 'Failed to fetch statistics' });
});
});
// Clear old CSP violations (admin only)
router.delete('/violations', authenticate, requireAdmin, (req, res) => {
const { days = 30 } = req.query;
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - parseInt(days));
db.run(
'DELETE FROM csp_violations WHERE created_at < ?',
[cutoffDate.toISOString()],
function(err) {
if (err) {
logger.error('Failed to delete old CSP violations:', err);
return res.status(500).json({ error: 'Failed to delete violations' });
}
res.json({
message: 'Old violations cleared',
deleted: this.changes
});
}
);
});
// Get current CSP policy (authenticated users)
router.get('/policy', authenticate, (req, res) => {
const isProduction = process.env.NODE_ENV === 'production';
res.json({
mode: isProduction ? 'enforce' : 'report-only',
policy: {
defaultSrc: ["'self'"],
scriptSrc: ["'self'", "'unsafe-inline'", "'unsafe-eval'", "https://www.gstatic.com"],
styleSrc: ["'self'", "'unsafe-inline'", "https://fonts.googleapis.com"],
fontSrc: ["'self'", "data:", "https://fonts.gstatic.com"],
imgSrc: ["'self'", "data:", "blob:", "https:", "http:"],
mediaSrc: ["'self'", "blob:", "data:", "mediastream:", "https:", "http:", "*"],
connectSrc: ["'self'", "https:", "http:", "ws:", "wss:", "blob:", "*"],
frameSrc: ["'self'", "https://www.youtube.com", "https://player.vimeo.com"],
objectSrc: ["'none'"],
baseUri: ["'self'"],
formAction: ["'self'"],
frameAncestors: ["'self'"],
upgradeInsecureRequests: isProduction
},
reportUri: '/api/csp/report'
});
});
module.exports = router;

View file

@ -0,0 +1,438 @@
/**
* Encryption Management API
* CWE-311: Encrypt Sensitive Data
*
* Provides endpoints for:
* - Checking encryption status
* - Migrating plaintext data to encrypted format
* - Key rotation
* - Encryption health monitoring
*/
const express = require('express');
const router = express.Router();
const { authenticate, requireAdmin } = require('../middleware/auth');
const { readLimiter, modifyLimiter } = require('../middleware/rateLimiter');
const { db } = require('../database/db');
const encryption = require('../utils/encryption');
const logger = require('../utils/logger');
const SecurityAuditLogger = require('../utils/securityAudit');
/**
* GET /api/encryption/status
* Get encryption configuration and health status
*/
router.get('/status', authenticate, requireAdmin, readLimiter, async (req, res) => {
try {
const status = encryption.getEncryptionStatus();
// Count encrypted vs unencrypted sensitive data
const stats = await getEncryptionStats();
res.json({
success: true,
data: {
...status,
statistics: stats
}
});
} catch (error) {
logger.error('Error getting encryption status:', error);
res.status(500).json({ error: 'Failed to get encryption status' });
}
});
/**
* GET /api/encryption/scan
* Scan database for sensitive unencrypted data
*/
router.get('/scan', authenticate, requireAdmin, readLimiter, async (req, res) => {
try {
const findings = await scanForUnencryptedData();
res.json({
success: true,
data: {
findings,
totalIssues: findings.reduce((sum, f) => sum + f.count, 0),
recommendation: findings.length > 0
? 'Run migration to encrypt sensitive data'
: 'All sensitive data is encrypted'
}
});
} catch (error) {
logger.error('Error scanning for unencrypted data:', error);
res.status(500).json({ error: 'Failed to scan database' });
}
});
/**
* POST /api/encryption/migrate
* Migrate unencrypted sensitive data to encrypted format
*/
router.post('/migrate', authenticate, requireAdmin, modifyLimiter, async (req, res) => {
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
const userAgent = req.headers['user-agent'];
try {
logger.info('Starting encryption migration...');
const results = {
settings: 0,
vpnConfigs: 0,
apiTokens: 0,
twoFactorSecrets: 0,
errors: []
};
// Migrate settings (sensitive keys only)
try {
const sensitiveKeys = ['api_key', 'api_secret', 'vpn_password', 'smtp_password'];
const settingsResult = await migrateSettings(sensitiveKeys);
results.settings = settingsResult.migrated;
if (settingsResult.error) results.errors.push(settingsResult.error);
} catch (error) {
results.errors.push(`Settings migration error: ${error.message}`);
}
// Migrate VPN configs
try {
const vpnResult = await migrateVPNConfigs();
results.vpnConfigs = vpnResult.migrated;
if (vpnResult.error) results.errors.push(vpnResult.error);
} catch (error) {
results.errors.push(`VPN migration error: ${error.message}`);
}
// Migrate API tokens (if any exist in plaintext)
try {
const tokenResult = await migrateAPITokens();
results.apiTokens = tokenResult.migrated;
if (tokenResult.error) results.errors.push(tokenResult.error);
} catch (error) {
results.errors.push(`API tokens migration error: ${error.message}`);
}
// Note: 2FA secrets are NOT migrated as they're already handled securely
// Passwords are bcrypt hashed (not encrypted), which is correct
const totalMigrated = results.settings + results.vpnConfigs + results.apiTokens;
// Log admin activity
await SecurityAuditLogger.logAdminActivity(req.user.userId, 'encryption_migration', {
ip,
userAgent,
results: {
totalMigrated,
...results
}
});
logger.info('Encryption migration completed', results);
res.json({
success: true,
message: 'Encryption migration completed',
data: {
totalMigrated,
...results
}
});
} catch (error) {
logger.error('Error during encryption migration:', error);
res.status(500).json({ error: 'Encryption migration failed' });
}
});
/**
* POST /api/encryption/verify
* Verify encrypted data integrity
*/
router.post('/verify', authenticate, requireAdmin, readLimiter, async (req, res) => {
try {
const results = await verifyEncryptedData();
res.json({
success: true,
data: results
});
} catch (error) {
logger.error('Error verifying encrypted data:', error);
res.status(500).json({ error: 'Verification failed' });
}
});
/**
* Helper: Get encryption statistics
*/
async function getEncryptionStats() {
return new Promise((resolve, reject) => {
db.serialize(() => {
const stats = {
vpnConfigs: { total: 0, encrypted: 0 },
settings: { total: 0, encrypted: 0 },
twoFactorSecrets: { total: 0, encrypted: 0 }
};
// Count VPN configs
db.get('SELECT COUNT(*) as total FROM vpn_configs', [], (err, row) => {
if (!err && row) {
stats.vpnConfigs.total = row.total;
// Check how many are encrypted (encrypted format has 4 colon-separated parts)
db.get(
`SELECT COUNT(*) as encrypted FROM vpn_configs
WHERE config_data LIKE '%:%:%:%'`,
[],
(err, row) => {
if (!err && row) stats.vpnConfigs.encrypted = row.encrypted;
}
);
}
});
// Count settings with sensitive data
db.get(
`SELECT COUNT(*) as total FROM settings
WHERE key IN ('api_key', 'api_secret', 'vpn_password', 'smtp_password')`,
[],
(err, row) => {
if (!err && row) {
stats.settings.total = row.total;
// Count encrypted ones
db.get(
`SELECT COUNT(*) as encrypted FROM settings
WHERE key IN ('api_key', 'api_secret', 'vpn_password', 'smtp_password')
AND value LIKE '%:%:%:%'`,
[],
(err, row) => {
if (!err && row) stats.settings.encrypted = row.encrypted;
}
);
}
}
);
// Count 2FA secrets (these should always be protected, not necessarily encrypted)
db.get(
'SELECT COUNT(*) as total FROM users WHERE two_factor_secret IS NOT NULL',
[],
(err, row) => {
if (!err && row) {
stats.twoFactorSecrets.total = row.total;
stats.twoFactorSecrets.encrypted = row.total; // Consider all protected
}
// Wait a bit for async queries to complete
setTimeout(() => resolve(stats), 100);
}
);
});
});
}
/**
* Helper: Scan for unencrypted sensitive data
*/
async function scanForUnencryptedData() {
const findings = [];
return new Promise((resolve, reject) => {
db.serialize(() => {
// Check VPN configs
db.get(
`SELECT COUNT(*) as count FROM vpn_configs
WHERE config_data NOT LIKE '%:%:%:%' OR config_data IS NULL`,
[],
(err, row) => {
if (!err && row && row.count > 0) {
findings.push({
table: 'vpn_configs',
field: 'config_data',
count: row.count,
severity: 'high',
description: 'VPN configuration files contain credentials and private keys'
});
}
}
);
// Check sensitive settings
db.get(
`SELECT COUNT(*) as count FROM settings
WHERE key IN ('api_key', 'api_secret', 'vpn_password', 'smtp_password')
AND (value NOT LIKE '%:%:%:%' OR value IS NULL)`,
[],
(err, row) => {
if (!err && row && row.count > 0) {
findings.push({
table: 'settings',
field: 'value',
count: row.count,
severity: 'medium',
description: 'Settings may contain API keys, passwords, and secrets'
});
}
// Wait for async queries to complete
setTimeout(() => resolve(findings), 100);
}
);
});
});
}
/**
* Helper: Migrate settings to encrypted format
*/
async function migrateSettings(sensitiveKeys) {
return new Promise((resolve) => {
db.all(
`SELECT id, user_id, key, value FROM settings
WHERE key IN (${sensitiveKeys.map(() => '?').join(',')})
AND value IS NOT NULL
AND value NOT LIKE '%:%:%:%'`,
sensitiveKeys,
async (err, settings) => {
if (err || !settings || settings.length === 0) {
return resolve({ migrated: 0, error: err ? err.message : null });
}
let migrated = 0;
for (const setting of settings) {
try {
const encrypted = encryption.encryptSetting(setting.value, setting.key);
await new Promise((res, rej) => {
db.run(
'UPDATE settings SET value = ? WHERE id = ?',
[encrypted, setting.id],
(err) => (err ? rej(err) : res())
);
});
migrated++;
} catch (error) {
logger.error(`Failed to encrypt setting ${setting.key}:`, error);
}
}
resolve({ migrated, error: null });
}
);
});
}
/**
* Helper: Migrate VPN configs to new encryption format
*/
async function migrateVPNConfigs() {
return new Promise((resolve) => {
db.all(
`SELECT id, user_id, config_data FROM vpn_configs
WHERE config_data IS NOT NULL`,
[],
async (err, configs) => {
if (err || !configs || configs.length === 0) {
return resolve({ migrated: 0, error: err ? err.message : null });
}
let migrated = 0;
for (const config of configs) {
try {
// Check if already encrypted (new format)
if (config.config_data.split(':').length === 4) {
continue; // Already encrypted
}
// Re-encrypt using new encryption module
const encrypted = encryption.encryptVPN(JSON.parse(config.config_data));
await new Promise((res, rej) => {
db.run(
'UPDATE vpn_configs SET config_data = ? WHERE id = ?',
[encrypted, config.id],
(err) => (err ? rej(err) : res())
);
});
migrated++;
} catch (error) {
logger.error(`Failed to encrypt VPN config ${config.id}:`, error);
}
}
resolve({ migrated, error: null });
}
);
});
}
/**
* Helper: Migrate API tokens (if storing plaintext tokens - usually they're hashed)
*/
async function migrateAPITokens() {
// API tokens are typically already hashed/encrypted, so this is a placeholder
// Only migrate if you're storing plaintext tokens
return Promise.resolve({ migrated: 0, error: null });
}
/**
* Helper: Verify encrypted data can be decrypted
*/
async function verifyEncryptedData() {
const results = {
vpnConfigs: { tested: 0, valid: 0, invalid: 0 },
settings: { tested: 0, valid: 0, invalid: 0 }
};
return new Promise((resolve) => {
db.serialize(() => {
// Verify VPN configs
db.all(
`SELECT id, config_data FROM vpn_configs
WHERE config_data LIKE '%:%:%:%' LIMIT 10`,
[],
(err, configs) => {
if (!err && configs) {
configs.forEach(config => {
results.vpnConfigs.tested++;
try {
encryption.decryptVPN(config.config_data);
results.vpnConfigs.valid++;
} catch {
results.vpnConfigs.invalid++;
}
});
}
}
);
// Verify settings
db.all(
`SELECT id, key, value FROM settings
WHERE value LIKE '%:%:%:%' LIMIT 10`,
[],
(err, settings) => {
if (!err && settings) {
settings.forEach(setting => {
results.settings.tested++;
try {
encryption.decryptSetting(setting.value, setting.key);
results.settings.valid++;
} catch {
results.settings.invalid++;
}
});
}
// Wait for async queries
setTimeout(() => resolve(results), 100);
}
);
});
});
}
module.exports = router;

136
backend/routes/favorites.js Normal file
View file

@ -0,0 +1,136 @@
const express = require('express');
const router = express.Router();
const { db } = require('../database/db');
const { authenticate } = require('../middleware/auth');
const { modifyLimiter, readLimiter } = require('../middleware/rateLimiter');
const logger = require('../utils/logger');
const { validateChannelIdParam } = require('../middleware/inputValidation');
/**
* Get user's favorite channels
*/
router.get('/', authenticate, readLimiter, (req, res) => {
const { isRadio } = req.query;
const userId = req.user.userId;
let query = `
SELECT
c.id, c.name, c.url, COALESCE(c.custom_logo, c.logo) as logo,
c.group_name, c.is_radio, f.created_at as favorited_at
FROM favorites f
JOIN channels c ON f.channel_id = c.id
WHERE f.user_id = ? AND c.is_active = 1
`;
const params = [userId];
// Filter by radio or TV
if (isRadio !== undefined) {
query += ' AND c.is_radio = ?';
params.push(isRadio === 'true' ? 1 : 0);
}
query += ' ORDER BY f.created_at DESC';
db.all(query, params, (err, rows) => {
if (err) {
console.error('Error fetching favorites:', err);
return res.status(500).json({ error: 'Failed to fetch favorites' });
}
res.json(rows || []);
});
});
/**
* Add channel to favorites
*/
router.post('/:channelId', authenticate, modifyLimiter, validateChannelIdParam, (req, res) => {
const { channelId } = req.params;
const userId = req.user.userId;
// Check if channel exists and is active
db.get(
'SELECT id, is_radio FROM channels WHERE id = ? AND is_active = 1',
[channelId],
(err, channel) => {
if (err) {
console.error('Error checking channel:', err);
return res.status(500).json({ error: 'Database error' });
}
if (!channel) {
return res.status(404).json({ error: 'Channel not found' });
}
// Add to favorites
db.run(
`INSERT OR IGNORE INTO favorites (user_id, channel_id) VALUES (?, ?)`,
[userId, channelId],
function(err) {
if (err) {
console.error('Error adding favorite:', err);
return res.status(500).json({ error: 'Failed to add favorite' });
}
if (this.changes === 0) {
return res.status(200).json({ message: 'Already in favorites', alreadyExists: true });
}
res.status(201).json({
message: 'Added to favorites',
channelId: parseInt(channelId),
isRadio: channel.is_radio === 1
});
}
);
}
);
});
/**
* Remove channel from favorites
*/
router.delete('/:channelId', authenticate, modifyLimiter, validateChannelIdParam, (req, res) => {
const { channelId } = req.params;
const userId = req.user.userId;
db.run(
'DELETE FROM favorites WHERE user_id = ? AND channel_id = ?',
[userId, channelId],
function(err) {
if (err) {
console.error('Error removing favorite:', err);
return res.status(500).json({ error: 'Failed to remove favorite' });
}
if (this.changes === 0) {
return res.status(404).json({ error: 'Favorite not found' });
}
res.json({ message: 'Removed from favorites' });
}
);
});
/**
* Check if channel is favorited
*/
router.get('/check/:channelId', authenticate, (req, res) => {
const { channelId } = req.params;
const userId = req.user.userId;
db.get(
'SELECT id FROM favorites WHERE user_id = ? AND channel_id = ?',
[userId, channelId],
(err, row) => {
if (err) {
console.error('Error checking favorite:', err);
return res.status(500).json({ error: 'Database error' });
}
res.json({ isFavorite: !!row });
}
);
});
module.exports = router;

9
backend/routes/groups.js Normal file
View file

@ -0,0 +1,9 @@
const express = require('express');
const router = express.Router();
const { authenticate } = require('../middleware/auth');
router.get('/', authenticate, (req, res) => {
res.json({ message: 'Groups endpoint' });
});
module.exports = router;

216
backend/routes/history.js Normal file
View file

@ -0,0 +1,216 @@
const express = require('express');
const router = express.Router();
const { authenticate } = require('../middleware/auth');
const { readLimiter, modifyLimiter } = require('../middleware/rateLimiter');
const { db } = require('../database/db');
const logger = require('../utils/logger');
/**
* Record watch history
*/
router.post('/', modifyLimiter, authenticate, (req, res) => {
const { channel_id, duration, profile_id } = req.body;
const user_id = req.user.userId;
if (!channel_id) {
return res.status(400).json({ error: 'channel_id is required' });
}
db.run(
`INSERT INTO watch_history (user_id, profile_id, channel_id, duration)
VALUES (?, ?, ?, ?)`,
[user_id, profile_id || null, channel_id, duration || 0],
function(err) {
if (err) {
logger.error('Error recording watch history:', err);
return res.status(500).json({ error: 'Failed to record watch history' });
}
res.json({ message: 'Watch history recorded', id: this.lastID });
}
);
});
/**
* Get user watch history
*/
router.get('/', readLimiter, authenticate, (req, res) => {
const user_id = req.user.userId;
const { limit = 50, offset = 0, profile_id } = req.query;
let query = `
SELECT
wh.id,
wh.watched_at,
wh.duration,
c.id as channel_id,
c.name as channel_name,
c.logo,
c.custom_logo,
c.group_name,
c.is_radio
FROM watch_history wh
INNER JOIN channels c ON wh.channel_id = c.id
WHERE wh.user_id = ?
`;
const params = [user_id];
if (profile_id) {
query += ' AND wh.profile_id = ?';
params.push(profile_id);
}
query += ' ORDER BY wh.watched_at DESC LIMIT ? OFFSET ?';
params.push(parseInt(limit), parseInt(offset));
db.all(query, params, (err, rows) => {
if (err) {
logger.error('Error fetching watch history:', err);
return res.status(500).json({ error: 'Failed to fetch watch history' });
}
res.json(rows);
});
});
/**
* Get most watched channels (recommendations)
*/
router.get('/top-channels', readLimiter, authenticate, (req, res) => {
const user_id = req.user.userId;
const { limit = 10, profile_id, days = 30 } = req.query;
let query = `
SELECT
c.id,
c.name,
c.logo,
c.custom_logo,
c.group_name,
c.is_radio,
c.url,
COUNT(wh.id) as watch_count,
SUM(wh.duration) as total_duration,
MAX(wh.watched_at) as last_watched
FROM watch_history wh
INNER JOIN channels c ON wh.channel_id = c.id
WHERE wh.user_id = ?
AND wh.watched_at >= datetime('now', '-' || ? || ' days')
`;
const params = [user_id, days];
if (profile_id) {
query += ' AND wh.profile_id = ?';
params.push(profile_id);
}
query += `
GROUP BY c.id
ORDER BY watch_count DESC, total_duration DESC
LIMIT ?
`;
params.push(parseInt(limit));
db.all(query, params, (err, rows) => {
if (err) {
logger.error('Error fetching top channels:', err);
return res.status(500).json({ error: 'Failed to fetch top channels' });
}
res.json(rows);
});
});
/**
* Get recommended channels based on viewing patterns
*/
router.get('/recommendations', readLimiter, authenticate, (req, res) => {
const user_id = req.user.userId;
const { limit = 10, profile_id } = req.query;
// Get channels from same groups as user's most watched channels
let query = `
WITH user_favorite_groups AS (
SELECT DISTINCT c.group_name, COUNT(wh.id) as watch_count
FROM watch_history wh
INNER JOIN channels c ON wh.channel_id = c.id
WHERE wh.user_id = ?
AND wh.watched_at >= datetime('now', '-30 days')
AND c.group_name IS NOT NULL
`;
const params = [user_id];
if (profile_id) {
query += ' AND wh.profile_id = ?';
params.push(profile_id);
}
query += `
GROUP BY c.group_name
ORDER BY watch_count DESC
LIMIT 5
)
SELECT DISTINCT
c.id,
c.name,
c.logo,
c.custom_logo,
c.group_name,
c.is_radio,
c.url,
c.health_status
FROM channels c
INNER JOIN user_favorite_groups ufg ON c.group_name = ufg.group_name
WHERE c.id NOT IN (
SELECT channel_id
FROM watch_history
WHERE user_id = ?
AND watched_at >= datetime('now', '-7 days')
)
AND c.is_active = 1
AND c.health_status != 'dead'
ORDER BY RANDOM()
LIMIT ?
`;
params.push(user_id, parseInt(limit));
db.all(query, params, (err, rows) => {
if (err) {
logger.error('Error fetching recommendations:', err);
return res.status(500).json({ error: 'Failed to fetch recommendations' });
}
res.json(rows);
});
});
/**
* Clear watch history
*/
router.delete('/', modifyLimiter, authenticate, (req, res) => {
const user_id = req.user.userId;
const { profile_id, days } = req.query;
let query = 'DELETE FROM watch_history WHERE user_id = ?';
const params = [user_id];
if (profile_id) {
query += ' AND profile_id = ?';
params.push(profile_id);
}
if (days) {
query += ' AND watched_at < datetime("now", "-" || ? || " days")';
params.push(days);
}
db.run(query, params, function(err) {
if (err) {
logger.error('Error clearing watch history:', err);
return res.status(500).json({ error: 'Failed to clear watch history' });
}
res.json({ message: 'Watch history cleared', deleted: this.changes });
});
});
module.exports = router;

View file

@ -0,0 +1,298 @@
/**
* Log Management API Routes (CWE-53 Compliance)
* Admin-only endpoints for log retention, archival, and integrity
*/
const express = require('express');
const router = express.Router();
const { authenticate } = require('../middleware/auth');
const { requirePermission } = require('../middleware/rbac');
const { readLimiter, modifyLimiter } = require('../middleware/rateLimiter');
const logManagement = require('../jobs/logManagement');
const SecurityAuditLogger = require('../utils/securityAudit');
const logger = require('../utils/logger');
const path = require('path');
const fs = require('fs').promises;
/**
* GET /api/log-management/statistics
* Get log management statistics
*/
router.get('/statistics',
authenticate,
requirePermission('security.view_audit'),
readLimiter,
async (req, res) => {
try {
const stats = await logManagement.getStatistics();
await SecurityAuditLogger.logSensitiveDataAccess(req.user.userId, 'log_statistics', {
ip: req.ip,
userAgent: req.headers['user-agent']
});
res.json({
success: true,
data: stats
});
} catch (error) {
logger.error('[LogManagement API] Error getting statistics:', error);
res.status(500).json({
success: false,
message: 'Failed to get log statistics'
});
}
}
);
/**
* GET /api/log-management/archives
* List available log archives
*/
router.get('/archives',
authenticate,
requirePermission('security.view_audit'),
readLimiter,
async (req, res) => {
try {
const archives = await logManagement.listArchives();
await SecurityAuditLogger.logSensitiveDataAccess(req.user.userId, 'log_archives_list', {
ip: req.ip,
userAgent: req.headers['user-agent'],
recordCount: archives.length
});
res.json({
success: true,
data: archives
});
} catch (error) {
logger.error('[LogManagement API] Error listing archives:', error);
res.status(500).json({
success: false,
message: 'Failed to list archives'
});
}
}
);
/**
* POST /api/log-management/cleanup
* Manual trigger for log cleanup
* Admin only
*/
router.post('/cleanup',
authenticate,
requirePermission('security.manage'),
modifyLimiter,
async (req, res) => {
try {
const { retentionDays } = req.body;
const days = parseInt(retentionDays) || 90;
if (days < 7) {
return res.status(400).json({
success: false,
message: 'Retention days must be at least 7'
});
}
const result = await logManagement.manualCleanup(days);
await SecurityAuditLogger.logAdminActivity(req.user.userId, 'log_cleanup_manual', {
ip: req.ip,
userAgent: req.headers['user-agent'],
retentionDays: days,
...result
});
res.json({
success: true,
message: `Deleted ${result.auditDeleted + result.aggregatedDeleted} old log entries`,
data: result
});
} catch (error) {
logger.error('[LogManagement API] Error during manual cleanup:', error);
res.status(500).json({
success: false,
message: 'Failed to perform log cleanup'
});
}
}
);
/**
* POST /api/log-management/verify-integrity
* Manual trigger for integrity verification
* Admin only
*/
router.post('/verify-integrity',
authenticate,
requirePermission('security.view_audit'),
modifyLimiter,
async (req, res) => {
try {
const result = await logManagement.manualIntegrityCheck();
if (!result) {
return res.status(500).json({
success: false,
message: 'Integrity verification failed'
});
}
await SecurityAuditLogger.logAdminActivity(req.user.userId, 'log_integrity_check', {
ip: req.ip,
userAgent: req.headers['user-agent'],
verified: result.verified,
tampered: result.tampered
});
res.json({
success: true,
message: result.tampered > 0
? `⚠️ WARNING: ${result.tampered} tampered logs detected!`
: `All ${result.verified} logs verified successfully`,
data: result,
alert: result.tampered > 0
});
} catch (error) {
logger.error('[LogManagement API] Error during integrity check:', error);
res.status(500).json({
success: false,
message: 'Failed to verify log integrity'
});
}
}
);
/**
* GET /api/log-management/archives/download/:filename
* Download a log archive
* Admin only
*/
router.get('/archives/download/:filename',
authenticate,
requirePermission('security.view_audit'),
readLimiter,
async (req, res) => {
try {
const { filename } = req.params;
// Security: prevent path traversal
if (filename.includes('..') || filename.includes('/') || filename.includes('\\')) {
return res.status(400).json({
success: false,
message: 'Invalid filename'
});
}
// Security: only allow .json.gz files
if (!filename.endsWith('.json.gz')) {
return res.status(400).json({
success: false,
message: 'Invalid file type'
});
}
const archiveDir = path.join(__dirname, '../../data/log-archives');
const filePath = path.join(archiveDir, filename);
// Check if file exists
try {
await fs.access(filePath);
} catch (error) {
return res.status(404).json({
success: false,
message: 'Archive not found'
});
}
// Log the access
await SecurityAuditLogger.logSensitiveDataAccess(req.user.userId, 'log_archive_download', {
ip: req.ip,
userAgent: req.headers['user-agent'],
filename,
accessMethod: 'download'
});
// Send file
res.download(filePath, filename);
} catch (error) {
logger.error('[LogManagement API] Error downloading archive:', error);
res.status(500).json({
success: false,
message: 'Failed to download archive'
});
}
}
);
/**
* DELETE /api/log-management/archives/:filename
* Delete a log archive
* Admin only
*/
router.delete('/archives/:filename',
authenticate,
requirePermission('security.manage'),
modifyLimiter,
async (req, res) => {
try {
const { filename } = req.params;
// Security: prevent path traversal
if (filename.includes('..') || filename.includes('/') || filename.includes('\\')) {
return res.status(400).json({
success: false,
message: 'Invalid filename'
});
}
// Security: only allow .json.gz files
if (!filename.endsWith('.json.gz')) {
return res.status(400).json({
success: false,
message: 'Invalid file type'
});
}
const archiveDir = path.join(__dirname, '../../data/log-archives');
const filePath = path.join(archiveDir, filename);
// Check if file exists
try {
await fs.access(filePath);
} catch (error) {
return res.status(404).json({
success: false,
message: 'Archive not found'
});
}
// Delete file
await fs.unlink(filePath);
// Log the deletion
await SecurityAuditLogger.logAdminActivity(req.user.userId, 'log_archive_deleted', {
ip: req.ip,
userAgent: req.headers['user-agent'],
filename
});
res.json({
success: true,
message: 'Archive deleted successfully'
});
} catch (error) {
logger.error('[LogManagement API] Error deleting archive:', error);
res.status(500).json({
success: false,
message: 'Failed to delete archive'
});
}
}
);
module.exports = router;

View file

@ -0,0 +1,58 @@
const express = require('express');
const router = express.Router();
const { authenticate } = require('../middleware/auth');
const { modifyLimiter } = require('../middleware/rateLimiter');
const { cacheAllLogos, cleanupOldLogos } = require('../jobs/logoCacher');
const { db } = require('../database/db');
// Trigger logo caching (admin only)
router.post('/cache', authenticate, modifyLimiter, async (req, res) => {
try {
// Run caching in background
cacheAllLogos().catch(err => {
console.error('[LogoCache API] Background caching error:', err);
});
res.json({ message: 'Logo caching started in background' });
} catch (error) {
console.error('[LogoCache API] Error starting cache:', error);
res.status(500).json({ error: 'Failed to start logo caching' });
}
});
// Get cache status
router.get('/status', authenticate, async (req, res) => {
try {
const stats = await new Promise((resolve, reject) => {
db.get(
`SELECT
COUNT(*) as cached_count,
(SELECT COUNT(*) FROM channels WHERE logo LIKE 'http%' OR custom_logo LIKE 'http%') as total_count
FROM logo_cache`,
[],
(err, row) => {
if (err) reject(err);
else resolve(row);
}
);
});
res.json(stats);
} catch (error) {
console.error('[LogoCache API] Error getting status:', error);
res.status(500).json({ error: 'Failed to get cache status' });
}
});
// Cleanup old cached logos
router.post('/cleanup', authenticate, modifyLimiter, async (req, res) => {
try {
await cleanupOldLogos();
res.json({ message: 'Old logos cleaned up successfully' });
} catch (error) {
console.error('[LogoCache API] Cleanup error:', error);
res.status(500).json({ error: 'Failed to cleanup logos' });
}
});
module.exports = router;

View file

@ -0,0 +1,122 @@
const express = require('express');
const axios = require('axios');
const logger = require('../utils/logger');
const { db } = require('../database/db');
const path = require('path');
const fs = require('fs').promises;
const router = express.Router();
// Middleware to fix CORS for public image serving
const fixImageCORS = (req, res, next) => {
// Remove credentials header set by global CORS middleware
res.removeHeader('Access-Control-Allow-Credentials');
// Set proper CORS for public images
res.set({
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET, OPTIONS',
'Access-Control-Allow-Headers': 'Content-Type',
'Cross-Origin-Resource-Policy': 'cross-origin'
});
next();
};
// Handle OPTIONS preflight requests
router.options('/', fixImageCORS, (req, res) => {
res.status(204).end();
});
// Proxy external logos to handle CORS issues
router.get('/', fixImageCORS, async (req, res) => {
const { url } = req.query;
if (!url) {
return res.status(400).json({ error: 'URL parameter is required' });
}
try {
// Check if logo is cached
const cached = await new Promise((resolve, reject) => {
const query = 'SELECT logo_url, local_path FROM logo_cache WHERE logo_url = ? LIMIT 1';
db.get(query, [url], (err, row) => {
console.log(`[LogoProxy] SQL: ${query} with url="${url}"`);
if (err) {
console.error(`[LogoProxy] DB Error:`, err);
reject(err);
} else {
console.log(`[LogoProxy] DB Result:`, row ? JSON.stringify(row) : 'null');
resolve(row);
}
}
);
});
console.log(`[LogoProxy] Cache lookup for ${url}: ${cached ? 'FOUND at ' + cached.local_path : 'NOT FOUND'}`);
// If cached, serve from disk
if (cached && cached.local_path) {
const cachedPath = cached.local_path;
try {
const fileData = await fs.readFile(cachedPath);
const ext = path.extname(cachedPath).toLowerCase();
const contentType = {
'.png': 'image/png',
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.gif': 'image/gif',
'.webp': 'image/webp',
'.svg': 'image/svg+xml'
}[ext] || 'image/png';
res.set({
'Content-Type': contentType,
'Cache-Control': 'public, max-age=2592000' // Cache for 30 days
});
return res.send(fileData);
} catch (err) {
logger.warn('Cached logo file not found, fetching fresh:', err.message);
}
}
// Validate URL
const logoUrl = new URL(url);
// Fetch the image
const response = await axios.get(url, {
responseType: 'arraybuffer',
timeout: 10000,
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
'Accept': 'image/webp,image/apng,image/*,*/*;q=0.8',
'Accept-Language': 'en-US,en;q=0.9',
'Cache-Control': 'no-cache',
'Pragma': 'no-cache'
},
maxRedirects: 5
});
// Set appropriate headers
const contentType = response.headers['content-type'] || 'image/png';
res.set({
'Content-Type': contentType,
'Cache-Control': 'public, max-age=86400' // Cache for 24 hours
});
// Send the image
res.send(response.data);
} catch (error) {
logger.error('Logo proxy error:', {
url,
error: error.message,
status: error.response?.status
});
// Return a 404 or error response
res.status(error.response?.status || 500).json({
error: 'Failed to fetch logo',
message: error.message
});
}
});
module.exports = router;

841
backend/routes/m3u-files.js Normal file
View file

@ -0,0 +1,841 @@
const express = require('express');
const router = express.Router();
const path = require('path');
const fs = require('fs').promises;
const fsSync = require('fs');
const { authenticate } = require('../middleware/auth');
const { db } = require('../database/db');
const m3uParser = require('iptv-playlist-parser');
const axios = require('axios');
const crypto = require('crypto');
// Cache for IPTV-org logos database (in memory)
let logoDbCache = null;
let logoDbCacheTime = null;
const CACHE_DURATION = 24 * 60 * 60 * 1000; // 24 hours
// Local logo cache directory
const LOGO_CACHE_DIR = path.join('/app', 'data', 'logo-cache');
// Ensure logo cache directory exists
async function ensureLogoCacheDir() {
try {
await fs.mkdir(LOGO_CACHE_DIR, { recursive: true });
} catch (error) {
console.error('Failed to create logo cache directory:', error);
}
}
// Initialize cache directory
ensureLogoCacheDir();
// Download and cache a logo locally
async function downloadAndCacheLogo(logoUrl) {
try {
// Generate a hash-based filename
const hash = crypto.createHash('md5').update(logoUrl).digest('hex');
const ext = path.extname(new URL(logoUrl).pathname) || '.png';
const filename = `${hash}${ext}`;
const localPath = path.join(LOGO_CACHE_DIR, filename);
const publicPath = `/logos/${filename}`;
// Check if already cached
try {
await fs.access(localPath);
return publicPath; // Already cached
} catch {
// Not cached, download it
}
console.log(`Downloading logo: ${logoUrl}`);
const response = await axios({
method: 'GET',
url: logoUrl,
responseType: 'arraybuffer',
timeout: 10000,
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
}
});
// Save to cache
await fs.writeFile(localPath, response.data);
console.log(`Cached logo: ${publicPath}`);
return publicPath;
} catch (error) {
console.error(`Failed to download logo ${logoUrl}:`, error.message);
// Return null instead of the failed URL to avoid overwriting existing logos
// with geo-blocked/inaccessible URLs
return null;
}
}
// Fetch and cache IPTV-org logos database
async function getLogoDatabase() {
const now = Date.now();
// Return cached data if still valid
if (logoDbCache && logoDbCacheTime && (now - logoDbCacheTime) < CACHE_DURATION) {
return logoDbCache;
}
try {
console.log('Fetching logos from tv-logo/tv-logos (Romania)...');
// Fetch Romanian logos from tv-logos repository
const response = await axios.get('https://api.github.com/repos/tv-logo/tv-logos/contents/countries/romania', {
timeout: 10000,
headers: {
'Accept': 'application/vnd.github.v3+json',
'User-Agent': 'StreamFlow-IPTV'
}
});
// Transform GitHub API response to match our logo database format
const logos = response.data
.filter(item => item.type === 'file' && (item.name.endsWith('.png') || item.name.endsWith('.svg')))
.map(item => ({
name: item.name.replace(/\.(png|svg)$/i, '').toLowerCase(),
url: item.download_url,
guides: []
}));
logoDbCache = logos;
logoDbCacheTime = now;
console.log(`Loaded ${logoDbCache.length} logos from tv-logos (Romania)`);
// Also try to fetch IPTV-org as fallback
try {
const iptvOrgResponse = await axios.get('https://iptv-org.github.io/api/logos.json', {
timeout: 5000
});
// Merge both sources, tv-logos takes priority
const combinedLogos = [...logoDbCache];
const existingNames = new Set(logoDbCache.map(l => l.name));
iptvOrgResponse.data.forEach(logo => {
if (!existingNames.has(logo.name.toLowerCase())) {
combinedLogos.push(logo);
}
});
logoDbCache = combinedLogos;
console.log(`Total logos after merging with IPTV-org: ${logoDbCache.length}`);
} catch (iptvError) {
console.log('IPTV-org fallback not available, using only tv-logos');
}
return logoDbCache;
} catch (error) {
console.error('Failed to fetch logos from tv-logos:', error.message);
// Try IPTV-org as fallback
try {
console.log('Trying IPTV-org as fallback...');
const response = await axios.get('https://iptv-org.github.io/api/logos.json', {
timeout: 10000
});
logoDbCache = response.data;
logoDbCacheTime = now;
console.log(`Loaded ${logoDbCache.length} logos from IPTV-org (fallback)`);
return logoDbCache;
} catch (fallbackError) {
console.error('Fallback to IPTV-org also failed:', fallbackError.message);
return logoDbCache || [];
}
}
}
// Find logo for a channel
async function findChannelLogo(channelName, channelUrl, logoDb) {
if (!logoDb || logoDb.length === 0) return null;
const cleanName = channelName.toLowerCase()
.replace(/\s*\([^)]*\)/g, '') // Remove parentheses content
.replace(/\s*\[[^\]]*\]/g, '') // Remove brackets content
.replace(/\s+/g, ' ')
.trim();
// Try exact match first
let match = logoDb.find(logo =>
logo.name.toLowerCase() === cleanName
);
if (match) return await downloadAndCacheLogo(match.url);
// Try partial match
match = logoDb.find(logo =>
cleanName.includes(logo.name.toLowerCase()) ||
logo.name.toLowerCase().includes(cleanName)
);
if (match) return await downloadAndCacheLogo(match.url);
// Try matching by domain in URL
if (channelUrl) {
try {
const urlObj = new URL(channelUrl);
const domain = urlObj.hostname.replace('www.', '');
match = logoDb.find(logo =>
logo.name.toLowerCase().includes(domain.split('.')[0])
);
if (match) return await downloadAndCacheLogo(match.url);
} catch (e) {
// Invalid URL, skip
}
}
return null;
}
// Helper function to detect if a channel is likely radio or TV
function detectChannelType(item, userSelectedType) {
const url = (item.url || '').toLowerCase();
const name = (item.name || '').toLowerCase();
const group = (item.group?.title || '').toLowerCase();
// Strong TV indicators - if any match, it's definitely TV
const strongTvIndicators = [
'.m3u8', // HLS video streams
'/playlist.m3u8', // Video playlists
'video',
'tv',
'1080p', '720p', '480p', '360p', '4k', 'hd', 'fhd', 'uhd', // Video quality markers
'tvsat', 'livestream'
];
// Strong radio indicators
const strongRadioIndicators = [
':8000/', ':8001/', ':8080/', ':8443/', // Common radio streaming ports
'/radio',
'radiostream',
'.mp3', '.aac', '.ogg', // Audio file extensions
'icecast', 'shoutcast' // Radio streaming platforms
];
// Radio name patterns
const radioNamePatterns = [
/^radio\s/i, // Starts with "Radio "
/\sradio$/i, // Ends with " Radio"
/\sradio\s/i, // Contains " Radio "
/\sfm$/i, // Ends with " FM"
/^fm\s/i, // Starts with "FM "
/\d+\.?\d*\s?fm/i, // Frequency like "101.5 FM" or "101FM"
/\sam\s/i, // Contains " AM "
];
// Check for strong TV indicators
for (const indicator of strongTvIndicators) {
if (url.includes(indicator) || name.includes(indicator)) {
return 0; // Definitely TV
}
}
// Check for strong radio indicators
for (const indicator of strongRadioIndicators) {
if (url.includes(indicator)) {
return 1; // Definitely radio
}
}
// Check radio name patterns
for (const pattern of radioNamePatterns) {
if (pattern.test(name) || pattern.test(group)) {
return 1; // Likely radio based on name
}
}
// Check group names
if (group.includes('radio') || group.includes('fm') || group.includes('am')) {
return 1;
}
// If no strong indicators, use user selection
return userSelectedType === 'radio' ? 1 : 0;
}
// Ensure M3U upload directory exists
const M3U_UPLOAD_DIR = path.join('/app', 'data', 'm3u-files');
try {
if (!fsSync.existsSync(M3U_UPLOAD_DIR)) {
fsSync.mkdirSync(M3U_UPLOAD_DIR, { recursive: true });
}
} catch (error) {
console.error('Failed to create M3U upload directory:', error);
}
// Get all M3U files for current user
router.get('/', authenticate, (req, res) => {
db.all(
`SELECT id, user_id, name, original_filename, size, created_at, updated_at
FROM m3u_files
WHERE user_id = ?
ORDER BY created_at DESC`,
[req.user.userId],
(err, files) => {
if (err) {
console.error('Error fetching M3U files:', err);
return res.status(500).json({ error: 'Failed to fetch M3U files' });
}
res.json(files);
}
);
});
// Download M3U file
router.get('/:id/download', authenticate, async (req, res) => {
const { id } = req.params;
db.get(
'SELECT * FROM m3u_files WHERE id = ? AND user_id = ?',
[id, req.user.userId],
async (err, file) => {
if (err) {
console.error('Error fetching M3U file:', err);
return res.status(500).json({ error: 'Failed to fetch M3U file' });
}
if (!file) {
return res.status(404).json({ error: 'M3U file not found' });
}
try {
const filePath = file.file_path;
// Sanitize filename to prevent path traversal attacks
const fileName = (file.original_filename || `${file.name}.m3u`).replace(/[^a-zA-Z0-9._-]/g, '_');
// Check if file exists
await fs.access(filePath);
// Set headers for download
res.setHeader('Content-Type', 'audio/x-mpegurl');
res.setHeader('Content-Disposition', `attachment; filename="${fileName}"`);
// Stream the file
const fileStream = fsSync.createReadStream(filePath);
fileStream.pipe(res);
} catch (error) {
console.error('Error downloading M3U file:', error);
res.status(500).json({ error: 'Failed to download M3U file' });
}
}
);
});
// Upload M3U file
router.post('/upload', authenticate, async (req, res) => {
try {
if (!req.files || !req.files.m3u) {
return res.status(400).json({ error: 'No file uploaded' });
}
const uploadedFile = req.files.m3u;
const { name } = req.body;
if (!name || !name.trim()) {
return res.status(400).json({ error: 'File name is required' });
}
// Validate file extension
const ext = path.extname(uploadedFile.name).toLowerCase();
if (ext !== '.m3u' && ext !== '.m3u8') {
return res.status(400).json({ error: 'Only .m3u and .m3u8 files are allowed' });
}
// Generate unique filename
const uniqueSuffix = Date.now() + '-' + Math.round(Math.random() * 1E9);
const filename = uniqueSuffix + ext;
const filePath = path.join(M3U_UPLOAD_DIR, filename);
// Move file from temp location to M3U directory
await uploadedFile.mv(filePath);
db.run(
`INSERT INTO m3u_files (user_id, name, original_filename, file_path, size)
VALUES (?, ?, ?, ?, ?)`,
[req.user.userId, name.trim(), uploadedFile.name, filePath, uploadedFile.size],
function(err) {
if (err) {
console.error('Error uploading M3U file:', err);
fs.unlink(filePath).catch(console.error);
return res.status(500).json({ error: 'Failed to upload M3U file' });
}
db.get(
`SELECT id, user_id, name, original_filename, size, created_at, updated_at
FROM m3u_files WHERE id = ?`,
[this.lastID],
(err, file) => {
if (err) {
return res.status(500).json({ error: 'Failed to fetch uploaded file' });
}
res.status(201).json(file);
}
);
}
);
} catch (error) {
console.error('Error uploading M3U file:', error);
res.status(500).json({ error: 'Failed to upload M3U file' });
}
});
// Rename M3U file
router.patch('/:id', authenticate, (req, res) => {
const { id } = req.params;
const { name } = req.body;
if (!name || !name.trim()) {
return res.status(400).json({ error: 'File name is required' });
}
// Verify ownership
db.get(
'SELECT * FROM m3u_files WHERE id = ? AND user_id = ?',
[id, req.user.userId],
(err, file) => {
if (err) {
console.error('Error fetching M3U file:', err);
return res.status(500).json({ error: 'Failed to fetch M3U file' });
}
if (!file) {
return res.status(404).json({ error: 'M3U file not found' });
}
db.run(
'UPDATE m3u_files SET name = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?',
[name.trim(), id],
(err) => {
if (err) {
console.error('Error renaming M3U file:', err);
return res.status(500).json({ error: 'Failed to rename M3U file' });
}
db.get(
`SELECT id, user_id, name, original_filename, size, created_at, updated_at
FROM m3u_files WHERE id = ?`,
[id],
(err, updatedFile) => {
if (err) {
return res.status(500).json({ error: 'Failed to fetch updated file' });
}
res.json(updatedFile);
}
);
}
);
}
);
});
// Delete M3U file
router.delete('/:id', authenticate, async (req, res) => {
const { id } = req.params;
// Get file info and verify ownership
db.get(
'SELECT * FROM m3u_files WHERE id = ? AND user_id = ?',
[id, req.user.userId],
async (err, file) => {
if (err) {
console.error('Error fetching M3U file:', err);
return res.status(500).json({ error: 'Failed to fetch M3U file' });
}
if (!file) {
return res.status(404).json({ error: 'M3U file not found' });
}
// Delete from database
db.run('DELETE FROM m3u_files WHERE id = ?', [id], async (err) => {
if (err) {
console.error('Error deleting M3U file:', err);
return res.status(500).json({ error: 'Failed to delete M3U file' });
}
// Delete physical file
try {
await fs.unlink(file.file_path);
} catch (fsError) {
console.error('Error deleting physical file:', fsError);
}
res.json({ message: 'M3U file deleted successfully' });
});
}
);
});
// Import M3U file to playlist
router.post('/:id/import', authenticate, async (req, res) => {
try {
const { id } = req.params;
const { type } = req.body; // 'tv' or 'radio'
if (!type || !['tv', 'radio'].includes(type)) {
return res.status(400).json({ error: 'Invalid type. Must be "tv" or "radio"' });
}
// Fetch logo database in parallel with file processing
const logoDbPromise = getLogoDatabase();
// Get file info and verify ownership
db.get(
'SELECT * FROM m3u_files WHERE id = ? AND user_id = ?',
[id, req.user.userId],
async (err, file) => {
if (err) {
console.error('Error fetching M3U file:', err);
return res.status(500).json({ error: 'Failed to fetch M3U file' });
}
if (!file) {
return res.status(404).json({ error: 'M3U file not found' });
}
try {
// Read and parse M3U file
console.log('Reading M3U file:', file.file_path);
const m3uContent = await fs.readFile(file.file_path, 'utf-8');
console.log('M3U content length:', m3uContent.length);
const parsed = m3uParser.parse(m3uContent);
console.log('Parsed items count:', parsed.items.length);
// Wait for logo database
const logoDb = await logoDbPromise;
console.log(`Logo database ready with ${logoDb.length} entries`);
// Create or get playlist
db.get(
`SELECT * FROM playlists WHERE user_id = ? AND name = ?`,
[req.user.userId, file.name],
(err, playlist) => {
if (err) {
console.error('Error checking playlist:', err);
return res.status(500).json({ error: 'Failed to check playlist' });
}
const createOrUsePlaylist = async (playlistId) => {
// Insert channels
let channelsAdded = 0;
let channelsProcessed = 0;
const totalChannels = parsed.items.length;
if (totalChannels === 0) {
return res.json({
message: 'No channels found in M3U file',
playlist_id: playlistId,
channels_added: 0,
type,
});
}
// Process channels sequentially to handle async logo downloads
for (const item of parsed.items) {
try {
// Detect actual channel type using heuristics
const isRadio = detectChannelType(item, type);
// Try to find logo from IPTV-org if not provided, or cache existing logo
let logo = item.tvg?.logo;
if (!logo || logo.trim() === '') {
// No logo in M3U, try to find one from IPTV-org
const foundLogo = await findChannelLogo(item.name, item.url, logoDb);
if (foundLogo) {
logo = foundLogo;
}
} else {
// Logo exists in M3U, try to cache it
const cachedLogo = await downloadAndCacheLogo(logo);
// Only use cached logo if download succeeded
if (cachedLogo) {
logo = cachedLogo;
}
// Otherwise keep original logo URL from M3U
}
await new Promise((resolve, reject) => {
db.run(
`INSERT OR IGNORE INTO channels (
playlist_id, name, url, logo, group_name,
tvg_id, tvg_name, language, country, is_radio
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
[
playlistId,
item.name || 'Unknown Channel',
item.url,
logo,
item.group?.title || null,
item.tvg?.id || null,
item.tvg?.name || null,
item.tvg?.language || null,
item.tvg?.country || null,
isRadio,
],
function(insertErr) {
if (!insertErr && this.changes > 0) {
channelsAdded++;
}
if (insertErr) {
console.error('Error inserting channel:', insertErr);
}
resolve();
}
);
});
channelsProcessed++;
} catch (error) {
console.error('Error processing channel:', error);
channelsProcessed++;
}
}
// Update complete
console.log(`Import complete: ${channelsAdded} channels added out of ${totalChannels}`);
// Update channel count - count all channels in this playlist regardless of type
db.run(
'UPDATE playlists SET channel_count = (SELECT COUNT(*) FROM channels WHERE playlist_id = ?) WHERE id = ?',
[playlistId, playlistId],
(updateErr) => {
if (updateErr) {
console.error('Error updating playlist count:', updateErr);
}
// Get actual counts of radio vs TV
db.get(
'SELECT SUM(is_radio = 1) as radio_count, SUM(is_radio = 0) as tv_count FROM channels WHERE playlist_id = ?',
[playlistId],
(err, counts) => {
const radioCount = counts?.radio_count || 0;
const tvCount = counts?.tv_count || 0;
res.json({
message: 'M3U file imported successfully',
playlist_id: playlistId,
channels_added: channelsAdded,
radio_channels: radioCount,
tv_channels: tvCount,
type,
});
}
);
}
);
};
if (!playlist) {
// Create new playlist
db.run(
`INSERT INTO playlists (user_id, name, type, url)
VALUES (?, ?, ?, ?)`,
[req.user.userId, file.name, type, file.file_path],
function(insertErr) {
if (insertErr) {
console.error('Error creating playlist:', insertErr);
return res.status(500).json({ error: 'Failed to create playlist' });
}
createOrUsePlaylist(this.lastID);
}
);
} else {
createOrUsePlaylist(playlist.id);
}
}
);
} catch (parseError) {
console.error('Error parsing/importing M3U file:', parseError);
console.error('Error stack:', parseError.stack);
res.status(500).json({ error: 'Failed to parse M3U file: ' + parseError.message });
}
}
);
} catch (error) {
console.error('Error importing M3U file:', error);
res.status(500).json({ error: 'Failed to import M3U file' });
}
});
// Fix channel types for existing channels (utility endpoint)
router.post('/fix-channel-types', authenticate, async (req, res) => {
try {
console.log('Starting channel type fix and deduplication...');
// First, remove duplicates (same URL in same playlist)
db.run(
`DELETE FROM channels WHERE id NOT IN (
SELECT MIN(id) FROM channels
GROUP BY playlist_id, url
)`,
(delErr, delResult) => {
if (delErr) {
console.error('Error removing duplicates:', delErr);
} else {
console.log('Duplicates removed');
}
// Now fix channel types
db.all(
`SELECT c.id, c.name, c.url, c.group_name, c.is_radio
FROM channels c
JOIN playlists p ON c.playlist_id = p.id
WHERE p.user_id = ?`,
[req.user.userId],
(err, channels) => {
if (err) {
console.error('Error fetching channels:', err);
return res.status(500).json({ error: 'Failed to fetch channels' });
}
let updated = 0;
let processed = 0;
const total = channels.length;
console.log(`Found ${total} channels to analyze`);
if (total === 0) {
return res.json({
message: 'No channels found',
total_channels: 0,
updated_channels: 0
});
}
channels.forEach(channel => {
// Detect correct type
const item = {
name: channel.name,
url: channel.url,
group: { title: channel.group_name }
};
// Determine if it should be radio or TV based on current type
const currentType = channel.is_radio === 1 ? 'radio' : 'tv';
const detectedIsRadio = detectChannelType(item, currentType);
// Only update if detection differs from current value
if (detectedIsRadio !== channel.is_radio) {
db.run(
'UPDATE channels SET is_radio = ? WHERE id = ?',
[detectedIsRadio, channel.id],
function(updateErr) {
processed++;
if (!updateErr && this.changes > 0) {
updated++;
console.log(`Updated channel ${channel.id} (${channel.name}): ${channel.is_radio} -> ${detectedIsRadio}`);
}
if (processed === total) {
console.log(`Fix complete: ${updated} channels updated out of ${total}`);
res.json({
message: 'Channel types fixed and duplicates removed',
total_channels: total,
updated_channels: updated
});
}
}
);
} else {
processed++;
if (processed === total) {
console.log(`Fix complete: ${updated} channels updated out of ${total}`);
res.json({
message: 'Channel types fixed and duplicates removed',
total_channels: total,
updated_channels: updated
});
}
}
});
}
);
}
);
} catch (error) {
console.error('Error fixing channel types:', error);
res.status(500).json({ error: 'Failed to fix channel types' });
}
});
// Update missing logos from IPTV-org database
router.post('/update-logos', authenticate, async (req, res) => {
try {
console.log('Fetching logo database...');
const logoDb = await getLogoDatabase();
if (!logoDb || logoDb.length === 0) {
return res.status(503).json({ error: 'Logo database unavailable' });
}
console.log('Fetching channels with missing logos...');
// Get channels without logos
db.all(
`SELECT c.id, c.name, c.url, c.logo
FROM channels c
JOIN playlists p ON c.playlist_id = p.id
WHERE p.user_id = ? AND (c.logo IS NULL OR c.logo = '')`,
[req.user.userId],
async (err, channels) => {
if (err) {
console.error('Error fetching channels:', err);
return res.status(500).json({ error: 'Failed to fetch channels' });
}
const total = channels.length;
console.log(`Found ${total} channels without logos`);
if (total === 0) {
return res.json({
message: 'All channels already have logos',
total_channels: 0,
updated_channels: 0
});
}
let updated = 0;
// Process sequentially to handle async logo downloads
for (const channel of channels) {
try {
const foundLogo = await findChannelLogo(channel.name, channel.url, logoDb);
if (foundLogo) {
await new Promise((resolve) => {
db.run(
'UPDATE channels SET logo = ? WHERE id = ?',
[foundLogo, channel.id],
function(updateErr) {
if (!updateErr && this.changes > 0) {
updated++;
console.log(`Updated logo for ${channel.name}: ${foundLogo}`);
}
resolve();
}
);
});
}
} catch (error) {
console.error(`Error updating logo for ${channel.name}:`, error);
}
}
console.log(`Logo update complete: ${updated} logos added`);
res.json({
message: 'Channel logos updated',
total_channels: total,
updated_channels: updated
});
}
);
} catch (error) {
console.error('Error updating logos:', error);
res.status(500).json({ error: 'Failed to update logos' });
}
});
module.exports = router;

218
backend/routes/metadata.js Normal file
View file

@ -0,0 +1,218 @@
const express = require('express');
const router = express.Router();
const axios = require('axios');
const { authenticate } = require('../middleware/auth');
const { readLimiter } = require('../middleware/rateLimiter');
const logger = require('../utils/logger');
const { getRadioStationMetadata } = require('../utils/radioMetadata');
/**
* Fetch metadata from radio stream (ICY/Shoutcast metadata)
* This attempts to extract "now playing" information from radio streams
*/
router.get('/radio/:channelId', readLimiter, authenticate, async (req, res) => {
const { channelId } = req.params;
const { db } = require('../database/db');
try {
// Validate channel ID
const id = parseInt(channelId, 10);
if (isNaN(id) || id < 1) {
return res.status(400).json({ error: 'Invalid channel ID' });
}
// Get channel URL from database
db.get(
'SELECT url, name FROM channels WHERE id = ? AND is_radio = 1',
[id],
async (err, channel) => {
if (err || !channel) {
return res.status(404).json({ error: 'Channel not found' });
}
try {
// Request stream with ICY metadata headers
const response = await axios.get(channel.url, {
headers: {
'Icy-MetaData': '1',
'User-Agent': 'StreamFlow/1.0'
},
responseType: 'stream',
timeout: 5000,
maxRedirects: 5
});
let metadata = {
channelId: parseInt(channelId),
channelName: channel.name,
title: null,
artist: null,
song: null,
streamTitle: null,
bitrate: null,
genre: null,
url: null
};
// Extract ICY headers
const icyName = response.headers['icy-name'];
const icyGenre = response.headers['icy-genre'];
const icyBr = response.headers['icy-br'];
const icyUrl = response.headers['icy-url'];
const icyDescription = response.headers['icy-description'];
if (icyName) {
// Filter out stream quality info (e.g., "europafm_aacp_48k")
const cleanName = icyName.replace(/_aacp?_\d+k?/gi, '').replace(/_mp3_\d+k?/gi, '').replace(/_\d+k/gi, '');
if (cleanName && !cleanName.match(/^\w+_\w+$/)) {
metadata.streamTitle = cleanName;
} else {
metadata.streamTitle = icyName;
}
}
if (icyGenre) metadata.genre = icyGenre;
if (icyBr) metadata.bitrate = icyBr + ' kbps';
if (icyUrl) metadata.url = icyUrl;
// Try to get current track from ICY-MetaInt
const metaInt = parseInt(response.headers['icy-metaint']);
if (metaInt && metaInt > 0) {
// Read metadata from stream
const chunks = [];
let bytesRead = 0;
let metadataFound = false;
response.data.on('data', (chunk) => {
if (metadataFound) return;
chunks.push(chunk);
bytesRead += chunk.length;
// Once we have enough data, parse metadata
if (bytesRead >= metaInt + 255) {
const buffer = Buffer.concat(chunks);
const metadataLength = buffer[metaInt] * 16;
if (metadataLength > 0) {
const metadataBuffer = buffer.slice(metaInt + 1, metaInt + 1 + metadataLength);
const metadataString = metadataBuffer.toString('utf8').replace(/\0/g, '');
logger.info(`[Metadata] Raw metadata string for ${channel.name}: ${metadataString}`);
// Parse StreamTitle='Artist - Song'
const titleMatch = metadataString.match(/StreamTitle='([^']*)'/);
if (titleMatch && titleMatch[1]) {
const rawTitle = titleMatch[1];
logger.info(`[Metadata] Raw title: ${rawTitle}`);
// Skip if it's just stream quality info
if (!rawTitle.match(/^\w+_aacp?_\d+k?$/i) && !rawTitle.match(/^\w+_mp3_\d+k?$/i)) {
metadata.song = rawTitle;
// Try to split into artist and title (various separators)
let parts = rawTitle.split(' - ');
if (parts.length === 1) {
parts = rawTitle.split(' '); // em dash
}
if (parts.length === 1) {
parts = rawTitle.split(' | ');
}
if (parts.length >= 2) {
metadata.artist = parts[0].trim();
metadata.title = parts.slice(1).join(' - ').trim();
} else if (parts.length === 1 && rawTitle.length > 0) {
// If no separator, use the whole thing as title
metadata.title = rawTitle.trim();
}
}
}
}
metadataFound = true;
response.data.destroy();
}
});
// Wait a bit for metadata
await new Promise(resolve => setTimeout(resolve, 2000));
}
// Close the stream
if (response.data && !response.data.destroyed) {
response.data.destroy();
}
// If no metadata found from ICY, try external APIs
if (!metadata.title && !metadata.artist && !metadata.song) {
logger.info(`[Metadata] No ICY metadata found, trying external sources for ${channel.name}`);
try {
const externalMetadata = await getRadioStationMetadata(channel.name, channel.url);
if (externalMetadata) {
metadata.title = externalMetadata.title;
metadata.artist = externalMetadata.artist;
if (externalMetadata.title && externalMetadata.artist) {
metadata.song = `${externalMetadata.artist} - ${externalMetadata.title}`;
} else if (externalMetadata.title) {
metadata.song = externalMetadata.title;
}
logger.info(`[Metadata] External metadata found: ${metadata.song}`);
}
} catch (externalError) {
logger.error(`[Metadata] External metadata fetch failed: ${externalError.message}`);
}
}
res.json(metadata);
} catch (streamError) {
logger.error('Error fetching stream metadata:', streamError.message);
// Try external metadata as fallback
try {
const externalMetadata = await getRadioStationMetadata(channel.name, channel.url);
if (externalMetadata) {
return res.json({
channelId: parseInt(channelId),
channelName: channel.name,
title: externalMetadata.title,
artist: externalMetadata.artist,
song: externalMetadata.artist && externalMetadata.title
? `${externalMetadata.artist} - ${externalMetadata.title}`
: externalMetadata.title,
streamTitle: channel.name,
bitrate: null,
genre: null,
url: null,
source: externalMetadata.source
});
}
} catch (externalError) {
logger.error(`[Metadata] External metadata fallback failed: ${externalError.message}`);
}
// Return basic info if everything fails
res.json({
channelId: parseInt(channelId),
channelName: channel.name,
title: null,
artist: null,
song: null,
streamTitle: channel.name,
bitrate: null,
genre: null,
url: null,
error: 'Unable to fetch stream metadata'
});
}
}
);
} catch (error) {
logger.error('Metadata fetch error:', error);
res.status(500).json({ error: 'Failed to fetch metadata' });
}
});
module.exports = router;

233
backend/routes/playlists.js Normal file
View file

@ -0,0 +1,233 @@
const express = require('express');
const router = express.Router();
const multer = require('multer');
const path = require('path');
const fs = require('fs');
const { authenticate } = require('../middleware/auth');
const { modifyLimiter, heavyLimiter, readLimiter } = require('../middleware/rateLimiter');
const { db } = require('../database/db');
const logger = require('../utils/logger');
const { parseM3U } = require('../utils/m3uParser');
const {
validatePlaylist,
validateIdParam,
validateBulkDelete,
createValidationMiddleware
} = require('../middleware/inputValidation');
const { validatePlaylistName } = require('../utils/inputValidator');
const storage = multer.diskStorage({
destination: (req, file, cb) => {
const uploadDir = path.join(__dirname, '../../data/uploads');
if (!fs.existsSync(uploadDir)) {
fs.mkdirSync(uploadDir, { recursive: true, mode: 0o755 });
}
cb(null, uploadDir);
},
filename: (req, file, cb) => {
const uniqueSuffix = Date.now() + '-' + Math.round(Math.random() * 1E9);
cb(null, 'playlist-' + uniqueSuffix + path.extname(file.originalname));
}
});
const upload = multer({
storage,
limits: { fileSize: 50 * 1024 * 1024 }, // 50MB
fileFilter: (req, file, cb) => {
if (file.mimetype === 'audio/x-mpegurl' || file.originalname.endsWith('.m3u') || file.originalname.endsWith('.m3u8')) {
cb(null, true);
} else {
cb(new Error('Only M3U files are allowed'));
}
}
});
// Get all playlists for user
router.get('/', authenticate, readLimiter, (req, res) => {
db.all(
'SELECT * FROM playlists WHERE user_id = ? ORDER BY created_at DESC',
[req.user.userId],
(err, playlists) => {
if (err) {
logger.error('Error fetching playlists:', err);
return res.status(500).json({ error: 'Failed to fetch playlists' });
}
res.json(playlists);
}
);
});
// Add playlist from URL
router.post('/url', authenticate, modifyLimiter, validatePlaylist, async (req, res) => {
const { name, url, username, password, category, type } = req.body;
let playlistUrl = url;
if (username && password) {
playlistUrl = url.replace('username=', `username=${username}`).replace('password=', `password=${password}`);
}
db.run(
'INSERT INTO playlists (user_id, name, url, type, category) VALUES (?, ?, ?, ?, ?)',
[req.user.userId, name, playlistUrl, type || 'live', category],
async function(err) {
if (err) {
logger.error('Error adding playlist:', err);
return res.status(500).json({ error: 'Failed to add playlist' });
}
const playlistId = this.lastID;
try {
await parseM3U(playlistUrl, playlistId);
res.status(201).json({
message: 'Playlist added successfully',
id: playlistId
});
} catch (error) {
logger.error('Error parsing M3U:', error);
res.status(500).json({ error: 'Failed to parse playlist' });
}
}
);
});
// Upload M3U file
router.post('/upload', authenticate, heavyLimiter, upload.single('file'), async (req, res) => {
if (!req.file) {
return res.status(400).json({ error: 'No file uploaded' });
}
const { name, category, type } = req.body;
// Validate playlist name if provided
if (name) {
const validation = validatePlaylistName(name);
if (!validation.valid) {
// Clean up uploaded file
if (fs.existsSync(req.file.path)) {
fs.unlinkSync(req.file.path);
}
return res.status(400).json({ error: validation.errors.join(', ') });
}
}
const filename = req.file.filename;
const filePath = req.file.path;
db.run(
'INSERT INTO playlists (user_id, name, filename, type, category) VALUES (?, ?, ?, ?, ?)',
[req.user.userId, name || req.file.originalname, filename, type || 'live', category],
async function(err) {
if (err) {
logger.error('Error saving playlist:', err);
return res.status(500).json({ error: 'Failed to save playlist' });
}
const playlistId = this.lastID;
try {
await parseM3U(filePath, playlistId, true);
res.status(201).json({
message: 'Playlist uploaded successfully',
id: playlistId,
filename
});
} catch (error) {
logger.error('Error parsing uploaded M3U:', error);
res.status(500).json({ error: 'Failed to parse playlist' });
}
}
);
});
// Delete playlist
router.delete('/:id', authenticate, modifyLimiter, validateIdParam, (req, res) => {
const playlistId = req.params.id;
db.get(
'SELECT * FROM playlists WHERE id = ? AND user_id = ?',
[playlistId, req.user.userId],
(err, playlist) => {
if (err) {
logger.error('Error fetching playlist:', err);
return res.status(500).json({ error: 'Failed to delete playlist' });
}
if (!playlist) {
return res.status(404).json({ error: 'Playlist not found' });
}
// Delete file if exists
if (playlist.filename) {
const filePath = path.join(__dirname, '../../data/uploads', playlist.filename);
if (fs.existsSync(filePath)) {
fs.unlinkSync(filePath);
}
}
db.run('DELETE FROM playlists WHERE id = ?', [playlistId], (err) => {
if (err) {
logger.error('Error deleting playlist:', err);
return res.status(500).json({ error: 'Failed to delete playlist' });
}
res.json({ message: 'Playlist deleted successfully' });
});
}
);
});
// Bulk delete playlists
router.post('/bulk-delete', authenticate, modifyLimiter, validateBulkDelete, (req, res) => {
const { ids } = req.body;
const placeholders = ids.map(() => '?').join(',');
const query = `DELETE FROM playlists WHERE id IN (${placeholders}) AND user_id = ?`;
db.run(query, [...ids, req.user.userId], function(err) {
if (err) {
logger.error('Error bulk deleting playlists:', err);
return res.status(500).json({ error: 'Failed to delete playlists' });
}
res.json({ message: 'Playlists deleted successfully', deleted: this.changes });
});
});
// Rename playlist
const validatePlaylistRename = createValidationMiddleware({
params: {
id: (value) => {
const num = parseInt(value, 10);
if (isNaN(num) || num < 1) {
return { valid: false, errors: ['Invalid playlist ID'], sanitized: null };
}
return { valid: true, errors: [], sanitized: num };
}
},
body: {
name: validatePlaylistName
}
});
router.patch('/:id', authenticate, modifyLimiter, validatePlaylistRename, (req, res) => {
const { name } = req.body;
const playlistId = req.params.id;
db.run(
'UPDATE playlists SET name = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ? AND user_id = ?',
[name, playlistId, req.user.userId],
function(err) {
if (err) {
logger.error('Error renaming playlist:', err);
return res.status(500).json({ error: 'Failed to rename playlist' });
}
if (this.changes === 0) {
return res.status(404).json({ error: 'Playlist not found' });
}
res.json({ message: 'Playlist renamed successfully' });
}
);
});
module.exports = router;

View file

@ -0,0 +1,9 @@
const express = require('express');
const router = express.Router();
const { authenticate } = require('../middleware/auth');
router.get('/', authenticate, (req, res) => {
res.json({ message: 'Profiles endpoint' });
});
module.exports = router;

9
backend/routes/radio.js Normal file
View file

@ -0,0 +1,9 @@
const express = require('express');
const router = express.Router();
const { authenticate } = require('../middleware/auth');
router.get('/', authenticate, (req, res) => {
res.json({ message: 'Radio endpoint' });
});
module.exports = router;

620
backend/routes/rbac.js Normal file
View file

@ -0,0 +1,620 @@
const express = require('express');
const router = express.Router();
const { body, validationResult } = require('express-validator');
const { authenticate, requireAdmin } = require('../middleware/auth');
const { requirePermission, requireAllPermissions, PERMISSIONS, DEFAULT_ROLES, clearAllPermissionCache, clearUserPermissionCache, logPermissionAction, getUserPermissions } = require('../middleware/rbac');
const { modifyLimiter, readLimiter } = require('../middleware/rateLimiter');
const { db } = require('../database/db');
const logger = require('../utils/logger');
const SecurityAuditLogger = require('../utils/securityAudit');
/**
* Get all available permissions
* Returns the complete permission catalog
*/
router.get('/permissions', authenticate, requirePermission('users.manage_roles'), readLimiter, (req, res) => {
res.json({
permissions: Object.entries(PERMISSIONS).map(([key, description]) => ({
key,
description
})),
categories: {
'User Management': Object.keys(PERMISSIONS).filter(k => k.startsWith('users.')),
'Session Management': Object.keys(PERMISSIONS).filter(k => k.startsWith('sessions.')),
'Content Management': Object.keys(PERMISSIONS).filter(k => k.startsWith('playlists.') || k.startsWith('channels.') || k.startsWith('favorites.') || k.startsWith('history.')),
'System & Settings': Object.keys(PERMISSIONS).filter(k => k.startsWith('settings.') || k.startsWith('stats.') || k.startsWith('backup.')),
'Security Management': Object.keys(PERMISSIONS).filter(k => k.startsWith('security.')),
'Search & Discovery': Object.keys(PERMISSIONS).filter(k => k.startsWith('search.')),
'VPN & Network': Object.keys(PERMISSIONS).filter(k => k.startsWith('vpn.'))
}
});
});
/**
* Get all roles
*/
router.get('/roles', authenticate, requirePermission('users.view'), readLimiter, (req, res) => {
db.all(
`SELECT id, role_key, name, description, permissions, is_system_role, created_at, updated_at
FROM roles
ORDER BY is_system_role DESC, name ASC`,
[],
(err, roles) => {
if (err) {
logger.error('Error fetching roles:', err);
return res.status(500).json({ error: 'Failed to fetch roles' });
}
// Parse permissions JSON
const rolesWithParsedPermissions = roles.map(role => ({
...role,
permissions: JSON.parse(role.permissions || '[]'),
is_system_role: Boolean(role.is_system_role)
}));
res.json(rolesWithParsedPermissions);
}
);
});
/**
* Get single role by key
*/
router.get('/roles/:roleKey', authenticate, requirePermission('users.view'), readLimiter, (req, res) => {
const { roleKey } = req.params;
db.get(
`SELECT id, role_key, name, description, permissions, is_system_role, created_at, updated_at
FROM roles WHERE role_key = ?`,
[roleKey],
(err, role) => {
if (err) {
logger.error('Error fetching role:', err);
return res.status(500).json({ error: 'Failed to fetch role' });
}
if (!role) {
return res.status(404).json({ error: 'Role not found' });
}
res.json({
...role,
permissions: JSON.parse(role.permissions || '[]'),
is_system_role: Boolean(role.is_system_role)
});
}
);
});
/**
* Create custom role
* Only admins with users.manage_roles permission
*/
router.post('/roles',
authenticate,
requireAllPermissions(['users.manage_roles', 'users.create']),
modifyLimiter,
[
body('role_key').trim().isLength({ min: 2, max: 50 }).matches(/^[a-z_]+$/).withMessage('Role key must be lowercase with underscores only'),
body('name').trim().isLength({ min: 2, max: 100 }),
body('description').optional().trim().isLength({ max: 500 }),
body('permissions').isArray().withMessage('Permissions must be an array'),
body('permissions.*').isString().isIn(Object.keys(PERMISSIONS)).withMessage('Invalid permission')
],
async (req, res) => {
const errors = validationResult(req);
if (!errors.isEmpty()) {
return res.status(400).json({ errors: errors.array() });
}
const { role_key, name, description, permissions } = req.body;
try {
// Check if role key already exists
db.get('SELECT id FROM roles WHERE role_key = ?', [role_key], (err, existing) => {
if (err) {
logger.error('Error checking role existence:', err);
return res.status(500).json({ error: 'Failed to create role' });
}
if (existing) {
return res.status(409).json({ error: 'Role key already exists' });
}
// Create new role
db.run(
`INSERT INTO roles (role_key, name, description, permissions, is_system_role)
VALUES (?, ?, ?, ?, 0)`,
[role_key, name, description || '', JSON.stringify(permissions)],
function(err) {
if (err) {
logger.error('Error creating role:', err);
return res.status(500).json({ error: 'Failed to create role' });
}
// Log action
logPermissionAction(
req.user.userId,
'role_created',
'role',
this.lastID,
null,
{ role_key, name, permissions },
req
);
logger.info(`Role created: ${role_key} by user ${req.user.userId}`);
// Fetch and return the created role
db.get(
'SELECT id, role_key, name, description, permissions, is_system_role, created_at FROM roles WHERE id = ?',
[this.lastID],
(err, role) => {
if (err) {
return res.status(500).json({ error: 'Role created but failed to fetch details' });
}
res.status(201).json({
...role,
permissions: JSON.parse(role.permissions),
is_system_role: Boolean(role.is_system_role)
});
}
);
}
);
});
} catch (error) {
logger.error('Role creation error:', error);
res.status(500).json({ error: 'Failed to create role' });
}
}
);
/**
* Update role permissions
* Cannot modify system roles
*/
router.patch('/roles/:roleKey',
authenticate,
requirePermission('users.manage_roles'),
modifyLimiter,
[
body('name').optional().trim().isLength({ min: 2, max: 100 }),
body('description').optional().trim().isLength({ max: 500 }),
body('permissions').optional().isArray(),
body('permissions.*').optional().isString().isIn(Object.keys(PERMISSIONS))
],
async (req, res) => {
const errors = validationResult(req);
if (!errors.isEmpty()) {
return res.status(400).json({ errors: errors.array() });
}
const { roleKey } = req.params;
const { name, description, permissions } = req.body;
try {
// Check if role exists and is not a system role
db.get('SELECT * FROM roles WHERE role_key = ?', [roleKey], (err, role) => {
if (err) {
logger.error('Error fetching role:', err);
return res.status(500).json({ error: 'Failed to update role' });
}
if (!role) {
return res.status(404).json({ error: 'Role not found' });
}
if (role.is_system_role) {
return res.status(403).json({ error: 'Cannot modify system roles' });
}
// Build update query
const updates = [];
const params = [];
if (name !== undefined) {
updates.push('name = ?');
params.push(name);
}
if (description !== undefined) {
updates.push('description = ?');
params.push(description);
}
if (permissions !== undefined) {
updates.push('permissions = ?');
params.push(JSON.stringify(permissions));
}
if (updates.length === 0) {
return res.status(400).json({ error: 'No fields to update' });
}
updates.push('updated_at = CURRENT_TIMESTAMP');
params.push(roleKey);
// Update role
db.run(
`UPDATE roles SET ${updates.join(', ')} WHERE role_key = ?`,
params,
function(err) {
if (err) {
logger.error('Error updating role:', err);
return res.status(500).json({ error: 'Failed to update role' });
}
if (this.changes === 0) {
return res.status(404).json({ error: 'Role not found' });
}
// Log action
logPermissionAction(
req.user.userId,
'role_updated',
'role',
role.id,
{ name: role.name, description: role.description, permissions: JSON.parse(role.permissions) },
{ name, description, permissions },
req
);
// Clear permission cache as role permissions changed
clearAllPermissionCache();
logger.info(`Role updated: ${roleKey} by user ${req.user.userId}`);
// Fetch and return updated role
db.get(
'SELECT id, role_key, name, description, permissions, is_system_role, updated_at FROM roles WHERE role_key = ?',
[roleKey],
(err, updatedRole) => {
if (err) {
return res.status(500).json({ error: 'Role updated but failed to fetch details' });
}
res.json({
...updatedRole,
permissions: JSON.parse(updatedRole.permissions),
is_system_role: Boolean(updatedRole.is_system_role)
});
}
);
}
);
});
} catch (error) {
logger.error('Role update error:', error);
res.status(500).json({ error: 'Failed to update role' });
}
}
);
/**
* Delete custom role
* Cannot delete system roles or roles assigned to users
*/
router.delete('/roles/:roleKey',
authenticate,
requirePermission('users.manage_roles'),
modifyLimiter,
async (req, res) => {
const { roleKey } = req.params;
try {
// Check if role exists
db.get('SELECT * FROM roles WHERE role_key = ?', [roleKey], (err, role) => {
if (err) {
logger.error('Error fetching role:', err);
return res.status(500).json({ error: 'Failed to delete role' });
}
if (!role) {
return res.status(404).json({ error: 'Role not found' });
}
if (role.is_system_role) {
return res.status(403).json({ error: 'Cannot delete system roles' });
}
// Check if role is assigned to any users
db.get('SELECT COUNT(*) as count FROM users WHERE role = ?', [roleKey], (err, result) => {
if (err) {
logger.error('Error checking role usage:', err);
return res.status(500).json({ error: 'Failed to delete role' });
}
if (result.count > 0) {
return res.status(409).json({
error: 'Cannot delete role that is assigned to users',
users_count: result.count
});
}
// Delete role
db.run('DELETE FROM roles WHERE role_key = ?', [roleKey], function(err) {
if (err) {
logger.error('Error deleting role:', err);
return res.status(500).json({ error: 'Failed to delete role' });
}
// Log action
logPermissionAction(
req.user.userId,
'role_deleted',
'role',
role.id,
{ role_key: roleKey, name: role.name },
null,
req
);
logger.info(`Role deleted: ${roleKey} by user ${req.user.userId}`);
res.json({ message: 'Role deleted successfully' });
});
});
});
} catch (error) {
logger.error('Role deletion error:', error);
res.status(500).json({ error: 'Failed to delete role' });
}
}
);
/**
* Get user's current permissions
*/
router.get('/my-permissions', authenticate, readLimiter, async (req, res) => {
try {
const permissions = await getUserPermissions(req.user.userId);
// Get role info
db.get(
'SELECT u.role, r.name as role_name, r.description as role_description FROM users u LEFT JOIN roles r ON u.role = r.role_key WHERE u.id = ?',
[req.user.userId],
(err, roleInfo) => {
if (err) {
logger.error('Error fetching role info:', err);
return res.status(500).json({ error: 'Failed to fetch permissions' });
}
res.json({
role: roleInfo?.role || 'unknown',
role_name: roleInfo?.role_name || 'Unknown',
role_description: roleInfo?.role_description || '',
permissions,
permission_details: permissions.map(p => ({
key: p,
description: PERMISSIONS[p] || 'Unknown permission'
}))
});
}
);
} catch (error) {
logger.error('Error fetching user permissions:', error);
res.status(500).json({ error: 'Failed to fetch permissions' });
}
});
/**
* Assign role to user
* Requires users.manage_roles permission
*/
router.post('/users/:userId/role',
authenticate,
requirePermission('users.manage_roles'),
modifyLimiter,
[
body('role').trim().notEmpty().withMessage('Role is required')
],
async (req, res) => {
const errors = validationResult(req);
if (!errors.isEmpty()) {
return res.status(400).json({ errors: errors.array() });
}
const { userId } = req.params;
const { role } = req.body;
try {
// Check if role exists
db.get('SELECT role_key FROM roles WHERE role_key = ?', [role], (err, roleExists) => {
if (err) {
logger.error('Error checking role:', err);
return res.status(500).json({ error: 'Failed to assign role' });
}
if (!roleExists) {
return res.status(404).json({ error: 'Role not found' });
}
// Check if user exists
db.get('SELECT id, username, role FROM users WHERE id = ?', [userId], (err, user) => {
if (err) {
logger.error('Error fetching user:', err);
return res.status(500).json({ error: 'Failed to assign role' });
}
if (!user) {
return res.status(404).json({ error: 'User not found' });
}
// Prevent modifying own role
if (parseInt(userId) === req.user.userId) {
return res.status(403).json({ error: 'Cannot modify your own role' });
}
const oldRole = user.role;
// Update user role
db.run(
'UPDATE users SET role = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?',
[role, userId],
async function(err) {
if (err) {
logger.error('Error updating user role:', err);
return res.status(500).json({ error: 'Failed to assign role' });
}
// Clear user's permission cache
clearUserPermissionCache(parseInt(userId));
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
const userAgent = req.headers['user-agent'];
// CWE-778: Log comprehensive privilege change
await SecurityAuditLogger.logPrivilegeChange(parseInt(userId), 'role_change', {
ip,
userAgent,
previousRole: oldRole,
newRole: role,
changedBy: req.user.userId,
changedByUsername: req.user.username || 'system',
targetUsername: user.username
});
// Log action
logPermissionAction(
req.user.userId,
'role_assigned',
'user',
parseInt(userId),
{ role: oldRole },
{ role },
req
);
logger.info(`Role assigned: ${role} to user ${userId} by ${req.user.userId}`);
res.json({
message: 'Role assigned successfully',
user_id: userId,
old_role: oldRole,
new_role: role
});
}
);
});
});
} catch (error) {
logger.error('Role assignment error:', error);
res.status(500).json({ error: 'Failed to assign role' });
}
}
);
/**
* Get permission audit log
* Admin only
*/
router.get('/audit-log',
authenticate,
requirePermission('security.view_audit'),
readLimiter,
async (req, res) => {
const { limit = 100, offset = 0, userId, action, targetType } = req.query;
try {
let query = `
SELECT pal.*, u.username
FROM permission_audit_log pal
JOIN users u ON pal.user_id = u.id
WHERE 1=1
`;
const params = [];
if (userId) {
query += ' AND pal.user_id = ?';
params.push(userId);
}
if (action) {
query += ' AND pal.action = ?';
params.push(action);
}
if (targetType) {
query += ' AND pal.target_type = ?';
params.push(targetType);
}
query += ' ORDER BY pal.created_at DESC LIMIT ? OFFSET ?';
params.push(parseInt(limit), parseInt(offset));
db.all(query, params, (err, logs) => {
if (err) {
logger.error('Error fetching audit log:', err);
return res.status(500).json({ error: 'Failed to fetch audit log' });
}
// Parse JSON fields
const parsedLogs = logs.map(log => ({
...log,
old_value: log.old_value ? JSON.parse(log.old_value) : null,
new_value: log.new_value ? JSON.parse(log.new_value) : null
}));
res.json({ logs: parsedLogs, limit: parseInt(limit), offset: parseInt(offset) });
});
} catch (error) {
logger.error('Audit log fetch error:', error);
res.status(500).json({ error: 'Failed to fetch audit log' });
}
}
);
/**
* Get permission statistics
* Shows which permissions are most used
*/
router.get('/stats',
authenticate,
requirePermission('security.view_audit'),
readLimiter,
async (req, res) => {
try {
// Get role distribution
db.all(
`SELECT r.name, r.role_key, COUNT(u.id) as user_count
FROM roles r
LEFT JOIN users u ON r.role_key = u.role
GROUP BY r.role_key
ORDER BY user_count DESC`,
[],
(err, roleStats) => {
if (err) {
logger.error('Error fetching role stats:', err);
return res.status(500).json({ error: 'Failed to fetch statistics' });
}
// Get recent permission actions
db.all(
`SELECT action, COUNT(*) as count
FROM permission_audit_log
WHERE created_at >= datetime('now', '-30 days')
GROUP BY action
ORDER BY count DESC`,
[],
(err, actionStats) => {
if (err) {
logger.error('Error fetching action stats:', err);
return res.status(500).json({ error: 'Failed to fetch statistics' });
}
res.json({
role_distribution: roleStats,
recent_actions: actionStats,
total_permissions: Object.keys(PERMISSIONS).length,
total_roles: roleStats.length
});
}
);
}
);
} catch (error) {
logger.error('Stats fetch error:', error);
res.status(500).json({ error: 'Failed to fetch statistics' });
}
}
);
module.exports = router;

View file

@ -0,0 +1,9 @@
const express = require('express');
const router = express.Router();
const { authenticate } = require('../middleware/auth');
router.get('/', authenticate, (req, res) => {
res.json({ message: 'Recordings endpoint' });
});
module.exports = router;

139
backend/routes/search.js Normal file
View file

@ -0,0 +1,139 @@
const express = require('express');
const router = express.Router();
const { db } = require('../database/db');
const { authenticate, requireAdmin } = require('../middleware/auth');
const { readLimiter } = require('../middleware/rateLimiter');
const { sanitizeString } = require('../utils/inputValidator');
const logger = require('../utils/logger');
/**
* Global search endpoint
* Searches across channels, radio stations, users, settings, etc.
*/
router.get('/', authenticate, readLimiter, async (req, res) => {
try {
const { q } = req.query;
const isAdmin = req.user.role === 'admin';
if (!q || q.trim().length < 2) {
return res.json({
channels: [],
radio: [],
users: [],
settings: [],
groups: []
});
}
// Validate and sanitize search query
const sanitized = sanitizeString(q.trim());
if (sanitized.length > 100) {
return res.status(400).json({ error: 'Search query too long' });
}
const searchTerm = `%${sanitized}%`;
const results = {
channels: [],
radio: [],
users: [],
settings: [],
groups: []
};
// Search TV channels (only from user's playlists)
results.channels = await new Promise((resolve, reject) => {
db.all(
`SELECT DISTINCT c.id, c.name, c.url, COALESCE(c.custom_logo, c.logo) as logo, c.group_name, c.is_radio
FROM channels c
JOIN playlists p ON c.playlist_id = p.id
WHERE p.user_id = ? AND c.is_radio = 0 AND c.is_active = 1
AND (c.name LIKE ? OR c.group_name LIKE ?)
ORDER BY c.name
LIMIT 20`,
[req.user.userId, searchTerm, searchTerm],
(err, rows) => {
if (err) reject(err);
else resolve(rows || []);
}
);
});
// Search Radio channels (only from user's playlists)
results.radio = await new Promise((resolve, reject) => {
db.all(
`SELECT DISTINCT c.id, c.name, c.url, COALESCE(c.custom_logo, c.logo) as logo, c.group_name, c.is_radio
FROM channels c
JOIN playlists p ON c.playlist_id = p.id
WHERE p.user_id = ? AND c.is_radio = 1 AND c.is_active = 1
AND (c.name LIKE ? OR c.group_name LIKE ?)
ORDER BY c.name
LIMIT 20`,
[req.user.userId, searchTerm, searchTerm],
(err, rows) => {
if (err) reject(err);
else resolve(rows || []);
}
);
});
// Search groups (only from user's playlists)
results.groups = await new Promise((resolve, reject) => {
db.all(
`SELECT DISTINCT c.group_name as name, c.is_radio
FROM channels c
JOIN playlists p ON c.playlist_id = p.id
WHERE p.user_id = ? AND c.is_active = 1
AND c.group_name LIKE ?
ORDER BY c.group_name
LIMIT 10`,
[req.user.userId, searchTerm],
(err, rows) => {
if (err) reject(err);
else resolve(rows || []);
}
);
});
// Search users (admin only)
if (isAdmin) {
results.users = await new Promise((resolve, reject) => {
db.all(
`SELECT id, username, email, role, created_at
FROM users
WHERE username LIKE ? OR email LIKE ?
ORDER BY username
LIMIT 10`,
[searchTerm, searchTerm],
(err, rows) => {
if (err) reject(err);
else resolve(rows || []);
}
);
});
}
// Add settings/pages results (static)
const settingsOptions = [
{ id: 'settings', name: 'Settings', path: '/settings', icon: 'settings' },
{ id: 'user-management', name: 'User Management', path: '/settings?tab=users', icon: 'people' },
{ id: 'vpn-settings', name: 'VPN Settings', path: '/settings?tab=vpn', icon: 'vpn_lock' },
{ id: '2fa', name: 'Two-Factor Authentication', path: '/settings?tab=2fa', icon: 'security' },
{ id: 'live-tv', name: 'Live TV', path: '/live', icon: 'tv' },
{ id: 'radio', name: 'Radio', path: '/radio', icon: 'radio' },
{ id: 'movies', name: 'Movies', path: '/movies', icon: 'movie' },
{ id: 'series', name: 'Series', path: '/series', icon: 'subscriptions' },
{ id: 'favorites', name: 'Favorites', path: '/favorites', icon: 'favorite' },
];
results.settings = settingsOptions.filter(option =>
option.name.toLowerCase().includes(q.toLowerCase())
);
res.json(results);
} catch (error) {
console.error('Search error:', error);
res.status(500).json({ error: 'Search failed' });
}
});
module.exports = router;

View file

@ -0,0 +1,733 @@
/**
* Security Configuration API Routes
* Manage thresholds, risk signatures, and response protocols
* Admin-only endpoints for security configuration
*/
const express = require('express');
const router = express.Router();
const { authenticate } = require('../middleware/auth');
const { requirePermission } = require('../middleware/rbac');
const logger = require('../utils/logger');
const thresholdManager = require('../utils/thresholdManager');
const riskSignatureManager = require('../utils/riskSignatureManager');
const responseProtocolManager = require('../utils/responseProtocolManager');
// Validation middleware
const validatePagination = (req, res, next) => {
const limit = parseInt(req.query.limit) || 100;
req.query.limit = Math.min(Math.max(limit, 1), 1000);
next();
};
const validateIdParam = (req, res, next) => {
if (!req.params.id || typeof req.params.id !== 'string') {
return res.status(400).json({
success: false,
message: 'Invalid ID parameter'
});
}
next();
};
// ===========================
// THRESHOLD MANAGEMENT ROUTES
// ===========================
/**
* GET /api/security-config/thresholds
* Get all configured thresholds
*/
router.get('/thresholds',
authenticate,
requirePermission('security.manage'),
validatePagination,
async (req, res) => {
try {
const filters = {
patternType: req.query.pattern_type,
enabled: req.query.enabled !== undefined ? req.query.enabled === 'true' : undefined,
limit: req.query.limit
};
const thresholds = await thresholdManager.getThresholds(filters);
const stats = await thresholdManager.getStatistics();
res.json({
success: true,
data: thresholds,
statistics: stats,
count: thresholds.length
});
} catch (error) {
logger.error('[SecurityConfig API] Error getting thresholds:', error);
res.status(500).json({
success: false,
message: 'Failed to get thresholds',
error: error.message
});
}
}
);
/**
* GET /api/security-config/thresholds/:id
* Get threshold by ID
*/
router.get('/thresholds/:id',
authenticate,
requirePermission('security.manage'),
validateIdParam,
async (req, res) => {
try {
const threshold = await thresholdManager.getThresholdById(req.params.id);
if (!threshold) {
return res.status(404).json({
success: false,
message: 'Threshold not found'
});
}
res.json({
success: true,
data: threshold
});
} catch (error) {
logger.error('[SecurityConfig API] Error getting threshold:', error);
res.status(500).json({
success: false,
message: 'Failed to get threshold',
error: error.message
});
}
}
);
/**
* POST /api/security-config/thresholds
* Create new threshold
*/
router.post('/thresholds',
authenticate,
requirePermission('security.manage'),
async (req, res) => {
try {
const { name, description, pattern_type, metric_name, operator, threshold_value, time_window_minutes, severity, enabled } = req.body;
// Validation
if (!name || !pattern_type || !metric_name || !operator || threshold_value === undefined || !severity) {
return res.status(400).json({
success: false,
message: 'Missing required fields: name, pattern_type, metric_name, operator, threshold_value, severity'
});
}
const validOperators = ['>=', '>', '<=', '<', '==', '!='];
if (!validOperators.includes(operator)) {
return res.status(400).json({
success: false,
message: 'Invalid operator. Must be one of: ' + validOperators.join(', ')
});
}
const validSeverities = ['low', 'medium', 'high', 'critical'];
if (!validSeverities.includes(severity)) {
return res.status(400).json({
success: false,
message: 'Invalid severity. Must be one of: ' + validSeverities.join(', ')
});
}
const result = await thresholdManager.createThreshold({
name,
description,
pattern_type,
metric_name,
operator,
threshold_value: parseInt(threshold_value),
time_window_minutes: time_window_minutes ? parseInt(time_window_minutes) : 30,
severity,
enabled
}, req.user.id);
res.status(201).json({
success: true,
message: 'Threshold created successfully',
data: result
});
} catch (error) {
logger.error('[SecurityConfig API] Error creating threshold:', error);
res.status(500).json({
success: false,
message: 'Failed to create threshold',
error: error.message
});
}
}
);
/**
* PUT /api/security-config/thresholds/:id
* Update threshold
*/
router.put('/thresholds/:id',
authenticate,
requirePermission('security.manage'),
validateIdParam,
async (req, res) => {
try {
const updates = {};
const allowedFields = ['name', 'description', 'operator', 'threshold_value', 'time_window_minutes', 'severity', 'enabled'];
for (const field of allowedFields) {
if (req.body[field] !== undefined) {
updates[field] = req.body[field];
}
}
if (Object.keys(updates).length === 0) {
return res.status(400).json({
success: false,
message: 'No valid fields to update'
});
}
await thresholdManager.updateThreshold(req.params.id, updates, req.user.id);
res.json({
success: true,
message: 'Threshold updated successfully'
});
} catch (error) {
logger.error('[SecurityConfig API] Error updating threshold:', error);
res.status(500).json({
success: false,
message: 'Failed to update threshold',
error: error.message
});
}
}
);
/**
* DELETE /api/security-config/thresholds/:id
* Delete threshold
*/
router.delete('/thresholds/:id',
authenticate,
requirePermission('security.manage'),
validateIdParam,
async (req, res) => {
try {
await thresholdManager.deleteThreshold(req.params.id, req.user.id);
res.json({
success: true,
message: 'Threshold deleted successfully'
});
} catch (error) {
logger.error('[SecurityConfig API] Error deleting threshold:', error);
res.status(500).json({
success: false,
message: 'Failed to delete threshold',
error: error.message
});
}
}
);
// ===========================
// RISK SIGNATURE ROUTES
// ===========================
/**
* GET /api/security-config/signatures
* Get all risk signatures
*/
router.get('/signatures',
authenticate,
requirePermission('security.manage'),
validatePagination,
async (req, res) => {
try {
const filters = {
signatureType: req.query.signature_type,
threatLevel: req.query.threat_level,
enabled: req.query.enabled !== undefined ? req.query.enabled === 'true' : undefined,
limit: req.query.limit
};
const signatures = await riskSignatureManager.getSignatures(filters);
const stats = await riskSignatureManager.getStatistics();
res.json({
success: true,
data: signatures,
statistics: stats,
count: signatures.length
});
} catch (error) {
logger.error('[SecurityConfig API] Error getting signatures:', error);
res.status(500).json({
success: false,
message: 'Failed to get signatures',
error: error.message
});
}
}
);
/**
* GET /api/security-config/signatures/:id
* Get signature by ID
*/
router.get('/signatures/:id',
authenticate,
requirePermission('security.manage'),
validateIdParam,
async (req, res) => {
try {
const signature = await riskSignatureManager.getSignatureById(req.params.id);
if (!signature) {
return res.status(404).json({
success: false,
message: 'Signature not found'
});
}
res.json({
success: true,
data: signature
});
} catch (error) {
logger.error('[SecurityConfig API] Error getting signature:', error);
res.status(500).json({
success: false,
message: 'Failed to get signature',
error: error.message
});
}
}
);
/**
* POST /api/security-config/signatures
* Create new risk signature
*/
router.post('/signatures',
authenticate,
requirePermission('security.manage'),
async (req, res) => {
try {
const { name, description, signature_type, pattern, match_type, threat_level, confidence, enabled, auto_block } = req.body;
// Validation
if (!name || !signature_type || !pattern || !match_type || !threat_level) {
return res.status(400).json({
success: false,
message: 'Missing required fields: name, signature_type, pattern, match_type, threat_level'
});
}
const validMatchTypes = ['regex', 'regex_case_insensitive', 'exact', 'contains', 'custom'];
if (!validMatchTypes.includes(match_type)) {
return res.status(400).json({
success: false,
message: 'Invalid match_type. Must be one of: ' + validMatchTypes.join(', ')
});
}
const validThreatLevels = ['low', 'medium', 'high', 'critical'];
if (!validThreatLevels.includes(threat_level)) {
return res.status(400).json({
success: false,
message: 'Invalid threat_level. Must be one of: ' + validThreatLevels.join(', ')
});
}
const result = await riskSignatureManager.createSignature({
name,
description,
signature_type,
pattern,
match_type,
threat_level,
confidence: confidence !== undefined ? parseFloat(confidence) : 0.8,
enabled,
auto_block
}, req.user.id);
res.status(201).json({
success: true,
message: 'Signature created successfully',
data: result
});
} catch (error) {
logger.error('[SecurityConfig API] Error creating signature:', error);
res.status(500).json({
success: false,
message: 'Failed to create signature',
error: error.message
});
}
}
);
/**
* PUT /api/security-config/signatures/:id
* Update risk signature
*/
router.put('/signatures/:id',
authenticate,
requirePermission('security.manage'),
validateIdParam,
async (req, res) => {
try {
const updates = {};
const allowedFields = ['name', 'description', 'pattern', 'match_type', 'threat_level', 'confidence', 'enabled', 'auto_block'];
for (const field of allowedFields) {
if (req.body[field] !== undefined) {
updates[field] = req.body[field];
}
}
if (Object.keys(updates).length === 0) {
return res.status(400).json({
success: false,
message: 'No valid fields to update'
});
}
await riskSignatureManager.updateSignature(req.params.id, updates, req.user.id);
res.json({
success: true,
message: 'Signature updated successfully'
});
} catch (error) {
logger.error('[SecurityConfig API] Error updating signature:', error);
res.status(500).json({
success: false,
message: 'Failed to update signature',
error: error.message
});
}
}
);
/**
* DELETE /api/security-config/signatures/:id
* Delete risk signature
*/
router.delete('/signatures/:id',
authenticate,
requirePermission('security.manage'),
validateIdParam,
async (req, res) => {
try {
await riskSignatureManager.deleteSignature(req.params.id, req.user.id);
res.json({
success: true,
message: 'Signature deleted successfully'
});
} catch (error) {
logger.error('[SecurityConfig API] Error deleting signature:', error);
res.status(500).json({
success: false,
message: 'Failed to delete signature',
error: error.message
});
}
}
);
// ===========================
// RESPONSE PROTOCOL ROUTES
// ===========================
/**
* GET /api/security-config/protocols
* Get all response protocols
*/
router.get('/protocols',
authenticate,
requirePermission('security.manage'),
validatePagination,
async (req, res) => {
try {
const filters = {
triggerType: req.query.trigger_type,
severity: req.query.severity,
enabled: req.query.enabled !== undefined ? req.query.enabled === 'true' : undefined,
limit: req.query.limit
};
const protocols = await responseProtocolManager.getProtocols(filters);
const stats = await responseProtocolManager.getStatistics();
res.json({
success: true,
data: protocols,
statistics: stats,
count: protocols.length
});
} catch (error) {
logger.error('[SecurityConfig API] Error getting protocols:', error);
res.status(500).json({
success: false,
message: 'Failed to get protocols',
error: error.message
});
}
}
);
/**
* GET /api/security-config/protocols/:id
* Get protocol by ID
*/
router.get('/protocols/:id',
authenticate,
requirePermission('security.manage'),
validateIdParam,
async (req, res) => {
try {
const protocol = await responseProtocolManager.getProtocolById(req.params.id);
if (!protocol) {
return res.status(404).json({
success: false,
message: 'Protocol not found'
});
}
res.json({
success: true,
data: protocol
});
} catch (error) {
logger.error('[SecurityConfig API] Error getting protocol:', error);
res.status(500).json({
success: false,
message: 'Failed to get protocol',
error: error.message
});
}
}
);
/**
* POST /api/security-config/protocols
* Create new response protocol
*/
router.post('/protocols',
authenticate,
requirePermission('security.manage'),
async (req, res) => {
try {
const { name, description, trigger_type, trigger_condition, actions, severity, enabled, auto_execute, cooldown_minutes } = req.body;
// Validation
if (!name || !trigger_type || !trigger_condition || !actions || !severity) {
return res.status(400).json({
success: false,
message: 'Missing required fields: name, trigger_type, trigger_condition, actions, severity'
});
}
const validTriggerTypes = ['anomaly', 'threshold', 'signature'];
if (!validTriggerTypes.includes(trigger_type)) {
return res.status(400).json({
success: false,
message: 'Invalid trigger_type. Must be one of: ' + validTriggerTypes.join(', ')
});
}
const validSeverities = ['low', 'medium', 'high', 'critical'];
if (!validSeverities.includes(severity)) {
return res.status(400).json({
success: false,
message: 'Invalid severity. Must be one of: ' + validSeverities.join(', ')
});
}
if (!Array.isArray(actions) || actions.length === 0) {
return res.status(400).json({
success: false,
message: 'actions must be a non-empty array'
});
}
const result = await responseProtocolManager.createProtocol({
name,
description,
trigger_type,
trigger_condition,
actions,
severity,
enabled,
auto_execute,
cooldown_minutes: cooldown_minutes ? parseInt(cooldown_minutes) : 60
}, req.user.id);
res.status(201).json({
success: true,
message: 'Protocol created successfully',
data: result
});
} catch (error) {
logger.error('[SecurityConfig API] Error creating protocol:', error);
res.status(500).json({
success: false,
message: 'Failed to create protocol',
error: error.message
});
}
}
);
/**
* PUT /api/security-config/protocols/:id
* Update response protocol
*/
router.put('/protocols/:id',
authenticate,
requirePermission('security.manage'),
validateIdParam,
async (req, res) => {
try {
const updates = {};
const allowedFields = ['name', 'description', 'trigger_condition', 'actions', 'severity', 'enabled', 'auto_execute', 'cooldown_minutes'];
for (const field of allowedFields) {
if (req.body[field] !== undefined) {
updates[field] = req.body[field];
}
}
if (Object.keys(updates).length === 0) {
return res.status(400).json({
success: false,
message: 'No valid fields to update'
});
}
await responseProtocolManager.updateProtocol(req.params.id, updates, req.user.id);
res.json({
success: true,
message: 'Protocol updated successfully'
});
} catch (error) {
logger.error('[SecurityConfig API] Error updating protocol:', error);
res.status(500).json({
success: false,
message: 'Failed to update protocol',
error: error.message
});
}
}
);
/**
* DELETE /api/security-config/protocols/:id
* Delete response protocol
*/
router.delete('/protocols/:id',
authenticate,
requirePermission('security.manage'),
validateIdParam,
async (req, res) => {
try {
await responseProtocolManager.deleteProtocol(req.params.id, req.user.id);
res.json({
success: true,
message: 'Protocol deleted successfully'
});
} catch (error) {
logger.error('[SecurityConfig API] Error deleting protocol:', error);
res.status(500).json({
success: false,
message: 'Failed to delete protocol',
error: error.message
});
}
}
);
/**
* GET /api/security-config/protocols/:id/history
* Get execution history for protocol
*/
router.get('/protocols/:id/history',
authenticate,
requirePermission('security.manage'),
validateIdParam,
validatePagination,
async (req, res) => {
try {
const history = await responseProtocolManager.getExecutionHistory({
protocolId: req.params.id,
limit: req.query.limit
});
res.json({
success: true,
data: history,
count: history.length
});
} catch (error) {
logger.error('[SecurityConfig API] Error getting protocol history:', error);
res.status(500).json({
success: false,
message: 'Failed to get protocol history',
error: error.message
});
}
}
);
// ===========================
// DASHBOARD/OVERVIEW ROUTES
// ===========================
/**
* GET /api/security-config/dashboard
* Get security configuration dashboard overview
*/
router.get('/dashboard',
authenticate,
requirePermission('security.manage'),
async (req, res) => {
try {
const [thresholdStats, signatureStats, protocolStats] = await Promise.all([
thresholdManager.getStatistics(),
riskSignatureManager.getStatistics(),
responseProtocolManager.getStatistics()
]);
res.json({
success: true,
data: {
thresholds: thresholdStats,
signatures: signatureStats,
protocols: protocolStats
}
});
} catch (error) {
logger.error('[SecurityConfig API] Error getting dashboard:', error);
res.status(500).json({
success: false,
message: 'Failed to get dashboard data',
error: error.message
});
}
}
);
module.exports = router;

View file

@ -0,0 +1,679 @@
const express = require('express');
const router = express.Router();
const { authenticate } = require('../middleware/auth');
const { requirePermission } = require('../middleware/rbac');
const { modifyLimiter, readLimiter } = require('../middleware/rateLimiter');
const { db } = require('../database/db');
const logger = require('../utils/logger');
const fs = require('fs').promises;
const path = require('path');
/**
* Security Headers Configuration Management
* Allows admins to view and configure HTTP security headers
*/
// Create security_headers_config table
db.run(`
CREATE TABLE IF NOT EXISTS security_headers_config (
id INTEGER PRIMARY KEY AUTOINCREMENT,
config_name TEXT NOT NULL UNIQUE,
csp_default_src TEXT,
csp_script_src TEXT,
csp_style_src TEXT,
csp_img_src TEXT,
csp_media_src TEXT,
csp_connect_src TEXT,
csp_font_src TEXT,
csp_frame_src TEXT,
csp_object_src TEXT,
csp_base_uri TEXT,
csp_form_action TEXT,
csp_frame_ancestors TEXT,
hsts_enabled INTEGER DEFAULT 1,
hsts_max_age INTEGER DEFAULT 31536000,
hsts_include_subdomains INTEGER DEFAULT 1,
hsts_preload INTEGER DEFAULT 1,
referrer_policy TEXT DEFAULT 'strict-origin-when-cross-origin',
x_content_type_options INTEGER DEFAULT 1,
x_frame_options TEXT DEFAULT 'SAMEORIGIN',
x_xss_protection INTEGER DEFAULT 1,
is_active INTEGER DEFAULT 0,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
created_by INTEGER,
FOREIGN KEY (created_by) REFERENCES users(id)
)
`);
// Create security_headers_history table for audit trail
db.run(`
CREATE TABLE IF NOT EXISTS security_headers_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
config_id INTEGER,
action TEXT NOT NULL,
previous_config TEXT,
new_config TEXT,
changed_by INTEGER,
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (config_id) REFERENCES security_headers_config(id),
FOREIGN KEY (changed_by) REFERENCES users(id)
)
`);
// Security presets
const SECURITY_PRESETS = {
strict: {
name: 'Strict Security',
description: 'Maximum security - blocks most external resources',
config: {
csp_default_src: "['self']",
csp_script_src: "['self']",
csp_style_src: "['self']",
csp_img_src: "['self', 'data:', 'https:']",
csp_media_src: "['self']",
csp_connect_src: "['self']",
csp_font_src: "['self', 'data:']",
csp_frame_src: "['self']",
csp_object_src: "['none']",
csp_base_uri: "['self']",
csp_form_action: "['self']",
csp_frame_ancestors: "['self']",
hsts_enabled: 1,
hsts_max_age: 31536000,
hsts_include_subdomains: 1,
hsts_preload: 1,
referrer_policy: 'no-referrer',
x_content_type_options: 1,
x_frame_options: 'DENY',
x_xss_protection: 1
}
},
balanced: {
name: 'Balanced',
description: 'Good security with common CDN support',
config: {
csp_default_src: "['self']",
csp_script_src: "['self', 'https://www.gstatic.com', 'https://cdn.jsdelivr.net']",
csp_style_src: "['self', 'https://fonts.googleapis.com', \"'unsafe-inline'\"]",
csp_img_src: "['self', 'data:', 'blob:', 'https:', 'http:']",
csp_media_src: "['self', 'blob:', 'data:', 'https:', 'http:']",
csp_connect_src: "['self', 'https:', 'http:', 'ws:', 'wss:']",
csp_font_src: "['self', 'data:', 'https://fonts.gstatic.com']",
csp_frame_src: "['self', 'https://www.youtube.com', 'https://player.vimeo.com']",
csp_object_src: "['none']",
csp_base_uri: "['self']",
csp_form_action: "['self']",
csp_frame_ancestors: "['self']",
hsts_enabled: 1,
hsts_max_age: 31536000,
hsts_include_subdomains: 1,
hsts_preload: 0,
referrer_policy: 'strict-origin-when-cross-origin',
x_content_type_options: 1,
x_frame_options: 'SAMEORIGIN',
x_xss_protection: 1
}
},
permissive: {
name: 'Permissive (IPTV Streaming)',
description: 'Allows external streams and APIs - current default',
config: {
csp_default_src: "['self']",
csp_script_src: "['self', \"'unsafe-inline'\", \"'unsafe-eval'\", 'https://www.gstatic.com', 'https://cdn.jsdelivr.net', 'blob:']",
csp_style_src: "['self', \"'unsafe-inline'\", 'https://fonts.googleapis.com']",
csp_img_src: "['self', 'data:', 'blob:', 'https:', 'http:']",
csp_media_src: "['self', 'blob:', 'data:', 'mediastream:', 'https:', 'http:', '*']",
csp_connect_src: "['self', 'https:', 'http:', 'ws:', 'wss:', 'blob:', '*']",
csp_font_src: "['self', 'data:', 'https://fonts.gstatic.com']",
csp_frame_src: "['self', 'https://www.youtube.com', 'https://player.vimeo.com']",
csp_object_src: "['none']",
csp_base_uri: "['self']",
csp_form_action: "['self']",
csp_frame_ancestors: "['self']",
hsts_enabled: 1,
hsts_max_age: 31536000,
hsts_include_subdomains: 1,
hsts_preload: 1,
referrer_policy: 'strict-origin-when-cross-origin',
x_content_type_options: 1,
x_frame_options: 'SAMEORIGIN',
x_xss_protection: 1
}
}
};
// Get current active security headers configuration
router.get('/current', authenticate, requirePermission('security.view_audit'), readLimiter, async (req, res) => {
try {
// Read current configuration from server.js
const serverPath = path.join(__dirname, '../server.js');
const serverContent = await fs.readFile(serverPath, 'utf8');
// Parse current CSP configuration
const currentConfig = {
environment: process.env.NODE_ENV || 'development',
csp: {
mode: process.env.NODE_ENV === 'production' ? 'enforcing' : 'report-only',
directives: extractCSPFromCode(serverContent)
},
hsts: {
enabled: process.env.NODE_ENV === 'production',
maxAge: 31536000,
includeSubDomains: true,
preload: true
},
referrerPolicy: 'strict-origin-when-cross-origin',
xContentTypeOptions: true,
xFrameOptions: 'SAMEORIGIN',
xssProtection: true
};
// Get saved configurations
db.all(
'SELECT * FROM security_headers_config ORDER BY is_active DESC, updated_at DESC',
[],
(err, configs) => {
if (err) {
logger.error('Error fetching security headers configs:', err);
return res.status(500).json({ error: 'Failed to fetch configurations' });
}
res.json({
current: currentConfig,
savedConfigs: configs || [],
presets: SECURITY_PRESETS
});
}
);
} catch (error) {
logger.error('Error reading current security headers:', error);
res.status(500).json({ error: 'Failed to read current configuration' });
}
});
// Get security header recommendations
router.get('/recommendations', authenticate, requirePermission('security.view_audit'), readLimiter, async (req, res) => {
try {
const recommendations = await generateSecurityRecommendations();
res.json(recommendations);
} catch (error) {
logger.error('Error generating recommendations:', error);
res.status(500).json({ error: 'Failed to generate recommendations' });
}
});
// Test security headers
router.post('/test', authenticate, requirePermission('security.manage'), modifyLimiter, async (req, res) => {
try {
const { config } = req.body;
if (!config) {
return res.status(400).json({ error: 'Configuration required' });
}
const testResults = await testSecurityConfiguration(config);
res.json(testResults);
} catch (error) {
logger.error('Error testing security headers:', error);
res.status(500).json({ error: 'Failed to test configuration' });
}
});
// Save security headers configuration
router.post('/save', authenticate, requirePermission('security.manage'), modifyLimiter, async (req, res) => {
try {
const { configName, config, setActive } = req.body;
if (!configName || !config) {
return res.status(400).json({ error: 'Configuration name and config required' });
}
// If setting as active, deactivate all others first
if (setActive) {
await new Promise((resolve, reject) => {
db.run('UPDATE security_headers_config SET is_active = 0', [], (err) => {
if (err) reject(err);
else resolve();
});
});
}
// Insert new configuration
const stmt = db.prepare(`
INSERT INTO security_headers_config (
config_name, csp_default_src, csp_script_src, csp_style_src,
csp_img_src, csp_media_src, csp_connect_src, csp_font_src,
csp_frame_src, csp_object_src, csp_base_uri, csp_form_action,
csp_frame_ancestors, hsts_enabled, hsts_max_age,
hsts_include_subdomains, hsts_preload, referrer_policy,
x_content_type_options, x_frame_options, x_xss_protection,
is_active, created_by
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
stmt.run(
configName,
config.csp_default_src,
config.csp_script_src,
config.csp_style_src,
config.csp_img_src,
config.csp_media_src,
config.csp_connect_src,
config.csp_font_src,
config.csp_frame_src,
config.csp_object_src,
config.csp_base_uri,
config.csp_form_action,
config.csp_frame_ancestors,
config.hsts_enabled ? 1 : 0,
config.hsts_max_age,
config.hsts_include_subdomains ? 1 : 0,
config.hsts_preload ? 1 : 0,
config.referrer_policy,
config.x_content_type_options ? 1 : 0,
config.x_frame_options,
config.x_xss_protection ? 1 : 0,
setActive ? 1 : 0,
req.user.id,
function(err) {
if (err) {
logger.error('Error saving security headers config:', err);
return res.status(500).json({ error: 'Failed to save configuration' });
}
// Log to history
db.run(
`INSERT INTO security_headers_history (config_id, action, new_config, changed_by)
VALUES (?, ?, ?, ?)`,
[this.lastID, 'created', JSON.stringify(config), req.user.id]
);
logger.info(`Security headers configuration '${configName}' saved by user ${req.user.id}`);
res.json({
success: true,
configId: this.lastID,
message: 'Configuration saved successfully'
});
}
);
} catch (error) {
logger.error('Error saving security headers:', error);
res.status(500).json({ error: 'Failed to save configuration' });
}
});
// Apply security headers configuration (updates server.js)
router.post('/apply/:configId', authenticate, requirePermission('security.manage'), modifyLimiter, async (req, res) => {
try {
const { configId } = req.params;
// Get configuration
db.get(
'SELECT * FROM security_headers_config WHERE id = ?',
[configId],
async (err, config) => {
if (err || !config) {
return res.status(404).json({ error: 'Configuration not found' });
}
try {
// Backup current server.js
const serverPath = path.join(__dirname, '../server.js');
const backupPath = path.join(__dirname, '../server.js.backup');
await fs.copyFile(serverPath, backupPath);
// Note: Applying configuration requires server restart
// This endpoint saves the config as active but warns user to restart
await new Promise((resolve, reject) => {
db.run('UPDATE security_headers_config SET is_active = 0', [], (err) => {
if (err) reject(err);
else resolve();
});
});
await new Promise((resolve, reject) => {
db.run(
'UPDATE security_headers_config SET is_active = 1, updated_at = CURRENT_TIMESTAMP WHERE id = ?',
[configId],
(err) => {
if (err) reject(err);
else resolve();
}
);
});
// Log to history
db.run(
`INSERT INTO security_headers_history (config_id, action, new_config, changed_by)
VALUES (?, ?, ?, ?)`,
[configId, 'applied', JSON.stringify(config), req.user.id]
);
logger.info(`Security headers configuration ${configId} marked as active by user ${req.user.id}`);
res.json({
success: true,
warning: 'Configuration saved. Server restart required to apply changes.',
requiresRestart: true
});
} catch (error) {
logger.error('Error applying security headers:', error);
res.status(500).json({ error: 'Failed to apply configuration' });
}
}
);
} catch (error) {
logger.error('Error in apply endpoint:', error);
res.status(500).json({ error: 'Failed to apply configuration' });
}
});
// Get configuration history
router.get('/history', authenticate, requirePermission('security.view_audit'), readLimiter, async (req, res) => {
try {
db.all(
`SELECT h.*, u.username, c.config_name
FROM security_headers_history h
LEFT JOIN users u ON h.changed_by = u.id
LEFT JOIN security_headers_config c ON h.config_id = c.id
ORDER BY h.timestamp DESC
LIMIT 50`,
[],
(err, history) => {
if (err) {
logger.error('Error fetching security headers history:', err);
return res.status(500).json({ error: 'Failed to fetch history' });
}
res.json(history || []);
}
);
} catch (error) {
logger.error('Error fetching history:', error);
res.status(500).json({ error: 'Failed to fetch history' });
}
});
// Delete saved configuration
router.delete('/:configId', authenticate, requirePermission('security.manage'), modifyLimiter, async (req, res) => {
try {
const { configId } = req.params;
db.get('SELECT is_active FROM security_headers_config WHERE id = ?', [configId], (err, config) => {
if (err) {
return res.status(500).json({ error: 'Failed to check configuration' });
}
if (!config) {
return res.status(404).json({ error: 'Configuration not found' });
}
if (config.is_active) {
return res.status(400).json({ error: 'Cannot delete active configuration' });
}
db.run('DELETE FROM security_headers_config WHERE id = ?', [configId], (err) => {
if (err) {
logger.error('Error deleting security headers config:', err);
return res.status(500).json({ error: 'Failed to delete configuration' });
}
logger.info(`Security headers configuration ${configId} deleted by user ${req.user.id}`);
res.json({ success: true, message: 'Configuration deleted' });
});
});
} catch (error) {
logger.error('Error deleting configuration:', error);
res.status(500).json({ error: 'Failed to delete configuration' });
}
});
// Helper functions
function extractCSPFromCode(serverCode) {
// This is a simplified extraction - in production, you'd parse more carefully
return {
defaultSrc: ["'self'"],
scriptSrc: ["'self'", "'unsafe-inline'", "'unsafe-eval'"],
styleSrc: ["'self'", "'unsafe-inline'"],
imgSrc: ["'self'", "data:", "blob:", "https:", "http:"],
mediaSrc: ["'self'", "blob:", "data:", "mediastream:", "https:", "http:", "*"],
connectSrc: ["'self'", "https:", "http:", "ws:", "wss:", "blob:", "*"],
fontSrc: ["'self'", "data:"],
frameSrc: ["'self'"],
objectSrc: ["'none'"],
baseUri: ["'self'"],
formAction: ["'self'"],
frameAncestors: ["'self'"]
};
}
async function generateSecurityRecommendations() {
const recommendations = [];
let score = 100;
// Check current environment
const isProduction = process.env.NODE_ENV === 'production';
if (!isProduction) {
recommendations.push({
severity: 'info',
category: 'Environment',
title: 'Development Mode Active',
description: 'CSP is in report-only mode. Some security headers are disabled.',
action: 'Deploy to production to enable full security',
impact: 'low'
});
}
// Check for CSP violations
const violationCount = await new Promise((resolve) => {
db.get(
'SELECT COUNT(*) as count FROM csp_violations WHERE created_at > datetime("now", "-7 days")',
[],
(err, row) => resolve(row?.count || 0)
);
});
if (violationCount > 10) {
score -= 10;
recommendations.push({
severity: 'warning',
category: 'CSP',
title: `${violationCount} CSP Violations in Last 7 Days`,
description: `Your Content Security Policy is being violated ${violationCount} times, indicating resources are being blocked or attempted to load from unauthorized sources. Common causes: (1) External scripts/styles not whitelisted in CSP, (2) Inline event handlers (onclick, onload, etc.), (3) Third-party widgets or ads, (4) Browser extensions injecting content, (5) Misconfigured CDN URLs. This could indicate attempted attacks or legitimate resources being blocked.`,
action: 'Visit /security/csp dashboard to analyze violations by: (1) Violated Directive - identify which CSP rule is being broken, (2) Blocked URI - see what resources are blocked, (3) Source File - find where the violation originates. Then either: (a) Add legitimate sources to your CSP whitelist, (b) Remove inline scripts/handlers, (c) Block malicious sources, (d) Update third-party library configurations.',
impact: 'medium',
details: {
threshold: 'More than 10 violations may indicate policy misconfiguration',
monitoring: 'Check CSP Dashboard for patterns and trends',
action: 'Use Statistics tab to group violations and identify root causes'
}
});
}
// Check for unsafe CSP directives
recommendations.push({
severity: 'warning',
category: 'CSP',
title: 'Unsafe CSP Directives Detected',
description: "Your CSP includes 'unsafe-inline' and 'unsafe-eval' in script-src, which weakens XSS (Cross-Site Scripting) protection. These directives allow inline JavaScript and dynamic code evaluation, making it easier for attackers to inject malicious scripts if they find a vulnerability. However, for IPTV streaming apps using React/Vite, these are often necessary for: (1) React's inline scripts and hot module replacement, (2) MUI's dynamic styling, (3) Third-party streaming libraries, (4) External IPTV APIs. Your server already generates cryptographic nonces for better security.",
action: 'Current configuration is acceptable for an IPTV app. To improve: (1) Monitor CSP violations regularly in the CSP Dashboard, (2) Keep input validation strict (already implemented), (3) Update dependencies frequently, (4) For future major refactoring, explore migrating to nonce-only scripts by configuring Vite to inject nonces and removing unsafe-inline. Note: Your nonce generation is already in place at server.js - you have the foundation for future improvement.',
impact: 'medium',
details: {
currentSetup: 'Multiple defense layers active: input validation, parameterized queries, authentication, rate limiting',
tradeoff: 'Security vs Functionality: Current score 85-90 is excellent for feature-rich apps',
futureWork: 'Nonce-based CSP requires: Vite config changes, React hydration updates, third-party library compatibility'
}
});
// Check HSTS
if (!isProduction) {
recommendations.push({
severity: 'info',
category: 'HSTS',
title: 'HSTS Disabled in Development',
description: 'HTTP Strict Transport Security (HSTS) forces browsers to only connect via HTTPS, preventing man-in-the-middle attacks and SSL stripping. Currently disabled in development mode to allow HTTP testing. In production, HSTS will be enabled with: max-age=31536000 (1 year), includeSubDomains, and preload flags.',
action: 'For production deployment: (1) Ensure valid SSL/TLS certificate is installed, (2) Configure reverse proxy (nginx/Apache) for HTTPS, (3) Set NODE_ENV=production to enable HSTS, (4) Test HTTPS functionality before enabling, (5) Consider HSTS preload list submission at hstspreload.org for maximum security (permanent, cannot be undone easily).',
impact: 'low',
details: {
currentMode: 'Development - HSTS off to allow HTTP testing',
productionMode: 'HSTS enabled automatically with secure settings',
preloadWarning: 'HSTS preload is permanent - only enable after thorough HTTPS testing'
}
});
}
// Add positive recommendation if security is good
if (recommendations.length === 0 || (recommendations.length === 1 && recommendations[0].severity === 'info')) {
recommendations.push({
severity: 'info',
category: 'Security',
title: 'Excellent Security Configuration',
description: 'Your security headers are well-configured with minimal issues detected. All critical protections are active: CSP for XSS prevention, HSTS for HTTPS enforcement (in production), X-Content-Type-Options for MIME sniffing protection, X-Frame-Options for clickjacking prevention, and proper referrer policy.',
action: 'Maintain current configuration and continue monitoring: (1) Review CSP violations weekly, (2) Keep dependencies updated with npm audit, (3) Monitor security audit logs for suspicious activity, (4) Backup security configurations before changes.',
impact: 'low',
details: {
score: 'Grade A security for IPTV streaming application',
maintenance: 'Regular monitoring and updates recommended',
compliance: 'Meets OWASP security standards'
}
});
}
// Security score calculation
if (violationCount > 10) score -= 10;
if (violationCount > 50) score -= 15;
return {
score: Math.max(0, score),
grade: score >= 90 ? 'A' : score >= 80 ? 'B' : score >= 70 ? 'C' : score >= 60 ? 'D' : 'F',
recommendations: recommendations,
summary: {
total: recommendations.length,
critical: recommendations.filter(r => r.severity === 'error').length,
warnings: recommendations.filter(r => r.severity === 'warning').length,
info: recommendations.filter(r => r.severity === 'info').length
}
};
}
async function testSecurityConfiguration(config) {
const results = {
passed: [],
warnings: [],
errors: [],
score: 100
};
// Test CSP strictness
if (config.csp_script_src && config.csp_script_src.includes("'unsafe-eval'")) {
results.warnings.push({
test: 'CSP Script Evaluation',
message: "'unsafe-eval' allows dynamic code execution, reducing XSS protection"
});
results.score -= 5;
} else {
results.passed.push({
test: 'CSP Script Evaluation',
message: 'No unsafe-eval in script-src'
});
}
if (config.csp_script_src && config.csp_script_src.includes("'unsafe-inline'")) {
results.warnings.push({
test: 'CSP Inline Scripts',
message: "'unsafe-inline' allows inline scripts, reducing XSS protection"
});
results.score -= 5;
} else {
results.passed.push({
test: 'CSP Inline Scripts',
message: 'No unsafe-inline in script-src'
});
}
// Test object-src
if (config.csp_object_src && config.csp_object_src.includes("'none'")) {
results.passed.push({
test: 'Plugin Blocking',
message: 'object-src is none - plugins blocked'
});
} else {
results.warnings.push({
test: 'Plugin Blocking',
message: 'Consider setting object-src to none to block plugins'
});
results.score -= 5;
}
// Test HSTS
if (config.hsts_enabled) {
if (config.hsts_max_age >= 31536000) {
results.passed.push({
test: 'HSTS Duration',
message: 'HSTS max-age is 1 year or more'
});
} else {
results.warnings.push({
test: 'HSTS Duration',
message: 'HSTS max-age should be at least 1 year (31536000 seconds)'
});
results.score -= 5;
}
if (config.hsts_include_subdomains) {
results.passed.push({
test: 'HSTS Subdomains',
message: 'HSTS includeSubDomains is enabled'
});
}
} else {
results.errors.push({
test: 'HSTS Enabled',
message: 'HSTS is disabled - HTTPS enforcement is not active'
});
results.score -= 15;
}
// Test X-Frame-Options
if (config.x_frame_options === 'DENY' || config.x_frame_options === 'SAMEORIGIN') {
results.passed.push({
test: 'Clickjacking Protection',
message: `X-Frame-Options is set to ${config.x_frame_options}`
});
} else {
results.warnings.push({
test: 'Clickjacking Protection',
message: 'X-Frame-Options should be DENY or SAMEORIGIN'
});
results.score -= 10;
}
// Test X-Content-Type-Options
if (config.x_content_type_options) {
results.passed.push({
test: 'MIME Sniffing Protection',
message: 'X-Content-Type-Options: nosniff is enabled'
});
} else {
results.errors.push({
test: 'MIME Sniffing Protection',
message: 'X-Content-Type-Options should be enabled'
});
results.score -= 10;
}
return {
score: Math.max(0, results.score),
grade: results.score >= 90 ? 'A' : results.score >= 80 ? 'B' : results.score >= 70 ? 'C' : results.score >= 60 ? 'D' : 'F',
passed: results.passed,
warnings: results.warnings,
errors: results.errors,
summary: `${results.passed.length} passed, ${results.warnings.length} warnings, ${results.errors.length} errors`
};
}
module.exports = router;

View file

@ -0,0 +1,479 @@
const express = require('express');
const router = express.Router();
const { authenticate } = require('../middleware/auth');
const { requirePermission } = require('../middleware/rbac');
const { readLimiter } = require('../middleware/rateLimiter');
const { db } = require('../database/db');
const logger = require('../utils/logger');
const fs = require('fs').promises;
const path = require('path');
const { exec } = require('child_process');
const { promisify } = require('util');
const execPromise = promisify(exec);
/**
* Security Monitoring & Dependency Management
* Provides comprehensive security status and vulnerability tracking
*/
// Get comprehensive security status
router.get('/status', authenticate, requirePermission('security.view_audit'), readLimiter, async (req, res) => {
try {
const securityStatus = {
timestamp: new Date().toISOString(),
dependencies: await checkDependencies(),
vulnerabilities: await checkVulnerabilities(),
securityHeaders: await checkSecurityHeaders(),
auditSummary: await getAuditSummary(),
systemHealth: await getSystemHealth()
};
res.json(securityStatus);
} catch (error) {
logger.error('Error fetching security status:', error);
res.status(500).json({ error: 'Failed to fetch security status' });
}
});
// Check dependencies for updates
async function checkDependencies() {
try {
const backendPackage = JSON.parse(
await fs.readFile(path.join(__dirname, '../package.json'), 'utf8')
);
const frontendPackage = JSON.parse(
await fs.readFile(path.join(__dirname, '../../frontend/package.json'), 'utf8')
);
return {
backend: {
dependencies: Object.keys(backendPackage.dependencies || {}).length,
devDependencies: Object.keys(backendPackage.devDependencies || {}).length,
lastChecked: new Date().toISOString()
},
frontend: {
dependencies: Object.keys(frontendPackage.dependencies || {}).length,
devDependencies: Object.keys(frontendPackage.devDependencies || {}).length,
lastChecked: new Date().toISOString()
}
};
} catch (error) {
logger.error('Error checking dependencies:', error);
return { error: 'Unable to check dependencies' };
}
}
// Check for known vulnerabilities
async function checkVulnerabilities() {
try {
// Run npm audit in both backend and frontend
const backendAudit = await runNpmAudit('backend');
const frontendAudit = await runNpmAudit('frontend');
return {
backend: backendAudit,
frontend: frontendAudit,
lastScanned: new Date().toISOString()
};
} catch (error) {
logger.error('Error checking vulnerabilities:', error);
return { error: 'Unable to scan for vulnerabilities' };
}
}
async function runNpmAudit(project) {
try {
const projectPath = project === 'backend'
? path.join(__dirname, '..')
: path.join(__dirname, '../../frontend');
const { stdout } = await execPromise(`cd ${projectPath} && npm audit --json`, {
timeout: 30000
});
const auditData = JSON.parse(stdout);
return {
total: auditData.metadata?.vulnerabilities?.total || 0,
critical: auditData.metadata?.vulnerabilities?.critical || 0,
high: auditData.metadata?.vulnerabilities?.high || 0,
moderate: auditData.metadata?.vulnerabilities?.moderate || 0,
low: auditData.metadata?.vulnerabilities?.low || 0,
info: auditData.metadata?.vulnerabilities?.info || 0
};
} catch (error) {
// npm audit returns non-zero exit code when vulnerabilities are found
if (error.stdout) {
try {
const auditData = JSON.parse(error.stdout);
return {
total: auditData.metadata?.vulnerabilities?.total || 0,
critical: auditData.metadata?.vulnerabilities?.critical || 0,
high: auditData.metadata?.vulnerabilities?.high || 0,
moderate: auditData.metadata?.vulnerabilities?.moderate || 0,
low: auditData.metadata?.vulnerabilities?.low || 0,
info: auditData.metadata?.vulnerabilities?.info || 0
};
} catch {
return { error: 'Unable to parse audit results' };
}
}
return { error: error.message };
}
}
// Check security headers configuration
async function checkSecurityHeaders() {
return {
helmet: {
enabled: true,
features: [
'Content-Security-Policy',
'X-Content-Type-Options',
'X-Frame-Options',
'X-XSS-Protection',
'Strict-Transport-Security',
'Referrer-Policy'
]
},
csp: {
enabled: true,
mode: process.env.NODE_ENV === 'production' ? 'enforcing' : 'report-only'
},
cors: {
enabled: true
}
};
}
// Get security audit summary
async function getAuditSummary() {
return new Promise((resolve, reject) => {
db.all(
`SELECT
action,
result,
COUNT(*) as count,
MAX(timestamp) as last_occurrence
FROM security_audit_log
WHERE timestamp > datetime('now', '-7 days')
GROUP BY action, result
ORDER BY count DESC
LIMIT 20`,
[],
(err, rows) => {
if (err) {
logger.error('Error fetching audit summary:', err);
resolve({ error: 'Unable to fetch audit summary' });
} else {
resolve(rows || []);
}
}
);
});
}
// Get system health metrics
async function getSystemHealth() {
return new Promise((resolve, reject) => {
Promise.all([
// Active sessions count
new Promise((res) => {
db.get('SELECT COUNT(*) as count FROM sessions WHERE expires_at > ?',
[new Date().toISOString()],
(err, row) => res(row?.count || 0)
);
}),
// Failed login attempts in last hour
new Promise((res) => {
db.get(
`SELECT COUNT(*) as count FROM security_audit_log
WHERE action = 'login' AND result = 'failed'
AND timestamp > datetime('now', '-1 hour')`,
[],
(err, row) => res(row?.count || 0)
);
}),
// Locked accounts
new Promise((res) => {
db.get(
'SELECT COUNT(*) as count FROM users WHERE locked_until > ?',
[new Date().toISOString()],
(err, row) => res(row?.count || 0)
);
}),
// Total users
new Promise((res) => {
db.get('SELECT COUNT(*) as count FROM users', [], (err, row) => res(row?.count || 0));
})
]).then(([activeSessions, failedLogins, lockedAccounts, totalUsers]) => {
resolve({
activeSessions,
failedLogins,
lockedAccounts,
totalUsers,
timestamp: new Date().toISOString()
});
});
});
}
// Get detailed vulnerability report
router.get('/vulnerabilities/detailed', authenticate, requirePermission('security.view_audit'), readLimiter, async (req, res) => {
try {
const backendPath = path.join(__dirname, '..');
const frontendPath = path.join(__dirname, '../../frontend');
const backendAudit = await execPromise(`cd ${backendPath} && npm audit --json`, {
timeout: 30000
}).catch(e => ({ stdout: e.stdout }));
const frontendAudit = await execPromise(`cd ${frontendPath} && npm audit --json`, {
timeout: 30000
}).catch(e => ({ stdout: e.stdout }));
const backendData = JSON.parse(backendAudit.stdout || '{}');
const frontendData = JSON.parse(frontendAudit.stdout || '{}');
res.json({
backend: {
vulnerabilities: backendData.vulnerabilities || {},
metadata: backendData.metadata || {}
},
frontend: {
vulnerabilities: frontendData.vulnerabilities || {},
metadata: frontendData.metadata || {}
},
timestamp: new Date().toISOString()
});
} catch (error) {
logger.error('Error fetching detailed vulnerabilities:', error);
res.status(500).json({ error: 'Failed to fetch vulnerability details' });
}
});
// Get security audit log with filtering
router.get('/audit-log', authenticate, requirePermission('security.view_audit'), readLimiter, async (req, res) => {
try {
const { action, result, userId, startDate, endDate, limit = 100, offset = 0 } = req.query;
let query = 'SELECT * FROM security_audit_log WHERE 1=1';
const params = [];
if (action) {
query += ' AND action = ?';
params.push(action);
}
if (result) {
query += ' AND result = ?';
params.push(result);
}
if (userId) {
query += ' AND user_id = ?';
params.push(userId);
}
if (startDate) {
query += ' AND timestamp >= ?';
params.push(startDate);
}
if (endDate) {
query += ' AND timestamp <= ?';
params.push(endDate);
}
query += ' ORDER BY timestamp DESC LIMIT ? OFFSET ?';
params.push(parseInt(limit), parseInt(offset));
db.all(query, params, (err, rows) => {
if (err) {
logger.error('Error fetching audit log:', err);
return res.status(500).json({ error: 'Failed to fetch audit log' });
}
// Get total count for pagination
let countQuery = 'SELECT COUNT(*) as total FROM security_audit_log WHERE 1=1';
const countParams = params.slice(0, -2); // Remove limit and offset
db.get(countQuery, countParams, (err, countRow) => {
if (err) {
logger.error('Error counting audit log:', err);
return res.json({ logs: rows || [], total: 0 });
}
res.json({
logs: rows || [],
total: countRow?.total || 0,
limit: parseInt(limit),
offset: parseInt(offset)
});
});
});
} catch (error) {
logger.error('Error fetching audit log:', error);
res.status(500).json({ error: 'Failed to fetch audit log' });
}
});
// Export audit log
router.get('/audit-log/export', authenticate, requirePermission('security.view_audit'), readLimiter, async (req, res) => {
try {
const { format = 'json', startDate, endDate } = req.query;
let query = 'SELECT * FROM security_audit_log WHERE 1=1';
const params = [];
if (startDate) {
query += ' AND timestamp >= ?';
params.push(startDate);
}
if (endDate) {
query += ' AND timestamp <= ?';
params.push(endDate);
}
query += ' ORDER BY timestamp DESC';
db.all(query, params, (err, rows) => {
if (err) {
logger.error('Error exporting audit log:', err);
return res.status(500).json({ error: 'Failed to export audit log' });
}
if (format === 'csv') {
const csv = convertToCSV(rows);
res.setHeader('Content-Type', 'text/csv');
res.setHeader('Content-Disposition', `attachment; filename=security-audit-${Date.now()}.csv`);
res.send(csv);
} else {
res.setHeader('Content-Type', 'application/json');
res.setHeader('Content-Disposition', `attachment; filename=security-audit-${Date.now()}.json`);
res.json(rows);
}
});
} catch (error) {
logger.error('Error exporting audit log:', error);
res.status(500).json({ error: 'Failed to export audit log' });
}
});
function convertToCSV(data) {
if (!data || data.length === 0) return '';
const headers = Object.keys(data[0]);
const csvRows = [headers.join(',')];
for (const row of data) {
const values = headers.map(header => {
const value = row[header];
return typeof value === 'string' && value.includes(',')
? `"${value}"`
: value;
});
csvRows.push(values.join(','));
}
return csvRows.join('\n');
}
// Get security recommendations
router.get('/recommendations', authenticate, requirePermission('security.view_audit'), readLimiter, async (req, res) => {
try {
const recommendations = [];
// Check for locked accounts
const lockedAccounts = await new Promise((resolve) => {
db.get(
'SELECT COUNT(*) as count FROM users WHERE locked_until > ?',
[new Date().toISOString()],
(err, row) => resolve(row?.count || 0)
);
});
if (lockedAccounts > 0) {
recommendations.push({
severity: 'warning',
category: 'account_security',
title: 'Locked Accounts',
description: `${lockedAccounts} account(s) are currently locked due to failed login attempts`,
action: 'Review locked accounts and consider unlocking legitimate users'
});
}
// Check for users with old passwords
const oldPasswords = await new Promise((resolve) => {
db.all(
`SELECT username, password_changed_at FROM users
WHERE password_changed_at < datetime('now', '-90 days')`,
[],
(err, rows) => resolve(rows || [])
);
});
if (oldPasswords.length > 0) {
recommendations.push({
severity: 'info',
category: 'password_policy',
title: 'Old Passwords',
description: `${oldPasswords.length} user(s) haven't changed their password in over 90 days`,
action: 'Encourage users to update their passwords regularly'
});
}
// Check for recent failed logins
const recentFailures = await new Promise((resolve) => {
db.get(
`SELECT COUNT(*) as count FROM security_audit_log
WHERE action = 'login' AND result = 'failed'
AND timestamp > datetime('now', '-1 hour')`,
[],
(err, row) => resolve(row?.count || 0)
);
});
if (recentFailures > 10) {
recommendations.push({
severity: 'high',
category: 'threat_detection',
title: 'High Failed Login Rate',
description: `${recentFailures} failed login attempts in the last hour`,
action: 'Investigate potential brute-force attack'
});
}
// Check for users without 2FA
const no2FA = await new Promise((resolve) => {
db.get(
'SELECT COUNT(*) as count FROM users WHERE two_factor_secret IS NULL',
[],
(err, row) => resolve(row?.count || 0)
);
});
if (no2FA > 0) {
recommendations.push({
severity: 'warning',
category: 'authentication',
title: 'Two-Factor Authentication',
description: `${no2FA} user(s) don't have 2FA enabled`,
action: 'Encourage users to enable two-factor authentication'
});
}
res.json({
recommendations,
timestamp: new Date().toISOString()
});
} catch (error) {
logger.error('Error generating recommendations:', error);
res.status(500).json({ error: 'Failed to generate recommendations' });
}
});
module.exports = router;

File diff suppressed because it is too large Load diff

326
backend/routes/sessions.js Normal file
View file

@ -0,0 +1,326 @@
/**
* Session Management Routes
* Handles active session viewing, management, and termination
*/
const express = require('express');
const router = express.Router();
const { authenticate, requireAdmin } = require('../middleware/auth');
const { readLimiter, modifyLimiter } = require('../middleware/rateLimiter');
const { db } = require('../database/db');
const logger = require('../utils/logger');
const SecurityAuditLogger = require('../utils/securityAudit');
/**
* Get all active sessions for current user
*/
router.get('/my-sessions', authenticate, readLimiter, async (req, res) => {
try {
const userId = req.user.userId;
const currentToken = req.headers.authorization?.split(' ')[1];
db.all(
`SELECT
id,
session_token,
ip_address,
user_agent,
created_at,
last_activity,
expires_at
FROM active_sessions
WHERE user_id = ? AND expires_at > ?
ORDER BY last_activity DESC`,
[userId, new Date().toISOString()],
(err, sessions) => {
if (err) {
logger.error('Error fetching sessions:', err);
return res.status(500).json({ error: 'Failed to fetch sessions' });
}
// Mark current session
const sessionsWithCurrent = sessions.map(session => ({
...session,
isCurrent: session.session_token === currentToken,
// Don't expose full token to client
session_token: undefined
}));
res.json(sessionsWithCurrent);
}
);
} catch (error) {
logger.error('Session fetch error:', error);
res.status(500).json({ error: 'Failed to fetch sessions' });
}
});
/**
* Get all active sessions (admin only)
*/
router.get('/all', authenticate, requireAdmin, readLimiter, async (req, res) => {
try {
db.all(
`SELECT
s.id,
s.user_id,
s.ip_address,
s.user_agent,
s.created_at,
s.last_activity,
s.expires_at,
u.username,
u.email
FROM active_sessions s
JOIN users u ON s.user_id = u.id
WHERE s.expires_at > ?
ORDER BY s.last_activity DESC`,
[new Date().toISOString()],
(err, sessions) => {
if (err) {
logger.error('Error fetching all sessions:', err);
return res.status(500).json({ error: 'Failed to fetch sessions' });
}
res.json(sessions);
}
);
} catch (error) {
logger.error('Session fetch error:', error);
res.status(500).json({ error: 'Failed to fetch sessions' });
}
});
/**
* Terminate a specific session
*/
router.delete('/:sessionId', authenticate, modifyLimiter, async (req, res) => {
try {
const { sessionId } = req.params;
const userId = req.user.userId;
const currentToken = req.headers.authorization?.split(' ')[1];
// Validate session ID
if (isNaN(parseInt(sessionId))) {
return res.status(400).json({ error: 'Invalid session ID' });
}
// Get session details first
db.get(
'SELECT * FROM active_sessions WHERE id = ?',
[sessionId],
async (err, session) => {
if (err) {
logger.error('Error fetching session:', err);
return res.status(500).json({ error: 'Failed to terminate session' });
}
if (!session) {
return res.status(404).json({ error: 'Session not found' });
}
// Only allow users to terminate their own sessions (or admins to terminate any)
if (session.user_id !== userId && req.user.role !== 'admin') {
return res.status(403).json({ error: 'Permission denied' });
}
// Prevent terminating current session
if (session.session_token === currentToken) {
return res.status(400).json({ error: 'Cannot terminate current session. Use logout instead.' });
}
// Terminate session
db.run(
'DELETE FROM active_sessions WHERE id = ?',
[sessionId],
async (err) => {
if (err) {
logger.error('Error terminating session:', err);
return res.status(500).json({ error: 'Failed to terminate session' });
}
// Log the event
await SecurityAuditLogger.logSessionEvent('SESSION_TERMINATED', userId, {
ip: req.ip || req.headers['x-forwarded-for'],
userAgent: req.headers['user-agent'],
sessionId: sessionId,
terminatedSessionIp: session.ip_address
});
logger.info(`Session ${sessionId} terminated by user ${userId}`);
res.json({ message: 'Session terminated successfully' });
}
);
}
);
} catch (error) {
logger.error('Session termination error:', error);
res.status(500).json({ error: 'Failed to terminate session' });
}
});
/**
* Terminate all other sessions (keep current)
*/
router.post('/terminate-all-others', authenticate, modifyLimiter, async (req, res) => {
try {
const userId = req.user.userId;
const currentToken = req.headers.authorization?.split(' ')[1];
db.run(
'DELETE FROM active_sessions WHERE user_id = ? AND session_token != ?',
[userId, currentToken],
async function(err) {
if (err) {
logger.error('Error terminating sessions:', err);
return res.status(500).json({ error: 'Failed to terminate sessions' });
}
const terminatedCount = this.changes;
// Log the event
await SecurityAuditLogger.logSessionEvent('SESSIONS_TERMINATED_BULK', userId, {
ip: req.ip || req.headers['x-forwarded-for'],
userAgent: req.headers['user-agent'],
count: terminatedCount
});
logger.info(`User ${userId} terminated ${terminatedCount} other sessions`);
res.json({
message: `${terminatedCount} session(s) terminated successfully`,
count: terminatedCount
});
}
);
} catch (error) {
logger.error('Bulk session termination error:', error);
res.status(500).json({ error: 'Failed to terminate sessions' });
}
});
/**
* Force logout user (admin only) - terminates all sessions
*/
router.post('/force-logout/:userId', authenticate, requireAdmin, modifyLimiter, async (req, res) => {
try {
const { userId } = req.params;
// Validate user ID
if (isNaN(parseInt(userId))) {
return res.status(400).json({ error: 'Invalid user ID' });
}
// Get user info
db.get(
'SELECT username FROM users WHERE id = ?',
[userId],
async (err, user) => {
if (err || !user) {
return res.status(404).json({ error: 'User not found' });
}
// Terminate all sessions for this user
db.run(
'DELETE FROM active_sessions WHERE user_id = ?',
[userId],
async function(err) {
if (err) {
logger.error('Error force logging out user:', err);
return res.status(500).json({ error: 'Failed to force logout' });
}
const terminatedCount = this.changes;
// Log the event
await SecurityAuditLogger.logSessionEvent('FORCE_LOGOUT', userId, {
ip: req.ip || req.headers['x-forwarded-for'],
userAgent: req.headers['user-agent'],
adminId: req.user.userId,
count: terminatedCount
});
logger.warn(`Admin ${req.user.userId} force logged out user ${userId} (${user.username}), terminated ${terminatedCount} sessions`);
res.json({
message: `User ${user.username} has been logged out`,
count: terminatedCount
});
}
);
}
);
} catch (error) {
logger.error('Force logout error:', error);
res.status(500).json({ error: 'Failed to force logout' });
}
});
/**
* Get session statistics (admin only)
*/
router.get('/stats', authenticate, requireAdmin, readLimiter, async (req, res) => {
try {
const now = new Date().toISOString();
// Total active sessions
db.get(
'SELECT COUNT(*) as total FROM active_sessions WHERE expires_at > ?',
[now],
(err, totalResult) => {
if (err) {
logger.error('Error fetching session stats:', err);
return res.status(500).json({ error: 'Failed to fetch statistics' });
}
// Sessions by user
db.all(
`SELECT u.username, u.email, COUNT(s.id) as session_count
FROM users u
LEFT JOIN active_sessions s ON u.id = s.user_id AND s.expires_at > ?
GROUP BY u.id
ORDER BY session_count DESC
LIMIT 10`,
[now],
(err, userSessions) => {
if (err) {
logger.error('Error fetching user sessions:', err);
return res.status(500).json({ error: 'Failed to fetch statistics' });
}
// Recent sessions
db.all(
`SELECT s.*, u.username
FROM active_sessions s
JOIN users u ON s.user_id = u.id
WHERE s.expires_at > ?
ORDER BY s.created_at DESC
LIMIT 20`,
[now],
(err, recentSessions) => {
if (err) {
logger.error('Error fetching recent sessions:', err);
return res.status(500).json({ error: 'Failed to fetch statistics' });
}
res.json({
totalActiveSessions: totalResult.total,
topUsers: userSessions.filter(u => u.session_count > 0),
recentSessions: recentSessions.map(s => ({
username: s.username,
ip: s.ip_address,
created: s.created_at,
lastActive: s.last_activity
}))
});
}
);
}
);
}
);
} catch (error) {
logger.error('Session stats error:', error);
res.status(500).json({ error: 'Failed to fetch statistics' });
}
});
module.exports = router;

134
backend/routes/settings.js Normal file
View file

@ -0,0 +1,134 @@
const express = require('express');
const router = express.Router();
const { authenticate } = require('../middleware/auth');
const { modifyLimiter, readLimiter } = require('../middleware/rateLimiter');
const { db } = require('../database/db');
const { validateSettings } = require('../middleware/inputValidation');
const logger = require('../utils/logger');
const SecurityAuditLogger = require('../utils/securityAudit');
// Get user settings
router.get('/', authenticate, readLimiter, async (req, res) => {
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
const userAgent = req.headers['user-agent'];
db.all(
'SELECT key, value FROM settings WHERE user_id = ?',
[req.user.userId],
async (err, settings) => {
if (err) {
return res.status(500).json({ error: 'Failed to fetch settings' });
}
// CWE-778: Log sensitive data access
await SecurityAuditLogger.logSensitiveDataAccess(req.user.userId, 'settings', {
ip,
userAgent,
recordCount: settings.length,
scope: 'own',
accessMethod: 'view'
});
// Convert array to object
const settingsObj = {};
settings.forEach(s => {
try {
settingsObj[s.key] = JSON.parse(s.value);
} catch {
settingsObj[s.key] = s.value;
}
});
res.json(settingsObj);
}
);
});
// Update setting
router.put('/:key', authenticate, modifyLimiter, validateSettings, (req, res) => {
const { key } = req.params;
const { value } = req.body;
const jsonValue = typeof value === 'string' ? value : JSON.stringify(value);
db.run(
`INSERT INTO settings (user_id, key, value, updated_at)
VALUES (?, ?, ?, CURRENT_TIMESTAMP)
ON CONFLICT(user_id, key)
DO UPDATE SET value = ?, updated_at = CURRENT_TIMESTAMP`,
[req.user.userId, key, jsonValue, jsonValue],
function(err) {
if (err) {
return res.status(500).json({ error: 'Failed to update setting' });
}
res.json({ key, value });
}
);
});
// Get specific setting
router.get('/:key', authenticate, readLimiter, (req, res) => {
const { key } = req.params;
// Validate key format
if (!key || !/^[a-zA-Z0-9_.-]+$/.test(key)) {
return res.status(400).json({ error: 'Invalid setting key' });
}
// Handle stream_settings specially - return defaults if not found
if (key === 'stream_settings') {
db.get(
'SELECT value FROM settings WHERE user_id = ? AND key = ?',
[req.user.userId, key],
(err, setting) => {
if (err) {
return res.status(500).json({ error: 'Failed to fetch setting' });
}
// Return defaults if not found
if (!setting) {
return res.json({
value: {
hwaccel: 'auto',
hwaccel_device: '/dev/dri/renderD128',
codec: 'h264',
preset: 'veryfast',
buffer_size: '2M',
max_bitrate: '8M'
}
});
}
try {
res.json({ value: JSON.parse(setting.value) });
} catch {
res.json({ value: setting.value });
}
}
);
return;
}
db.get(
'SELECT value FROM settings WHERE user_id = ? AND key = ?',
[req.user.userId, key],
(err, setting) => {
if (err) {
return res.status(500).json({ error: 'Failed to fetch setting' });
}
if (!setting) {
return res.status(404).json({ error: 'Setting not found' });
}
try {
res.json({ value: JSON.parse(setting.value) });
} catch {
res.json({ value: setting.value });
}
}
);
});
module.exports = router;

425
backend/routes/siem.js Normal file
View file

@ -0,0 +1,425 @@
/**
* SIEM (Security Information and Event Management) API Routes
* Provides access to centralized log aggregation, security intelligence,
* anomaly detection, and real-time alerts
*/
const express = require('express');
const router = express.Router();
const { authenticate } = require('../middleware/auth');
const { requirePermission } = require('../middleware/rbac');
const { validatePagination, validateIdParam } = require('../middleware/inputValidation');
const logAggregator = require('../utils/logAggregator');
const securityIntelligence = require('../utils/securityIntelligence');
const alertSystem = require('../utils/alertSystem');
const logger = require('../utils/logger');
/**
* GET /api/siem/logs
* Query aggregated logs with filtering
*/
router.get('/logs',
authenticate,
requirePermission('security.view_audit'),
validatePagination,
async (req, res) => {
try {
const filters = {
source: req.query.source,
level: req.query.level,
category: req.query.category,
userId: req.query.userId ? parseInt(req.query.userId) : undefined,
startDate: req.query.startDate,
endDate: req.query.endDate,
limit: req.query.limit ? parseInt(req.query.limit) : 100,
offset: req.query.offset ? parseInt(req.query.offset) : 0
};
const logs = await logAggregator.query(filters);
logAggregator.aggregate('siem', 'info', 'access', 'SIEM logs queried', {
userId: req.user.id,
filters
});
res.json({
success: true,
data: logs,
filters,
count: logs.length
});
} catch (error) {
logger.error('[SIEM API] Error querying logs:', error);
res.status(500).json({
success: false,
message: 'Failed to query logs',
error: error.message
});
}
}
);
/**
* POST /api/siem/logs/verify
* Verify log integrity (check for tampering)
*/
router.post('/logs/verify',
authenticate,
requirePermission('security.manage'),
async (req, res) => {
try {
const logIds = req.body.logIds || [];
const result = await logAggregator.verifyIntegrity(logIds.length > 0 ? logIds : null);
logAggregator.aggregate('siem', 'info', 'security', 'Log integrity verification performed', {
userId: req.user.id,
checkedCount: result.total,
tamperedCount: result.tampered
});
res.json({
success: true,
data: result
});
} catch (error) {
logger.error('[SIEM API] Error verifying logs:', error);
res.status(500).json({
success: false,
message: 'Failed to verify log integrity',
error: error.message
});
}
}
);
/**
* GET /api/siem/statistics
* Get log statistics
*/
router.get('/statistics',
authenticate,
requirePermission('security.view_audit'),
async (req, res) => {
try {
const timeRange = req.query.timeRange ? parseInt(req.query.timeRange) : 24;
const stats = await logAggregator.getStatistics(timeRange);
res.json({
success: true,
data: stats,
timeRange: `${timeRange} hours`
});
} catch (error) {
logger.error('[SIEM API] Error getting statistics:', error);
res.status(500).json({
success: false,
message: 'Failed to get statistics',
error: error.message
});
}
}
);
/**
* GET /api/siem/export
* Export logs in JSON or CSV format
*/
router.get('/export',
authenticate,
requirePermission('security.manage'),
async (req, res) => {
try {
const format = req.query.format || 'json';
const filters = {
source: req.query.source,
level: req.query.level,
startDate: req.query.startDate,
endDate: req.query.endDate
};
const exportData = await logAggregator.export(filters, format);
logAggregator.aggregate('siem', 'info', 'security', 'Logs exported', {
userId: req.user.id,
format,
filters
});
const contentType = format === 'csv' ? 'text/csv' : 'application/json';
const filename = `siem_export_${Date.now()}.${format}`;
res.setHeader('Content-Type', contentType);
res.setHeader('Content-Disposition', `attachment; filename="${filename}"`);
res.send(exportData);
} catch (error) {
logger.error('[SIEM API] Error exporting logs:', error);
res.status(500).json({
success: false,
message: 'Failed to export logs',
error: error.message
});
}
}
);
/**
* GET /api/siem/anomalies
* Get detected anomalies
*/
router.get('/anomalies',
authenticate,
requirePermission('security.view_audit'),
validatePagination,
async (req, res) => {
try {
const filters = {
status: req.query.status || 'open',
severity: req.query.severity,
type: req.query.type,
limit: req.query.limit ? parseInt(req.query.limit) : 100,
offset: req.query.offset ? parseInt(req.query.offset) : 0
};
const anomalies = await securityIntelligence.getAnomalies(filters);
res.json({
success: true,
data: anomalies,
count: anomalies.length
});
} catch (error) {
logger.error('[SIEM API] Error getting anomalies:', error);
res.status(500).json({
success: false,
message: 'Failed to get anomalies',
error: error.message
});
}
}
);
/**
* POST /api/siem/anomalies/:id/resolve
* Resolve an anomaly
*/
router.post('/anomalies/:id/resolve',
authenticate,
requirePermission('security.manage'),
validateIdParam,
async (req, res) => {
try {
const anomalyId = req.params.id;
const notes = req.body.notes || '';
await securityIntelligence.resolveAnomaly(anomalyId, req.user.id, notes);
logAggregator.aggregate('siem', 'info', 'security', 'Anomaly resolved', {
userId: req.user.id,
anomalyId,
notes
});
res.json({
success: true,
message: 'Anomaly resolved successfully'
});
} catch (error) {
logger.error('[SIEM API] Error resolving anomaly:', error);
res.status(500).json({
success: false,
message: 'Failed to resolve anomaly',
error: error.message
});
}
}
);
/**
* GET /api/siem/threats
* Get threat intelligence data
*/
router.get('/threats',
authenticate,
requirePermission('security.view_audit'),
validatePagination,
async (req, res) => {
try {
const filters = {
level: req.query.level,
type: req.query.type,
limit: req.query.limit ? parseInt(req.query.limit) : 100
};
const threats = await securityIntelligence.getThreatIntelligence(filters);
res.json({
success: true,
data: threats,
count: threats.length
});
} catch (error) {
logger.error('[SIEM API] Error getting threats:', error);
res.status(500).json({
success: false,
message: 'Failed to get threat intelligence',
error: error.message
});
}
}
);
/**
* GET /api/siem/alerts
* Get active security alerts
*/
router.get('/alerts',
authenticate,
requirePermission('security.view_audit'),
validatePagination,
async (req, res) => {
try {
const filters = {
status: req.query.status || 'active',
severity: req.query.severity,
limit: req.query.limit ? parseInt(req.query.limit) : 100,
offset: req.query.offset ? parseInt(req.query.offset) : 0
};
const alerts = await alertSystem.getAlerts(filters);
res.json({
success: true,
data: alerts,
count: alerts.length
});
} catch (error) {
logger.error('[SIEM API] Error getting alerts:', error);
res.status(500).json({
success: false,
message: 'Failed to get alerts',
error: error.message
});
}
}
);
/**
* POST /api/siem/alerts/:id/acknowledge
* Acknowledge an alert
*/
router.post('/alerts/:id/acknowledge',
authenticate,
requirePermission('security.view_audit'),
validateIdParam,
async (req, res) => {
try {
const alertId = req.params.id;
const notes = req.body.notes || '';
await alertSystem.acknowledgeAlert(alertId, req.user.id, notes);
res.json({
success: true,
message: 'Alert acknowledged successfully'
});
} catch (error) {
logger.error('[SIEM API] Error acknowledging alert:', error);
res.status(500).json({
success: false,
message: 'Failed to acknowledge alert',
error: error.message
});
}
}
);
/**
* POST /api/siem/alerts/:id/resolve
* Resolve an alert
*/
router.post('/alerts/:id/resolve',
authenticate,
requirePermission('security.manage'),
validateIdParam,
async (req, res) => {
try {
const alertId = req.params.id;
const notes = req.body.notes;
await alertSystem.resolveAlert(alertId, req.user.id, notes);
res.json({
success: true,
message: 'Alert resolved successfully'
});
} catch (error) {
logger.error('[SIEM API] Error resolving alert:', error);
res.status(500).json({
success: false,
message: 'Failed to resolve alert',
error: error.message
});
}
}
);
/**
* GET /api/siem/dashboard
* Get comprehensive security intelligence dashboard data
*/
router.get('/dashboard',
authenticate,
requirePermission('security.view_audit'),
async (req, res) => {
try {
const [dashboardData, alertStats] = await Promise.all([
securityIntelligence.getDashboardData(),
alertSystem.getStatistics()
]);
res.json({
success: true,
data: {
...dashboardData,
alertStats
}
});
} catch (error) {
logger.error('[SIEM API] Error getting dashboard data:', error);
res.status(500).json({
success: false,
message: 'Failed to get dashboard data',
error: error.message
});
}
}
);
/**
* GET /api/siem/alert-rules
* Get configured alert rules
*/
router.get('/alert-rules',
authenticate,
requirePermission('security.manage'),
async (req, res) => {
try {
const rules = Array.from(alertSystem.alertRules.values());
res.json({
success: true,
data: rules,
count: rules.length
});
} catch (error) {
logger.error('[SIEM API] Error getting alert rules:', error);
res.status(500).json({
success: false,
message: 'Failed to get alert rules',
error: error.message
});
}
}
);
module.exports = router;

243
backend/routes/stats.js Normal file
View file

@ -0,0 +1,243 @@
const express = require('express');
const router = express.Router();
const { authenticate, requireAdmin } = require('../middleware/auth');
const { readLimiter } = require('../middleware/rateLimiter');
const { db } = require('../database/db');
const os = require('os');
const logger = require('../utils/logger');
/**
* Get overall statistics (admin only)
*/
router.get('/overview', readLimiter, authenticate, requireAdmin, (req, res) => {
const stats = {};
// Get total counts
db.get('SELECT COUNT(*) as count FROM users', [], (err, result) => {
stats.totalUsers = result?.count || 0;
db.get('SELECT COUNT(*) as count FROM channels WHERE is_active = 1', [], (err, result) => {
stats.totalChannels = result?.count || 0;
db.get('SELECT COUNT(*) as count FROM channels WHERE is_radio = 1 AND is_active = 1', [], (err, result) => {
stats.totalRadioChannels = result?.count || 0;
stats.totalTvChannels = stats.totalChannels - stats.totalRadioChannels;
db.get('SELECT COUNT(*) as count FROM playlists', [], (err, result) => {
stats.totalPlaylists = result?.count || 0;
db.get('SELECT COUNT(*) as count FROM watch_history', [], (err, result) => {
stats.totalWatchHistory = result?.count || 0;
// Channel health stats
db.all(`
SELECT health_status, COUNT(*) as count
FROM channels
WHERE is_active = 1
GROUP BY health_status
`, [], (err, rows) => {
stats.channelHealth = {
healthy: 0,
degraded: 0,
dead: 0,
unknown: 0
};
rows.forEach(row => {
if (row.health_status) {
stats.channelHealth[row.health_status] = row.count;
}
});
// System resource usage
stats.system = {
cpuUsage: process.cpuUsage(),
memoryUsage: process.memoryUsage(),
uptime: process.uptime(),
platform: os.platform(),
totalMemory: os.totalmem(),
freeMemory: os.freemem(),
cpuCount: os.cpus().length,
loadAverage: os.loadavg()
};
res.json(stats);
});
});
});
});
});
});
});
/**
* Get most watched channels across all users (admin only)
*/
router.get('/top-channels', readLimiter, authenticate, requireAdmin, (req, res) => {
const { limit = 20, days = 30 } = req.query;
db.all(`
SELECT
c.id,
c.name,
c.logo,
c.custom_logo,
c.group_name,
c.is_radio,
COUNT(wh.id) as watch_count,
COUNT(DISTINCT wh.user_id) as unique_users,
SUM(wh.duration) as total_duration,
AVG(wh.duration) as avg_duration,
MAX(wh.watched_at) as last_watched
FROM watch_history wh
INNER JOIN channels c ON wh.channel_id = c.id
WHERE wh.watched_at >= datetime('now', '-' || ? || ' days')
GROUP BY c.id
ORDER BY watch_count DESC
LIMIT ?
`, [days, parseInt(limit)], (err, rows) => {
if (err) {
logger.error('Error fetching top channels:', err);
return res.status(500).json({ error: 'Failed to fetch top channels' });
}
res.json(rows);
});
});
/**
* Get usage patterns by hour (admin only)
*/
router.get('/usage-by-hour', readLimiter, authenticate, requireAdmin, (req, res) => {
const { days = 7 } = req.query;
db.all(`
SELECT
CAST(strftime('%H', watched_at) AS INTEGER) as hour,
COUNT(*) as view_count,
COUNT(DISTINCT user_id) as unique_users
FROM watch_history
WHERE watched_at >= datetime('now', '-' || ? || ' days')
GROUP BY hour
ORDER BY hour
`, [days], (err, rows) => {
if (err) {
logger.error('Error fetching usage by hour:', err);
return res.status(500).json({ error: 'Failed to fetch usage by hour' });
}
res.json(rows);
});
});
/**
* Get usage trends (admin only)
*/
router.get('/trends', readLimiter, authenticate, requireAdmin, (req, res) => {
const { days = 30 } = req.query;
db.all(`
SELECT
DATE(watched_at) as date,
COUNT(*) as view_count,
COUNT(DISTINCT user_id) as unique_users,
COUNT(DISTINCT channel_id) as unique_channels,
SUM(duration) as total_duration
FROM watch_history
WHERE watched_at >= datetime('now', '-' || ? || ' days')
GROUP BY date
ORDER BY date DESC
`, [days], (err, rows) => {
if (err) {
logger.error('Error fetching trends:', err);
return res.status(500).json({ error: 'Failed to fetch trends' });
}
res.json(rows);
});
});
/**
* Get user activity stats (admin only)
*/
router.get('/user-activity', readLimiter, authenticate, requireAdmin, (req, res) => {
const { days = 30 } = req.query;
db.all(`
SELECT
u.id,
u.username,
u.email,
COUNT(wh.id) as watch_count,
SUM(wh.duration) as total_duration,
MAX(wh.watched_at) as last_active
FROM users u
LEFT JOIN watch_history wh ON u.id = wh.user_id
AND wh.watched_at >= datetime('now', '-' || ? || ' days')
WHERE u.is_active = 1
GROUP BY u.id
ORDER BY watch_count DESC
`, [days], (err, rows) => {
if (err) {
logger.error('Error fetching user activity:', err);
return res.status(500).json({ error: 'Failed to fetch user activity' });
}
res.json(rows);
});
});
/**
* Get current user's personal stats
*/
router.get('/my-stats', readLimiter, authenticate, (req, res) => {
const user_id = req.user.userId;
const { days = 30 } = req.query;
const stats = {};
// Total watch count
db.get(`
SELECT
COUNT(*) as watch_count,
SUM(duration) as total_duration,
COUNT(DISTINCT channel_id) as unique_channels
FROM watch_history
WHERE user_id = ?
AND watched_at >= datetime('now', '-' || ? || ' days')
`, [user_id, days], (err, result) => {
if (err) {
return res.status(500).json({ error: 'Failed to fetch stats' });
}
stats.overview = result;
// Most watched group
db.get(`
SELECT c.group_name, COUNT(*) as count
FROM watch_history wh
INNER JOIN channels c ON wh.channel_id = c.id
WHERE wh.user_id = ?
AND wh.watched_at >= datetime('now', '-' || ? || ' days')
AND c.group_name IS NOT NULL
GROUP BY c.group_name
ORDER BY count DESC
LIMIT 1
`, [user_id, days], (err, favGroup) => {
stats.favoriteGroup = favGroup?.group_name || null;
// Watch by day of week
db.all(`
SELECT
CAST(strftime('%w', watched_at) AS INTEGER) as day_of_week,
COUNT(*) as count
FROM watch_history
WHERE user_id = ?
AND watched_at >= datetime('now', '-' || ? || ' days')
GROUP BY day_of_week
ORDER BY day_of_week
`, [user_id, days], (err, weekData) => {
stats.byDayOfWeek = weekData || [];
res.json(stats);
});
});
});
});
module.exports = router;

418
backend/routes/stream.js Normal file
View file

@ -0,0 +1,418 @@
const express = require('express');
const router = express.Router();
const { spawn, exec } = require('child_process');
const axios = require('axios');
const https = require('https');
const { SocksProxyAgent } = require('socks-proxy-agent');
const { HttpsProxyAgent } = require('https-proxy-agent');
const { authenticate } = require('../middleware/auth');
const { heavyLimiter, readLimiter } = require('../middleware/rateLimiter');
const { db } = require('../database/db');
const logger = require('../utils/logger');
// HTTPS agent to bypass SSL certificate verification for IPTV streams
const httpsAgent = new https.Agent({
rejectUnauthorized: false
});
// Check if user has active VPN connection and return appropriate agent
const getVPNAgent = async (userId) => {
return new Promise((resolve) => {
db.get(
'SELECT connected FROM vpn_settings WHERE user_id = ? AND connected = 1',
[userId],
(err, row) => {
if (err || !row) {
// No VPN, use standard HTTPS agent
resolve(httpsAgent);
} else {
// VPN active - traffic will automatically route through VPN interface
// Use standard agent, OS routing will handle VPN
logger.info('VPN active for user, traffic will route through VPN');
resolve(httpsAgent);
}
}
);
});
};
// Check hardware acceleration availability
const checkHardwareAcceleration = () => {
const capabilities = {
quicksync: false,
nvenc: false,
vaapi: false,
videotoolbox: false
};
// Check for Intel Quick Sync (typically /dev/dri/renderD128)
try {
const fs = require('fs');
if (fs.existsSync('/dev/dri/renderD128')) {
capabilities.quicksync = true;
capabilities.vaapi = true;
}
} catch (err) {
logger.debug('Quick Sync not available');
}
// Check for NVIDIA
try {
const { execSync } = require('child_process');
execSync('nvidia-smi', { stdio: 'ignore' });
capabilities.nvenc = true;
} catch (err) {
logger.debug('NVENC not available');
}
return capabilities;
};
// Get hardware acceleration capabilities
router.get('/capabilities', authenticate, readLimiter, (req, res) => {
const capabilities = checkHardwareAcceleration();
res.json(capabilities);
});
// Get user's stream settings
const getStreamSettings = (userId, callback) => {
db.get(
'SELECT value FROM settings WHERE user_id = ? AND key = ?',
[userId, 'stream_settings'],
(err, result) => {
if (err || !result) {
// Default settings
return callback({
hwaccel: 'auto',
hwaccel_device: '/dev/dri/renderD128',
codec: 'h264',
preset: 'veryfast',
buffer_size: '2M',
max_bitrate: '8M'
});
}
try {
callback(JSON.parse(result.value));
} catch {
callback({
hwaccel: 'auto',
hwaccel_device: '/dev/dri/renderD128',
codec: 'h264',
preset: 'veryfast',
buffer_size: '2M',
max_bitrate: '8M'
});
}
}
);
};
// Universal proxy for all streams with geo-blocking bypass
router.get('/proxy/:channelId', authenticate, heavyLimiter, async (req, res) => {
const { channelId } = req.params;
console.log(`[STREAM] Proxy request for channel ${channelId}`);
try {
const channel = await new Promise((resolve, reject) => {
db.get('SELECT url, name, is_radio FROM channels WHERE id = ?', [channelId], (err, row) => {
if (err) {
console.error('[STREAM] Database error:', err);
reject(err);
} else if (!row) {
console.error('[STREAM] Channel not found:', channelId);
reject(new Error('Channel not found'));
} else {
console.log('[STREAM] Found channel:', row.name);
resolve(row);
}
});
});
if (!channel.url) {
return res.status(400).json({ error: 'Channel has no URL' });
}
logger.info(`Proxying ${channel.is_radio ? 'radio' : 'video'} stream: ${channel.name} - ${channel.url}`);
// Extract origin from URL for proper spoofing
const urlObj = new URL(channel.url);
const origin = `${urlObj.protocol}//${urlObj.hostname}`;
const referer = origin;
// Get VPN-aware agent for this user
const agent = await getVPNAgent(req.user.userId);
const requestConfig = {
method: 'GET',
url: channel.url,
responseType: channel.url.includes('.m3u8') ? 'text' : 'stream',
timeout: 30000,
validateStatus: (status) => status < 500,
httpsAgent: agent,
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
'Accept': '*/*',
'Accept-Language': 'en-US,en;q=0.9',
'Accept-Encoding': 'identity',
'Origin': origin,
'Referer': referer,
'Connection': 'keep-alive',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'cross-site'
}
};
// Proxy with origin spoofing to bypass geo-blocking
const response = await axios(requestConfig);
if (response.status >= 400) {
logger.error(`Stream returned status ${response.status}`);
return res.status(response.status).json({ error: 'Stream unavailable' });
}
// Handle HLS manifests - rewrite URLs to go through our proxy
if (channel.url.includes('.m3u8')) {
const m3u8Content = response.data;
const baseUrl = channel.url.substring(0, channel.url.lastIndexOf('/') + 1);
// Rewrite relative URLs in playlist to absolute URLs through our proxy
const rewrittenContent = m3u8Content.split('\n').map(line => {
if (line.startsWith('#') || line.trim() === '') {
return line;
}
// Convert relative to absolute URL
let absoluteUrl = line;
if (!line.startsWith('http')) {
absoluteUrl = baseUrl + line;
}
// Proxy the URL through our server
const proxyUrl = `/api/stream/hls-segment?url=${encodeURIComponent(absoluteUrl)}&token=${req.query.token}`;
return proxyUrl;
}).join('\n');
res.setHeader('Content-Type', 'application/vnd.apple.mpegurl');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Access-Control-Allow-Headers', '*');
res.setHeader('Cache-Control', 'no-cache');
return res.send(rewrittenContent);
}
// For binary streams, pipe directly
res.setHeader('Content-Type', response.headers['content-type'] || 'application/octet-stream');
res.setHeader('Cache-Control', 'no-cache');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Access-Control-Allow-Headers', '*');
res.setHeader('Access-Control-Allow-Methods', 'GET, HEAD, OPTIONS');
if (response.headers['content-length']) {
res.setHeader('Content-Length', response.headers['content-length']);
}
if (response.headers['content-range']) {
res.setHeader('Content-Range', response.headers['content-range']);
res.setHeader('Accept-Ranges', 'bytes');
}
response.data.pipe(res);
response.data.on('error', (error) => {
logger.error('Stream error:', error.message);
if (!res.headersSent) {
res.status(500).json({ error: 'Stream failed' });
}
});
req.on('close', () => {
logger.info('Client disconnected from stream');
if (response.data && !response.data.destroyed) {
response.data.destroy();
}
});
} catch (error) {
console.error('[STREAM] Proxy error:', error);
logger.error('Proxy error:', error.message);
if (!res.headersSent) {
res.status(500).json({ error: 'Failed to proxy stream', details: error.message });
}
}
});
// Proxy HLS segments (playlists and .ts chunks)
router.get('/hls-segment', authenticate, heavyLimiter, async (req, res) => {
const { url } = req.query;
if (!url) {
return res.status(400).json({ error: 'URL parameter required' });
}
try {
const urlObj = new URL(url);
const origin = `${urlObj.protocol}//${urlObj.hostname}`;
logger.info(`Proxying HLS segment: ${url}`);
const response = await axios({
method: 'GET',
url: url,
responseType: url.includes('.m3u8') ? 'text' : 'stream',
timeout: 15000,
httpsAgent: httpsAgent,
validateStatus: (status) => status < 500,
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36',
'Accept': '*/*',
'Origin': origin,
'Referer': origin,
'Connection': 'keep-alive'
}
});
if (response.status >= 400) {
return res.status(response.status).json({ error: 'Segment unavailable' });
}
// Handle nested m3u8 playlists
if (url.includes('.m3u8')) {
const m3u8Content = response.data;
const baseUrl = url.substring(0, url.lastIndexOf('/') + 1);
const rewrittenContent = m3u8Content.split('\n').map(line => {
if (line.startsWith('#') || line.trim() === '') {
return line;
}
let absoluteUrl = line;
if (!line.startsWith('http')) {
absoluteUrl = baseUrl + line;
}
return `/api/stream/hls-segment?url=${encodeURIComponent(absoluteUrl)}&token=${req.query.token}`;
}).join('\n');
res.setHeader('Content-Type', 'application/vnd.apple.mpegurl');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Cache-Control', 'no-cache');
return res.send(rewrittenContent);
}
// Stream binary segments
res.setHeader('Content-Type', response.headers['content-type'] || 'video/mp2t');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Cache-Control', 'public, max-age=300');
if (response.headers['content-length']) {
res.setHeader('Content-Length', response.headers['content-length']);
}
response.data.pipe(res);
response.data.on('error', (error) => {
logger.error('HLS segment error:', error.message);
if (!res.headersSent) {
res.status(500).end();
}
});
} catch (error) {
logger.error('HLS segment proxy error:', error.message);
if (!res.headersSent) {
res.status(500).json({ error: 'Failed to proxy segment' });
}
}
});
// Stream proxy with hardware acceleration (for transcoding if needed)
router.get('/proxy-ffmpeg/:channelId', authenticate, heavyLimiter, (req, res) => {
const { channelId } = req.params;
db.get(
'SELECT url FROM channels WHERE id = ?',
[channelId],
(err, channel) => {
if (err || !channel) {
return res.status(404).json({ error: 'Channel not found' });
}
getStreamSettings(req.user.userId, (settings) => {
const capabilities = checkHardwareAcceleration();
// Build FFmpeg command with hardware acceleration
const ffmpegArgs = [
'-re',
'-i', channel.url,
'-c:v', 'copy',
'-c:a', 'copy',
'-f', 'mpegts',
'pipe:1'
];
// Add hardware acceleration if enabled and available
if (settings.hwaccel !== 'none') {
if (settings.hwaccel === 'quicksync' && capabilities.quicksync) {
ffmpegArgs.unshift(
'-hwaccel', 'qsv',
'-hwaccel_device', settings.hwaccel_device || '/dev/dri/renderD128',
'-hwaccel_output_format', 'qsv'
);
} else if (settings.hwaccel === 'vaapi' && capabilities.vaapi) {
ffmpegArgs.unshift(
'-hwaccel', 'vaapi',
'-hwaccel_device', settings.hwaccel_device || '/dev/dri/renderD128',
'-hwaccel_output_format', 'vaapi'
);
} else if (settings.hwaccel === 'nvenc' && capabilities.nvenc) {
ffmpegArgs.unshift(
'-hwaccel', 'cuda',
'-hwaccel_output_format', 'cuda'
);
} else if (settings.hwaccel === 'auto') {
// Auto-detect best available
if (capabilities.quicksync) {
ffmpegArgs.unshift(
'-hwaccel', 'qsv',
'-hwaccel_device', '/dev/dri/renderD128',
'-hwaccel_output_format', 'qsv'
);
} else if (capabilities.nvenc) {
ffmpegArgs.unshift(
'-hwaccel', 'cuda',
'-hwaccel_output_format', 'cuda'
);
}
}
}
logger.info(`Streaming channel ${channelId} with args:`, ffmpegArgs);
const ffmpeg = spawn('ffmpeg', ffmpegArgs);
res.setHeader('Content-Type', 'video/mp2t');
res.setHeader('Cache-Control', 'no-cache');
ffmpeg.stdout.pipe(res);
ffmpeg.stderr.on('data', (data) => {
logger.debug(`FFmpeg: ${data}`);
});
ffmpeg.on('error', (error) => {
logger.error('FFmpeg error:', error);
if (!res.headersSent) {
res.status(500).json({ error: 'Stream failed' });
}
});
ffmpeg.on('close', (code) => {
logger.info(`FFmpeg closed with code ${code}`);
});
req.on('close', () => {
logger.info('Client disconnected, stopping stream');
ffmpeg.kill('SIGTERM');
});
});
}
);
});
module.exports = router;

331
backend/routes/twoFactor.js Normal file
View file

@ -0,0 +1,331 @@
const express = require('express');
const router = express.Router();
const speakeasy = require('speakeasy');
const QRCode = require('qrcode');
const crypto = require('crypto');
const { authenticate } = require('../middleware/auth');
const { modifyLimiter, authLimiter, readLimiter } = require('../middleware/rateLimiter');
const { db } = require('../database/db');
const logger = require('../utils/logger');
const { promisify } = require('util');
const dbRun = promisify(db.run.bind(db));
const dbGet = promisify(db.get.bind(db));
const dbAll = promisify(db.all.bind(db));
// Generate 2FA secret and QR code
router.post('/setup', authenticate, modifyLimiter, async (req, res) => {
try {
const userId = req.user.userId;
// Check if user already has 2FA enabled
const user = await dbGet('SELECT two_factor_enabled FROM users WHERE id = ?', [userId]);
if (user.two_factor_enabled) {
return res.status(400).json({ error: '2FA is already enabled' });
}
// Generate secret with StreamFlow branding
const secret = speakeasy.generateSecret({
name: `StreamFlow IPTV:${req.user.username || 'User'}`,
issuer: 'StreamFlow'
});
// Generate QR code with custom options for better readability
const qrCodeDataURL = await QRCode.toDataURL(secret.otpauth_url, {
width: 300,
margin: 2,
color: {
dark: '#000000',
light: '#FFFFFF'
},
errorCorrectionLevel: 'H'
});
// Store secret temporarily (not enabled yet)
await dbRun(
'UPDATE users SET two_factor_secret = ? WHERE id = ?',
[secret.base32, userId]
);
res.json({
secret: secret.base32,
qrCode: qrCodeDataURL,
manualEntryKey: secret.base32
});
} catch (error) {
logger.error('2FA setup error:', error);
res.status(500).json({ error: 'Failed to setup 2FA' });
}
});
// Verify and enable 2FA
router.post('/enable', authenticate, authLimiter, async (req, res) => {
try {
const { token } = req.body;
const userId = req.user.userId;
if (!token) {
return res.status(400).json({ error: 'Verification token required' });
}
// Get user's secret
const user = await dbGet(
'SELECT two_factor_secret FROM users WHERE id = ?',
[userId]
);
if (!user.two_factor_secret) {
return res.status(400).json({ error: '2FA not set up. Call /setup first' });
}
// Verify token
const verified = speakeasy.totp.verify({
secret: user.two_factor_secret,
encoding: 'base32',
token: token,
window: 2 // Allow 2 time steps before/after
});
if (!verified) {
return res.status(400).json({ error: 'Invalid verification code' });
}
// Generate backup codes
const backupCodes = [];
for (let i = 0; i < 10; i++) {
const code = crypto.randomBytes(4).toString('hex').toUpperCase();
backupCodes.push(code);
await dbRun(
'INSERT INTO two_factor_backup_codes (user_id, code) VALUES (?, ?)',
[userId, code]
);
}
// Enable 2FA
await dbRun(
'UPDATE users SET two_factor_enabled = 1 WHERE id = ?',
[userId]
);
logger.info(`2FA enabled for user ${userId}`);
res.json({
success: true,
message: '2FA enabled successfully',
backupCodes: backupCodes
});
} catch (error) {
logger.error('2FA enable error:', error);
res.status(500).json({ error: 'Failed to enable 2FA' });
}
});
// Disable 2FA
router.post('/disable', authenticate, authLimiter, async (req, res) => {
try {
const { password, token } = req.body;
const userId = req.user.userId;
if (!password) {
return res.status(400).json({ error: 'Password required to disable 2FA' });
}
// Verify password
const bcrypt = require('bcrypt');
const user = await dbGet('SELECT password, two_factor_secret FROM users WHERE id = ?', [userId]);
const validPassword = await bcrypt.compare(password, user.password);
if (!validPassword) {
return res.status(401).json({ error: 'Invalid password' });
}
// Verify 2FA token
const verified = speakeasy.totp.verify({
secret: user.two_factor_secret,
encoding: 'base32',
token: token,
window: 2
});
if (!verified) {
return res.status(400).json({ error: 'Invalid 2FA code' });
}
// Disable 2FA and remove secret
await dbRun(
'UPDATE users SET two_factor_enabled = 0, two_factor_secret = NULL WHERE id = ?',
[userId]
);
// Delete all backup codes
await dbRun('DELETE FROM two_factor_backup_codes WHERE user_id = ?', [userId]);
logger.info(`2FA disabled for user ${userId}`);
res.json({ success: true, message: '2FA disabled successfully' });
} catch (error) {
logger.error('2FA disable error:', error);
res.status(500).json({ error: 'Failed to disable 2FA' });
}
});
// Verify 2FA token (for login)
router.post('/verify', authLimiter, async (req, res) => {
try {
const { userId, token } = req.body;
if (!userId || !token) {
return res.status(400).json({ error: 'User ID and token required' });
}
// Get user's secret
const user = await dbGet(
'SELECT two_factor_secret, two_factor_enabled FROM users WHERE id = ?',
[userId]
);
if (!user || !user.two_factor_enabled) {
return res.status(400).json({ error: '2FA not enabled for this user' });
}
// Check if it's a backup code
const backupCode = await dbGet(
'SELECT id FROM two_factor_backup_codes WHERE user_id = ? AND code = ? AND used = 0',
[userId, token.toUpperCase()]
);
if (backupCode) {
// Mark backup code as used
await dbRun(
'UPDATE two_factor_backup_codes SET used = 1, used_at = CURRENT_TIMESTAMP WHERE id = ?',
[backupCode.id]
);
logger.info(`Backup code used for user ${userId}`);
return res.json({ valid: true, method: 'backup_code' });
}
// Verify TOTP token
const verified = speakeasy.totp.verify({
secret: user.two_factor_secret,
encoding: 'base32',
token: token,
window: 2
});
if (verified) {
return res.json({ valid: true, method: 'totp' });
} else {
return res.status(400).json({ error: 'Invalid 2FA code' });
}
} catch (error) {
logger.error('2FA verify error:', error);
res.status(500).json({ error: 'Failed to verify 2FA code' });
}
});
// Get backup codes
router.get('/backup-codes', authenticate, readLimiter, async (req, res) => {
try {
const userId = req.user.userId;
const codes = await dbAll(
'SELECT code, used, used_at, created_at FROM two_factor_backup_codes WHERE user_id = ? ORDER BY created_at DESC',
[userId]
);
res.json(codes);
} catch (error) {
logger.error('Get backup codes error:', error);
res.status(500).json({ error: 'Failed to retrieve backup codes' });
}
});
// Regenerate backup codes
router.post('/backup-codes/regenerate', authenticate, modifyLimiter, async (req, res) => {
try {
const { password, token } = req.body;
const userId = req.user.userId;
if (!password || !token) {
return res.status(400).json({ error: 'Password and 2FA token required' });
}
// Verify password
const bcrypt = require('bcrypt');
const user = await dbGet('SELECT password, two_factor_secret FROM users WHERE id = ?', [userId]);
const validPassword = await bcrypt.compare(password, user.password);
if (!validPassword) {
return res.status(401).json({ error: 'Invalid password' });
}
// Verify 2FA token
const verified = speakeasy.totp.verify({
secret: user.two_factor_secret,
encoding: 'base32',
token: token,
window: 2
});
if (!verified) {
return res.status(400).json({ error: 'Invalid 2FA code' });
}
// Delete old backup codes
await dbRun('DELETE FROM two_factor_backup_codes WHERE user_id = ?', [userId]);
// Generate new backup codes
const backupCodes = [];
for (let i = 0; i < 10; i++) {
const code = crypto.randomBytes(4).toString('hex').toUpperCase();
backupCodes.push(code);
await dbRun(
'INSERT INTO two_factor_backup_codes (user_id, code) VALUES (?, ?)',
[userId, code]
);
}
logger.info(`Backup codes regenerated for user ${userId}`);
res.json({
success: true,
message: 'Backup codes regenerated',
backupCodes: backupCodes
});
} catch (error) {
logger.error('Backup codes regenerate error:', error);
res.status(500).json({ error: 'Failed to regenerate backup codes' });
}
});
// Check 2FA status
router.get('/status', authenticate, readLimiter, async (req, res) => {
try {
const userId = req.user.userId;
const user = await dbGet(
'SELECT two_factor_enabled FROM users WHERE id = ?',
[userId]
);
const backupCodesCount = await dbGet(
'SELECT COUNT(*) as total, SUM(CASE WHEN used = 0 THEN 1 ELSE 0 END) as unused FROM two_factor_backup_codes WHERE user_id = ?',
[userId]
);
res.json({
enabled: !!user.two_factor_enabled,
backupCodesTotal: backupCodesCount.total || 0,
backupCodesUnused: backupCodesCount.unused || 0
});
} catch (error) {
logger.error('2FA status error:', error);
res.status(500).json({ error: 'Failed to get 2FA status' });
}
});
module.exports = router;

434
backend/routes/users.js Normal file
View file

@ -0,0 +1,434 @@
const express = require('express');
const router = express.Router();
const bcrypt = require('bcryptjs');
const { body, validationResult } = require('express-validator');
const { authenticate, requireAdmin } = require('../middleware/auth');
const { modifyLimiter, readLimiter } = require('../middleware/rateLimiter');
const { db } = require('../database/db');
const logger = require('../utils/logger');
const SecurityAuditLogger = require('../utils/securityAudit');
// Get all users (admin only)
router.get('/', readLimiter, authenticate, requireAdmin, async (req, res) => {
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
const userAgent = req.headers['user-agent'];
db.all(
`SELECT id, username, email, role, is_active, created_at, updated_at, created_by,
failed_login_attempts, last_failed_login, locked_until, last_login_at, last_login_ip,
password_changed_at, password_expires_at
FROM users
ORDER BY created_at DESC`,
[],
async (err, users) => {
if (err) {
logger.error('Error fetching users:', err);
return res.status(500).json({ error: 'Failed to fetch users' });
}
// CWE-778: Log sensitive data access
await SecurityAuditLogger.logSensitiveDataAccess(req.user.userId, 'user_list', {
ip,
userAgent,
recordCount: users.length,
scope: 'all',
accessMethod: 'view'
});
res.json(users);
}
);
});
// Get single user (admin only)
router.get('/:id', readLimiter, authenticate, requireAdmin, async (req, res) => {
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
const userAgent = req.headers['user-agent'];
db.get(
`SELECT id, username, email, role, is_active, created_at, updated_at, created_by
FROM users WHERE id = ?`,
[req.params.id],
async (err, user) => {
if (err) {
logger.error('Error fetching user:', err);
return res.status(500).json({ error: 'Failed to fetch user' });
}
if (!user) {
return res.status(404).json({ error: 'User not found' });
}
// CWE-778: Log sensitive data access
await SecurityAuditLogger.logSensitiveDataAccess(req.user.userId, 'user_details', {
ip,
userAgent,
recordCount: 1,
scope: 'specific',
accessMethod: 'view',
filters: { userId: req.params.id }
});
res.json(user);
}
);
});
// Create user (admin only)
router.post('/',
modifyLimiter,
authenticate,
requireAdmin,
[
body('username').trim().isLength({ min: 3, max: 50 }).isAlphanumeric(),
body('email').isEmail().normalizeEmail(),
body('password').isLength({ min: 8 }),
body('role').isIn(['user', 'admin'])
],
async (req, res) => {
const errors = validationResult(req);
if (!errors.isEmpty()) {
return res.status(400).json({ errors: errors.array() });
}
const { username, email, password, role } = req.body;
try {
const hashedPassword = await bcrypt.hash(password, 10);
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
const userAgent = req.headers['user-agent'];
db.run(
`INSERT INTO users (username, email, password, role, must_change_password, created_by)
VALUES (?, ?, ?, ?, ?, ?)`,
[username, email, hashedPassword, role, 1, req.user.userId],
async function(err) {
if (err) {
if (err.message.includes('UNIQUE')) {
return res.status(400).json({ error: 'Username or email already exists' });
}
logger.error('User creation error:', err);
return res.status(500).json({ error: 'Failed to create user' });
}
const newUserId = this.lastID;
// CWE-778: Log admin activity
await SecurityAuditLogger.logAdminActivity(req.user.userId, 'user_created', {
ip,
userAgent,
targetUserId: newUserId,
targetUsername: username,
adminUsername: req.user.username || 'admin',
changes: { username, email, role }
});
db.get(
`SELECT id, username, email, role, is_active, created_at, created_by
FROM users WHERE id = ?`,
[newUserId],
(err, user) => {
if (err) {
return res.status(500).json({ error: 'User created but failed to fetch details' });
}
res.status(201).json(user);
}
);
}
);
} catch (error) {
logger.error('User creation error:', error);
res.status(500).json({ error: 'Failed to create user' });
}
}
);
// Update user (admin only)
router.patch('/:id',
modifyLimiter,
authenticate,
requireAdmin,
[
body('username').optional().trim().isLength({ min: 3, max: 50 }).isAlphanumeric(),
body('email').optional().isEmail().normalizeEmail(),
body('role').optional().isIn(['user', 'admin']),
body('is_active').optional().isBoolean()
],
async (req, res) => {
const errors = validationResult(req);
if (!errors.isEmpty()) {
return res.status(400).json({ errors: errors.array() });
}
const { id } = req.params;
const updates = [];
const values = [];
// Build dynamic update query
if (req.body.username !== undefined) {
updates.push('username = ?');
values.push(req.body.username);
}
if (req.body.email !== undefined) {
updates.push('email = ?');
values.push(req.body.email);
}
// Check if role or is_active is being changed (for audit logging)
const isRoleChange = req.body.role !== undefined;
const isStatusChange = req.body.is_active !== undefined;
if (req.body.role !== undefined) {
updates.push('role = ?');
values.push(req.body.role);
}
if (req.body.is_active !== undefined) {
updates.push('is_active = ?');
values.push(req.body.is_active ? 1 : 0);
}
if (updates.length === 0) {
return res.status(400).json({ error: 'No valid fields to update' });
}
// Get current user data for audit logging
db.get('SELECT role, is_active, username FROM users WHERE id = ?', [id], async (err, existingUser) => {
if (err || !existingUser) {
return res.status(404).json({ error: 'User not found' });
}
updates.push('updated_at = CURRENT_TIMESTAMP');
values.push(id);
db.run(
`UPDATE users SET ${updates.join(', ')} WHERE id = ?`,
values,
async function(err) {
if (err) {
if (err.message.includes('UNIQUE')) {
return res.status(400).json({ error: 'Username or email already exists' });
}
logger.error('User update error:', err);
return res.status(500).json({ error: 'Failed to update user' });
}
if (this.changes === 0) {
return res.status(404).json({ error: 'User not found' });
}
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
const userAgent = req.headers['user-agent'];
// CWE-778: Log privilege changes if role changed
if (isRoleChange && req.body.role !== existingUser.role) {
await SecurityAuditLogger.logPrivilegeChange(parseInt(id), 'role_change', {
ip,
userAgent,
previousRole: existingUser.role,
newRole: req.body.role,
changedBy: req.user.userId,
changedByUsername: req.user.username || 'system',
targetUsername: existingUser.username
});
}
// CWE-778: Log account status changes
if (isStatusChange && req.body.is_active !== (existingUser.is_active === 1)) {
const newStatus = req.body.is_active ? 'active' : 'inactive';
await SecurityAuditLogger.logAccountStatusChange(parseInt(id), newStatus, {
ip,
userAgent,
previousStatus: existingUser.is_active === 1 ? 'active' : 'inactive',
changedBy: req.user.userId,
changedByUsername: req.user.username || 'system',
targetUsername: existingUser.username,
reason: 'admin_action'
});
}
db.get(
`SELECT id, username, email, role, is_active, created_at, updated_at, created_by
FROM users WHERE id = ?`,
[id],
(err, user) => {
if (err) {
return res.status(500).json({ error: 'User updated but failed to fetch details' });
}
res.json(user);
}
);
}
);
});
}
);
// Reset user password (admin only)
router.post('/:id/reset-password',
modifyLimiter,
authenticate,
requireAdmin,
[
body('newPassword').isLength({ min: 8 })
],
async (req, res) => {
const errors = validationResult(req);
if (!errors.isEmpty()) {
return res.status(400).json({ errors: errors.array() });
}
const { id } = req.params;
const { newPassword } = req.body;
try {
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
const userAgent = req.headers['user-agent'];
// Get user info first
db.get('SELECT username FROM users WHERE id = ?', [id], async (err, user) => {
if (err || !user) {
return res.status(404).json({ error: 'User not found' });
}
const hashedPassword = await bcrypt.hash(newPassword, 10);
db.run(
'UPDATE users SET password = ?, must_change_password = 1, updated_at = CURRENT_TIMESTAMP WHERE id = ?',
[hashedPassword, id],
async function(err) {
if (err) {
logger.error('Password reset error:', err);
return res.status(500).json({ error: 'Failed to reset password' });
}
if (this.changes === 0) {
return res.status(404).json({ error: 'User not found' });
}
// CWE-778: Log admin activity
await SecurityAuditLogger.logAdminActivity(req.user.userId, 'password_reset', {
ip,
userAgent,
targetUserId: id,
targetUsername: user.username,
adminUsername: req.user.username || 'admin',
reason: 'admin_initiated'
});
res.json({ message: 'Password reset successfully. User must change password on next login.' });
}
);
});
} catch (error) {
logger.error('Password reset error:', error);
res.status(500).json({ error: 'Failed to reset password' });
}
}
);
// Unlock account (admin only)
router.post('/:id/unlock', modifyLimiter, authenticate, requireAdmin, async (req, res) => {
const { id } = req.params;
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
const userAgent = req.headers['user-agent'];
try {
// Get user info first
db.get('SELECT username, locked_until FROM users WHERE id = ?', [id], async (err, user) => {
if (err || !user) {
return res.status(404).json({ error: 'User not found' });
}
db.run(
'UPDATE users SET locked_until = NULL, failed_login_attempts = 0 WHERE id = ?',
[id],
async function(err) {
if (err) {
logger.error('Account unlock error:', err);
return res.status(500).json({ error: 'Failed to unlock account' });
}
if (this.changes === 0) {
return res.status(404).json({ error: 'User not found' });
}
// CWE-778: Log admin activity
await SecurityAuditLogger.logAdminActivity(req.user.userId, 'account_unlocked', {
ip,
userAgent,
targetUserId: id,
targetUsername: user.username,
adminUsername: req.user.username || 'admin',
changes: { locked_until: user.locked_until, failed_login_attempts: 0 },
reason: 'admin_unlock'
});
logger.info(`Admin ${req.user.userId} unlocked account ${id}`);
res.json({ message: 'Account unlocked successfully' });
}
);
});
} catch (error) {
logger.error('Account unlock error:', error);
res.status(500).json({ error: 'Failed to unlock account' });
}
});
// Delete user (admin only)
router.delete('/:id', modifyLimiter, authenticate, requireAdmin, async (req, res) => {
const { id } = req.params;
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
const userAgent = req.headers['user-agent'];
// Prevent deleting yourself
if (parseInt(id) === req.user.userId) {
return res.status(400).json({ error: 'Cannot delete your own account' });
}
// Check if this is the last admin
db.get(
"SELECT COUNT(*) as count FROM users WHERE role = 'admin' AND is_active = 1",
[],
(err, result) => {
if (err) {
logger.error('Error checking admin count:', err);
return res.status(500).json({ error: 'Failed to delete user' });
}
db.get('SELECT username, email, role FROM users WHERE id = ?', [id], async (err, user) => {
if (err || !user) {
return res.status(404).json({ error: 'User not found' });
}
if (user.role === 'admin' && result.count <= 1) {
return res.status(400).json({ error: 'Cannot delete the last admin account' });
}
db.run('DELETE FROM users WHERE id = ?', [id], async function(err) {
if (err) {
logger.error('User deletion error:', err);
return res.status(500).json({ error: 'Failed to delete user' });
}
if (this.changes === 0) {
return res.status(404).json({ error: 'User not found' });
}
// CWE-778: Log admin activity - user deletion
await SecurityAuditLogger.logAdminActivity(req.user.userId, 'user_deleted', {
ip,
userAgent,
targetUserId: id,
targetUsername: user.username,
adminUsername: req.user.username || 'admin',
changes: { deleted: { username: user.username, email: user.email, role: user.role } },
reason: 'admin_deletion'
});
res.json({ message: 'User deleted successfully' });
});
});
}
);
});
module.exports = router;

View file

@ -0,0 +1,738 @@
const express = require('express');
const router = express.Router();
const { authenticate } = require('../middleware/auth');
const { db } = require('../database/db');
const { spawn } = require('child_process');
const fs = require('fs').promises;
const path = require('path');
const { modifyLimiter, readLimiter } = require('../middleware/rateLimiter');
const encryption = require('../utils/encryption');
// Use centralized encryption module for CWE-311 compliance
const encrypt = (data) => encryption.encryptVPN(data);
const decrypt = (data) => encryption.decryptVPN(data);
// Parse WireGuard config
function parseWireGuardConfig(content) {
// Security: Limit content size
if (content.length > 100000) {
throw new Error('Configuration file too large');
}
// Security: Check for script injection attempts
if (content.includes('<script') || content.includes('${') || content.includes('eval(')) {
throw new Error('Invalid configuration content detected');
}
const lines = content.split('\n');
const parsed = {
type: 'wireguard',
interface: {},
peer: {}
};
let section = null;
for (const line of lines) {
const trimmed = line.trim();
if (trimmed === '[Interface]') {
section = 'interface';
} else if (trimmed === '[Peer]') {
section = 'peer';
} else if (trimmed && !trimmed.startsWith('#') && section) {
const [key, ...valueParts] = trimmed.split('=');
if (key && valueParts.length > 0) {
const value = valueParts.join('=').trim();
// Security: Sanitize values, no shell commands
if (value.includes('$(') || value.includes('`') || value.includes(';')) {
throw new Error('Invalid configuration value detected');
}
parsed[section][key.trim()] = value;
}
}
}
// Validate required fields
console.log('[VPN-CONFIG] Parsed interface:', Object.keys(parsed.interface));
console.log('[VPN-CONFIG] Parsed peer:', Object.keys(parsed.peer));
if (!parsed.interface.PrivateKey || !parsed.peer.PublicKey || !parsed.peer.Endpoint) {
console.log('[VPN-CONFIG] Missing fields - PrivateKey:', !!parsed.interface.PrivateKey, 'PublicKey:', !!parsed.peer.PublicKey, 'Endpoint:', !!parsed.peer.Endpoint);
throw new Error('Missing required WireGuard configuration fields');
}
// Extract metadata
const endpoint = parsed.peer.Endpoint || '';
const country = extractCountryFromEndpoint(endpoint);
const serverName = endpoint.split(':')[0];
return {
type: 'wireguard',
country,
serverName,
endpoint,
data: parsed
};
}
// Parse OpenVPN config
function parseOpenVPNConfig(content) {
// Security: Limit content size
if (content.length > 100000) {
throw new Error('Configuration file too large');
}
// Security: Check for script injection attempts
if (content.includes('<script') || content.includes('${') || content.includes('eval(')) {
throw new Error('Invalid configuration content detected');
}
// Security: Check for dangerous directives
const dangerousDirectives = ['script-security 3', 'up /bin/sh', 'down /bin/sh', 'route-up /bin/sh'];
for (const directive of dangerousDirectives) {
if (content.includes(directive)) {
throw new Error('Configuration contains potentially dangerous directives');
}
}
const lines = content.split('\n');
const parsed = {
type: 'openvpn',
remote: null,
port: null,
proto: null
};
for (const line of lines) {
const trimmed = line.trim();
if (trimmed.startsWith('remote ')) {
const parts = trimmed.split(/\s+/);
parsed.remote = parts[1];
parsed.port = parts[2] || '1194';
parsed.proto = parts[3] || 'udp';
}
}
// Validate required fields
if (!parsed.remote) {
throw new Error('Missing required OpenVPN remote server');
}
// Extract metadata
const country = extractCountryFromEndpoint(parsed.remote);
const serverName = parsed.remote || 'Unknown';
return {
type: 'openvpn',
country,
serverName,
endpoint: `${parsed.remote}:${parsed.port}`,
proto: parsed.proto,
data: { config: content }
};
}
// Extract country code from endpoint/hostname
function extractCountryFromEndpoint(endpoint) {
if (!endpoint) return null;
// Common patterns: us-01.server.com, node-us-01, 185.107.57.98 (Romania)
const countryMatch = endpoint.match(/[-_]([a-z]{2})[-_\d]/i);
if (countryMatch) {
return countryMatch[1].toUpperCase();
}
// Known IP ranges (basic lookup)
const ipRanges = {
'185.107.57': 'RO',
'185.163.110': 'RO',
'169.150': 'US',
'103.107': 'JP'
};
for (const [range, country] of Object.entries(ipRanges)) {
if (endpoint.startsWith(range)) {
return country;
}
}
return null;
}
// Get all VPN configs for user
router.get('/configs', authenticate, readLimiter, async (req, res) => {
try {
const configs = await new Promise((resolve, reject) => {
db.all(
`SELECT id, name, config_type, country, server_name, endpoint, is_active, created_at
FROM vpn_configs
WHERE user_id = ?
ORDER BY is_active DESC, created_at DESC`,
[req.user.userId],
(err, rows) => {
if (err) reject(err);
else resolve(rows || []);
}
);
});
// Verify actual VPN connection status for active configs
for (const config of configs) {
if (config.is_active) {
const actuallyConnected = await checkVPNConnection(config.config_type, req.user.userId);
if (!actuallyConnected) {
// Update database to reflect actual state
await new Promise((resolve, reject) => {
db.run(
'UPDATE vpn_configs SET is_active = 0 WHERE id = ?',
[config.id],
(err) => (err ? reject(err) : resolve())
);
});
config.is_active = 0;
console.log(`[VPN-CONFIG] Reset stale active state for config ${config.id}`);
}
}
}
res.json({ configs });
} catch (err) {
console.error('Error fetching VPN configs:', err);
res.status(500).json({ error: 'Failed to fetch VPN configurations' });
}
});
// Get specific config
router.get('/configs/:id', authenticate, readLimiter, (req, res) => {
db.get(
`SELECT id, name, config_type, config_data, country, server_name, endpoint, is_active, created_at
FROM vpn_configs
WHERE id = ? AND user_id = ?`,
[req.params.id, req.user.userId],
(err, config) => {
if (err) {
console.error('Error fetching VPN config:', err);
return res.status(500).json({ error: 'Failed to fetch configuration' });
}
if (!config) {
return res.status(404).json({ error: 'Configuration not found' });
}
// Decrypt config data
try {
config.config_data = JSON.stringify(decrypt(config.config_data));
} catch (error) {
console.error('Error decrypting config:', error);
return res.status(500).json({ error: 'Failed to decrypt configuration' });
}
res.json({ config });
}
);
});
// Upload VPN config file
router.post('/configs/upload', authenticate, modifyLimiter, async (req, res) => {
try {
console.log('[VPN-CONFIG] Upload request received');
console.log('[VPN-CONFIG] Files:', req.files ? Object.keys(req.files) : 'none');
// CWE-532: Do not log request body - may contain sensitive VPN credentials
if (!req.files || !req.files.config) {
return res.status(400).json({ error: 'No file uploaded' });
}
const uploadedFile = req.files.config;
// Validate file size (1MB max)
if (uploadedFile.size > 1024 * 1024) {
return res.status(400).json({ error: 'File too large (max 1MB)' });
}
// Validate file extension
const ext = path.extname(uploadedFile.name).toLowerCase();
if (ext !== '.conf' && ext !== '.ovpn') {
return res.status(400).json({ error: 'Only .conf and .ovpn files are allowed' });
}
const { name } = req.body;
if (!name || name.trim().length === 0) {
return res.status(400).json({ error: 'Configuration name is required' });
}
// Validate name
if (!/^[a-zA-Z0-9\s\-_.()]+$/.test(name)) {
return res.status(400).json({ error: 'Invalid configuration name. Use only letters, numbers, spaces, and common punctuation.' });
}
if (name.length > 100) {
return res.status(400).json({ error: 'Configuration name too long (max 100 characters)' });
}
// Read file content from temp file
const content = await fs.readFile(uploadedFile.tempFilePath, 'utf8');
console.log('[VPN-CONFIG] File extension:', ext);
console.log('[VPN-CONFIG] Content length:', content.length);
console.log('[VPN-CONFIG] First 200 chars:', content.substring(0, 200));
// Parse based on file type
let parsed;
if (ext === '.conf') {
parsed = parseWireGuardConfig(content);
} else if (ext === '.ovpn') {
parsed = parseOpenVPNConfig(content);
} else {
return res.status(400).json({ error: 'Unsupported file format' });
}
// Encrypt config data using centralized encryption (CWE-311)
const encryptedData = encrypt(parsed.data);
// Save to database
db.run(
`INSERT INTO vpn_configs (user_id, name, config_type, config_data, country, server_name, endpoint, is_active)
VALUES (?, ?, ?, ?, ?, ?, ?, 0)`,
[req.user.userId, name.trim(), parsed.type, encryptedData, parsed.country, parsed.serverName, parsed.endpoint],
function(err) {
if (err) {
console.error('Error saving VPN config:', err);
return res.status(500).json({ error: 'Failed to save configuration' });
}
res.json({
message: 'Configuration uploaded successfully',
config: {
id: this.lastID,
name: name.trim(),
type: parsed.type,
country: parsed.country,
serverName: parsed.serverName,
endpoint: parsed.endpoint
}
});
}
);
} catch (error) {
console.error('[VPN-CONFIG] Error processing config upload:', error);
console.error('[VPN-CONFIG] Error stack:', error.stack);
res.status(500).json({ error: error.message || 'Failed to process configuration file' });
}
});
// Delete config
router.delete('/configs/:id', authenticate, modifyLimiter, (req, res) => {
// First check if config is active
db.get(
'SELECT is_active FROM vpn_configs WHERE id = ? AND user_id = ?',
[req.params.id, req.user.userId],
(err, config) => {
if (err) {
console.error('Error checking config:', err);
return res.status(500).json({ error: 'Failed to delete configuration' });
}
if (!config) {
return res.status(404).json({ error: 'Configuration not found' });
}
if (config.is_active) {
return res.status(400).json({ error: 'Cannot delete active configuration. Disconnect first.' });
}
// Delete the config
db.run(
'DELETE FROM vpn_configs WHERE id = ? AND user_id = ?',
[req.params.id, req.user.userId],
function(err) {
if (err) {
console.error('Error deleting config:', err);
return res.status(500).json({ error: 'Failed to delete configuration' });
}
res.json({ message: 'Configuration deleted successfully' });
}
);
}
);
});
// Set active config
router.post('/configs/:id/activate', authenticate, modifyLimiter, (req, res) => {
db.serialize(() => {
// Deactivate all configs for user
db.run(
'UPDATE vpn_configs SET is_active = 0 WHERE user_id = ?',
[req.user.userId],
(err) => {
if (err) {
console.error('Error deactivating configs:', err);
return res.status(500).json({ error: 'Failed to activate configuration' });
}
// Activate the selected config
db.run(
'UPDATE vpn_configs SET is_active = 1 WHERE id = ? AND user_id = ?',
[req.params.id, req.user.userId],
function(err) {
if (err) {
console.error('Error activating config:', err);
return res.status(500).json({ error: 'Failed to activate configuration' });
}
if (this.changes === 0) {
return res.status(404).json({ error: 'Configuration not found' });
}
res.json({ message: 'Configuration activated successfully' });
}
);
}
);
});
});
// Connect using config
router.post('/configs/:id/connect', authenticate, modifyLimiter, async (req, res) => {
try {
// Get config
const config = await new Promise((resolve, reject) => {
db.get(
'SELECT config_type, config_data FROM vpn_configs WHERE id = ? AND user_id = ?',
[req.params.id, req.user.userId],
(err, row) => {
if (err) reject(err);
else resolve(row);
}
);
});
if (!config) {
return res.status(404).json({ error: 'Configuration not found' });
}
// Decrypt config
const decryptedData = decrypt(config.config_data);
console.log(`[VPN-CONFIG] Connecting config ${req.params.id} for user ${req.user.userId}`);
// Connect based on type
if (config.config_type === 'wireguard') {
await connectWireGuard(decryptedData, req.user.userId);
} else if (config.config_type === 'openvpn') {
await connectOpenVPN(decryptedData, req.user.userId);
}
console.log(`[VPN-CONFIG] Successfully connected, updating database`);
// Deactivate all other configs for this user first
await new Promise((resolve, reject) => {
db.run(
'UPDATE vpn_configs SET is_active = 0 WHERE user_id = ?',
[req.user.userId],
(err) => (err ? reject(err) : resolve())
);
});
// Mark this config as active
await new Promise((resolve, reject) => {
db.run(
'UPDATE vpn_configs SET is_active = 1 WHERE id = ? AND user_id = ?',
[req.params.id, req.user.userId],
(err) => (err ? reject(err) : resolve())
);
});
// CWE-532: Logged without exposing sensitive config ID
console.log(`[VPN-CONFIG] Configuration marked as active for user ${req.user.userId}`);
res.json({ message: 'Connected to VPN successfully', success: true });
} catch (error) {
console.error('Error connecting to VPN:', error);
res.status(500).json({ error: error.message || 'Failed to connect to VPN' });
}
});
// Disconnect VPN
router.post('/configs/:id/disconnect', authenticate, modifyLimiter, async (req, res) => {
try {
// Get config
const config = await new Promise((resolve, reject) => {
db.get(
'SELECT config_type FROM vpn_configs WHERE id = ? AND user_id = ? AND is_active = 1',
[req.params.id, req.user.userId],
(err, row) => {
if (err) reject(err);
else resolve(row);
}
);
});
if (!config) {
// No active config found, but still try to clean up any interfaces
console.log('[VPN] No active config found, attempting cleanup anyway');
try {
await disconnectWireGuard(req.user.userId);
} catch (e) {
// Ignore errors during cleanup
}
// Mark as inactive regardless
await new Promise((resolve, reject) => {
db.run(
'UPDATE vpn_configs SET is_active = 0 WHERE id = ? AND user_id = ?',
[req.params.id, req.user.userId],
(err) => (err ? reject(err) : resolve())
);
});
return res.json({ message: 'VPN state cleaned up' });
}
// Disconnect based on type
if (config.config_type === 'wireguard') {
await disconnectWireGuard(req.user.userId);
} else if (config.config_type === 'openvpn') {
await disconnectOpenVPN(req.user.userId);
}
// Mark as inactive
await new Promise((resolve, reject) => {
db.run(
'UPDATE vpn_configs SET is_active = 0 WHERE id = ? AND user_id = ?',
[req.params.id, req.user.userId],
(err) => (err ? reject(err) : resolve())
);
});
res.json({ message: 'Disconnected from VPN successfully' });
} catch (error) {
console.error('Error disconnecting from VPN:', error);
res.status(500).json({ error: error.message || 'Failed to disconnect from VPN' });
}
});
// Helper: Connect WireGuard
async function connectWireGuard(config, userId) {
const interfaceName = `wg${userId}`;
const confPath = `/etc/wireguard/${interfaceName}.conf`;
// Build WireGuard config with split-tunnel for local network access
// Note: DNS is not set in config to avoid conflicts with Docker's DNS
// Extract VPN endpoint IP to exclude from tunnel (prevent routing loop)
const vpnEndpointIP = config.peer.Endpoint ? config.peer.Endpoint.split(':')[0] : null;
// Use DNS from config (VPN provider's DNS) with public DNS fallbacks
const dnsServers = config.interface.DNS || '1.1.1.1';
const primaryDNS = dnsServers.split(',')[0].trim();
const wgConfig = `[Interface]
PrivateKey = ${config.interface.PrivateKey}
Address = ${config.interface.Address}
Table = off
FwMark = 0xca6c
PostUp = ip route add default dev %i table 51820${vpnEndpointIP ? `
PostUp = ip route add ${vpnEndpointIP}/32 via 172.20.0.1 dev eth0` : ''}
PostUp = ip rule add to 172.20.0.0/16 table main priority 50
PostUp = ip rule add to 192.168.0.0/16 table main priority 51
PostUp = ip rule add not fwmark 0xca6c table 51820 priority 100
PostUp = ip route replace default via 172.20.0.1 dev eth0 metric 200
PostUp = ip route add default dev %i metric 50
PostUp = cp /etc/resolv.conf /etc/resolv.conf.vpn-backup
PostUp = echo 'nameserver ${primaryDNS}' > /etc/resolv.conf
PostUp = echo 'nameserver 1.1.1.1' >> /etc/resolv.conf
PostUp = echo 'nameserver 8.8.8.8' >> /etc/resolv.conf
PreDown = ip route del default dev %i metric 50
PreDown = ip route replace default via 172.20.0.1 dev eth0 metric 0
PreDown = ip rule del to 172.20.0.0/16 table main priority 50
PreDown = ip rule del to 192.168.0.0/16 table main priority 51
PreDown = ip rule del to 10.0.0.0/8 table main priority 52
PreDown = ip rule del not fwmark 0xca6c table 51820 priority 100
PreDown = ip route del default dev %i table 51820${vpnEndpointIP ? `
PreDown = ip route del ${vpnEndpointIP}/32 via 172.20.0.1 dev eth0` : ''}
PreDown = mv /etc/resolv.conf.vpn-backup /etc/resolv.conf 2>/dev/null || true
[Peer]
PublicKey = ${config.peer.PublicKey}
AllowedIPs = ${config.peer.AllowedIPs || '0.0.0.0/0, ::/0'}
Endpoint = ${config.peer.Endpoint}
${config.peer.PersistentKeepalive ? `PersistentKeepalive = ${config.peer.PersistentKeepalive}` : ''}
`;
console.log('[WireGuard] Creating config file at', confPath);
console.log('[WireGuard] Current user:', process.getuid ? process.getuid() : 'unknown');
// Write config file (this will work as root or with proper permissions)
try {
await fs.writeFile(confPath, wgConfig, { mode: 0o600 });
console.log('[WireGuard] Config file created successfully');
} catch (err) {
console.error('[WireGuard] Failed to create config file:', err.message);
throw new Error(`Failed to create WireGuard config: ${err.message}`);
}
return new Promise((resolve, reject) => {
// Use shell to ensure root context
const wg = spawn('wg-quick', ['up', interfaceName], {
uid: 0, // Run as root
gid: 0
});
let output = '';
wg.stdout.on('data', (data) => {
output += data;
console.log('[WireGuard]', data.toString().trim());
});
wg.stderr.on('data', (data) => {
output += data;
console.log('[WireGuard]', data.toString().trim());
});
wg.on('close', (code) => {
if (code === 0) {
console.log('[WireGuard] Connected successfully to', config.peer.Endpoint);
resolve();
} else {
console.error('[WireGuard] Connection failed (code', code, '):', output);
// Check for Docker networking limitation
if (output.includes('Nexthop has invalid gateway') || output.includes('Error: Nexthop')) {
reject(new Error('VPN connection requires host network mode. Docker containers have limited network access. Please use the desktop app for VPN connections.'));
} else {
reject(new Error('WireGuard connection failed'));
}
}
});
wg.on('error', (err) => {
console.error('[WireGuard] Spawn error:', err.message);
reject(new Error(`Failed to start wg-quick: ${err.message}`));
});
});
}
// Helper: Connect OpenVPN
async function connectOpenVPN(config, userId) {
const confPath = `/tmp/ovpn_${userId}.conf`;
await fs.writeFile(confPath, config.config);
return new Promise((resolve, reject) => {
const ovpn = spawn('openvpn', [
'--config', confPath,
'--daemon',
'--log', `/tmp/ovpn_${userId}.log`
]);
ovpn.on('close', (code) => {
if (code === 0) {
console.log('[OpenVPN] Connected successfully');
resolve();
} else {
reject(new Error('OpenVPN connection failed. VPN requires host network mode. Please use the desktop app for VPN connections.'));
}
});
});
}
// Helper: Disconnect WireGuard
async function disconnectWireGuard(userId) {
const interfaceName = `wg${userId}`;
console.log('[WireGuard] Disconnecting interface:', interfaceName);
return new Promise((resolve, reject) => {
const wg = spawn('wg-quick', ['down', interfaceName], {
uid: 0,
gid: 0
});
let output = '';
wg.stdout.on('data', (data) => {
output += data;
console.log('[WireGuard]', data.toString().trim());
});
wg.stderr.on('data', (data) => {
output += data;
console.log('[WireGuard]', data.toString().trim());
});
wg.on('close', (code) => {
if (code === 0) {
console.log('[WireGuard] Disconnected successfully');
resolve();
} else if (output.includes('is not a WireGuard interface') || output.includes('does not exist')) {
// Interface doesn't exist - already disconnected
console.log('[WireGuard] Interface already disconnected or config file missing');
resolve();
} else {
console.error('[WireGuard] Disconnect failed (code', code, '):', output);
reject(new Error('WireGuard disconnect failed'));
}
});
wg.on('error', (err) => {
console.error('[WireGuard] Spawn error:', err.message);
reject(new Error(`Failed to stop wg-quick: ${err.message}`));
});
});
}
// Helper: Disconnect OpenVPN
async function disconnectOpenVPN(userId) {
console.log('[OpenVPN] Disconnecting...');
return new Promise((resolve, reject) => {
// Kill OpenVPN process
const pkill = spawn('pkill', ['-f', `openvpn.*ovpn_${userId}`]);
pkill.on('close', (code) => {
// pkill returns 0 if processes were killed, 1 if none were found
if (code === 0 || code === 1) {
console.log('[OpenVPN] Disconnected successfully');
resolve();
} else {
console.error('[OpenVPN] Disconnect failed with code:', code);
reject(new Error('OpenVPN disconnect failed'));
}
});
pkill.on('error', (err) => {
console.error('[OpenVPN] Spawn error:', err.message);
reject(new Error(`Failed to stop OpenVPN: ${err.message}`));
});
});
}
// Helper: Check if VPN is actually connected
async function checkVPNConnection(configType, userId) {
try {
if (configType === 'wireguard') {
const interfaceName = `wg${userId}`;
return new Promise((resolve) => {
const wg = spawn('wg', ['show', interfaceName]);
let hasOutput = false;
wg.stdout.on('data', () => { hasOutput = true; });
wg.on('close', (code) => {
resolve(code === 0 && hasOutput);
});
wg.on('error', () => resolve(false));
});
} else if (configType === 'openvpn') {
return new Promise((resolve) => {
const pgrep = spawn('pgrep', ['-f', `openvpn.*ovpn_${userId}`]);
let hasOutput = false;
pgrep.stdout.on('data', () => { hasOutput = true; });
pgrep.on('close', (code) => {
resolve(code === 0 && hasOutput);
});
pgrep.on('error', () => resolve(false));
});
}
return false;
} catch (error) {
return false;
}
}
module.exports = router;

301
backend/server.js Normal file
View file

@ -0,0 +1,301 @@
const express = require('express');
const helmet = require('helmet');
const cors = require('cors');
const compression = require('compression');
const path = require('path');
const dotenv = require('dotenv');
const fileUpload = require('express-fileupload');
const crypto = require('crypto');
const logger = require('./utils/logger');
const { errorMiddleware, ErrorResponses } = require('./utils/errorHandler');
const db = require('./database/db');
const logManagement = require('./jobs/logManagement');
dotenv.config();
const app = express();
const PORT = process.env.PORT || 12345;
const isProduction = process.env.NODE_ENV === 'production';
// Generate nonce for inline scripts
app.use((req, res, next) => {
res.locals.nonce = crypto.randomBytes(16).toString('base64');
next();
});
// Security middleware with comprehensive CSP
app.use(helmet({
contentSecurityPolicy: {
directives: {
defaultSrc: ["'self'"],
scriptSrc: [
"'self'",
"'unsafe-inline'", // Required for React/Vite inline scripts
"'unsafe-eval'", // Required for React DevTools and some libraries
"https://www.gstatic.com", // Google Cast SDK
"https://cdn.jsdelivr.net", // HLS.js library
"blob:", // Required for HLS.js Web Workers
(req, res) => `'nonce-${res.locals.nonce}'`
],
workerSrc: [
"'self'",
"blob:" // Required for HLS.js Web Workers
],
styleSrc: [
"'self'",
"'unsafe-inline'", // Required for MUI and inline styles
"https://fonts.googleapis.com"
],
fontSrc: [
"'self'",
"data:",
"https://fonts.gstatic.com"
],
imgSrc: [
"'self'",
"data:",
"blob:",
"https:",
"http:" // Allow external logo URLs
],
mediaSrc: [
"'self'",
"blob:",
"data:",
"mediastream:",
"https:",
"http:", // Required for IPTV streams
"*" // Allow all media sources for streaming
],
connectSrc: [
"'self'",
"https:",
"http:",
"ws:",
"wss:", // WebSocket support
"blob:",
"*" // Required for external APIs and streams
],
frameSrc: [
"'self'",
"https://www.youtube.com", // For embedded players
"https://player.vimeo.com"
],
objectSrc: ["'none'"],
baseUri: ["'self'"],
formAction: ["'self'"],
frameAncestors: ["'self'"]
// upgradeInsecureRequests disabled for HTTP-only deployments
},
reportOnly: !isProduction, // Report-only mode in development
useDefaults: false
},
originAgentCluster: false, // Disable to avoid agent cluster warnings
crossOriginEmbedderPolicy: false,
crossOriginOpenerPolicy: false,
crossOriginResourcePolicy: { policy: "cross-origin" },
hsts: isProduction ? {
maxAge: 31536000,
includeSubDomains: true,
preload: true
} : false,
referrerPolicy: { policy: "strict-origin-when-cross-origin" },
noSniff: true,
xssFilter: true,
hidePoweredBy: true
}));
// CORS configuration to allow local network and HTTPS domain
const allowedOrigins = [
'http://localhost:12345',
'http://localhost:9000',
'https://tv.iulian.uk',
'http://tv.iulian.uk',
/^http:\/\/192\.168\.\d{1,3}\.\d{1,3}(:\d+)?$/, // Local network 192.168.x.x
/^http:\/\/10\.\d{1,3}\.\d{1,3}\.\d{1,3}(:\d+)?$/, // Local network 10.x.x.x
/^http:\/\/172\.(1[6-9]|2[0-9]|3[0-1])\.\d{1,3}\.\d{1,3}(:\d+)?$/ // Local network 172.16-31.x.x
];
// Mount logo-proxy BEFORE global CORS to handle public image serving
app.use('/api/logo-proxy', require('./routes/logo-proxy'));
app.use(cors({
origin: function (origin, callback) {
// Allow requests with no origin (mobile apps, curl, etc.)
if (!origin) return callback(null, true);
// Check if origin matches allowed patterns
const isAllowed = allowedOrigins.some(allowed => {
if (typeof allowed === 'string') {
return origin === allowed;
} else if (allowed instanceof RegExp) {
return allowed.test(origin);
}
return false;
});
if (isAllowed) {
callback(null, true);
} else {
console.warn(`[CORS] Rejected origin: ${origin}`);
callback(new Error('Not allowed by CORS'));
}
},
credentials: true, // Allow cookies and authentication headers
methods: ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'],
allowedHeaders: ['Content-Type', 'Authorization', 'X-Requested-With']
}));
app.use(compression());
app.use(express.json({ limit: '10mb' }));
app.use(express.urlencoded({ extended: true, limit: '10mb' }));
app.use(fileUpload({
limits: { fileSize: 100 * 1024 * 1024 }, // 100MB max file size
useTempFiles: true,
tempFileDir: '/tmp/'
}));
// Initialize database
db.initialize();
// Serve static files (uploaded logos)
app.use('/uploads', express.static(path.join(__dirname, 'uploads')));
// Serve cached logos from IPTV-org
app.use('/logos', express.static(path.join('/app', 'data', 'logo-cache')));
// Health check endpoint (no rate limiting)
app.get('/api/health', (req, res) => {
res.status(200).json({ status: 'ok', timestamp: new Date().toISOString() });
});
// API Routes
app.use('/api/auth', require('./routes/auth'));
app.use('/api/users', require('./routes/users'));
app.use('/api/playlists', require('./routes/playlists'));
app.use('/api/channels', require('./routes/channels'));
app.use('/api/recordings', require('./routes/recordings'));
app.use('/api/two-factor', require('./routes/twoFactor'));
app.use('/api/profiles', require('./routes/profiles'));
app.use('/api/radio', require('./routes/radio'));
app.use('/api/groups', require('./routes/groups'));
app.use('/api/settings', require('./routes/settings'));
app.use('/api/stream', require('./routes/stream'));
app.use('/api/stats', require('./routes/stats'));
app.use('/api/m3u-files', require('./routes/m3u-files'));
app.use('/api/vpn-configs', require('./routes/vpn-configs'));
app.use('/api/search', require('./routes/search'));
app.use('/api/favorites', require('./routes/favorites'));
app.use('/api/backup', require('./routes/backup'));
app.use('/api/metadata', require('./routes/metadata'));
app.use('/api/history', require('./routes/history'));
app.use('/api/logo-cache', require('./routes/logo-cache'));
app.use('/api/sessions', require('./routes/sessions'));
app.use('/api/csp', require('./routes/csp'));
app.use('/api/rbac', require('./routes/rbac'));
app.use('/api/security-monitor', require('./routes/security-monitor'));
app.use('/api/security-headers', require('./routes/security-headers'));
app.use('/api/security-testing', require('./routes/security-testing'));
app.use('/api/security-config', require('./routes/security-config'));
app.use('/api/siem', require('./routes/siem'));
app.use('/api/log-management', require('./routes/log-management'));
app.use('/api/encryption', require('./routes/encryption-management'));
// Serve static files from frontend
app.use(express.static(path.join(__dirname, '../frontend/dist')));
// Serve uploaded files
app.use('/uploads', express.static(path.join(__dirname, '../data/uploads')));
app.use('/logos', express.static(path.join(__dirname, '../data/logos')));
app.use('/data/logo-cache', express.static(path.join(__dirname, '../data/logo-cache')));
// Handle SPA routing (must be before error handler)
app.get('*', (req, res) => {
res.sendFile(path.join(__dirname, '../frontend/dist/index.html'));
});
// Secure error handling middleware (CWE-209 protection)
// This MUST be the last middleware - catches all errors and sanitizes them
app.use(errorMiddleware);
// Reset VPN states on startup (connections are lost on restart)
db.db.run('UPDATE vpn_configs SET is_active = 0', (err) => {
if (err) {
logger.error('Failed to reset VPN states on startup:', err);
} else {
logger.info('Reset all VPN connection states on startup');
}
});
// Start background jobs
require('./jobs/channelHealth');
require('./jobs/recordingScheduler');
require('./jobs/logoCacher');
// Process-level error handlers (CWE-391 protection)
// Handle uncaught exceptions
process.on('uncaughtException', (error) => {
logger.error('UNCAUGHT EXCEPTION - Process will exit', {
error: error.message,
stack: error.stack,
timestamp: new Date().toISOString()
});
// Give logger time to write
setTimeout(() => {
process.exit(1);
}, 1000);
});
// Handle unhandled promise rejections
process.on('unhandledRejection', (reason, promise) => {
logger.error('UNHANDLED PROMISE REJECTION', {
reason: reason instanceof Error ? reason.message : reason,
stack: reason instanceof Error ? reason.stack : undefined,
promise: promise.toString(),
timestamp: new Date().toISOString()
});
});
// Handle SIGTERM gracefully
process.on('SIGTERM', () => {
logger.info('SIGTERM received, closing server gracefully');
// Close database connections, clear intervals, etc.
db.db.close((err) => {
if (err) {
logger.error('Error closing database:', err);
}
process.exit(0);
});
});
// Handle SIGINT (Ctrl+C) gracefully
process.on('SIGINT', () => {
logger.info('SIGINT received, closing server gracefully');
process.exit(0);
});
// Start server
const server = app.listen(PORT, '0.0.0.0', () => {
logger.info(`StreamFlow server running on port ${PORT}`);
console.log(`StreamFlow server running on port ${PORT}`);
// Initialize log management system (CWE-53 compliance)
logManagement.initialize().catch(err => {
logger.error('Failed to initialize log management:', err);
});
});
// Handle server errors
server.on('error', (error) => {
logger.error('Server error:', {
error: error.message,
code: error.code,
stack: error.stack
});
if (error.code === 'EADDRINUSE') {
logger.error(`Port ${PORT} is already in use`);
process.exit(1);
}
});
module.exports = app;

View file

@ -0,0 +1,704 @@
/**
* Real-time Alert System
* Automated notification system for security events
* Supports multiple notification channels and alert rules
*/
const logger = require('./logger');
const logAggregator = require('./logAggregator');
const { db } = require('../database/db');
const EventEmitter = require('events');
const responseProtocolManager = require('./responseProtocolManager');
class AlertSystem extends EventEmitter {
constructor() {
super();
this.activeAlerts = new Map();
this.alertRules = new Map();
this.initialize();
}
/**
* Initialize alert system
*/
async initialize() {
await this.createAlertsTable();
await this.createAlertRulesTable();
await this.loadAlertRules();
logger.info('[AlertSystem] Initialized - Real-time monitoring enabled');
}
/**
* Create alerts table
*/
async createAlertsTable() {
return new Promise((resolve, reject) => {
db.run(`
CREATE TABLE IF NOT EXISTS security_alerts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
alert_id TEXT UNIQUE NOT NULL,
rule_id TEXT,
severity TEXT NOT NULL,
title TEXT NOT NULL,
description TEXT NOT NULL,
affected_entity TEXT,
source_data TEXT,
status TEXT DEFAULT 'active',
acknowledged_at DATETIME,
acknowledged_by INTEGER,
resolved_at DATETIME,
resolved_by INTEGER,
resolution_notes TEXT,
notification_sent INTEGER DEFAULT 0,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
)
`, (err) => {
if (err) reject(err);
else {
db.run(`CREATE INDEX IF NOT EXISTS idx_alerts_severity ON security_alerts(severity, created_at DESC)`);
db.run(`CREATE INDEX IF NOT EXISTS idx_alerts_status ON security_alerts(status, created_at DESC)`);
db.run(`CREATE INDEX IF NOT EXISTS idx_alerts_rule ON security_alerts(rule_id)`);
resolve();
}
});
});
}
/**
* Create alert rules table
*/
async createAlertRulesTable() {
return new Promise((resolve, reject) => {
db.run(`
CREATE TABLE IF NOT EXISTS alert_rules (
id INTEGER PRIMARY KEY AUTOINCREMENT,
rule_id TEXT UNIQUE NOT NULL,
name TEXT NOT NULL,
description TEXT,
rule_type TEXT NOT NULL,
condition TEXT NOT NULL,
severity TEXT NOT NULL,
enabled INTEGER DEFAULT 1,
notification_channels TEXT,
cooldown_minutes INTEGER DEFAULT 10,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
)
`, async (err) => {
if (err) reject(err);
else {
await this.createDefaultRules();
resolve();
}
});
});
}
/**
* Create default alert rules
*/
async createDefaultRules() {
const defaultRules = [
{
rule_id: 'RULE-BRUTE-FORCE',
name: 'Brute Force Detection',
description: 'Alert on brute force attack patterns',
rule_type: 'anomaly',
condition: JSON.stringify({ anomaly_type: 'brute_force_attack' }),
severity: 'critical',
notification_channels: JSON.stringify(['in_app', 'email']),
cooldown_minutes: 10
},
{
rule_id: 'RULE-PRIVILEGE-ESC',
name: 'Privilege Escalation',
description: 'Alert on privilege escalation attempts',
rule_type: 'anomaly',
condition: JSON.stringify({ anomaly_type: 'privilege_escalation' }),
severity: 'critical',
notification_channels: JSON.stringify(['in_app', 'email']),
cooldown_minutes: 5
},
{
rule_id: 'RULE-DATA-EXFIL',
name: 'Data Exfiltration',
description: 'Alert on potential data exfiltration',
rule_type: 'anomaly',
condition: JSON.stringify({ anomaly_type: 'data_exfiltration' }),
severity: 'high',
notification_channels: JSON.stringify(['in_app', 'email']),
cooldown_minutes: 15
},
{
rule_id: 'RULE-THREAT-CRITICAL',
name: 'Critical Threat Level',
description: 'Alert when threat score exceeds 80',
rule_type: 'threshold',
condition: JSON.stringify({ metric: 'threat_score', operator: '>=', value: 80 }),
severity: 'critical',
notification_channels: JSON.stringify(['in_app', 'email']),
cooldown_minutes: 30
},
{
rule_id: 'RULE-SUSPICIOUS-IP',
name: 'Suspicious IP Activity',
description: 'Alert on suspicious IP behavior',
rule_type: 'anomaly',
condition: JSON.stringify({ anomaly_type: 'suspicious_ip' }),
severity: 'high',
notification_channels: JSON.stringify(['in_app']),
cooldown_minutes: 20
},
{
rule_id: 'RULE-SESSION-ANOMALY',
name: 'Session Anomaly',
description: 'Alert on unusual session patterns',
rule_type: 'anomaly',
condition: JSON.stringify({ anomaly_type: 'session_anomaly' }),
severity: 'medium',
notification_channels: JSON.stringify(['in_app']),
cooldown_minutes: 30
}
];
for (const rule of defaultRules) {
await new Promise((resolve, reject) => {
db.run(
`INSERT OR IGNORE INTO alert_rules
(rule_id, name, description, rule_type, condition, severity, notification_channels, cooldown_minutes)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
[
rule.rule_id,
rule.name,
rule.description,
rule.rule_type,
rule.condition,
rule.severity,
rule.notification_channels,
rule.cooldown_minutes
],
(err) => {
if (err) reject(err);
else resolve();
}
);
});
}
logger.info('[AlertSystem] Default alert rules created');
}
/**
* Load alert rules into memory
*/
async loadAlertRules() {
return new Promise((resolve, reject) => {
db.all(
'SELECT * FROM alert_rules WHERE enabled = 1',
[],
(err, rows) => {
if (err) {
reject(err);
return;
}
this.alertRules.clear();
for (const row of rows) {
this.alertRules.set(row.rule_id, {
...row,
condition: JSON.parse(row.condition),
notification_channels: JSON.parse(row.notification_channels)
});
}
logger.info(`[AlertSystem] Loaded ${rows.length} alert rules`);
resolve(rows.length);
}
);
});
}
/**
* Trigger alert based on anomaly
* Enhanced with automated response protocols
*/
async triggerAnomalyAlert(anomaly) {
// Find matching rules
const matchingRules = [];
for (const [ruleId, rule] of this.alertRules.entries()) {
if (rule.rule_type === 'anomaly' && rule.condition.anomaly_type === anomaly.type) {
matchingRules.push(rule);
}
}
if (matchingRules.length === 0) {
return;
}
// Create alerts for matching rules
for (const rule of matchingRules) {
// Check cooldown
if (await this.isInCooldown(rule.rule_id, anomaly.affected_ip || anomaly.affected_user_id)) {
logger.debug(`[AlertSystem] Alert ${rule.rule_id} in cooldown period`);
continue;
}
const alertId = await this.createAlert({
rule_id: rule.rule_id,
severity: rule.severity,
title: rule.name,
description: anomaly.description,
affected_entity: anomaly.affected_user_id || anomaly.affected_ip,
source_data: JSON.stringify({
anomalyId: anomaly.anomaly_id,
type: anomaly.type,
confidence: anomaly.confidence,
patternData: JSON.parse(anomaly.pattern_data || '{}')
})
});
// Send notifications
await this.sendNotifications(alertId, rule.notification_channels);
// Execute automated response protocols (CWE-778)
await this.executeResponseProtocols('anomaly', {
anomaly_type: anomaly.type,
severity: rule.severity
}, {
alertId,
ip_address: anomaly.affected_ip,
user_id: anomaly.affected_user_id,
confidence: anomaly.confidence
});
}
}
/**
* Execute automated response protocols
* CWE-778: Logs all protocol executions
*/
async executeResponseProtocols(triggerType, triggerEvent, context = {}) {
try {
const result = await responseProtocolManager.executeProtocols(triggerType, triggerEvent, context);
if (result.executed) {
logger.warn(`[AlertSystem] Executed ${result.protocols.length} response protocol(s)`);
// Log protocol execution (CWE-778)
logAggregator.aggregate('alert_system', 'warn', 'security', 'Response protocols executed', {
triggerType,
protocolsExecuted: result.protocols.length,
protocols: result.protocols.map(p => ({
protocolId: p.protocolId,
protocolName: p.protocolName,
status: p.status,
actionsExecuted: p.actionsExecuted
})),
context
});
}
return result;
} catch (error) {
logger.error('[AlertSystem] Response protocol execution failed:', error);
// Log execution failure (CWE-778)
logAggregator.aggregate('alert_system', 'error', 'security', 'Response protocol execution failed', {
triggerType,
error: error.message,
context
});
}
}
/**
* Trigger alert based on threshold
*/
async triggerThresholdAlert(metric, value) {
for (const [ruleId, rule] of this.alertRules.entries()) {
if (rule.rule_type !== 'threshold' || rule.condition.metric !== metric) {
continue;
}
// Evaluate condition
const passed = this.evaluateCondition(rule.condition, value);
if (!passed) continue;
// Check cooldown
if (await this.isInCooldown(rule.rule_id, metric)) {
continue;
}
const alertId = await this.createAlert({
rule_id: rule.rule_id,
severity: rule.severity,
title: rule.name,
description: `${rule.description}: ${metric} = ${value}`,
affected_entity: metric,
source_data: JSON.stringify({ metric, value, threshold: rule.condition.value })
});
await this.sendNotifications(alertId, rule.notification_channels);
}
}
/**
* Evaluate threshold condition
*/
evaluateCondition(condition, value) {
const { operator, value: threshold } = condition;
switch (operator) {
case '>=': return value >= threshold;
case '>': return value > threshold;
case '<=': return value <= threshold;
case '<': return value < threshold;
case '==': return value == threshold;
case '!=': return value != threshold;
default: return false;
}
}
/**
* Check if alert is in cooldown period
*/
async isInCooldown(ruleId, affectedEntity) {
const rule = this.alertRules.get(ruleId);
if (!rule) return false;
const cooldownMinutes = rule.cooldown_minutes || 10;
const cooldownTime = new Date(Date.now() - cooldownMinutes * 60 * 1000).toISOString();
return new Promise((resolve, reject) => {
db.get(
`SELECT COUNT(*) as count FROM security_alerts
WHERE rule_id = ?
AND affected_entity = ?
AND created_at >= ?`,
[ruleId, affectedEntity, cooldownTime],
(err, row) => {
if (err) reject(err);
else resolve(row.count > 0);
}
);
});
}
/**
* Create alert
*/
async createAlert(details) {
const alertId = `ALERT-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
return new Promise((resolve, reject) => {
db.run(
`INSERT INTO security_alerts
(alert_id, rule_id, severity, title, description, affected_entity, source_data)
VALUES (?, ?, ?, ?, ?, ?, ?)`,
[
alertId,
details.rule_id,
details.severity,
details.title,
details.description,
details.affected_entity,
details.source_data
],
(err) => {
if (err) {
reject(err);
} else {
logger.warn(`[AlertSystem] Alert triggered: ${alertId} - ${details.title} (${details.severity})`);
// Log to aggregated logs
logAggregator.aggregate('alert_system', 'warn', 'security', details.title, {
alertId,
ruleId: details.rule_id,
severity: details.severity,
affectedEntity: details.affected_entity
});
// Emit event for real-time notifications
this.emit('alert', {
alertId,
...details,
timestamp: new Date().toISOString()
});
this.activeAlerts.set(alertId, details);
resolve(alertId);
}
}
);
});
}
/**
* Send notifications through configured channels
*/
async sendNotifications(alertId, channels) {
for (const channel of channels) {
try {
switch (channel) {
case 'in_app':
await this.sendInAppNotification(alertId);
break;
case 'email':
await this.sendEmailNotification(alertId);
break;
case 'webhook':
await this.sendWebhookNotification(alertId);
break;
default:
logger.debug(`[AlertSystem] Unknown notification channel: ${channel}`);
}
} catch (error) {
logger.error(`[AlertSystem] Failed to send ${channel} notification:`, error);
}
}
// Mark notification as sent
await new Promise((resolve) => {
db.run(
'UPDATE security_alerts SET notification_sent = 1 WHERE alert_id = ?',
[alertId],
() => resolve()
);
});
}
/**
* Send in-app notification (emit event)
*/
async sendInAppNotification(alertId) {
const alert = await this.getAlert(alertId);
if (!alert) return;
this.emit('notification', {
type: 'security_alert',
alertId: alert.alert_id,
severity: alert.severity,
title: alert.title,
description: alert.description,
timestamp: alert.created_at
});
logger.debug(`[AlertSystem] In-app notification sent: ${alertId}`);
}
/**
* Send email notification (placeholder)
*/
async sendEmailNotification(alertId) {
const alert = await this.getAlert(alertId);
if (!alert) return;
// TODO: Implement email sending (nodemailer)
logger.info(`[AlertSystem] Email notification (stub): ${alertId}`);
// For now, just log it
logger.info(`[AlertSystem] Email would be sent for alert ${alertId}: ${alert.title}`);
}
/**
* Send webhook notification (placeholder)
*/
async sendWebhookNotification(alertId) {
const alert = await this.getAlert(alertId);
if (!alert) return;
// TODO: Implement webhook HTTP POST
logger.info(`[AlertSystem] Webhook notification (stub): ${alertId}`);
}
/**
* Get alert by ID
*/
async getAlert(alertId) {
return new Promise((resolve, reject) => {
db.get(
'SELECT * FROM security_alerts WHERE alert_id = ?',
[alertId],
(err, row) => {
if (err) reject(err);
else resolve(row);
}
);
});
}
/**
* Get active alerts
*/
async getAlerts(filters = {}) {
const {
status = 'active',
severity,
limit = 100,
offset = 0
} = filters;
let whereClause = ['status = ?'];
let params = [status];
if (severity) {
whereClause.push('severity = ?');
params.push(severity);
}
params.push(limit, offset);
return new Promise((resolve, reject) => {
db.all(
`SELECT * FROM security_alerts
WHERE ${whereClause.join(' AND ')}
ORDER BY created_at DESC
LIMIT ? OFFSET ?`,
params,
(err, rows) => {
if (err) reject(err);
else resolve(rows);
}
);
});
}
/**
* Acknowledge alert
*/
async acknowledgeAlert(alertId, userId, notes = '') {
return new Promise((resolve, reject) => {
db.run(
`UPDATE security_alerts
SET status = 'acknowledged',
acknowledged_at = CURRENT_TIMESTAMP,
acknowledged_by = ?
WHERE alert_id = ?`,
[userId, alertId],
(err) => {
if (err) {
reject(err);
} else {
logger.info(`[AlertSystem] Alert acknowledged: ${alertId} by user ${userId}`);
logAggregator.aggregate('alert_system', 'info', 'security', 'Alert acknowledged', {
alertId,
userId,
notes
});
this.emit('alert_acknowledged', { alertId, userId });
resolve();
}
}
);
});
}
/**
* Resolve alert
*/
async resolveAlert(alertId, userId, notes) {
return new Promise((resolve, reject) => {
db.run(
`UPDATE security_alerts
SET status = 'resolved',
resolved_at = CURRENT_TIMESTAMP,
resolved_by = ?,
resolution_notes = ?
WHERE alert_id = ?`,
[userId, notes, alertId],
(err) => {
if (err) {
reject(err);
} else {
logger.info(`[AlertSystem] Alert resolved: ${alertId} by user ${userId}`);
logAggregator.aggregate('alert_system', 'info', 'security', 'Alert resolved', {
alertId,
userId,
notes
});
this.activeAlerts.delete(alertId);
this.emit('alert_resolved', { alertId, userId });
resolve();
}
}
);
});
}
/**
* Get alert statistics
*/
async getStatistics() {
return new Promise((resolve, reject) => {
db.all(
`SELECT
status,
severity,
COUNT(*) as count
FROM security_alerts
WHERE created_at >= datetime('now', '-24 hours')
GROUP BY status, severity`,
[],
(err, rows) => {
if (err) {
reject(err);
return;
}
const stats = {
total: 0,
byStatus: {},
bySeverity: {}
};
for (const row of rows) {
stats.total += row.count;
if (!stats.byStatus[row.status]) {
stats.byStatus[row.status] = 0;
}
stats.byStatus[row.status] += row.count;
if (!stats.bySeverity[row.severity]) {
stats.bySeverity[row.severity] = 0;
}
stats.bySeverity[row.severity] += row.count;
}
resolve(stats);
}
);
});
}
}
// Create singleton instance
const alertSystem = new AlertSystem();
// Connect security intelligence to alert system
const securityIntelligence = require('./securityIntelligence');
// Listen for anomalies and trigger alerts
setInterval(async () => {
try {
const anomalies = await securityIntelligence.getAnomalies({ status: 'open', limit: 50 });
for (const anomaly of anomalies) {
await alertSystem.triggerAnomalyAlert(anomaly);
}
// Check threat score
const threatScore = securityIntelligence.threatScore;
if (threatScore >= 80) {
await alertSystem.triggerThresholdAlert('threat_score', threatScore);
}
} catch (error) {
logger.error('[AlertSystem] Error checking for alerts:', error);
}
}, 60000); // Check every minute
module.exports = alertSystem;

View file

@ -0,0 +1,156 @@
/**
* Data Sanitization Utility
* CWE-532: Prevents logging of sensitive data
* Ensures compliance with HIPAA, PCI, SOX regulations
*/
const SENSITIVE_FIELDS = [
'password',
'newPassword',
'oldPassword',
'currentPassword',
'confirmPassword',
'token',
'accessToken',
'refreshToken',
'jwt',
'secret',
'apiKey',
'api_key',
'privateKey',
'private_key',
'two_factor_secret',
'twoFactorSecret',
'backup_codes',
'backupCodes',
'creditCard',
'credit_card',
'cvv',
'ssn',
'social_security',
'pin',
'authCode',
'auth_code'
];
/**
* Sanitize object by removing or masking sensitive fields
* @param {Object} data - Object to sanitize
* @param {Array} additionalFields - Additional fields to sanitize
* @returns {Object} Sanitized object
*/
function sanitizeForLogging(data, additionalFields = []) {
if (!data || typeof data !== 'object') {
return data;
}
const sensitiveFields = [...SENSITIVE_FIELDS, ...additionalFields];
const sanitized = Array.isArray(data) ? [] : {};
for (const [key, value] of Object.entries(data)) {
const lowerKey = key.toLowerCase();
const isSensitive = sensitiveFields.some(field =>
lowerKey.includes(field.toLowerCase())
);
if (isSensitive) {
sanitized[key] = '[REDACTED]';
} else if (value && typeof value === 'object') {
sanitized[key] = sanitizeForLogging(value, additionalFields);
} else {
sanitized[key] = value;
}
}
return sanitized;
}
/**
* Sanitize user object for export (remove password hash)
* @param {Object} user - User object from database
* @returns {Object} Sanitized user object
*/
function sanitizeUserForExport(user) {
if (!user) return user;
const sanitized = { ...user };
delete sanitized.password;
delete sanitized.two_factor_secret;
delete sanitized.backup_codes;
return sanitized;
}
/**
* Sanitize user array for export
* @param {Array} users - Array of user objects
* @returns {Array} Sanitized user array
*/
function sanitizeUsersForExport(users) {
if (!Array.isArray(users)) return users;
return users.map(user => sanitizeUserForExport(user));
}
/**
* Mask token for logging (show only last 8 characters)
* @param {String} token - Token to mask
* @returns {String} Masked token
*/
function maskToken(token) {
if (!token || typeof token !== 'string') return '[INVALID_TOKEN]';
if (token.length <= 8) return '***';
return '...' + token.slice(-8);
}
/**
* Mask email for logging (show only domain)
* @param {String} email - Email to mask
* @returns {String} Masked email
*/
function maskEmail(email) {
if (!email || typeof email !== 'string') return '[INVALID_EMAIL]';
const parts = email.split('@');
if (parts.length !== 2) return '[INVALID_EMAIL]';
return `***@${parts[1]}`;
}
/**
* Sanitize request body for logging
* @param {Object} body - Request body
* @returns {Object} Sanitized body
*/
function sanitizeRequestBody(body) {
return sanitizeForLogging(body);
}
/**
* Create safe metadata object for audit logging
* Ensures no sensitive data is included in audit logs
* @param {Object} data - Data to include in audit metadata
* @returns {Object} Safe metadata object
*/
function createSafeAuditMetadata(data) {
const safe = sanitizeForLogging(data);
// Specifically handle common patterns
if (safe.user && typeof safe.user === 'object') {
safe.user = sanitizeUserForExport(safe.user);
}
if (safe.changes && typeof safe.changes === 'object') {
safe.changes = sanitizeForLogging(safe.changes);
}
return safe;
}
module.exports = {
sanitizeForLogging,
sanitizeUserForExport,
sanitizeUsersForExport,
maskToken,
maskEmail,
sanitizeRequestBody,
createSafeAuditMetadata,
SENSITIVE_FIELDS
};

293
backend/utils/encryption.js Normal file
View file

@ -0,0 +1,293 @@
/**
* Centralized Encryption Utility for CWE-311 Compliance
* Provides AES-256-GCM encryption for sensitive data at rest
*
* Security Features:
* - AES-256-GCM authenticated encryption
* - Unique IV per encryption operation
* - HMAC authentication tags
* - Key rotation support
* - Secure key derivation from master secret
*/
const crypto = require('crypto');
const logger = require('./logger');
// Encryption configuration
const ALGORITHM = 'aes-256-gcm';
const KEY_LENGTH = 32; // 256 bits
const IV_LENGTH = 16; // 128 bits for GCM
const AUTH_TAG_LENGTH = 16;
const SALT_LENGTH = 32;
/**
* Get master encryption key from environment or generate default
* SECURITY WARNING: Always set ENCRYPTION_MASTER_KEY in production!
*/
function getMasterKey() {
const envKey = process.env.ENCRYPTION_MASTER_KEY;
if (!envKey) {
logger.warn('⚠️ ENCRYPTION_MASTER_KEY not set - using default (insecure for production)');
// Use JWT_SECRET as fallback, but warn about it
const fallbackKey = process.env.JWT_SECRET || 'default-insecure-key-change-in-production';
return crypto.createHash('sha256').update(fallbackKey + '-encryption-v1').digest();
}
// Derive proper key from master secret
return crypto.createHash('sha256').update(envKey).digest();
}
/**
* Derive encryption key for specific purpose using HKDF-like approach
* @param {String} purpose - Purpose identifier (e.g., 'settings', 'vpn', 'api-tokens')
* @returns {Buffer} Derived encryption key
*/
function deriveKey(purpose) {
const masterKey = getMasterKey();
const info = Buffer.from(purpose + '-v1', 'utf8');
return crypto.createHmac('sha256', masterKey)
.update(info)
.digest();
}
/**
* Encrypt sensitive data with AES-256-GCM
* @param {String} plaintext - Data to encrypt
* @param {String} purpose - Purpose identifier for key derivation
* @returns {String} Encrypted data in format: salt:iv:authTag:ciphertext (all hex encoded)
*/
function encrypt(plaintext, purpose = 'default') {
try {
if (!plaintext) {
return null;
}
// Generate random salt and IV for this encryption operation
const salt = crypto.randomBytes(SALT_LENGTH);
const iv = crypto.randomBytes(IV_LENGTH);
// Derive encryption key with salt for additional security
const masterKey = getMasterKey();
const derivedKey = crypto.pbkdf2Sync(
masterKey,
salt,
100000, // iterations
KEY_LENGTH,
'sha256'
);
// Create cipher
const cipher = crypto.createCipheriv(ALGORITHM, derivedKey, iv);
// Encrypt
let encrypted = cipher.update(plaintext, 'utf8', 'hex');
encrypted += cipher.final('hex');
// Get authentication tag (GCM provides authenticated encryption)
const authTag = cipher.getAuthTag();
// Return format: salt:iv:authTag:ciphertext
return [
salt.toString('hex'),
iv.toString('hex'),
authTag.toString('hex'),
encrypted
].join(':');
} catch (error) {
logger.error('Encryption error:', { purpose, error: error.message });
throw new Error('Failed to encrypt data');
}
}
/**
* Decrypt data encrypted with encrypt()
* @param {String} encryptedData - Encrypted data in format: salt:iv:authTag:ciphertext
* @param {String} purpose - Purpose identifier (must match encryption purpose)
* @returns {String} Decrypted plaintext
*/
function decrypt(encryptedData, purpose = 'default') {
try {
if (!encryptedData) {
return null;
}
// Parse encrypted data
const parts = encryptedData.split(':');
if (parts.length !== 4) {
throw new Error('Invalid encrypted data format');
}
const salt = Buffer.from(parts[0], 'hex');
const iv = Buffer.from(parts[1], 'hex');
const authTag = Buffer.from(parts[2], 'hex');
const encrypted = parts[3];
// Validate lengths
if (salt.length !== SALT_LENGTH || iv.length !== IV_LENGTH || authTag.length !== AUTH_TAG_LENGTH) {
throw new Error('Invalid encrypted data structure');
}
// Derive the same key used for encryption
const masterKey = getMasterKey();
const derivedKey = crypto.pbkdf2Sync(
masterKey,
salt,
100000,
KEY_LENGTH,
'sha256'
);
// Create decipher
const decipher = crypto.createDecipheriv(ALGORITHM, derivedKey, iv);
decipher.setAuthTag(authTag);
// Decrypt
let decrypted = decipher.update(encrypted, 'hex', 'utf8');
decrypted += decipher.final('utf8');
return decrypted;
} catch (error) {
logger.error('Decryption error:', { purpose, error: error.message });
throw new Error('Failed to decrypt data');
}
}
/**
* Encrypt sensitive settings value
* Automatically detects if already encrypted
*/
function encryptSetting(value, key) {
if (!value) return null;
// Don't encrypt if already encrypted (starts with salt:iv:authTag:ciphertext format)
if (typeof value === 'string' && value.split(':').length === 4) {
const parts = value.split(':');
if (parts[0].length === SALT_LENGTH * 2 && parts[1].length === IV_LENGTH * 2) {
return value; // Already encrypted
}
}
const plaintext = typeof value === 'string' ? value : JSON.stringify(value);
return encrypt(plaintext, `setting:${key}`);
}
/**
* Decrypt sensitive setting value
*/
function decryptSetting(encryptedValue, key) {
if (!encryptedValue) return null;
try {
const decrypted = decrypt(encryptedValue, `setting:${key}`);
// Try to parse as JSON if it looks like JSON
if (decrypted && (decrypted.startsWith('{') || decrypted.startsWith('['))) {
try {
return JSON.parse(decrypted);
} catch {
return decrypted;
}
}
return decrypted;
} catch (error) {
// If decryption fails, value might not be encrypted (migration scenario)
logger.warn(`Failed to decrypt setting ${key}, returning as-is`);
return encryptedValue;
}
}
/**
* Check if encryption key is properly configured
*/
function isEncryptionConfigured() {
return !!process.env.ENCRYPTION_MASTER_KEY;
}
/**
* Get encryption health status
*/
function getEncryptionStatus() {
const configured = isEncryptionConfigured();
return {
configured,
algorithm: ALGORITHM,
keySize: KEY_LENGTH * 8, // bits
status: configured ? 'secure' : 'default-key',
warning: configured ? null : 'Using default encryption key - set ENCRYPTION_MASTER_KEY in production',
recommendations: configured ? [] : [
'Set ENCRYPTION_MASTER_KEY environment variable',
'Use a strong random key (at least 32 characters)',
'Store the key securely (e.g., Docker secrets, AWS Secrets Manager)'
]
};
}
/**
* Re-encrypt data with new master key (for key rotation)
* @param {String} oldEncryptedData - Data encrypted with old key
* @param {String} purpose - Purpose identifier
* @param {String} oldMasterKey - Old master key (optional, uses current env if not provided)
* @returns {String} Data re-encrypted with current master key
*/
function reEncrypt(oldEncryptedData, purpose = 'default', oldMasterKey = null) {
try {
// Temporarily swap master key if provided
const originalKey = process.env.ENCRYPTION_MASTER_KEY;
if (oldMasterKey) {
process.env.ENCRYPTION_MASTER_KEY = oldMasterKey;
}
// Decrypt with old key
const plaintext = decrypt(oldEncryptedData, purpose);
// Restore original key
if (oldMasterKey) {
process.env.ENCRYPTION_MASTER_KEY = originalKey;
}
// Encrypt with current key
return encrypt(plaintext, purpose);
} catch (error) {
logger.error('Re-encryption error:', { purpose, error: error.message });
throw new Error('Failed to re-encrypt data');
}
}
/**
* Hash sensitive data for comparison (one-way, cannot be decrypted)
* Use for data that needs to be compared but not retrieved (e.g., backup codes)
*/
function hashSensitiveData(data) {
return crypto.createHash('sha256').update(data).digest('hex');
}
/**
* Generate cryptographically secure random token
*/
function generateSecureToken(length = 32) {
return crypto.randomBytes(length).toString('hex');
}
module.exports = {
encrypt,
decrypt,
encryptSetting,
decryptSetting,
deriveKey,
isEncryptionConfigured,
getEncryptionStatus,
reEncrypt,
hashSensitiveData,
generateSecureToken,
// Export for legacy VPN compatibility
encryptVPN: (data) => encrypt(JSON.stringify(data), 'vpn'),
decryptVPN: (data) => JSON.parse(decrypt(data, 'vpn'))
};

View file

@ -0,0 +1,340 @@
/**
* Secure Error Handler Utility
* Prevents CWE-209: Information Exposure Through Error Messages
*
* This utility sanitizes error messages before sending them to clients,
* ensuring that internal system details, file paths, stack traces, and
* other sensitive information are never exposed to end users.
*/
const logger = require('./logger');
/**
* Error types with user-friendly messages
*/
const ERROR_TYPES = {
// Authentication & Authorization
AUTH_FAILED: 'Authentication failed',
AUTH_REQUIRED: 'Authentication required',
AUTH_INVALID_TOKEN: 'Invalid or expired authentication token',
AUTH_INSUFFICIENT_PERMISSIONS: 'Insufficient permissions',
// User Management
USER_NOT_FOUND: 'User not found',
USER_ALREADY_EXISTS: 'User already exists',
USER_CREATION_FAILED: 'Failed to create user',
USER_UPDATE_FAILED: 'Failed to update user',
USER_DELETE_FAILED: 'Failed to delete user',
// Data Validation
VALIDATION_FAILED: 'Validation failed',
INVALID_INPUT: 'Invalid input provided',
INVALID_FILE_TYPE: 'Invalid file type',
FILE_TOO_LARGE: 'File size exceeds limit',
MISSING_REQUIRED_FIELD: 'Required field is missing',
// Database Operations
DATABASE_ERROR: 'Database operation failed',
RECORD_NOT_FOUND: 'Record not found',
DUPLICATE_ENTRY: 'Duplicate entry exists',
// File Operations
FILE_NOT_FOUND: 'File not found',
FILE_UPLOAD_FAILED: 'File upload failed',
FILE_DELETE_FAILED: 'Failed to delete file',
FILE_READ_FAILED: 'Failed to read file',
FILE_WRITE_FAILED: 'Failed to write file',
// Network & External Services
NETWORK_ERROR: 'Network request failed',
EXTERNAL_SERVICE_ERROR: 'External service unavailable',
TIMEOUT_ERROR: 'Request timeout',
// Rate Limiting
RATE_LIMIT_EXCEEDED: 'Too many requests. Please try again later',
// Generic
INTERNAL_ERROR: 'An internal error occurred',
NOT_FOUND: 'Resource not found',
FORBIDDEN: 'Access forbidden',
BAD_REQUEST: 'Bad request',
CONFLICT: 'Resource conflict',
UNPROCESSABLE_ENTITY: 'Unable to process request',
SERVICE_UNAVAILABLE: 'Service temporarily unavailable'
};
/**
* Sanitize error for client response
* Removes sensitive information like stack traces, file paths, and internal details
*
* @param {Error|string} error - The error to sanitize
* @param {string} defaultMessage - Default message if error cannot be parsed
* @returns {Object} Sanitized error object with safe message
*/
function sanitizeError(error, defaultMessage = ERROR_TYPES.INTERNAL_ERROR) {
// If error is a string, return it as is (assuming it's already safe)
if (typeof error === 'string') {
return {
message: error,
code: 'CUSTOM_ERROR'
};
}
// Extract error message
const errorMessage = error?.message || defaultMessage;
// Check for known error patterns and map to safe messages
// Database errors
if (errorMessage.includes('UNIQUE constraint') || errorMessage.includes('UNIQUE')) {
return {
message: ERROR_TYPES.DUPLICATE_ENTRY,
code: 'DUPLICATE_ENTRY'
};
}
if (errorMessage.includes('FOREIGN KEY constraint')) {
return {
message: ERROR_TYPES.CONFLICT,
code: 'FOREIGN_KEY_CONSTRAINT'
};
}
if (errorMessage.includes('NOT NULL constraint')) {
return {
message: ERROR_TYPES.MISSING_REQUIRED_FIELD,
code: 'MISSING_FIELD'
};
}
// File system errors
if (errorMessage.includes('ENOENT') || errorMessage.includes('no such file')) {
return {
message: ERROR_TYPES.FILE_NOT_FOUND,
code: 'FILE_NOT_FOUND'
};
}
if (errorMessage.includes('EACCES') || errorMessage.includes('permission denied')) {
return {
message: ERROR_TYPES.FORBIDDEN,
code: 'PERMISSION_DENIED'
};
}
if (errorMessage.includes('ENOSPC') || errorMessage.includes('no space')) {
return {
message: ERROR_TYPES.SERVICE_UNAVAILABLE,
code: 'DISK_FULL'
};
}
// Network errors
if (errorMessage.includes('ECONNREFUSED') || errorMessage.includes('connection refused')) {
return {
message: ERROR_TYPES.EXTERNAL_SERVICE_ERROR,
code: 'CONNECTION_REFUSED'
};
}
if (errorMessage.includes('ETIMEDOUT') || errorMessage.includes('timeout')) {
return {
message: ERROR_TYPES.TIMEOUT_ERROR,
code: 'TIMEOUT'
};
}
if (errorMessage.includes('ENOTFOUND') || errorMessage.includes('getaddrinfo')) {
return {
message: ERROR_TYPES.NETWORK_ERROR,
code: 'DNS_ERROR'
};
}
// Authentication errors
if (errorMessage.toLowerCase().includes('unauthorized') ||
errorMessage.toLowerCase().includes('authentication')) {
return {
message: ERROR_TYPES.AUTH_FAILED,
code: 'AUTH_ERROR'
};
}
if (errorMessage.toLowerCase().includes('forbidden') ||
errorMessage.toLowerCase().includes('permission')) {
return {
message: ERROR_TYPES.AUTH_INSUFFICIENT_PERMISSIONS,
code: 'PERMISSION_ERROR'
};
}
// Validation errors (pass through if they seem safe)
if (errorMessage.toLowerCase().includes('validation') ||
errorMessage.toLowerCase().includes('invalid')) {
// Check if message is reasonably safe (no paths, no system info)
if (!containsSensitiveInfo(errorMessage)) {
return {
message: errorMessage,
code: 'VALIDATION_ERROR'
};
}
return {
message: ERROR_TYPES.VALIDATION_FAILED,
code: 'VALIDATION_ERROR'
};
}
// Default to generic error message
return {
message: defaultMessage,
code: 'INTERNAL_ERROR'
};
}
/**
* Check if error message contains sensitive information
*
* @param {string} message - Error message to check
* @returns {boolean} True if message contains sensitive info
*/
function containsSensitiveInfo(message) {
const sensitivePatterns = [
/\/[a-z0-9_\-\/]+\.(js|json|db|log|conf|env)/i, // File paths
/at\s+[a-zA-Z0-9_]+\s+\(/i, // Stack trace patterns
/line\s+\d+/i, // Line numbers
/column\s+\d+/i, // Column numbers
/Error:\s+SQLITE_/i, // SQLite internal errors
/node_modules/i, // Node modules paths
/\/home\//i, // Unix home directory
/\/usr\//i, // Unix system paths
/\/var\//i, // Unix var paths
/\/tmp\//i, // Temp directory
/C:\\/i, // Windows paths
/\\Users\\/i, // Windows user paths
/password/i, // Password references
/secret/i, // Secret references
/token/i, // Token references
/key/i // Key references (be careful with "keyboard" etc.)
];
return sensitivePatterns.some(pattern => pattern.test(message));
}
/**
* Log error securely (internal logs can contain full details)
*
* @param {Error|string} error - Error to log
* @param {Object} context - Additional context
*/
function logError(error, context = {}) {
const errorInfo = {
message: error?.message || error,
stack: error?.stack,
code: error?.code,
...context
};
logger.error('Application error:', errorInfo);
}
/**
* Express error handler middleware
* Catches all errors and returns sanitized responses
*
* @param {Error} err - Error object
* @param {Object} req - Express request
* @param {Object} res - Express response
* @param {Function} next - Express next function
*/
function errorMiddleware(err, req, res, next) {
// Log full error details internally
logError(err, {
method: req.method,
path: req.path,
userId: req.user?.id,
ip: req.ip
});
// Determine status code
const statusCode = err.statusCode || err.status || 500;
// Sanitize error for client
const sanitized = sanitizeError(err, ERROR_TYPES.INTERNAL_ERROR);
// Send sanitized response
res.status(statusCode).json({
error: sanitized.message,
code: sanitized.code,
timestamp: new Date().toISOString()
});
}
/**
* Async handler wrapper to catch async errors
*
* @param {Function} fn - Async route handler
* @returns {Function} Wrapped function
*/
function asyncHandler(fn) {
return (req, res, next) => {
Promise.resolve(fn(req, res, next)).catch(next);
};
}
/**
* Create a safe error response object
*
* @param {string} message - Error message (should be user-safe)
* @param {number} statusCode - HTTP status code
* @param {string} code - Error code
* @returns {Object} Error response object
*/
function createError(message, statusCode = 500, code = 'ERROR') {
const error = new Error(message);
error.statusCode = statusCode;
error.code = code;
return error;
}
/**
* Standard error responses
*/
const ErrorResponses = {
badRequest: (message = ERROR_TYPES.BAD_REQUEST) =>
createError(message, 400, 'BAD_REQUEST'),
unauthorized: (message = ERROR_TYPES.AUTH_REQUIRED) =>
createError(message, 401, 'UNAUTHORIZED'),
forbidden: (message = ERROR_TYPES.FORBIDDEN) =>
createError(message, 403, 'FORBIDDEN'),
notFound: (message = ERROR_TYPES.NOT_FOUND) =>
createError(message, 404, 'NOT_FOUND'),
conflict: (message = ERROR_TYPES.CONFLICT) =>
createError(message, 409, 'CONFLICT'),
unprocessable: (message = ERROR_TYPES.UNPROCESSABLE_ENTITY) =>
createError(message, 422, 'UNPROCESSABLE_ENTITY'),
tooManyRequests: (message = ERROR_TYPES.RATE_LIMIT_EXCEEDED) =>
createError(message, 429, 'TOO_MANY_REQUESTS'),
internal: (message = ERROR_TYPES.INTERNAL_ERROR) =>
createError(message, 500, 'INTERNAL_ERROR'),
serviceUnavailable: (message = ERROR_TYPES.SERVICE_UNAVAILABLE) =>
createError(message, 503, 'SERVICE_UNAVAILABLE')
};
module.exports = {
ERROR_TYPES,
sanitizeError,
containsSensitiveInfo,
logError,
errorMiddleware,
asyncHandler,
createError,
ErrorResponses
};

View file

@ -0,0 +1,486 @@
/**
* Comprehensive Input Validation Utility
* Implements whitelist-based validation for all user inputs
*/
const validator = require('validator');
/**
* Validation Rules Configuration
*/
const VALIDATION_RULES = {
// User-related
username: {
minLength: 3,
maxLength: 50,
pattern: /^[a-zA-Z0-9_-]+$/,
sanitize: true
},
email: {
maxLength: 255,
sanitize: true
},
password: {
minLength: 8,
maxLength: 128
},
// Content-related
playlistName: {
minLength: 1,
maxLength: 200,
pattern: /^[a-zA-Z0-9\s\-_.,()!]+$/,
sanitize: true
},
channelName: {
minLength: 1,
maxLength: 200,
sanitize: true
},
url: {
maxLength: 2048,
protocols: ['http', 'https', 'rtmp', 'rtsp', 'udp', 'rtp']
},
// Generic text fields
description: {
maxLength: 1000,
sanitize: true
},
// File names
filename: {
maxLength: 255,
pattern: /^[a-zA-Z0-9\s\-_.,()]+$/,
sanitize: true
},
// Settings keys
settingKey: {
maxLength: 100,
pattern: /^[a-zA-Z0-9_.-]+$/
}
};
/**
* Sanitize string input to prevent XSS
*/
function sanitizeString(str) {
if (typeof str !== 'string') return str;
// Remove HTML tags
str = str.replace(/<[^>]*>/g, '');
// Remove script-related content
str = str.replace(/javascript:/gi, '');
str = str.replace(/on\w+\s*=/gi, '');
// Escape special characters
return validator.escape(str);
}
// Export sanitizeString
module.exports.sanitizeString = sanitizeString;
/**
* Validate username
*/
function validateUsername(username) {
const errors = [];
const rules = VALIDATION_RULES.username;
if (!username || typeof username !== 'string') {
errors.push('Username is required');
return { valid: false, errors, sanitized: null };
}
const trimmed = username.trim();
if (trimmed.length < rules.minLength) {
errors.push(`Username must be at least ${rules.minLength} characters`);
}
if (trimmed.length > rules.maxLength) {
errors.push(`Username must not exceed ${rules.maxLength} characters`);
}
if (!rules.pattern.test(trimmed)) {
errors.push('Username can only contain letters, numbers, hyphens, and underscores');
}
const sanitized = rules.sanitize ? sanitizeString(trimmed) : trimmed;
return {
valid: errors.length === 0,
errors,
sanitized
};
}
/**
* Validate email
*/
function validateEmail(email) {
const errors = [];
const rules = VALIDATION_RULES.email;
if (!email || typeof email !== 'string') {
errors.push('Email is required');
return { valid: false, errors, sanitized: null };
}
const trimmed = email.trim().toLowerCase();
if (!validator.isEmail(trimmed)) {
errors.push('Invalid email format');
}
if (trimmed.length > rules.maxLength) {
errors.push(`Email must not exceed ${rules.maxLength} characters`);
}
const sanitized = rules.sanitize ? sanitizeString(trimmed) : trimmed;
return {
valid: errors.length === 0,
errors,
sanitized
};
}
/**
* Validate URL
*/
function validateUrl(url, allowLocalhost = false) {
const errors = [];
const rules = VALIDATION_RULES.url;
if (!url || typeof url !== 'string') {
errors.push('URL is required');
return { valid: false, errors, sanitized: null };
}
const trimmed = url.trim();
if (trimmed.length > rules.maxLength) {
errors.push(`URL must not exceed ${rules.maxLength} characters`);
}
// Check if URL is valid and uses allowed protocols
const options = {
protocols: rules.protocols,
require_protocol: true,
allow_underscores: true
};
if (!allowLocalhost) {
options.disallow_auth = false;
}
if (!validator.isURL(trimmed, options)) {
errors.push('Invalid URL format');
}
// Additional security checks
if (trimmed.includes('javascript:')) {
errors.push('URL contains invalid content');
}
return {
valid: errors.length === 0,
errors,
sanitized: trimmed
};
}
/**
* Validate playlist name
*/
function validatePlaylistName(name) {
const errors = [];
const rules = VALIDATION_RULES.playlistName;
if (!name || typeof name !== 'string') {
errors.push('Playlist name is required');
return { valid: false, errors, sanitized: null };
}
const trimmed = name.trim();
if (trimmed.length < rules.minLength) {
errors.push(`Playlist name must be at least ${rules.minLength} character`);
}
if (trimmed.length > rules.maxLength) {
errors.push(`Playlist name must not exceed ${rules.maxLength} characters`);
}
if (!rules.pattern.test(trimmed)) {
errors.push('Playlist name contains invalid characters');
}
const sanitized = rules.sanitize ? sanitizeString(trimmed) : trimmed;
return {
valid: errors.length === 0,
errors,
sanitized
};
}
/**
* Validate channel name
*/
function validateChannelName(name) {
const errors = [];
const rules = VALIDATION_RULES.channelName;
if (!name || typeof name !== 'string') {
errors.push('Channel name is required');
return { valid: false, errors, sanitized: null };
}
const trimmed = name.trim();
if (trimmed.length < rules.minLength) {
errors.push(`Channel name must be at least ${rules.minLength} character`);
}
if (trimmed.length > rules.maxLength) {
errors.push(`Channel name must not exceed ${rules.maxLength} characters`);
}
const sanitized = rules.sanitize ? sanitizeString(trimmed) : trimmed;
return {
valid: errors.length === 0,
errors,
sanitized
};
}
/**
* Validate description/text field
*/
function validateDescription(description) {
const errors = [];
const rules = VALIDATION_RULES.description;
if (!description) {
return { valid: true, errors: [], sanitized: '' };
}
if (typeof description !== 'string') {
errors.push('Description must be a string');
return { valid: false, errors, sanitized: null };
}
const trimmed = description.trim();
if (trimmed.length > rules.maxLength) {
errors.push(`Description must not exceed ${rules.maxLength} characters`);
}
const sanitized = rules.sanitize ? sanitizeString(trimmed) : trimmed;
return {
valid: errors.length === 0,
errors,
sanitized
};
}
/**
* Validate filename
*/
function validateFilename(filename) {
const errors = [];
const rules = VALIDATION_RULES.filename;
if (!filename || typeof filename !== 'string') {
errors.push('Filename is required');
return { valid: false, errors, sanitized: null };
}
const trimmed = filename.trim();
if (trimmed.length > rules.maxLength) {
errors.push(`Filename must not exceed ${rules.maxLength} characters`);
}
if (!rules.pattern.test(trimmed)) {
errors.push('Filename contains invalid characters');
}
// Check for path traversal attempts
if (trimmed.includes('..') || trimmed.includes('/') || trimmed.includes('\\')) {
errors.push('Filename contains invalid path characters');
}
const sanitized = rules.sanitize ? sanitizeString(trimmed) : trimmed;
return {
valid: errors.length === 0,
errors,
sanitized
};
}
/**
* Validate setting key
*/
function validateSettingKey(key) {
const errors = [];
const rules = VALIDATION_RULES.settingKey;
if (!key || typeof key !== 'string') {
errors.push('Setting key is required');
return { valid: false, errors, sanitized: null };
}
const trimmed = key.trim();
if (trimmed.length > rules.maxLength) {
errors.push(`Setting key must not exceed ${rules.maxLength} characters`);
}
if (!rules.pattern.test(trimmed)) {
errors.push('Setting key contains invalid characters');
}
return {
valid: errors.length === 0,
errors,
sanitized: trimmed
};
}
/**
* Validate integer
*/
function validateInteger(value, min = Number.MIN_SAFE_INTEGER, max = Number.MAX_SAFE_INTEGER) {
const errors = [];
const num = parseInt(value, 10);
if (isNaN(num)) {
errors.push('Must be a valid integer');
return { valid: false, errors, sanitized: null };
}
if (num < min) {
errors.push(`Must be at least ${min}`);
}
if (num > max) {
errors.push(`Must not exceed ${max}`);
}
return {
valid: errors.length === 0,
errors,
sanitized: num
};
}
/**
* Validate boolean
*/
function validateBoolean(value) {
if (typeof value === 'boolean') {
return { valid: true, errors: [], sanitized: value };
}
if (value === 'true' || value === '1' || value === 1) {
return { valid: true, errors: [], sanitized: true };
}
if (value === 'false' || value === '0' || value === 0) {
return { valid: true, errors: [], sanitized: false };
}
return {
valid: false,
errors: ['Must be a valid boolean'],
sanitized: null
};
}
/**
* Validate JSON
*/
function validateJSON(value, maxSize = 10000) {
const errors = [];
if (typeof value === 'object') {
const jsonString = JSON.stringify(value);
if (jsonString.length > maxSize) {
errors.push(`JSON data exceeds maximum size of ${maxSize} characters`);
}
return {
valid: errors.length === 0,
errors,
sanitized: value
};
}
if (typeof value !== 'string') {
errors.push('Must be valid JSON');
return { valid: false, errors, sanitized: null };
}
try {
const parsed = JSON.parse(value);
if (value.length > maxSize) {
errors.push(`JSON data exceeds maximum size of ${maxSize} characters`);
}
return {
valid: errors.length === 0,
errors,
sanitized: parsed
};
} catch (e) {
errors.push('Invalid JSON format');
return { valid: false, errors, sanitized: null };
}
}
/**
* Sanitize object with multiple fields
*/
function sanitizeObject(obj, schema) {
const sanitized = {};
const errors = {};
let hasErrors = false;
for (const [key, validator] of Object.entries(schema)) {
const value = obj[key];
const result = validator(value);
if (!result.valid) {
errors[key] = result.errors;
hasErrors = true;
} else {
sanitized[key] = result.sanitized;
}
}
return {
valid: !hasErrors,
errors,
sanitized
};
}
module.exports = {
validateUsername,
validateEmail,
validateUrl,
validatePlaylistName,
validateChannelName,
validateDescription,
validateFilename,
validateSettingKey,
validateInteger,
validateBoolean,
validateJSON,
sanitizeString,
sanitizeObject,
VALIDATION_RULES
};

View file

@ -0,0 +1,444 @@
/**
* Centralized Log Aggregation System (SIEM)
* Consolidates logs from multiple sources into a protected repository
* Provides holistic visibility across the infrastructure
*/
const logger = require('./logger');
const { db } = require('../database/db');
const crypto = require('crypto');
const fs = require('fs').promises;
const path = require('path');
class LogAggregator {
constructor() {
this.logSources = new Map();
this.aggregationBuffer = [];
this.bufferSize = 100; // Batch size for bulk insert
this.flushInterval = 5000; // 5 seconds
this.initializeAggregation();
}
/**
* Initialize aggregation system
*/
async initializeAggregation() {
// Create aggregated_logs table if not exists
await this.createAggregatedLogsTable();
// Start periodic flush
setInterval(() => this.flushBuffer(), this.flushInterval);
logger.info('[LogAggregator] Initialized - SIEM mode active');
}
/**
* Create database table for aggregated logs
*/
async createAggregatedLogsTable() {
return new Promise((resolve, reject) => {
db.run(`
CREATE TABLE IF NOT EXISTS aggregated_logs (
id INTEGER PRIMARY KEY AUTOINCREMENT,
log_id TEXT UNIQUE NOT NULL,
source TEXT NOT NULL,
level TEXT NOT NULL,
category TEXT NOT NULL,
message TEXT NOT NULL,
metadata TEXT,
user_id INTEGER,
ip_address TEXT,
user_agent TEXT,
signature TEXT NOT NULL,
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
)
`, (err) => {
if (err) {
logger.error('[LogAggregator] Failed to create aggregated_logs table:', err);
reject(err);
} else {
// Create indexes for fast querying
db.run(`CREATE INDEX IF NOT EXISTS idx_aggregated_logs_source ON aggregated_logs(source, timestamp DESC)`);
db.run(`CREATE INDEX IF NOT EXISTS idx_aggregated_logs_level ON aggregated_logs(level, timestamp DESC)`);
db.run(`CREATE INDEX IF NOT EXISTS idx_aggregated_logs_category ON aggregated_logs(category, timestamp DESC)`);
db.run(`CREATE INDEX IF NOT EXISTS idx_aggregated_logs_user ON aggregated_logs(user_id, timestamp DESC)`);
db.run(`CREATE INDEX IF NOT EXISTS idx_aggregated_logs_timestamp ON aggregated_logs(timestamp DESC)`);
resolve();
}
});
});
}
/**
* Register a log source
* @param {String} sourceName - Name of the log source
* @param {Object} config - Source configuration
*/
registerSource(sourceName, config = {}) {
this.logSources.set(sourceName, {
name: sourceName,
enabled: config.enabled !== false,
priority: config.priority || 'medium',
retention: config.retention || 90, // days
...config
});
logger.info(`[LogAggregator] Registered source: ${sourceName}`);
}
/**
* Aggregate log entry with cryptographic signature
* @param {String} source - Log source identifier
* @param {String} level - Log level (info, warn, error, critical)
* @param {String} category - Log category (auth, access, security, system, application)
* @param {String} message - Log message
* @param {Object} details - Additional details
*/
async aggregate(source, level, category, message, details = {}) {
const logId = this.generateLogId();
const timestamp = new Date().toISOString();
const logEntry = {
log_id: logId,
source,
level,
category,
message,
metadata: JSON.stringify({
...details,
aggregatedAt: timestamp
}),
user_id: details.userId || null,
ip_address: details.ip || null,
user_agent: details.userAgent || null,
timestamp
};
// Generate cryptographic signature for log integrity
logEntry.signature = this.generateSignature(logEntry);
// Add to buffer
this.aggregationBuffer.push(logEntry);
// Flush if buffer is full
if (this.aggregationBuffer.length >= this.bufferSize) {
await this.flushBuffer();
}
return logId;
}
/**
* Generate unique log ID
*/
generateLogId() {
const timestamp = Date.now();
const random = crypto.randomBytes(8).toString('hex');
return `LOG-${timestamp}-${random}`;
}
/**
* Generate cryptographic signature for log entry
* SHA-256 HMAC with secret key for integrity verification
*/
generateSignature(logEntry) {
const secret = process.env.LOG_SIGNATURE_SECRET || 'default-secret-change-in-production';
const data = `${logEntry.log_id}|${logEntry.source}|${logEntry.level}|${logEntry.category}|${logEntry.message}|${logEntry.timestamp}`;
return crypto
.createHmac('sha256', secret)
.update(data)
.digest('hex');
}
/**
* Verify log entry signature
*/
verifySignature(logEntry) {
const expectedSignature = this.generateSignature(logEntry);
return logEntry.signature === expectedSignature;
}
/**
* Flush aggregation buffer to database
*/
async flushBuffer() {
if (this.aggregationBuffer.length === 0) return;
const batch = [...this.aggregationBuffer];
this.aggregationBuffer = [];
try {
await this.bulkInsert(batch);
logger.debug(`[LogAggregator] Flushed ${batch.length} log entries`);
} catch (error) {
logger.error('[LogAggregator] Failed to flush buffer:', error);
// Re-add failed entries to buffer
this.aggregationBuffer.unshift(...batch);
}
}
/**
* Bulk insert log entries
*/
async bulkInsert(entries) {
if (entries.length === 0) return;
const placeholders = entries.map(() => '(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)').join(',');
const values = entries.flatMap(entry => [
entry.log_id,
entry.source,
entry.level,
entry.category,
entry.message,
entry.metadata,
entry.user_id,
entry.ip_address,
entry.user_agent,
entry.signature,
entry.timestamp
]);
return new Promise((resolve, reject) => {
db.run(
`INSERT INTO aggregated_logs
(log_id, source, level, category, message, metadata, user_id, ip_address, user_agent, signature, timestamp)
VALUES ${placeholders}`,
values,
(err) => {
if (err) reject(err);
else resolve();
}
);
});
}
/**
* Query aggregated logs
*/
async query(filters = {}) {
const {
source,
level,
category,
userId,
startDate,
endDate,
limit = 1000,
offset = 0,
orderBy = 'timestamp',
order = 'DESC'
} = filters;
let whereClause = [];
let params = [];
if (source) {
whereClause.push('source = ?');
params.push(source);
}
if (level) {
if (Array.isArray(level)) {
whereClause.push(`level IN (${level.map(() => '?').join(',')})`);
params.push(...level);
} else {
whereClause.push('level = ?');
params.push(level);
}
}
if (category) {
whereClause.push('category = ?');
params.push(category);
}
if (userId) {
whereClause.push('user_id = ?');
params.push(userId);
}
if (startDate) {
whereClause.push('timestamp >= ?');
params.push(startDate);
}
if (endDate) {
whereClause.push('timestamp <= ?');
params.push(endDate);
}
const where = whereClause.length > 0 ? `WHERE ${whereClause.join(' AND ')}` : '';
params.push(limit, offset);
return new Promise((resolve, reject) => {
db.all(
`SELECT * FROM aggregated_logs ${where}
ORDER BY ${orderBy} ${order}
LIMIT ? OFFSET ?`,
params,
(err, rows) => {
if (err) reject(err);
else resolve(rows);
}
);
});
}
/**
* Get log statistics
*/
async getStatistics(timeRange = 24) {
const startTime = new Date(Date.now() - timeRange * 60 * 60 * 1000).toISOString();
return new Promise((resolve, reject) => {
db.all(
`SELECT
source,
level,
category,
COUNT(*) as count,
MIN(timestamp) as first_seen,
MAX(timestamp) as last_seen
FROM aggregated_logs
WHERE timestamp >= ?
GROUP BY source, level, category
ORDER BY count DESC`,
[startTime],
(err, rows) => {
if (err) reject(err);
else {
const stats = {
timeRange: `${timeRange} hours`,
totalLogs: rows.reduce((sum, row) => sum + row.count, 0),
bySource: {},
byLevel: {},
byCategory: {},
breakdown: rows
};
rows.forEach(row => {
// By source
if (!stats.bySource[row.source]) stats.bySource[row.source] = 0;
stats.bySource[row.source] += row.count;
// By level
if (!stats.byLevel[row.level]) stats.byLevel[row.level] = 0;
stats.byLevel[row.level] += row.count;
// By category
if (!stats.byCategory[row.category]) stats.byCategory[row.category] = 0;
stats.byCategory[row.category] += row.count;
});
resolve(stats);
}
}
);
});
}
/**
* Verify log integrity
* Checks if log entries have been tampered with
*/
async verifyIntegrity(logIds = null) {
const query = logIds
? `SELECT * FROM aggregated_logs WHERE log_id IN (${logIds.map(() => '?').join(',')})`
: `SELECT * FROM aggregated_logs ORDER BY timestamp DESC LIMIT 1000`;
const params = logIds || [];
return new Promise((resolve, reject) => {
db.all(query, params, (err, rows) => {
if (err) {
reject(err);
return;
}
const results = {
total: rows.length,
verified: 0,
tampered: 0,
tamperedLogs: []
};
rows.forEach(row => {
if (this.verifySignature(row)) {
results.verified++;
} else {
results.tampered++;
results.tamperedLogs.push({
log_id: row.log_id,
timestamp: row.timestamp,
source: row.source
});
}
});
resolve(results);
});
});
}
/**
* Cleanup old logs based on retention policy
*/
async cleanup(retentionDays = 90) {
const cutoffDate = new Date(Date.now() - retentionDays * 24 * 60 * 60 * 1000).toISOString();
return new Promise((resolve, reject) => {
db.run(
'DELETE FROM aggregated_logs WHERE timestamp < ?',
[cutoffDate],
function(err) {
if (err) reject(err);
else {
logger.info(`[LogAggregator] Cleaned up ${this.changes} old log entries (retention: ${retentionDays} days)`);
resolve(this.changes);
}
}
);
});
}
/**
* Export logs to external SIEM system
*/
async export(filters = {}, format = 'json') {
const logs = await this.query({ ...filters, limit: 10000 });
if (format === 'json') {
return JSON.stringify(logs, null, 2);
} else if (format === 'csv') {
const headers = ['log_id', 'source', 'level', 'category', 'message', 'timestamp', 'ip_address', 'user_id'];
const csv = [headers.join(',')];
logs.forEach(log => {
const row = headers.map(header => {
const value = log[header] || '';
return `"${String(value).replace(/"/g, '""')}"`;
});
csv.push(row.join(','));
});
return csv.join('\n');
}
throw new Error(`Unsupported export format: ${format}`);
}
}
// Create singleton instance
const logAggregator = new LogAggregator();
// Register default sources
logAggregator.registerSource('authentication', { priority: 'critical', retention: 365 });
logAggregator.registerSource('authorization', { priority: 'high', retention: 365 });
logAggregator.registerSource('security_audit', { priority: 'critical', retention: 365 });
logAggregator.registerSource('application', { priority: 'medium', retention: 90 });
logAggregator.registerSource('system', { priority: 'high', retention: 180 });
logAggregator.registerSource('access', { priority: 'low', retention: 30 });
module.exports = logAggregator;

124
backend/utils/logger.js Normal file
View file

@ -0,0 +1,124 @@
const winston = require('winston');
const path = require('path');
const fs = require('fs');
// Ensure logs directory exists
const logsDir = path.join(__dirname, '../../logs');
if (!fs.existsSync(logsDir)) {
fs.mkdirSync(logsDir, { recursive: true, mode: 0o755 });
}
/**
* Custom format to sanitize sensitive data from logs
* Removes passwords, tokens, secrets, and other sensitive information
*/
const sanitizeFormat = winston.format((info) => {
// Convert info to string for pattern matching
const infoStr = JSON.stringify(info);
// Patterns to redact
const sensitivePatterns = [
{ pattern: /"password"\s*:\s*"[^"]*"/gi, replacement: '"password":"[REDACTED]"' },
{ pattern: /"token"\s*:\s*"[^"]*"/gi, replacement: '"token":"[REDACTED]"' },
{ pattern: /"secret"\s*:\s*"[^"]*"/gi, replacement: '"secret":"[REDACTED]"' },
{ pattern: /"apiKey"\s*:\s*"[^"]*"/gi, replacement: '"apiKey":"[REDACTED]"' },
{ pattern: /"api_key"\s*:\s*"[^"]*"/gi, replacement: '"api_key":"[REDACTED]"' },
{ pattern: /"authorization"\s*:\s*"Bearer\s+[^"]*"/gi, replacement: '"authorization":"Bearer [REDACTED]"' },
{ pattern: /"privateKey"\s*:\s*"[^"]*"/gi, replacement: '"privateKey":"[REDACTED]"' },
{ pattern: /"private_key"\s*:\s*"[^"]*"/gi, replacement: '"private_key":"[REDACTED]"' }
];
let sanitized = infoStr;
sensitivePatterns.forEach(({ pattern, replacement }) => {
sanitized = sanitized.replace(pattern, replacement);
});
try {
return JSON.parse(sanitized);
} catch (e) {
return info; // Return original if parsing fails
}
});
/**
* Production format: Structured JSON logs without sensitive data
*/
const productionFormat = winston.format.combine(
winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
winston.format.errors({ stack: true }),
sanitizeFormat(),
winston.format.json()
);
/**
* Development format: Human-readable with colors
*/
const developmentFormat = winston.format.combine(
winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
winston.format.errors({ stack: true }),
sanitizeFormat(),
winston.format.colorize(),
winston.format.printf(({ timestamp, level, message, ...meta }) => {
let msg = `${timestamp} [${level}]: ${message}`;
if (Object.keys(meta).length > 0) {
msg += ` ${JSON.stringify(meta, null, 2)}`;
}
return msg;
})
);
const isProduction = process.env.NODE_ENV === 'production';
const logger = winston.createLogger({
level: process.env.LOG_LEVEL || (isProduction ? 'info' : 'debug'),
format: productionFormat,
defaultMeta: { service: 'streamflow-iptv' },
transports: [
// Error logs - separate file for errors only
new winston.transports.File({
filename: path.join(logsDir, 'error.log'),
level: 'error',
maxsize: 5242880, // 5MB
maxFiles: 5,
tailable: true
}),
// Combined logs - all levels
new winston.transports.File({
filename: path.join(logsDir, 'combined.log'),
maxsize: 5242880, // 5MB
maxFiles: 5,
tailable: true
})
],
// Don't exit on uncaught exceptions
exitOnError: false
});
// Console transport for development
if (!isProduction) {
logger.add(new winston.transports.Console({
format: developmentFormat,
handleExceptions: true
}));
}
// Security audit log helper
logger.security = (action, details) => {
logger.info('SECURITY_EVENT', {
action,
timestamp: new Date().toISOString(),
...details
});
};
// Performance monitoring helper
logger.performance = (operation, duration, details = {}) => {
logger.info('PERFORMANCE', {
operation,
duration_ms: duration,
timestamp: new Date().toISOString(),
...details
});
};
module.exports = logger;

View file

@ -0,0 +1,94 @@
const axios = require('axios');
const fs = require('fs');
const path = require('path');
const { db } = require('../database/db');
const logger = require('./logger');
const parseM3U = async (source, playlistId, isFile = false) => {
try {
let content;
if (isFile) {
content = fs.readFileSync(source, 'utf8');
} else {
const response = await axios.get(source, { timeout: 30000 });
content = response.data;
}
const lines = content.split('\n').map(line => line.trim());
const channels = [];
for (let i = 0; i < lines.length; i++) {
if (lines[i].startsWith('#EXTINF:')) {
const info = lines[i];
const url = lines[i + 1];
if (!url || url.startsWith('#')) continue;
const nameMatch = info.match(/,(.+)$/);
const name = nameMatch ? nameMatch[1].trim() : 'Unknown';
const tvgIdMatch = info.match(/tvg-id="([^"]*)"/);
const tvgNameMatch = info.match(/tvg-name="([^"]*)"/);
const tvgLogoMatch = info.match(/tvg-logo="([^"]*)"/);
const groupTitleMatch = info.match(/group-title="([^"]*)"/);
const languageMatch = info.match(/tvg-language="([^"]*)"/);
const countryMatch = info.match(/tvg-country="([^"]*)"/);
const isRadio = info.toLowerCase().includes('radio') ||
groupTitleMatch?.[1]?.toLowerCase().includes('radio') ||
url.toLowerCase().includes('radio');
channels.push({
playlistId,
name,
url,
logo: tvgLogoMatch ? tvgLogoMatch[1] : null,
groupName: groupTitleMatch ? groupTitleMatch[1] : 'Uncategorized',
tvgId: tvgIdMatch ? tvgIdMatch[1] : null,
tvgName: tvgNameMatch ? tvgNameMatch[1] : null,
language: languageMatch ? languageMatch[1] : null,
country: countryMatch ? countryMatch[1] : null,
isRadio: isRadio ? 1 : 0
});
i++; // Skip the URL line
}
}
// Insert channels in batches
const batchSize = 100;
for (let i = 0; i < channels.length; i += batchSize) {
const batch = channels.slice(i, i + batchSize);
const stmt = db.prepare(`
INSERT INTO channels (playlist_id, name, url, logo, group_name, tvg_id, tvg_name, language, country, is_radio)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
for (const channel of batch) {
stmt.run(
channel.playlistId,
channel.name,
channel.url,
channel.logo,
channel.groupName,
channel.tvgId,
channel.tvgName,
channel.language,
channel.country,
channel.isRadio
);
}
stmt.finalize();
}
logger.info(`Parsed ${channels.length} channels for playlist ${playlistId}`);
return channels.length;
} catch (error) {
logger.error('M3U parsing error:', error);
throw error;
}
};
module.exports = { parseM3U };

View file

@ -0,0 +1,165 @@
/**
* Password Policy Configuration
* Enforces strong password requirements
*/
const PASSWORD_POLICY = {
minLength: 12,
maxLength: 128,
requireUppercase: true,
requireLowercase: true,
requireNumbers: true,
requireSpecialChars: true,
specialChars: '!@#$%^&*()_+-=[]{}|;:,.<>?',
preventCommonPasswords: true,
preventUserInfo: true, // Don't allow username/email in password
maxRepeatingChars: 3,
historyCount: 5 // Remember last 5 passwords
};
const ACCOUNT_LOCKOUT = {
maxFailedAttempts: 5,
lockoutDuration: 30 * 60 * 1000, // 30 minutes
resetAfterSuccess: true,
notifyOnLockout: true
};
const PASSWORD_EXPIRY = {
enabled: true,
expiryDays: 90,
warningDays: 14,
gracePeriodDays: 7
};
const SESSION_POLICY = {
maxConcurrentSessions: 3,
absoluteTimeout: 24 * 60 * 60 * 1000, // 24 hours
idleTimeout: 2 * 60 * 60 * 1000, // 2 hours
refreshTokenRotation: true
};
// Common passwords to block (top 100 most common)
const COMMON_PASSWORDS = [
'123456', 'password', '12345678', 'qwerty', '123456789', '12345', '1234', '111111',
'1234567', 'dragon', '123123', 'baseball', 'iloveyou', 'trustno1', '1234567890',
'sunshine', 'master', '123321', '666666', 'photoshop', '1111111', 'princess', 'azerty',
'000000', 'access', '696969', 'batman', '121212', 'letmein', 'qwertyuiop', 'admin',
'welcome', 'monkey', 'login', 'abc123', 'starwars', 'shadow', 'ashley', 'football',
'superman', 'michael', 'ninja', 'mustang', 'password1', 'passw0rd', 'password123'
];
/**
* Validates password against policy
* @param {string} password - Password to validate
* @param {object} userData - User data (username, email) to prevent personal info
* @returns {object} - {valid: boolean, errors: string[]}
*/
function validatePassword(password, userData = {}) {
const errors = [];
// Length check
if (password.length < PASSWORD_POLICY.minLength) {
errors.push(`Password must be at least ${PASSWORD_POLICY.minLength} characters long`);
}
if (password.length > PASSWORD_POLICY.maxLength) {
errors.push(`Password must not exceed ${PASSWORD_POLICY.maxLength} characters`);
}
// Character requirements
if (PASSWORD_POLICY.requireUppercase && !/[A-Z]/.test(password)) {
errors.push('Password must contain at least one uppercase letter');
}
if (PASSWORD_POLICY.requireLowercase && !/[a-z]/.test(password)) {
errors.push('Password must contain at least one lowercase letter');
}
if (PASSWORD_POLICY.requireNumbers && !/\d/.test(password)) {
errors.push('Password must contain at least one number');
}
if (PASSWORD_POLICY.requireSpecialChars) {
const specialCharsRegex = new RegExp(`[${PASSWORD_POLICY.specialChars.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}]`);
if (!specialCharsRegex.test(password)) {
errors.push('Password must contain at least one special character (!@#$%^&*...)');
}
}
// Repeating characters
const repeatingRegex = new RegExp(`(.)\\1{${PASSWORD_POLICY.maxRepeatingChars},}`);
if (repeatingRegex.test(password)) {
errors.push(`Password cannot contain more than ${PASSWORD_POLICY.maxRepeatingChars} repeating characters`);
}
// Common passwords
if (PASSWORD_POLICY.preventCommonPasswords) {
const lowerPassword = password.toLowerCase();
if (COMMON_PASSWORDS.some(common => lowerPassword.includes(common))) {
errors.push('Password is too common or easily guessable');
}
}
// User info in password
if (PASSWORD_POLICY.preventUserInfo && userData) {
const lowerPassword = password.toLowerCase();
if (userData.username && lowerPassword.includes(userData.username.toLowerCase())) {
errors.push('Password cannot contain your username');
}
if (userData.email) {
const emailParts = userData.email.split('@')[0].toLowerCase();
if (lowerPassword.includes(emailParts)) {
errors.push('Password cannot contain your email address');
}
}
}
return {
valid: errors.length === 0,
errors,
strength: calculatePasswordStrength(password)
};
}
/**
* Calculate password strength score (0-100)
*/
function calculatePasswordStrength(password) {
let score = 0;
// Length score (0-30 points)
score += Math.min(30, password.length * 2);
// Character variety (0-40 points)
if (/[a-z]/.test(password)) score += 10;
if (/[A-Z]/.test(password)) score += 10;
if (/\d/.test(password)) score += 10;
if (/[^a-zA-Z0-9]/.test(password)) score += 10;
// Patterns (0-30 points)
const hasNoRepeats = !/(.)\\1{2,}/.test(password);
const hasNoSequence = !/(?:abc|bcd|cde|123|234|345)/i.test(password);
const hasMixedCase = /[a-z]/.test(password) && /[A-Z]/.test(password);
if (hasNoRepeats) score += 10;
if (hasNoSequence) score += 10;
if (hasMixedCase) score += 10;
return Math.min(100, score);
}
/**
* Get password strength label
*/
function getStrengthLabel(score) {
if (score >= 80) return { label: 'Strong', color: 'success' };
if (score >= 60) return { label: 'Good', color: 'info' };
if (score >= 40) return { label: 'Fair', color: 'warning' };
return { label: 'Weak', color: 'error' };
}
module.exports = {
PASSWORD_POLICY,
ACCOUNT_LOCKOUT,
PASSWORD_EXPIRY,
SESSION_POLICY,
validatePassword,
calculatePasswordStrength,
getStrengthLabel
};

View file

@ -0,0 +1,324 @@
const axios = require('axios');
const logger = require('./logger');
/**
* Radio station metadata providers
* Maps station names/URLs to their API endpoints or scraping methods
*/
// Europa FM API
async function getEuropaFMMetadata() {
try {
const response = await axios.get('https://www.europafm.ro/now-playing/', {
timeout: 5000,
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
}
});
// Parse the HTML or JSON response
const html = response.data;
// Look for common patterns in Europa FM's page
const titleMatch = html.match(/<div class="now-playing-title">([^<]+)<\/div>/i) ||
html.match(/<span class="track-title">([^<]+)<\/span>/i) ||
html.match(/"title":"([^"]+)"/);
const artistMatch = html.match(/<div class="now-playing-artist">([^<]+)<\/div>/i) ||
html.match(/<span class="track-artist">([^<]+)<\/span>/i) ||
html.match(/"artist":"([^"]+)"/);
if (titleMatch || artistMatch) {
return {
title: titleMatch ? titleMatch[1].trim() : null,
artist: artistMatch ? artistMatch[1].trim() : null,
source: 'Europa FM Website'
};
}
// Try API endpoint
try {
const apiResponse = await axios.get('https://www.europafm.ro/api/now-playing', {
timeout: 3000,
headers: { 'User-Agent': 'Mozilla/5.0' }
});
if (apiResponse.data && apiResponse.data.title) {
return {
title: apiResponse.data.title,
artist: apiResponse.data.artist || null,
source: 'Europa FM API'
};
}
} catch (apiError) {
// API might not exist, continue
}
return null;
} catch (error) {
logger.error('Europa FM metadata fetch error:', error.message);
return null;
}
}
// Radio Romania (various stations)
async function getRadioRomaniaMetadata(stationId = 'actualitati') {
try {
const response = await axios.get(`https://www.radioromania.ro/live/${stationId}/`, {
timeout: 5000,
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
}
});
const html = response.data;
const titleMatch = html.match(/"currentSong":"([^"]+)"/);
const artistMatch = html.match(/"currentArtist":"([^"]+)"/);
if (titleMatch || artistMatch) {
return {
title: titleMatch ? titleMatch[1].trim() : null,
artist: artistMatch ? artistMatch[1].trim() : null,
source: 'Radio Romania'
};
}
return null;
} catch (error) {
logger.error('Radio Romania metadata fetch error:', error.message);
return null;
}
}
// Magic FM
async function getMagicFMMetadata() {
try {
const response = await axios.get('https://www.magicfm.ro/now-playing/', {
timeout: 5000,
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
}
});
const html = response.data;
const titleMatch = html.match(/"title":"([^"]+)"/);
const artistMatch = html.match(/"artist":"([^"]+)"/);
if (titleMatch || artistMatch) {
return {
title: titleMatch ? titleMatch[1].trim() : null,
artist: artistMatch ? artistMatch[1].trim() : null,
source: 'Magic FM'
};
}
return null;
} catch (error) {
logger.error('Magic FM metadata fetch error:', error.message);
return null;
}
}
// Kiss FM
async function getKissFMMetadata() {
try {
const response = await axios.get('https://www.kissfm.ro/now-playing/', {
timeout: 5000,
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
}
});
const html = response.data;
const titleMatch = html.match(/"title":"([^"]+)"/);
const artistMatch = html.match(/"artist":"([^"]+)"/);
if (titleMatch || artistMatch) {
return {
title: titleMatch ? titleMatch[1].trim() : null,
artist: artistMatch ? artistMatch[1].trim() : null,
source: 'Kiss FM'
};
}
return null;
} catch (error) {
logger.error('Kiss FM metadata fetch error:', error.message);
return null;
}
}
// Pro FM
async function getProFMMetadata() {
try {
const response = await axios.get('https://www.profm.ro/now-playing/', {
timeout: 5000,
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
}
});
const html = response.data;
const titleMatch = html.match(/"title":"([^"]+)"/);
const artistMatch = html.match(/"artist":"([^"]+)"/);
if (titleMatch || artistMatch) {
return {
title: titleMatch ? titleMatch[1].trim() : null,
artist: artistMatch ? artistMatch[1].trim() : null,
source: 'Pro FM'
};
}
return null;
} catch (error) {
logger.error('Pro FM metadata fetch error:', error.message);
return null;
}
}
// Generic RadioBrowser API fallback
async function getRadioBrowserMetadata(stationName) {
try {
// Search for station
const searchResponse = await axios.get('https://de1.api.radio-browser.info/json/stations/search', {
params: {
name: stationName,
limit: 1
},
timeout: 5000,
headers: {
'User-Agent': 'StreamFlow/1.0'
}
});
if (searchResponse.data && searchResponse.data.length > 0) {
const station = searchResponse.data[0];
// Radio Browser doesn't provide real-time metadata, but we can try the station's homepage
if (station.homepage) {
try {
const homepageResponse = await axios.get(station.homepage, {
timeout: 3000,
headers: { 'User-Agent': 'Mozilla/5.0' }
});
const html = homepageResponse.data;
// Try common patterns
const patterns = [
/"nowPlaying":"([^"]+)"/,
/"current_track":"([^"]+)"/,
/<div[^>]*class="[^"]*now-playing[^"]*"[^>]*>([^<]+)</i,
/"title":"([^"]+)"/
];
for (const pattern of patterns) {
const match = html.match(pattern);
if (match && match[1]) {
const parts = match[1].split(' - ');
if (parts.length >= 2) {
return {
artist: parts[0].trim(),
title: parts.slice(1).join(' - ').trim(),
source: 'Radio Browser + Website'
};
}
return {
title: match[1].trim(),
artist: null,
source: 'Radio Browser + Website'
};
}
}
} catch (homepageError) {
// Homepage fetch failed, ignore
}
}
}
return null;
} catch (error) {
logger.error('Radio Browser metadata fetch error:', error.message);
return null;
}
}
/**
* Main function to get metadata for a radio station
* Tries to identify the station and use the appropriate provider
*/
async function getRadioStationMetadata(channelName, channelUrl) {
const nameLower = channelName.toLowerCase();
const urlLower = channelUrl ? channelUrl.toLowerCase() : '';
logger.info(`[RadioMetadata] Fetching metadata for: ${channelName}`);
// Try specific providers based on station name or URL
try {
// Europa FM
if (nameLower.includes('europa') && nameLower.includes('fm')) {
const metadata = await getEuropaFMMetadata();
if (metadata) {
logger.info(`[RadioMetadata] Found metadata from ${metadata.source}`);
return metadata;
}
}
// Radio Romania
if (nameLower.includes('radio') && nameLower.includes('romania')) {
let stationId = 'actualitati';
if (nameLower.includes('muzical')) stationId = 'muzical';
if (nameLower.includes('cultural')) stationId = 'cultural';
if (nameLower.includes('Cluj')) stationId = 'cluj';
const metadata = await getRadioRomaniaMetadata(stationId);
if (metadata) {
logger.info(`[RadioMetadata] Found metadata from ${metadata.source}`);
return metadata;
}
}
// Magic FM
if (nameLower.includes('magic') && nameLower.includes('fm')) {
const metadata = await getMagicFMMetadata();
if (metadata) {
logger.info(`[RadioMetadata] Found metadata from ${metadata.source}`);
return metadata;
}
}
// Kiss FM
if (nameLower.includes('kiss') && nameLower.includes('fm')) {
const metadata = await getKissFMMetadata();
if (metadata) {
logger.info(`[RadioMetadata] Found metadata from ${metadata.source}`);
return metadata;
}
}
// Pro FM
if (nameLower.includes('pro') && nameLower.includes('fm')) {
const metadata = await getProFMMetadata();
if (metadata) {
logger.info(`[RadioMetadata] Found metadata from ${metadata.source}`);
return metadata;
}
}
// Fallback to RadioBrowser
const metadata = await getRadioBrowserMetadata(channelName);
if (metadata) {
logger.info(`[RadioMetadata] Found metadata from ${metadata.source}`);
return metadata;
}
} catch (error) {
logger.error(`[RadioMetadata] Error fetching metadata: ${error.message}`);
}
logger.info(`[RadioMetadata] No external metadata found for ${channelName}`);
return null;
}
module.exports = {
getRadioStationMetadata
};

View file

@ -0,0 +1,858 @@
/**
* Response Protocol Manager
* Automated response protocols for security incident handling
* CWE-778 Compliance: Logs all automated responses and protocol executions
*/
const logger = require('./logger');
const logAggregator = require('./logAggregator');
const { db } = require('../database/db');
const EventEmitter = require('events');
class ResponseProtocolManager extends EventEmitter {
constructor() {
super();
this.protocols = new Map();
this.executionHistory = new Map();
this.initialize();
}
/**
* Initialize response protocol manager
*/
async initialize() {
await this.createProtocolsTable();
await this.createExecutionHistoryTable();
await this.loadProtocols();
logger.info('[ResponseProtocolManager] Initialized with automated response protocols');
// Log initialization (CWE-778)
logAggregator.aggregate('response_protocol_manager', 'info', 'security', 'Response protocol manager initialized', {
totalProtocols: this.protocols.size
});
}
/**
* Create protocols table
*/
async createProtocolsTable() {
return new Promise((resolve, reject) => {
db.run(`
CREATE TABLE IF NOT EXISTS response_protocols (
id INTEGER PRIMARY KEY AUTOINCREMENT,
protocol_id TEXT UNIQUE NOT NULL,
name TEXT NOT NULL,
description TEXT,
trigger_type TEXT NOT NULL,
trigger_condition TEXT NOT NULL,
actions TEXT NOT NULL,
severity TEXT NOT NULL,
enabled INTEGER DEFAULT 1,
auto_execute INTEGER DEFAULT 0,
cooldown_minutes INTEGER DEFAULT 60,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
)
`, async (err) => {
if (err) reject(err);
else {
db.run(`CREATE INDEX IF NOT EXISTS idx_protocols_trigger ON response_protocols(trigger_type, enabled)`);
db.run(`CREATE INDEX IF NOT EXISTS idx_protocols_severity ON response_protocols(severity, enabled)`);
await this.createDefaultProtocols();
resolve();
}
});
});
}
/**
* Create execution history table
*/
async createExecutionHistoryTable() {
return new Promise((resolve, reject) => {
db.run(`
CREATE TABLE IF NOT EXISTS protocol_executions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
execution_id TEXT UNIQUE NOT NULL,
protocol_id TEXT NOT NULL,
trigger_event TEXT NOT NULL,
actions_executed TEXT NOT NULL,
execution_status TEXT NOT NULL,
execution_result TEXT,
executed_by TEXT DEFAULT 'system',
executed_at DATETIME DEFAULT CURRENT_TIMESTAMP
)
`, (err) => {
if (err) reject(err);
else {
db.run(`CREATE INDEX IF NOT EXISTS idx_executions_protocol ON protocol_executions(protocol_id)`);
db.run(`CREATE INDEX IF NOT EXISTS idx_executions_status ON protocol_executions(execution_status)`);
resolve();
}
});
});
}
/**
* Create default response protocols
*/
async createDefaultProtocols() {
const defaultProtocols = [
{
protocol_id: 'PROTOCOL-BRUTE-FORCE-RESPONSE',
name: 'Brute Force Attack Response',
description: 'Automated response to brute force attacks',
trigger_type: 'anomaly',
trigger_condition: JSON.stringify({ anomaly_type: 'brute_force_attack', severity: 'critical' }),
actions: JSON.stringify([
{ action: 'block_ip', duration_minutes: 60, reason: 'brute_force_attack' },
{ action: 'notify_admin', channel: 'email', priority: 'high' },
{ action: 'log_incident', category: 'security_breach' }
]),
severity: 'critical',
auto_execute: 1,
cooldown_minutes: 30
},
{
protocol_id: 'PROTOCOL-CREDENTIAL-STUFFING-RESPONSE',
name: 'Credential Stuffing Response',
description: 'Automated response to credential stuffing attacks',
trigger_type: 'anomaly',
trigger_condition: JSON.stringify({ anomaly_type: 'credential_stuffing', severity: 'critical' }),
actions: JSON.stringify([
{ action: 'block_ip', duration_minutes: 120, reason: 'credential_stuffing' },
{ action: 'require_2fa', target: 'affected_accounts' },
{ action: 'notify_admin', channel: 'email', priority: 'high' },
{ action: 'log_incident', category: 'account_compromise' }
]),
severity: 'critical',
auto_execute: 1,
cooldown_minutes: 60
},
{
protocol_id: 'PROTOCOL-PRIVILEGE-ESC-RESPONSE',
name: 'Privilege Escalation Response',
description: 'Automated response to privilege escalation attempts',
trigger_type: 'anomaly',
trigger_condition: JSON.stringify({ anomaly_type: 'privilege_escalation', severity: 'critical' }),
actions: JSON.stringify([
{ action: 'lock_account', target: 'attacker', duration_minutes: 240 },
{ action: 'revoke_sessions', target: 'attacker' },
{ action: 'notify_admin', channel: 'email', priority: 'critical' },
{ action: 'escalate_incident', level: 'security_team' },
{ action: 'log_incident', category: 'privilege_violation' }
]),
severity: 'critical',
auto_execute: 1,
cooldown_minutes: 15
},
{
protocol_id: 'PROTOCOL-SUSPICIOUS-IP-RESPONSE',
name: 'Suspicious IP Response',
description: 'Automated response to suspicious IP activity',
trigger_type: 'anomaly',
trigger_condition: JSON.stringify({ anomaly_type: 'suspicious_ip', severity: 'high' }),
actions: JSON.stringify([
{ action: 'rate_limit_ip', limit: 10, window_minutes: 10 },
{ action: 'notify_admin', channel: 'in_app', priority: 'medium' },
{ action: 'log_incident', category: 'suspicious_activity' }
]),
severity: 'high',
auto_execute: 1,
cooldown_minutes: 60
},
{
protocol_id: 'PROTOCOL-DATA-EXFIL-RESPONSE',
name: 'Data Exfiltration Response',
description: 'Automated response to data exfiltration attempts',
trigger_type: 'anomaly',
trigger_condition: JSON.stringify({ anomaly_type: 'data_exfiltration', severity: 'high' }),
actions: JSON.stringify([
{ action: 'block_ip', duration_minutes: 180, reason: 'data_exfiltration' },
{ action: 'lock_account', target: 'attacker', duration_minutes: 360 },
{ action: 'notify_admin', channel: 'email', priority: 'critical' },
{ action: 'escalate_incident', level: 'data_protection_team' },
{ action: 'log_incident', category: 'data_breach' }
]),
severity: 'high',
auto_execute: 0,
cooldown_minutes: 120
},
{
protocol_id: 'PROTOCOL-MALICIOUS-SIGNATURE-RESPONSE',
name: 'Malicious Signature Response',
description: 'Automated response to malicious signature matches',
trigger_type: 'signature',
trigger_condition: JSON.stringify({ signature_type: 'attack_pattern', threat_level: 'critical', auto_block: true }),
actions: JSON.stringify([
{ action: 'block_ip', duration_minutes: 240, reason: 'malicious_signature' },
{ action: 'notify_admin', channel: 'email', priority: 'high' },
{ action: 'log_incident', category: 'attack_detected' }
]),
severity: 'critical',
auto_execute: 1,
cooldown_minutes: 30
},
{
protocol_id: 'PROTOCOL-THREAT-SCORE-CRITICAL',
name: 'Critical Threat Score Response',
description: 'Automated response when threat score reaches critical level',
trigger_type: 'threshold',
trigger_condition: JSON.stringify({ metric: 'threat_score', operator: '>=', value: 80 }),
actions: JSON.stringify([
{ action: 'notify_admin', channel: 'email', priority: 'critical' },
{ action: 'escalate_incident', level: 'security_team' },
{ action: 'enable_enhanced_monitoring', duration_minutes: 120 },
{ action: 'log_incident', category: 'threat_escalation' }
]),
severity: 'critical',
auto_execute: 1,
cooldown_minutes: 60
}
];
for (const protocol of defaultProtocols) {
await new Promise((resolve, reject) => {
db.run(
`INSERT OR IGNORE INTO response_protocols
(protocol_id, name, description, trigger_type, trigger_condition, actions, severity, auto_execute, cooldown_minutes)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
[
protocol.protocol_id,
protocol.name,
protocol.description,
protocol.trigger_type,
protocol.trigger_condition,
protocol.actions,
protocol.severity,
protocol.auto_execute,
protocol.cooldown_minutes
],
(err) => {
if (err) reject(err);
else resolve();
}
);
});
}
logger.info(`[ResponseProtocolManager] Created ${defaultProtocols.length} default protocols`);
}
/**
* Load protocols from database into memory
*/
async loadProtocols() {
return new Promise((resolve, reject) => {
db.all(
`SELECT * FROM response_protocols WHERE enabled = 1`,
[],
(err, rows) => {
if (err) {
reject(err);
} else {
this.protocols.clear();
rows.forEach(row => {
row.trigger_condition = JSON.parse(row.trigger_condition);
row.actions = JSON.parse(row.actions);
this.protocols.set(row.protocol_id, row);
});
logger.info(`[ResponseProtocolManager] Loaded ${rows.length} active protocols`);
resolve();
}
}
);
});
}
/**
* Execute protocols based on trigger event
* CWE-778: Logs all protocol executions
*/
async executeProtocols(triggerType, triggerEvent, context = {}) {
const matchingProtocols = Array.from(this.protocols.values()).filter(
p => p.trigger_type === triggerType && this.matchesTriggerCondition(p.trigger_condition, triggerEvent)
);
if (matchingProtocols.length === 0) {
return { executed: false, protocols: [] };
}
const executedProtocols = [];
for (const protocol of matchingProtocols) {
// Check cooldown
if (this.isInCooldown(protocol.protocol_id)) {
logger.info(`[ResponseProtocolManager] Protocol ${protocol.protocol_id} in cooldown, skipping`);
continue;
}
// Check if auto-execute is enabled
if (!protocol.auto_execute) {
logger.info(`[ResponseProtocolManager] Protocol ${protocol.protocol_id} requires manual execution, skipping`);
continue;
}
// Execute protocol
const executionResult = await this.executeProtocolActions(protocol, triggerEvent, context);
executedProtocols.push(executionResult);
// Set cooldown
this.setCooldown(protocol.protocol_id, protocol.cooldown_minutes);
// Log protocol execution (CWE-778)
logAggregator.aggregate('response_protocol_manager', 'warn', 'security', 'Response protocol executed', {
protocolId: protocol.protocol_id,
protocolName: protocol.name,
triggerType,
triggerEvent: JSON.stringify(triggerEvent).substring(0, 200),
actionsExecuted: executionResult.actionsExecuted.length,
executionStatus: executionResult.status,
context
});
logger.warn(`[ResponseProtocolManager] Protocol executed: ${protocol.name} (${executionResult.status})`);
}
return {
executed: executedProtocols.length > 0,
protocols: executedProtocols
};
}
/**
* Execute protocol actions
*/
async executeProtocolActions(protocol, triggerEvent, context) {
const executionId = `EXEC-${Date.now()}-${Math.random().toString(36).substr(2, 9).toUpperCase()}`;
const actionsExecuted = [];
const actionResults = [];
for (const action of protocol.actions) {
try {
const result = await this.executeAction(action, triggerEvent, context);
actionsExecuted.push(action.action);
actionResults.push({ action: action.action, status: 'success', result });
// Emit event for action execution
this.emit('action_executed', {
protocolId: protocol.protocol_id,
action: action.action,
result
});
} catch (error) {
logger.error(`[ResponseProtocolManager] Action execution failed: ${action.action}`, error);
actionResults.push({ action: action.action, status: 'failed', error: error.message });
}
}
const executionStatus = actionResults.every(r => r.status === 'success') ? 'success' : 'partial';
// Save execution history
await this.saveExecutionHistory({
executionId,
protocolId: protocol.protocol_id,
triggerEvent: JSON.stringify(triggerEvent),
actionsExecuted: JSON.stringify(actionsExecuted),
executionStatus,
executionResult: JSON.stringify(actionResults)
});
return {
executionId,
protocolId: protocol.protocol_id,
protocolName: protocol.name,
actionsExecuted,
actionResults,
status: executionStatus
};
}
/**
* Execute individual action
*/
async executeAction(action, triggerEvent, context) {
switch (action.action) {
case 'block_ip':
return await this.actionBlockIP(action, triggerEvent, context);
case 'lock_account':
return await this.actionLockAccount(action, triggerEvent, context);
case 'revoke_sessions':
return await this.actionRevokeSessions(action, triggerEvent, context);
case 'require_2fa':
return await this.actionRequire2FA(action, triggerEvent, context);
case 'rate_limit_ip':
return await this.actionRateLimitIP(action, triggerEvent, context);
case 'notify_admin':
return await this.actionNotifyAdmin(action, triggerEvent, context);
case 'escalate_incident':
return await this.actionEscalateIncident(action, triggerEvent, context);
case 'log_incident':
return await this.actionLogIncident(action, triggerEvent, context);
case 'enable_enhanced_monitoring':
return await this.actionEnableEnhancedMonitoring(action, triggerEvent, context);
default:
throw new Error(`Unknown action: ${action.action}`);
}
}
/**
* Action: Block IP address
*/
async actionBlockIP(action, triggerEvent, context) {
const ipAddress = context.ip_address || triggerEvent.ip_address;
const duration = action.duration_minutes || 60;
const reason = action.reason || 'security_violation';
if (!ipAddress) {
throw new Error('No IP address provided for blocking');
}
// TODO: Implement actual IP blocking (firewall rules, rate limiter, etc.)
logger.warn(`[ResponseProtocolManager] Action: Block IP ${ipAddress} for ${duration} minutes (reason: ${reason})`);
return {
action: 'block_ip',
ipAddress,
duration,
reason,
expiresAt: new Date(Date.now() + duration * 60 * 1000).toISOString()
};
}
/**
* Action: Lock user account
*/
async actionLockAccount(action, triggerEvent, context) {
const userId = context.user_id || triggerEvent.user_id;
const duration = action.duration_minutes || 120;
if (!userId) {
throw new Error('No user ID provided for account locking');
}
// TODO: Implement actual account locking
logger.warn(`[ResponseProtocolManager] Action: Lock account ${userId} for ${duration} minutes`);
return {
action: 'lock_account',
userId,
duration,
lockedUntil: new Date(Date.now() + duration * 60 * 1000).toISOString()
};
}
/**
* Action: Revoke all sessions
*/
async actionRevokeSessions(action, triggerEvent, context) {
const userId = context.user_id || triggerEvent.user_id;
if (!userId) {
throw new Error('No user ID provided for session revocation');
}
// TODO: Implement actual session revocation
logger.warn(`[ResponseProtocolManager] Action: Revoke all sessions for user ${userId}`);
return { action: 'revoke_sessions', userId, revokedAt: new Date().toISOString() };
}
/**
* Action: Require 2FA for affected accounts
*/
async actionRequire2FA(action, triggerEvent, context) {
const target = action.target || 'affected_accounts';
// TODO: Implement 2FA requirement
logger.warn(`[ResponseProtocolManager] Action: Require 2FA for ${target}`);
return { action: 'require_2fa', target, enabledAt: new Date().toISOString() };
}
/**
* Action: Rate limit IP
*/
async actionRateLimitIP(action, triggerEvent, context) {
const ipAddress = context.ip_address || triggerEvent.ip_address;
const limit = action.limit || 10;
const windowMinutes = action.window_minutes || 10;
if (!ipAddress) {
throw new Error('No IP address provided for rate limiting');
}
// TODO: Implement actual rate limiting
logger.warn(`[ResponseProtocolManager] Action: Rate limit IP ${ipAddress} to ${limit} requests per ${windowMinutes} minutes`);
return { action: 'rate_limit_ip', ipAddress, limit, windowMinutes };
}
/**
* Action: Notify administrator
*/
async actionNotifyAdmin(action, triggerEvent, context) {
const channel = action.channel || 'in_app';
const priority = action.priority || 'medium';
// TODO: Implement actual admin notification (email, SMS, webhook)
logger.warn(`[ResponseProtocolManager] Action: Notify admin via ${channel} (priority: ${priority})`);
return { action: 'notify_admin', channel, priority, notifiedAt: new Date().toISOString() };
}
/**
* Action: Escalate incident
*/
async actionEscalateIncident(action, triggerEvent, context) {
const level = action.level || 'security_team';
// TODO: Implement actual incident escalation
logger.warn(`[ResponseProtocolManager] Action: Escalate incident to ${level}`);
return { action: 'escalate_incident', level, escalatedAt: new Date().toISOString() };
}
/**
* Action: Log incident
*/
async actionLogIncident(action, triggerEvent, context) {
const category = action.category || 'security_incident';
logAggregator.aggregate('response_protocol_manager', 'error', category, 'Security incident logged', {
triggerEvent,
context
});
return { action: 'log_incident', category, loggedAt: new Date().toISOString() };
}
/**
* Action: Enable enhanced monitoring
*/
async actionEnableEnhancedMonitoring(action, triggerEvent, context) {
const duration = action.duration_minutes || 120;
// TODO: Implement enhanced monitoring mode
logger.warn(`[ResponseProtocolManager] Action: Enable enhanced monitoring for ${duration} minutes`);
return {
action: 'enable_enhanced_monitoring',
duration,
expiresAt: new Date(Date.now() + duration * 60 * 1000).toISOString()
};
}
/**
* Check if trigger condition matches event
*/
matchesTriggerCondition(condition, event) {
for (const [key, value] of Object.entries(condition)) {
if (event[key] !== value) {
return false;
}
}
return true;
}
/**
* Check if protocol is in cooldown
*/
isInCooldown(protocolId) {
const lastExecution = this.executionHistory.get(protocolId);
if (!lastExecution) return false;
const cooldownEnd = new Date(lastExecution.cooldownUntil);
return Date.now() < cooldownEnd.getTime();
}
/**
* Set cooldown for protocol
*/
setCooldown(protocolId, cooldownMinutes) {
this.executionHistory.set(protocolId, {
lastExecuted: new Date().toISOString(),
cooldownUntil: new Date(Date.now() + cooldownMinutes * 60 * 1000).toISOString()
});
}
/**
* Save execution history to database
*/
async saveExecutionHistory(data) {
return new Promise((resolve, reject) => {
db.run(
`INSERT INTO protocol_executions
(execution_id, protocol_id, trigger_event, actions_executed, execution_status, execution_result)
VALUES (?, ?, ?, ?, ?, ?)`,
[
data.executionId,
data.protocolId,
data.triggerEvent,
data.actionsExecuted,
data.executionStatus,
data.executionResult
],
(err) => {
if (err) reject(err);
else resolve();
}
);
});
}
/**
* Get all protocols
*/
async getProtocols(filters = {}) {
const { triggerType, severity, enabled, limit = 100 } = filters;
let whereClause = [];
let params = [];
if (triggerType) {
whereClause.push('trigger_type = ?');
params.push(triggerType);
}
if (severity) {
whereClause.push('severity = ?');
params.push(severity);
}
if (enabled !== undefined) {
whereClause.push('enabled = ?');
params.push(enabled ? 1 : 0);
}
const where = whereClause.length > 0 ? `WHERE ${whereClause.join(' AND ')}` : '';
params.push(limit);
return new Promise((resolve, reject) => {
db.all(
`SELECT * FROM response_protocols ${where}
ORDER BY severity DESC, trigger_type
LIMIT ?`,
params,
(err, rows) => {
if (err) reject(err);
else resolve(rows);
}
);
});
}
/**
* Get protocol by ID
*/
async getProtocolById(protocolId) {
return new Promise((resolve, reject) => {
db.get(
`SELECT * FROM response_protocols WHERE protocol_id = ?`,
[protocolId],
(err, row) => {
if (err) reject(err);
else resolve(row);
}
);
});
}
/**
* Get execution history
*/
async getExecutionHistory(filters = {}) {
const { protocolId, status, limit = 100 } = filters;
let whereClause = [];
let params = [];
if (protocolId) {
whereClause.push('protocol_id = ?');
params.push(protocolId);
}
if (status) {
whereClause.push('execution_status = ?');
params.push(status);
}
const where = whereClause.length > 0 ? `WHERE ${whereClause.join(' AND ')}` : '';
params.push(limit);
return new Promise((resolve, reject) => {
db.all(
`SELECT * FROM protocol_executions ${where}
ORDER BY executed_at DESC
LIMIT ?`,
params,
(err, rows) => {
if (err) reject(err);
else resolve(rows);
}
);
});
}
/**
* Create new protocol
* CWE-778: Logs protocol creation
*/
async createProtocol(data, userId) {
const protocolId = `PROTOCOL-${Date.now()}-${Math.random().toString(36).substr(2, 9).toUpperCase()}`;
return new Promise((resolve, reject) => {
db.run(
`INSERT INTO response_protocols
(protocol_id, name, description, trigger_type, trigger_condition, actions, severity, enabled, auto_execute, cooldown_minutes)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
[
protocolId,
data.name,
data.description || '',
data.trigger_type,
JSON.stringify(data.trigger_condition),
JSON.stringify(data.actions),
data.severity,
data.enabled !== undefined ? (data.enabled ? 1 : 0) : 1,
data.auto_execute !== undefined ? (data.auto_execute ? 1 : 0) : 0,
data.cooldown_minutes || 60
],
async (err) => {
if (err) {
reject(err);
} else {
await this.loadProtocols();
// Log protocol creation (CWE-778)
logAggregator.aggregate('response_protocol_manager', 'info', 'security', 'Response protocol created', {
protocolId,
userId,
name: data.name,
triggerType: data.trigger_type,
severity: data.severity,
autoExecute: data.auto_execute === 1
});
logger.info(`[ResponseProtocolManager] Protocol created: ${protocolId} by user ${userId}`);
resolve({ protocolId });
}
}
);
});
}
/**
* Update protocol
* CWE-778: Logs protocol modifications
*/
async updateProtocol(protocolId, updates, userId) {
const allowedFields = ['name', 'description', 'trigger_condition', 'actions', 'severity', 'enabled', 'auto_execute', 'cooldown_minutes'];
const setClause = [];
const params = [];
for (const [key, value] of Object.entries(updates)) {
if (allowedFields.includes(key)) {
setClause.push(`${key} = ?`);
if (key === 'trigger_condition' || key === 'actions') {
params.push(JSON.stringify(value));
} else if (key === 'enabled' || key === 'auto_execute') {
params.push(value ? 1 : 0);
} else {
params.push(value);
}
}
}
if (setClause.length === 0) {
throw new Error('No valid fields to update');
}
setClause.push('updated_at = CURRENT_TIMESTAMP');
params.push(protocolId);
return new Promise((resolve, reject) => {
db.run(
`UPDATE response_protocols
SET ${setClause.join(', ')}
WHERE protocol_id = ?`,
params,
async (err) => {
if (err) {
reject(err);
} else {
await this.loadProtocols();
// Log protocol update (CWE-778)
logAggregator.aggregate('response_protocol_manager', 'info', 'security', 'Response protocol updated', {
protocolId,
userId,
updates
});
logger.info(`[ResponseProtocolManager] Protocol updated: ${protocolId} by user ${userId}`);
resolve({ success: true });
}
}
);
});
}
/**
* Delete protocol
* CWE-778: Logs protocol deletion
*/
async deleteProtocol(protocolId, userId) {
return new Promise((resolve, reject) => {
db.run(
`DELETE FROM response_protocols WHERE protocol_id = ?`,
[protocolId],
async (err) => {
if (err) {
reject(err);
} else {
await this.loadProtocols();
// Log protocol deletion (CWE-778)
logAggregator.aggregate('response_protocol_manager', 'warn', 'security', 'Response protocol deleted', {
protocolId,
userId
});
logger.info(`[ResponseProtocolManager] Protocol deleted: ${protocolId} by user ${userId}`);
resolve({ success: true });
}
}
);
});
}
/**
* Get protocol statistics
*/
async getStatistics() {
return new Promise((resolve, reject) => {
db.get(
`SELECT
COUNT(*) as total,
SUM(CASE WHEN enabled = 1 THEN 1 ELSE 0 END) as enabled,
SUM(CASE WHEN enabled = 0 THEN 1 ELSE 0 END) as disabled,
SUM(CASE WHEN auto_execute = 1 THEN 1 ELSE 0 END) as auto_execute_enabled,
COUNT(DISTINCT trigger_type) as unique_triggers,
COUNT(DISTINCT severity) as unique_severities
FROM response_protocols`,
[],
(err, row) => {
if (err) reject(err);
else resolve(row);
}
);
});
}
}
// Create singleton instance
const responseProtocolManager = new ResponseProtocolManager();
module.exports = responseProtocolManager;

View file

@ -0,0 +1,565 @@
/**
* Risk Signature Manager
* Predefined risk signatures for threat detection
* CWE-778 Compliance: Logs all signature matches and management operations
*/
const logger = require('./logger');
const logAggregator = require('./logAggregator');
const { db } = require('../database/db');
class RiskSignatureManager {
constructor() {
this.signatures = new Map();
this.initialize();
}
/**
* Initialize risk signature manager
*/
async initialize() {
await this.createSignaturesTable();
await this.loadSignatures();
logger.info('[RiskSignatureManager] Initialized with predefined risk signatures');
// Log initialization (CWE-778)
logAggregator.aggregate('risk_signature_manager', 'info', 'security', 'Risk signature manager initialized', {
totalSignatures: this.signatures.size
});
}
/**
* Create risk signatures table
*/
async createSignaturesTable() {
return new Promise((resolve, reject) => {
db.run(`
CREATE TABLE IF NOT EXISTS risk_signatures (
id INTEGER PRIMARY KEY AUTOINCREMENT,
signature_id TEXT UNIQUE NOT NULL,
name TEXT NOT NULL,
description TEXT,
signature_type TEXT NOT NULL,
pattern TEXT NOT NULL,
match_type TEXT NOT NULL,
threat_level TEXT NOT NULL,
confidence REAL DEFAULT 0.8,
enabled INTEGER DEFAULT 1,
auto_block INTEGER DEFAULT 0,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
)
`, async (err) => {
if (err) reject(err);
else {
db.run(`CREATE INDEX IF NOT EXISTS idx_signatures_type ON risk_signatures(signature_type, enabled)`);
db.run(`CREATE INDEX IF NOT EXISTS idx_signatures_threat ON risk_signatures(threat_level, enabled)`);
await this.createDefaultSignatures();
resolve();
}
});
});
}
/**
* Create default risk signatures
*/
async createDefaultSignatures() {
const defaultSignatures = [
// IP-based signatures
{
signature_id: 'SIG-IP-TOR',
name: 'TOR Exit Node',
description: 'Known TOR exit node IP address',
signature_type: 'ip_address',
pattern: '(^10\\.\\d+\\.\\d+\\.\\d+|^172\\.(1[6-9]|2[0-9]|3[01])\\.\\d+\\.\\d+|^192\\.168\\.\\d+\\.\\d+)',
match_type: 'regex',
threat_level: 'high',
confidence: 0.9,
auto_block: 0
},
{
signature_id: 'SIG-IP-SUSPICIOUS',
name: 'Suspicious IP Range',
description: 'IP from suspicious geographic region',
signature_type: 'ip_address',
pattern: '',
match_type: 'custom',
threat_level: 'medium',
confidence: 0.7,
auto_block: 0
},
// User-agent signatures
{
signature_id: 'SIG-UA-BOT-MALICIOUS',
name: 'Malicious Bot User-Agent',
description: 'Known malicious bot signatures',
signature_type: 'user_agent',
pattern: '(scrapy|python-requests|curl|wget|nikto|sqlmap|havij|acunetix|nessus|openvas)',
match_type: 'regex_case_insensitive',
threat_level: 'high',
confidence: 0.95,
auto_block: 1
},
{
signature_id: 'SIG-UA-VULNERABILITY-SCANNER',
name: 'Vulnerability Scanner',
description: 'Automated vulnerability scanning tools',
signature_type: 'user_agent',
pattern: '(nmap|masscan|zap|burp|metasploit|w3af|arachni)',
match_type: 'regex_case_insensitive',
threat_level: 'critical',
confidence: 0.99,
auto_block: 1
},
// Attack pattern signatures
{
signature_id: 'SIG-ATTACK-SQL-INJECTION',
name: 'SQL Injection Pattern',
description: 'Common SQL injection attack patterns',
signature_type: 'attack_pattern',
pattern: '(union.*select|select.*from|insert.*into|delete.*from|drop.*table|exec.*xp_|script.*alert)',
match_type: 'regex_case_insensitive',
threat_level: 'critical',
confidence: 0.85,
auto_block: 1
},
{
signature_id: 'SIG-ATTACK-XSS',
name: 'Cross-Site Scripting Pattern',
description: 'XSS attack patterns',
signature_type: 'attack_pattern',
pattern: '(<script|javascript:|onerror=|onload=|<iframe|eval\\(|alert\\()',
match_type: 'regex_case_insensitive',
threat_level: 'high',
confidence: 0.8,
auto_block: 1
},
{
signature_id: 'SIG-ATTACK-PATH-TRAVERSAL',
name: 'Path Traversal Pattern',
description: 'Directory traversal attack patterns',
signature_type: 'attack_pattern',
pattern: '(\\.\\./|\\.\\.\\\\/|%2e%2e/|%252e%252e/)',
match_type: 'regex_case_insensitive',
threat_level: 'high',
confidence: 0.9,
auto_block: 1
},
{
signature_id: 'SIG-ATTACK-COMMAND-INJECTION',
name: 'Command Injection Pattern',
description: 'OS command injection patterns',
signature_type: 'attack_pattern',
pattern: '(;\\s*(rm|cat|ls|wget|curl|bash|sh|cmd|powershell)|\\|\\s*(nc|netcat))',
match_type: 'regex_case_insensitive',
threat_level: 'critical',
confidence: 0.95,
auto_block: 1
},
// Behavioral signatures
{
signature_id: 'SIG-BEHAVIOR-BRUTE-FORCE',
name: 'Brute Force Behavior',
description: 'Rapid repeated authentication attempts',
signature_type: 'behavior',
pattern: 'failed_login_rate',
match_type: 'custom',
threat_level: 'critical',
confidence: 0.9,
auto_block: 1
},
{
signature_id: 'SIG-BEHAVIOR-CREDENTIAL-STUFFING',
name: 'Credential Stuffing Behavior',
description: 'Multiple username attempts from single source',
signature_type: 'behavior',
pattern: 'unique_username_rate',
match_type: 'custom',
threat_level: 'high',
confidence: 0.85,
auto_block: 1
},
{
signature_id: 'SIG-BEHAVIOR-PRIVILEGE-ESC',
name: 'Privilege Escalation Behavior',
description: 'Repeated unauthorized access attempts',
signature_type: 'behavior',
pattern: 'authorization_failure_rate',
match_type: 'custom',
threat_level: 'critical',
confidence: 0.95,
auto_block: 1
},
{
signature_id: 'SIG-BEHAVIOR-DATA-EXFIL',
name: 'Data Exfiltration Behavior',
description: 'Unusual data download patterns',
signature_type: 'behavior',
pattern: 'download_volume_rate',
match_type: 'custom',
threat_level: 'high',
confidence: 0.8,
auto_block: 0
}
];
for (const signature of defaultSignatures) {
await new Promise((resolve, reject) => {
db.run(
`INSERT OR IGNORE INTO risk_signatures
(signature_id, name, description, signature_type, pattern, match_type, threat_level, confidence, auto_block)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
[
signature.signature_id,
signature.name,
signature.description,
signature.signature_type,
signature.pattern,
signature.match_type,
signature.threat_level,
signature.confidence,
signature.auto_block
],
(err) => {
if (err) reject(err);
else resolve();
}
);
});
}
logger.info(`[RiskSignatureManager] Created ${defaultSignatures.length} default signatures`);
}
/**
* Load signatures from database into memory
*/
async loadSignatures() {
return new Promise((resolve, reject) => {
db.all(
`SELECT * FROM risk_signatures WHERE enabled = 1`,
[],
(err, rows) => {
if (err) {
reject(err);
} else {
this.signatures.clear();
rows.forEach(row => {
this.signatures.set(row.signature_id, row);
});
logger.info(`[RiskSignatureManager] Loaded ${rows.length} active signatures`);
resolve();
}
}
);
});
}
/**
* Match input against risk signatures
* CWE-778: Logs all signature matches
*/
async matchSignatures(input, signatureType, context = {}) {
const matchingSignatures = Array.from(this.signatures.values()).filter(
s => s.signature_type === signatureType
);
if (matchingSignatures.length === 0) {
return { matched: false, signatures: [] };
}
const matches = [];
for (const signature of matchingSignatures) {
const matched = this.testPattern(input, signature.pattern, signature.match_type);
if (matched) {
matches.push({
...signature,
matchedInput: input,
context
});
// Log signature match (CWE-778)
logAggregator.aggregate('risk_signature_manager', 'warn', 'security', 'Risk signature matched', {
signatureId: signature.signature_id,
signatureName: signature.name,
signatureType,
threatLevel: signature.threat_level,
confidence: signature.confidence,
autoBlock: signature.auto_block === 1,
matchedInput: input.substring(0, 100), // Truncate for logging
context
});
logger.warn(`[RiskSignatureManager] Signature matched: ${signature.name} (${signature.threat_level})`);
}
}
return {
matched: matches.length > 0,
signatures: matches,
highestThreat: matches.length > 0 ? this.getHighestThreatLevel(matches) : null,
shouldAutoBlock: matches.some(m => m.auto_block === 1)
};
}
/**
* Test pattern against input
*/
testPattern(input, pattern, matchType) {
try {
switch (matchType) {
case 'regex':
return new RegExp(pattern).test(input);
case 'regex_case_insensitive':
return new RegExp(pattern, 'i').test(input);
case 'exact':
return input === pattern;
case 'contains':
return input.includes(pattern);
case 'custom':
// Custom patterns handled by specific detection methods
return false;
default:
return false;
}
} catch (error) {
logger.error(`[RiskSignatureManager] Pattern test error: ${error.message}`);
return false;
}
}
/**
* Get highest threat level from matches
*/
getHighestThreatLevel(matches) {
const threatLevels = { critical: 4, high: 3, medium: 2, low: 1 };
let highest = 'low';
let highestScore = 0;
for (const match of matches) {
const score = threatLevels[match.threat_level] || 0;
if (score > highestScore) {
highestScore = score;
highest = match.threat_level;
}
}
return highest;
}
/**
* Get all signatures
*/
async getSignatures(filters = {}) {
const { signatureType, threatLevel, enabled, limit = 100 } = filters;
let whereClause = [];
let params = [];
if (signatureType) {
whereClause.push('signature_type = ?');
params.push(signatureType);
}
if (threatLevel) {
whereClause.push('threat_level = ?');
params.push(threatLevel);
}
if (enabled !== undefined) {
whereClause.push('enabled = ?');
params.push(enabled ? 1 : 0);
}
const where = whereClause.length > 0 ? `WHERE ${whereClause.join(' AND ')}` : '';
params.push(limit);
return new Promise((resolve, reject) => {
db.all(
`SELECT * FROM risk_signatures ${where}
ORDER BY threat_level DESC, confidence DESC
LIMIT ?`,
params,
(err, rows) => {
if (err) reject(err);
else resolve(rows);
}
);
});
}
/**
* Get signature by ID
*/
async getSignatureById(signatureId) {
return new Promise((resolve, reject) => {
db.get(
`SELECT * FROM risk_signatures WHERE signature_id = ?`,
[signatureId],
(err, row) => {
if (err) reject(err);
else resolve(row);
}
);
});
}
/**
* Create new signature
* CWE-778: Logs signature creation
*/
async createSignature(data, userId) {
const signatureId = `SIG-${Date.now()}-${Math.random().toString(36).substr(2, 9).toUpperCase()}`;
return new Promise((resolve, reject) => {
db.run(
`INSERT INTO risk_signatures
(signature_id, name, description, signature_type, pattern, match_type, threat_level, confidence, enabled, auto_block)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
[
signatureId,
data.name,
data.description || '',
data.signature_type,
data.pattern,
data.match_type,
data.threat_level,
data.confidence || 0.8,
data.enabled !== undefined ? (data.enabled ? 1 : 0) : 1,
data.auto_block !== undefined ? (data.auto_block ? 1 : 0) : 0
],
async (err) => {
if (err) {
reject(err);
} else {
await this.loadSignatures();
// Log signature creation (CWE-778)
logAggregator.aggregate('risk_signature_manager', 'info', 'security', 'Risk signature created', {
signatureId,
userId,
name: data.name,
signatureType: data.signature_type,
threatLevel: data.threat_level,
autoBlock: data.auto_block === 1
});
logger.info(`[RiskSignatureManager] Signature created: ${signatureId} by user ${userId}`);
resolve({ signatureId });
}
}
);
});
}
/**
* Update signature
* CWE-778: Logs signature modifications
*/
async updateSignature(signatureId, updates, userId) {
const allowedFields = ['name', 'description', 'pattern', 'match_type', 'threat_level', 'confidence', 'enabled', 'auto_block'];
const setClause = [];
const params = [];
for (const [key, value] of Object.entries(updates)) {
if (allowedFields.includes(key)) {
setClause.push(`${key} = ?`);
params.push((key === 'enabled' || key === 'auto_block') ? (value ? 1 : 0) : value);
}
}
if (setClause.length === 0) {
throw new Error('No valid fields to update');
}
setClause.push('updated_at = CURRENT_TIMESTAMP');
params.push(signatureId);
return new Promise((resolve, reject) => {
db.run(
`UPDATE risk_signatures
SET ${setClause.join(', ')}
WHERE signature_id = ?`,
params,
async (err) => {
if (err) {
reject(err);
} else {
await this.loadSignatures();
// Log signature update (CWE-778)
logAggregator.aggregate('risk_signature_manager', 'info', 'security', 'Risk signature updated', {
signatureId,
userId,
updates
});
logger.info(`[RiskSignatureManager] Signature updated: ${signatureId} by user ${userId}`);
resolve({ success: true });
}
}
);
});
}
/**
* Delete signature
* CWE-778: Logs signature deletion
*/
async deleteSignature(signatureId, userId) {
return new Promise((resolve, reject) => {
db.run(
`DELETE FROM risk_signatures WHERE signature_id = ?`,
[signatureId],
async (err) => {
if (err) {
reject(err);
} else {
await this.loadSignatures();
// Log signature deletion (CWE-778)
logAggregator.aggregate('risk_signature_manager', 'warn', 'security', 'Risk signature deleted', {
signatureId,
userId
});
logger.info(`[RiskSignatureManager] Signature deleted: ${signatureId} by user ${userId}`);
resolve({ success: true });
}
}
);
});
}
/**
* Get signature statistics
*/
async getStatistics() {
return new Promise((resolve, reject) => {
db.get(
`SELECT
COUNT(*) as total,
SUM(CASE WHEN enabled = 1 THEN 1 ELSE 0 END) as enabled,
SUM(CASE WHEN enabled = 0 THEN 1 ELSE 0 END) as disabled,
SUM(CASE WHEN auto_block = 1 THEN 1 ELSE 0 END) as auto_block_enabled,
COUNT(DISTINCT signature_type) as unique_types,
COUNT(DISTINCT threat_level) as unique_threat_levels
FROM risk_signatures`,
[],
(err, row) => {
if (err) reject(err);
else resolve(row);
}
);
});
}
}
// Create singleton instance
const riskSignatureManager = new RiskSignatureManager();
module.exports = riskSignatureManager;

View file

@ -0,0 +1,227 @@
/**
* Route Protection Utility
* Ensures all routes have proper error handling (CWE-391 compliance)
* Wraps database callbacks and async operations with error handlers
*/
const logger = require('./logger');
const { sanitizeError } = require('./errorHandler');
/**
* Wrap database callback to ensure errors are caught
*
* @param {Function} callback - Database callback function
* @param {Object} res - Express response object
* @param {string} context - Context for error logging
* @returns {Function} Wrapped callback
*/
function wrapDbCallback(callback, res, context = 'Database operation') {
return function(err, ...args) {
if (err) {
logger.error(`${context} error:`, {
error: err.message,
stack: err.stack,
context
});
const sanitized = sanitizeError(err);
return res.status(500).json({
error: sanitized.message,
code: sanitized.code,
timestamp: new Date().toISOString()
});
}
try {
return callback(err, ...args);
} catch (callbackError) {
logger.error(`${context} callback error:`, {
error: callbackError.message,
stack: callbackError.stack,
context
});
const sanitized = sanitizeError(callbackError);
return res.status(500).json({
error: sanitized.message,
code: sanitized.code,
timestamp: new Date().toISOString()
});
}
};
}
/**
* Wrap sync route handler to catch any errors
*
* @param {Function} handler - Route handler function
* @returns {Function} Wrapped handler
*/
function wrapSyncHandler(handler) {
return function(req, res, next) {
try {
return handler(req, res, next);
} catch (error) {
logger.error('Sync route handler error:', {
error: error.message,
stack: error.stack,
method: req.method,
path: req.path
});
next(error);
}
};
}
/**
* Wrap async route handler to catch promise rejections
*
* @param {Function} handler - Async route handler function
* @returns {Function} Wrapped handler
*/
function wrapAsyncHandler(handler) {
return function(req, res, next) {
Promise.resolve(handler(req, res, next))
.catch((error) => {
logger.error('Async route handler error:', {
error: error.message,
stack: error.stack,
method: req.method,
path: req.path
});
next(error);
});
};
}
/**
* Smart route wrapper - automatically detects and wraps handlers
*
* @param {Function} handler - Route handler (sync or async)
* @returns {Function} Wrapped handler
*/
function protectRoute(handler) {
// Check if handler is async
if (handler.constructor.name === 'AsyncFunction') {
return wrapAsyncHandler(handler);
}
return wrapSyncHandler(handler);
}
/**
* Protect all routes in a router
*
* @param {Object} router - Express router
* @param {Array} routes - Array of route definitions
*/
function protectAllRoutes(router, routes) {
routes.forEach(({ method, path, middleware = [], handler }) => {
const wrappedHandler = protectRoute(handler);
router[method](path, ...middleware, wrappedHandler);
});
}
/**
* Create a safe database wrapper with automatic error handling
*
* @param {Object} db - Database connection
* @returns {Object} Wrapped database object
*/
function createSafeDb(db) {
return {
// Wrap db.run
run: (sql, params, callback) => {
try {
return db.run(sql, params, (err, ...args) => {
if (err) {
logger.error('Database run error:', {
error: err.message,
sql: sql.substring(0, 100) // Log first 100 chars only
});
}
if (callback) {
try {
return callback(err, ...args);
} catch (callbackError) {
logger.error('Database run callback error:', callbackError);
}
}
});
} catch (error) {
logger.error('Database run exception:', error);
if (callback) {
callback(error);
}
}
},
// Wrap db.get
get: (sql, params, callback) => {
try {
return db.get(sql, params, (err, ...args) => {
if (err) {
logger.error('Database get error:', {
error: err.message,
sql: sql.substring(0, 100)
});
}
if (callback) {
try {
return callback(err, ...args);
} catch (callbackError) {
logger.error('Database get callback error:', callbackError);
}
}
});
} catch (error) {
logger.error('Database get exception:', error);
if (callback) {
callback(error);
}
}
},
// Wrap db.all
all: (sql, params, callback) => {
try {
return db.all(sql, params, (err, ...args) => {
if (err) {
logger.error('Database all error:', {
error: err.message,
sql: sql.substring(0, 100)
});
}
if (callback) {
try {
return callback(err, ...args);
} catch (callbackError) {
logger.error('Database all callback error:', callbackError);
}
}
});
} catch (error) {
logger.error('Database all exception:', error);
if (callback) {
callback(error);
}
}
},
// Pass through other methods
close: (...args) => db.close(...args),
serialize: (...args) => db.serialize(...args),
parallelize: (...args) => db.parallelize(...args)
};
}
module.exports = {
wrapDbCallback,
wrapSyncHandler,
wrapAsyncHandler,
protectRoute,
protectAllRoutes,
createSafeDb
};

View file

@ -0,0 +1,531 @@
/**
* Security Audit Logger
* Tracks security-related events for compliance and forensics
* Integrated with SIEM for centralized log aggregation
*/
const logger = require('./logger');
const { db } = require('../database/db');
const logAggregator = require('./logAggregator');
class SecurityAuditLogger {
/**
* Log authentication events
*/
static async logAuthEvent(eventType, userId, details = {}) {
const event = {
event_type: eventType,
user_id: userId,
ip_address: details.ip || 'unknown',
user_agent: details.userAgent || 'unknown',
success: details.success !== false,
failure_reason: details.failureReason || null,
metadata: JSON.stringify(details.metadata || {}),
timestamp: new Date().toISOString()
};
try {
await new Promise((resolve, reject) => {
db.run(
`INSERT INTO security_audit_log (event_type, user_id, ip_address, user_agent, success, failure_reason, metadata, timestamp)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
[event.event_type, event.user_id, event.ip_address, event.user_agent, event.success ? 1 : 0, event.failure_reason, event.metadata, event.timestamp],
(err) => err ? reject(err) : resolve()
);
});
logger.info(`[SECURITY] ${eventType}: user=${userId}, ip=${event.ip_address}, success=${event.success}`);
// Aggregate to SIEM
const level = event.success ? 'info' : 'warn';
const source = eventType.startsWith('LOGIN') || eventType.includes('PASSWORD') ? 'authentication' : 'security_audit';
logAggregator.aggregate(source, level, 'authentication', `${eventType}: ${event.success ? 'success' : 'failure'}`, {
userId: event.user_id,
ip: event.ip_address,
userAgent: event.user_agent,
failureReason: event.failure_reason,
metadata: details.metadata || {}
});
} catch (error) {
logger.error('Failed to log security event:', error);
}
}
/**
* Log login attempt
*/
static async logLoginAttempt(username, success, details = {}) {
return this.logAuthEvent('LOGIN_ATTEMPT', null, {
success,
failureReason: success ? null : (details.reason || 'Invalid credentials'),
ip: details.ip,
userAgent: details.userAgent,
metadata: { username }
});
}
/**
* Log successful login
*/
static async logLoginSuccess(userId, details = {}) {
return this.logAuthEvent('LOGIN_SUCCESS', userId, {
success: true,
ip: details.ip,
userAgent: details.userAgent,
metadata: { twoFactorUsed: details.twoFactorUsed || false }
});
}
/**
* Log failed login
*/
static async logLoginFailure(username, reason, details = {}) {
return this.logAuthEvent('LOGIN_FAILURE', null, {
success: false,
failureReason: reason,
ip: details.ip,
userAgent: details.userAgent,
metadata: { username }
});
}
/**
* Log account lockout
*/
static async logAccountLockout(userId, details = {}) {
return this.logAuthEvent('ACCOUNT_LOCKOUT', userId, {
success: false,
failureReason: 'Too many failed login attempts',
ip: details.ip,
userAgent: details.userAgent,
metadata: { failedAttempts: details.failedAttempts }
});
}
/**
* Log password change
*/
static async logPasswordChange(userId, details = {}) {
return this.logAuthEvent('PASSWORD_CHANGE', userId, {
success: true,
ip: details.ip,
userAgent: details.userAgent,
metadata: { forced: details.forced || false, expired: details.expired || false }
});
}
/**
* Log 2FA events
*/
static async log2FAEvent(eventType, userId, success, details = {}) {
return this.logAuthEvent(eventType, userId, {
success,
failureReason: success ? null : (details.reason || 'Invalid code'),
ip: details.ip,
userAgent: details.userAgent,
metadata: details.metadata || {}
});
}
/**
* Log session events
*/
static async logSessionEvent(eventType, userId, details = {}) {
return this.logAuthEvent(eventType, userId, {
success: true,
ip: details.ip,
userAgent: details.userAgent,
metadata: { sessionId: details.sessionId }
});
}
/**
* Log privilege escalation
*/
static async logPrivilegeEscalation(userId, details = {}) {
return this.logAuthEvent('PRIVILEGE_ESCALATION', userId, {
success: true,
ip: details.ip,
userAgent: details.userAgent,
metadata: {
fromRole: details.fromRole,
toRole: details.toRole,
grantedBy: details.grantedBy
}
});
}
/**
* Get recent security events for a user
*/
static async getUserSecurityEvents(userId, limit = 50) {
return new Promise((resolve, reject) => {
db.all(
`SELECT * FROM security_audit_log
WHERE user_id = ?
ORDER BY timestamp DESC
LIMIT ?`,
[userId, limit],
(err, rows) => err ? reject(err) : resolve(rows)
);
});
}
/**
* Get failed login attempts for an IP
*/
static async getFailedAttemptsForIP(ipAddress, timeWindowMinutes = 30) {
const cutoffTime = new Date(Date.now() - timeWindowMinutes * 60 * 1000).toISOString();
return new Promise((resolve, reject) => {
db.get(
`SELECT COUNT(*) as count
FROM security_audit_log
WHERE event_type IN ('LOGIN_FAILURE', 'LOGIN_ATTEMPT')
AND success = 0
AND ip_address = ?
AND timestamp > ?`,
[ipAddress, cutoffTime],
(err, row) => err ? reject(err) : resolve(row.count || 0)
);
});
}
/**
* Get failed login attempts for a user
*/
static async getFailedAttemptsForUser(username, timeWindowMinutes = 30) {
const cutoffTime = new Date(Date.now() - timeWindowMinutes * 60 * 1000).toISOString();
return new Promise((resolve, reject) => {
db.get(
`SELECT COUNT(*) as count
FROM security_audit_log
WHERE event_type IN ('LOGIN_FAILURE', 'LOGIN_ATTEMPT')
AND success = 0
AND metadata LIKE ?
AND timestamp > ?`,
[`%"username":"${username}"%`, cutoffTime],
(err, row) => err ? reject(err) : resolve(row.count || 0)
);
});
}
/**
* Clear old audit logs (data retention)
*/
static async cleanupOldLogs(retentionDays = 90) {
const cutoffDate = new Date(Date.now() - retentionDays * 24 * 60 * 60 * 1000).toISOString();
return new Promise((resolve, reject) => {
db.run(
'DELETE FROM security_audit_log WHERE timestamp < ?',
[cutoffDate],
function(err) {
if (err) reject(err);
else {
logger.info(`Cleaned up ${this.changes} old security audit logs`);
resolve(this.changes);
}
}
);
});
}
/**
* CWE-778: Log token issuance (JWT, OAuth, etc.)
* Includes relevant metadata such as client ID, IP address, device info
*/
static async logTokenIssuance(userId, tokenType, details = {}) {
return this.logAuthEvent('TOKEN_ISSUED', userId, {
success: true,
ip: details.ip,
userAgent: details.userAgent,
metadata: {
tokenType: tokenType, // 'JWT', 'REFRESH', 'TEMP_2FA', 'OAUTH'
clientId: details.clientId,
deviceInfo: details.deviceInfo || this.extractDeviceInfo(details.userAgent),
expiresIn: details.expiresIn,
purpose: details.purpose, // 'login', '2fa', 'registration', 'password_reset'
scope: details.scope,
grantType: details.grantType
}
});
}
/**
* CWE-778: Log token refresh
*/
static async logTokenRefresh(userId, details = {}) {
return this.logAuthEvent('TOKEN_REFRESHED', userId, {
success: true,
ip: details.ip,
userAgent: details.userAgent,
metadata: {
tokenType: details.tokenType || 'JWT',
clientId: details.clientId,
deviceInfo: details.deviceInfo || this.extractDeviceInfo(details.userAgent),
oldTokenExpiry: details.oldTokenExpiry,
newTokenExpiry: details.newTokenExpiry
}
});
}
/**
* CWE-778: Log token revocation
*/
static async logTokenRevocation(userId, reason, details = {}) {
return this.logAuthEvent('TOKEN_REVOKED', userId, {
success: true,
ip: details.ip,
userAgent: details.userAgent,
metadata: {
reason: reason, // 'logout', 'password_change', 'security_breach', 'admin_action'
tokenType: details.tokenType || 'JWT',
sessionId: details.sessionId,
revokedBy: details.revokedBy // user_id of admin who revoked it
}
});
}
/**
* CWE-778: Log privilege changes
* Any activities where user's privilege level changes
*/
static async logPrivilegeChange(userId, action, details = {}) {
return this.logAuthEvent('PRIVILEGE_CHANGE', userId, {
success: true,
ip: details.ip,
userAgent: details.userAgent,
metadata: {
action: action, // 'role_assigned', 'role_removed', 'permission_granted', 'permission_revoked'
previousRole: details.previousRole,
newRole: details.newRole,
previousPermissions: details.previousPermissions,
newPermissions: details.newPermissions,
changedBy: details.changedBy, // user_id of admin who made the change
changedByUsername: details.changedByUsername,
reason: details.reason,
affectedUser: details.affectedUser // username of the user being changed
}
});
}
/**
* CWE-778: Log permission grant
*/
static async logPermissionGrant(userId, permission, details = {}) {
return this.logAuthEvent('PERMISSION_GRANTED', userId, {
success: true,
ip: details.ip,
userAgent: details.userAgent,
metadata: {
permission: permission,
grantedBy: details.grantedBy,
grantedByUsername: details.grantedByUsername,
scope: details.scope,
expiresAt: details.expiresAt
}
});
}
/**
* CWE-778: Log permission revocation
*/
static async logPermissionRevocation(userId, permission, details = {}) {
return this.logAuthEvent('PERMISSION_REVOKED', userId, {
success: true,
ip: details.ip,
userAgent: details.userAgent,
metadata: {
permission: permission,
revokedBy: details.revokedBy,
revokedByUsername: details.revokedByUsername,
reason: details.reason
}
});
}
/**
* CWE-778: Log user activation/deactivation
*/
static async logAccountStatusChange(userId, newStatus, details = {}) {
return this.logAuthEvent('ACCOUNT_STATUS_CHANGED', userId, {
success: true,
ip: details.ip,
userAgent: details.userAgent,
metadata: {
previousStatus: details.previousStatus,
newStatus: newStatus, // 'active', 'inactive', 'suspended', 'locked'
changedBy: details.changedBy,
changedByUsername: details.changedByUsername,
reason: details.reason
}
});
}
/**
* CWE-778: Log administrative activities
* For admin actions like user creation, deletion, unlock, force logout
*/
static async logAdminActivity(adminId, action, details = {}) {
return this.logAuthEvent('ADMIN_ACTIVITY', adminId, {
success: true,
ip: details.ip,
userAgent: details.userAgent,
metadata: {
action: action, // 'user_created', 'user_deleted', 'account_unlocked', 'password_reset', 'force_logout'
targetUserId: details.targetUserId,
targetUsername: details.targetUsername,
adminUsername: details.adminUsername,
changes: details.changes,
reason: details.reason,
deviceInfo: details.deviceInfo || this.extractDeviceInfo(details.userAgent)
}
});
}
/**
* CWE-778: Log sensitive data access
* For accessing user lists, settings, VPN configs, backups, etc.
*/
static async logSensitiveDataAccess(userId, dataType, details = {}) {
return this.logAuthEvent('SENSITIVE_DATA_ACCESS', userId, {
success: true,
ip: details.ip,
userAgent: details.userAgent,
metadata: {
dataType: dataType, // 'user_list', 'user_details', 'settings', 'vpn_configs', 'backups', 'audit_logs'
accessMethod: details.accessMethod || 'view', // 'view', 'export', 'download'
recordCount: details.recordCount,
filters: details.filters,
scope: details.scope, // 'own', 'all', 'specific'
deviceInfo: details.deviceInfo || this.extractDeviceInfo(details.userAgent)
}
});
}
/**
* Extract device info from user agent
*/
static extractDeviceInfo(userAgent = '') {
if (!userAgent) return { deviceType: 'unknown', os: 'unknown', browser: 'unknown' };
const ua = userAgent.toLowerCase();
// Device type
let deviceType = 'desktop';
if (/bot|crawler|spider/.test(ua)) deviceType = 'bot';
else if (/mobile|android|iphone|ipod/.test(ua)) deviceType = 'mobile';
else if (/tablet|ipad/.test(ua)) deviceType = 'tablet';
// Operating System
let os = 'unknown';
if (/windows/.test(ua)) os = 'Windows';
else if (/mac os|macos/.test(ua)) os = 'macOS';
else if (/linux/.test(ua)) os = 'Linux';
else if (/android/.test(ua)) os = 'Android';
else if (/ios|iphone|ipad/.test(ua)) os = 'iOS';
// Browser
let browser = 'unknown';
if (/firefox/.test(ua)) browser = 'Firefox';
else if (/chrome/.test(ua) && !/edge|edg/.test(ua)) browser = 'Chrome';
else if (/safari/.test(ua) && !/chrome/.test(ua)) browser = 'Safari';
else if (/edge|edg/.test(ua)) browser = 'Edge';
else if (/opera|opr/.test(ua)) browser = 'Opera';
return { deviceType, os, browser };
}
/**
* Get comprehensive audit statistics
*/
static async getAuditStatistics(timeRangeDays = 30) {
const cutoffDate = new Date(Date.now() - timeRangeDays * 24 * 60 * 60 * 1000).toISOString();
return new Promise((resolve, reject) => {
db.all(
`SELECT
event_type,
success,
COUNT(*) as count,
COUNT(DISTINCT user_id) as unique_users,
COUNT(DISTINCT ip_address) as unique_ips
FROM security_audit_log
WHERE timestamp > ?
GROUP BY event_type, success
ORDER BY count DESC`,
[cutoffDate],
(err, rows) => err ? reject(err) : resolve(rows)
);
});
}
/**
* Log system events (startup, shutdown, cleanup, etc.)
*/
static async logSystemEvent(eventType, success, details = {}) {
return this.logAuthEvent(`SYSTEM_${eventType.toUpperCase()}`, null, {
success,
failureReason: success ? null : (details.error || 'System event failed'),
metadata: details
});
}
/**
* Log security incidents (tampering, breaches, etc.)
*/
static async logSecurityIncident(incidentType, details = {}) {
const event = {
event_type: `SECURITY_INCIDENT_${incidentType.toUpperCase()}`,
user_id: details.userId || null,
ip_address: details.ip || 'system',
user_agent: details.userAgent || 'system',
success: false,
failure_reason: `Security incident: ${incidentType}`,
metadata: JSON.stringify(details),
timestamp: new Date().toISOString()
};
try {
await new Promise((resolve, reject) => {
db.run(
`INSERT INTO security_audit_log (event_type, user_id, ip_address, user_agent, success, failure_reason, metadata, timestamp)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
[event.event_type, event.user_id, event.ip_address, event.user_agent, 0, event.failure_reason, event.metadata, event.timestamp],
(err) => err ? reject(err) : resolve()
);
});
logger.error(`[SECURITY INCIDENT] ${incidentType}: ${JSON.stringify(details)}`);
// Aggregate to SIEM with CRITICAL level
logAggregator.aggregate('security_audit', 'critical', 'security_incident', `Security incident: ${incidentType}`, {
incidentType,
...details
});
} catch (error) {
logger.error('Failed to log security incident:', error);
}
}
/**
* Log admin activities (user management, config changes, etc.)
*/
static async logAdminActivity(adminId, action, details = {}) {
return this.logAuthEvent(`ADMIN_${action.toUpperCase()}`, adminId, {
success: true,
ip: details.ip,
userAgent: details.userAgent,
metadata: {
action,
target: details.target || details.targetUserId,
changes: details.changes,
...details
}
});
}
}
module.exports = SecurityAuditLogger;

View file

@ -0,0 +1,853 @@
/**
* Security Intelligence & Pattern Analysis
* Algorithm-driven surveillance system for automatic pattern detection
* Includes anomaly detection, threat intelligence, and predictive analysis
* Enhanced with configurable thresholds and risk signatures (CWE-778)
*/
const logger = require('./logger');
const logAggregator = require('./logAggregator');
const { db } = require('../database/db');
const thresholdManager = require('./thresholdManager');
const riskSignatureManager = require('./riskSignatureManager');
class SecurityIntelligence {
constructor() {
this.patterns = new Map();
this.anomalies = [];
this.threatScore = 0;
this.analysisInterval = 60000; // 1 minute
this.initialize();
}
/**
* Initialize security intelligence system
*/
async initialize() {
await this.createAnomaliesTable();
await this.createThreatIntelligenceTable();
// Start continuous monitoring
setInterval(() => this.analyze(), this.analysisInterval);
logger.info('[SecurityIntelligence] Initialized - Active monitoring enabled');
}
/**
* Create anomalies table
*/
async createAnomaliesTable() {
return new Promise((resolve, reject) => {
db.run(`
CREATE TABLE IF NOT EXISTS security_anomalies (
id INTEGER PRIMARY KEY AUTOINCREMENT,
anomaly_id TEXT UNIQUE NOT NULL,
type TEXT NOT NULL,
severity TEXT NOT NULL,
description TEXT NOT NULL,
confidence REAL NOT NULL,
affected_user_id INTEGER,
affected_ip TEXT,
pattern_data TEXT,
related_logs TEXT,
status TEXT DEFAULT 'open',
resolved_at DATETIME,
resolved_by INTEGER,
resolution_notes TEXT,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
)
`, (err) => {
if (err) reject(err);
else {
db.run(`CREATE INDEX IF NOT EXISTS idx_anomalies_type ON security_anomalies(type, created_at DESC)`);
db.run(`CREATE INDEX IF NOT EXISTS idx_anomalies_severity ON security_anomalies(severity, created_at DESC)`);
db.run(`CREATE INDEX IF NOT EXISTS idx_anomalies_status ON security_anomalies(status, created_at DESC)`);
resolve();
}
});
});
}
/**
* Create threat intelligence table
*/
async createThreatIntelligenceTable() {
return new Promise((resolve, reject) => {
db.run(`
CREATE TABLE IF NOT EXISTS threat_intelligence (
id INTEGER PRIMARY KEY AUTOINCREMENT,
indicator TEXT NOT NULL,
indicator_type TEXT NOT NULL,
threat_level TEXT NOT NULL,
description TEXT,
source TEXT,
confidence REAL NOT NULL,
first_seen DATETIME DEFAULT CURRENT_TIMESTAMP,
last_seen DATETIME DEFAULT CURRENT_TIMESTAMP,
occurrence_count INTEGER DEFAULT 1,
metadata TEXT
)
`, (err) => {
if (err) reject(err);
else {
db.run(`CREATE UNIQUE INDEX IF NOT EXISTS idx_threat_indicator ON threat_intelligence(indicator, indicator_type)`);
db.run(`CREATE INDEX IF NOT EXISTS idx_threat_level ON threat_intelligence(threat_level, last_seen DESC)`);
resolve();
}
});
});
}
/**
* Main analysis loop - runs continuously
*/
async analyze() {
try {
logger.debug('[SecurityIntelligence] Running analysis cycle');
// Run all detection algorithms in parallel
await Promise.all([
this.detectBruteForceAttacks(),
this.detectAccountEnumeration(),
this.detectPrivilegeEscalation(),
this.detectAnomalousAccess(),
this.detectSuspiciousIPs(),
this.detectDataExfiltration(),
this.detectSessionAnomalies(),
this.detectRateLimitAbuse()
]);
// Calculate overall threat score
await this.calculateThreatScore();
} catch (error) {
logger.error('[SecurityIntelligence] Analysis cycle failed:', error);
}
}
/**
* Detect brute force authentication attacks
* Enhanced with configurable thresholds
*/
async detectBruteForceAttacks() {
// Get configured threshold or use default
const thresholdConfig = await thresholdManager.getThresholds({ patternType: 'brute_force_attack' });
const timeWindow = thresholdConfig[0]?.time_window_minutes || 10;
const threshold = thresholdConfig[0]?.threshold_value || 10;
const startTime = new Date(Date.now() - timeWindow * 60 * 1000).toISOString();
return new Promise((resolve, reject) => {
db.all(
`SELECT ip_address, COUNT(*) as attempt_count,
MAX(timestamp) as last_attempt,
GROUP_CONCAT(log_id) as log_ids
FROM aggregated_logs
WHERE category = 'authentication'
AND level IN ('warn', 'error')
AND message LIKE '%failed%'
AND timestamp >= ?
AND ip_address IS NOT NULL
GROUP BY ip_address
HAVING attempt_count >= ?`,
[startTime, threshold],
async (err, rows) => {
if (err) {
reject(err);
return;
}
for (const row of rows) {
// Evaluate threshold
const thresholdResult = await thresholdManager.evaluateThreshold(
'brute_force_attack',
'failed_login_count',
row.attempt_count,
{ ip_address: row.ip_address, timeWindow }
);
if (!thresholdResult.exceeded) continue;
const configuredThreshold = thresholdResult.thresholds[0];
const severity = configuredThreshold?.severity || (row.attempt_count > 20 ? 'critical' : 'high');
await this.createAnomaly({
type: 'brute_force_attack',
severity: severity,
description: `Brute force attack detected from IP ${row.ip_address}: ${row.attempt_count} failed login attempts in ${timeWindow} minutes (threshold: ${threshold})`,
confidence: Math.min(row.attempt_count / threshold, 1.0),
affected_ip: row.ip_address,
pattern_data: JSON.stringify({
attemptCount: row.attempt_count,
timeWindow: `${timeWindow} minutes`,
threshold: threshold,
thresholdExceeded: thresholdResult.exceeded,
lastAttempt: row.last_attempt
}),
related_logs: row.log_ids
});
// Add to threat intelligence
await this.addThreatIndicator(row.ip_address, 'ip', severity === 'critical' ? 'critical' : 'high', 'Brute force attack source');
}
resolve(rows.length);
}
);
});
}
/**
* Detect account enumeration attempts
* Enhanced with configurable thresholds
*/
async detectAccountEnumeration() {
// Get configured threshold or use default
const thresholdConfig = await thresholdManager.getThresholds({ patternType: 'credential_stuffing' });
const timeWindow = thresholdConfig[0]?.time_window_minutes || 5;
const threshold = thresholdConfig[0]?.threshold_value || 5;
const startTime = new Date(Date.now() - timeWindow * 60 * 1000).toISOString();
return new Promise((resolve, reject) => {
db.all(
`SELECT ip_address,
COUNT(DISTINCT json_extract(metadata, '$.username')) as unique_usernames,
COUNT(*) as total_attempts,
GROUP_CONCAT(log_id) as log_ids
FROM aggregated_logs
WHERE category = 'authentication'
AND level = 'warn'
AND timestamp >= ?
AND ip_address IS NOT NULL
AND metadata LIKE '%username%'
GROUP BY ip_address
HAVING unique_usernames >= ?`,
[startTime, threshold],
async (err, rows) => {
if (err) {
reject(err);
return;
}
for (const row of rows) {
await this.createAnomaly({
type: 'account_enumeration',
severity: 'medium',
description: `Account enumeration detected from IP ${row.ip_address}: ${row.unique_usernames} different usernames tried in ${timeWindow} minutes`,
confidence: Math.min(row.unique_usernames / (threshold * 2), 1.0),
affected_ip: row.ip_address,
pattern_data: JSON.stringify({
uniqueUsernames: row.unique_usernames,
totalAttempts: row.total_attempts,
timeWindow: `${timeWindow} minutes`
}),
related_logs: row.log_ids
});
}
resolve(rows.length);
}
);
});
}
/**
* Detect privilege escalation attempts
*/
async detectPrivilegeEscalation() {
const timeWindow = 30; // minutes
const startTime = new Date(Date.now() - timeWindow * 60 * 1000).toISOString();
return new Promise((resolve, reject) => {
db.all(
`SELECT user_id, ip_address,
COUNT(*) as escalation_attempts,
GROUP_CONCAT(log_id) as log_ids
FROM aggregated_logs
WHERE category = 'authorization'
AND (message LIKE '%denied%' OR message LIKE '%unauthorized%')
AND timestamp >= ?
AND user_id IS NOT NULL
GROUP BY user_id, ip_address
HAVING escalation_attempts >= 3`,
[startTime],
async (err, rows) => {
if (err) {
reject(err);
return;
}
for (const row of rows) {
await this.createAnomaly({
type: 'privilege_escalation',
severity: 'critical',
description: `Privilege escalation attempt detected: User ${row.user_id} attempted ${row.escalation_attempts} unauthorized actions`,
confidence: 0.85,
affected_user_id: row.user_id,
affected_ip: row.ip_address,
pattern_data: JSON.stringify({
escalationAttempts: row.escalation_attempts,
timeWindow: `${timeWindow} minutes`
}),
related_logs: row.log_ids
});
}
resolve(rows.length);
}
);
});
}
/**
* Detect anomalous access patterns
*/
async detectAnomalousAccess() {
const timeWindow = 60; // minutes
const startTime = new Date(Date.now() - timeWindow * 60 * 1000).toISOString();
// Detect access from unusual hours (2 AM - 5 AM)
return new Promise((resolve, reject) => {
db.all(
`SELECT user_id, ip_address,
COUNT(*) as access_count,
GROUP_CONCAT(log_id) as log_ids
FROM aggregated_logs
WHERE category IN ('access', 'security_audit')
AND timestamp >= ?
AND CAST(strftime('%H', timestamp) AS INTEGER) BETWEEN 2 AND 5
AND user_id IS NOT NULL
GROUP BY user_id, ip_address
HAVING access_count >= 3`,
[startTime],
async (err, rows) => {
if (err) {
reject(err);
return;
}
for (const row of rows) {
await this.createAnomaly({
type: 'anomalous_access',
severity: 'medium',
description: `Unusual access pattern: User ${row.user_id} accessed system during off-hours (${row.access_count} times)`,
confidence: 0.7,
affected_user_id: row.user_id,
affected_ip: row.ip_address,
pattern_data: JSON.stringify({
accessCount: row.access_count,
timeRange: '2 AM - 5 AM'
}),
related_logs: row.log_ids
});
}
resolve(rows.length);
}
);
});
}
/**
* Detect suspicious IP addresses
*/
async detectSuspiciousIPs() {
const timeWindow = 60; // minutes
const threshold = 100; // requests
const startTime = new Date(Date.now() - timeWindow * 60 * 1000).toISOString();
return new Promise((resolve, reject) => {
db.all(
`SELECT ip_address,
COUNT(*) as request_count,
COUNT(DISTINCT user_id) as unique_users,
COUNT(CASE WHEN level = 'error' THEN 1 END) as error_count,
GROUP_CONCAT(DISTINCT source) as sources
FROM aggregated_logs
WHERE timestamp >= ?
AND ip_address IS NOT NULL
GROUP BY ip_address
HAVING request_count >= ? OR unique_users >= 10`,
[startTime, threshold],
async (err, rows) => {
if (err) {
reject(err);
return;
}
for (const row of rows) {
let severity = 'low';
let reason = [];
if (row.request_count >= threshold * 2) {
severity = 'high';
reason.push(`excessive requests (${row.request_count})`);
} else if (row.request_count >= threshold) {
severity = 'medium';
reason.push(`high request volume (${row.request_count})`);
}
if (row.unique_users >= 10) {
severity = 'high';
reason.push(`multiple user accounts (${row.unique_users})`);
}
if (row.error_count > row.request_count * 0.3) {
severity = 'high';
reason.push(`high error rate (${row.error_count})`);
}
await this.createAnomaly({
type: 'suspicious_ip',
severity,
description: `Suspicious IP activity from ${row.ip_address}: ${reason.join(', ')}`,
confidence: 0.75,
affected_ip: row.ip_address,
pattern_data: JSON.stringify({
requestCount: row.request_count,
uniqueUsers: row.unique_users,
errorCount: row.error_count,
sources: row.sources
}),
related_logs: null
});
if (severity === 'high') {
await this.addThreatIndicator(row.ip_address, 'ip', severity, reason.join(', '));
}
}
resolve(rows.length);
}
);
});
}
/**
* Detect potential data exfiltration
*/
async detectDataExfiltration() {
const timeWindow = 30; // minutes
const downloadThreshold = 5; // Large downloads
const startTime = new Date(Date.now() - timeWindow * 60 * 1000).toISOString();
return new Promise((resolve, reject) => {
db.all(
`SELECT user_id, ip_address,
COUNT(*) as download_count,
GROUP_CONCAT(log_id) as log_ids
FROM aggregated_logs
WHERE category = 'access'
AND (message LIKE '%download%' OR message LIKE '%export%' OR message LIKE '%backup%')
AND timestamp >= ?
AND user_id IS NOT NULL
GROUP BY user_id, ip_address
HAVING download_count >= ?`,
[startTime, downloadThreshold],
async (err, rows) => {
if (err) {
reject(err);
return;
}
for (const row of rows) {
await this.createAnomaly({
type: 'data_exfiltration',
severity: 'high',
description: `Potential data exfiltration: User ${row.user_id} performed ${row.download_count} download/export operations in ${timeWindow} minutes`,
confidence: 0.8,
affected_user_id: row.user_id,
affected_ip: row.ip_address,
pattern_data: JSON.stringify({
downloadCount: row.download_count,
timeWindow: `${timeWindow} minutes`
}),
related_logs: row.log_ids
});
}
resolve(rows.length);
}
);
});
}
/**
* Detect session anomalies
*/
async detectSessionAnomalies() {
const timeWindow = 24; // hours
const startTime = new Date(Date.now() - timeWindow * 60 * 60 * 1000).toISOString();
// Detect impossible travel (same user, different locations in short time)
return new Promise((resolve, reject) => {
db.all(
`SELECT user_id,
COUNT(DISTINCT ip_address) as unique_ips,
GROUP_CONCAT(DISTINCT ip_address) as ips,
COUNT(*) as session_count
FROM aggregated_logs
WHERE category = 'authentication'
AND message LIKE '%login%success%'
AND timestamp >= ?
AND user_id IS NOT NULL
GROUP BY user_id
HAVING unique_ips >= 5`,
[startTime],
async (err, rows) => {
if (err) {
reject(err);
return;
}
for (const row of rows) {
await this.createAnomaly({
type: 'session_anomaly',
severity: 'medium',
description: `Session anomaly: User ${row.user_id} logged in from ${row.unique_ips} different IP addresses in ${timeWindow} hours`,
confidence: 0.7,
affected_user_id: row.user_id,
pattern_data: JSON.stringify({
uniqueIPs: row.unique_ips,
ipAddresses: row.ips.split(','),
sessionCount: row.session_count,
timeWindow: `${timeWindow} hours`
}),
related_logs: null
});
}
resolve(rows.length);
}
);
});
}
/**
* Detect rate limit abuse
*/
async detectRateLimitAbuse() {
const timeWindow = 15; // minutes
const startTime = new Date(Date.now() - timeWindow * 60 * 1000).toISOString();
return new Promise((resolve, reject) => {
db.all(
`SELECT ip_address,
COUNT(*) as blocked_count,
GROUP_CONCAT(log_id) as log_ids
FROM aggregated_logs
WHERE category = 'system'
AND message LIKE '%rate limit%'
AND timestamp >= ?
AND ip_address IS NOT NULL
GROUP BY ip_address
HAVING blocked_count >= 5`,
[startTime],
async (err, rows) => {
if (err) {
reject(err);
return;
}
for (const row of rows) {
await this.createAnomaly({
type: 'rate_limit_abuse',
severity: 'medium',
description: `Rate limit abuse: IP ${row.ip_address} was rate-limited ${row.blocked_count} times in ${timeWindow} minutes`,
confidence: 0.9,
affected_ip: row.ip_address,
pattern_data: JSON.stringify({
blockedCount: row.blocked_count,
timeWindow: `${timeWindow} minutes`
}),
related_logs: row.log_ids
});
await this.addThreatIndicator(row.ip_address, 'ip', 'medium', 'Rate limit abuse');
}
resolve(rows.length);
}
);
});
}
/**
* Create anomaly record
*/
async createAnomaly(details) {
const anomalyId = `ANOM-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
// Check if similar anomaly exists (deduplication)
const existing = await this.findSimilarAnomaly(details);
if (existing) {
logger.debug(`[SecurityIntelligence] Similar anomaly exists: ${existing.anomaly_id}`);
return existing.anomaly_id;
}
return new Promise((resolve, reject) => {
db.run(
`INSERT INTO security_anomalies
(anomaly_id, type, severity, description, confidence, affected_user_id, affected_ip, pattern_data, related_logs)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
[
anomalyId,
details.type,
details.severity,
details.description,
details.confidence,
details.affected_user_id || null,
details.affected_ip || null,
details.pattern_data,
details.related_logs
],
(err) => {
if (err) {
reject(err);
} else {
logger.warn(`[SecurityIntelligence] Anomaly detected: ${details.type} - ${details.severity} - ${details.description}`);
// Log to aggregated logs as well
logAggregator.aggregate('security_intelligence', 'warn', 'security', details.description, {
anomalyId,
type: details.type,
severity: details.severity,
confidence: details.confidence
});
resolve(anomalyId);
}
}
);
});
}
/**
* Find similar anomaly (deduplication)
*/
async findSimilarAnomaly(details) {
const recentTime = new Date(Date.now() - 10 * 60 * 1000).toISOString(); // Last 10 minutes
return new Promise((resolve, reject) => {
db.get(
`SELECT * FROM security_anomalies
WHERE type = ?
AND severity = ?
AND (affected_user_id = ? OR affected_ip = ?)
AND status = 'open'
AND created_at >= ?
ORDER BY created_at DESC
LIMIT 1`,
[details.type, details.severity, details.affected_user_id, details.affected_ip, recentTime],
(err, row) => {
if (err) reject(err);
else resolve(row);
}
);
});
}
/**
* Add threat indicator to intelligence database
*/
async addThreatIndicator(indicator, type, level, description) {
return new Promise((resolve, reject) => {
db.run(
`INSERT INTO threat_intelligence (indicator, indicator_type, threat_level, description, confidence, source)
VALUES (?, ?, ?, ?, 0.8, 'internal_detection')
ON CONFLICT(indicator, indicator_type) DO UPDATE SET
last_seen = CURRENT_TIMESTAMP,
occurrence_count = occurrence_count + 1,
threat_level = ?,
description = ?`,
[indicator, type, level, description, level, description],
(err) => {
if (err) reject(err);
else {
logger.info(`[SecurityIntelligence] Threat indicator added: ${type}=${indicator} (${level})`);
resolve();
}
}
);
});
}
/**
* Calculate overall threat score (0-100)
*/
async calculateThreatScore() {
return new Promise((resolve, reject) => {
db.get(
`SELECT
COUNT(CASE WHEN severity = 'critical' AND status = 'open' THEN 1 END) as critical_count,
COUNT(CASE WHEN severity = 'high' AND status = 'open' THEN 1 END) as high_count,
COUNT(CASE WHEN severity = 'medium' AND status = 'open' THEN 1 END) as medium_count,
COUNT(CASE WHEN severity = 'low' AND status = 'open' THEN 1 END) as low_count
FROM security_anomalies
WHERE created_at >= datetime('now', '-24 hours')`,
[],
(err, row) => {
if (err) {
reject(err);
return;
}
// Weight severity levels
const score = Math.min(
(row.critical_count * 40) +
(row.high_count * 20) +
(row.medium_count * 10) +
(row.low_count * 5),
100
);
this.threatScore = score;
if (score >= 80) {
logger.error(`[SecurityIntelligence] CRITICAL THREAT LEVEL: ${score}/100`);
} else if (score >= 50) {
logger.warn(`[SecurityIntelligence] HIGH THREAT LEVEL: ${score}/100`);
} else if (score >= 20) {
logger.info(`[SecurityIntelligence] MEDIUM THREAT LEVEL: ${score}/100`);
} else {
logger.debug(`[SecurityIntelligence] LOW THREAT LEVEL: ${score}/100`);
}
resolve(score);
}
);
});
}
/**
* Get active anomalies
*/
async getAnomalies(filters = {}) {
const {
status = 'open',
severity,
type,
limit = 100,
offset = 0
} = filters;
let whereClause = ['status = ?'];
let params = [status];
if (severity) {
whereClause.push('severity = ?');
params.push(severity);
}
if (type) {
whereClause.push('type = ?');
params.push(type);
}
params.push(limit, offset);
return new Promise((resolve, reject) => {
db.all(
`SELECT * FROM security_anomalies
WHERE ${whereClause.join(' AND ')}
ORDER BY created_at DESC
LIMIT ? OFFSET ?`,
params,
(err, rows) => {
if (err) reject(err);
else resolve(rows);
}
);
});
}
/**
* Resolve anomaly
*/
async resolveAnomaly(anomalyId, resolvedBy, notes) {
return new Promise((resolve, reject) => {
db.run(
`UPDATE security_anomalies
SET status = 'resolved',
resolved_at = CURRENT_TIMESTAMP,
resolved_by = ?,
resolution_notes = ?
WHERE anomaly_id = ?`,
[resolvedBy, notes, anomalyId],
(err) => {
if (err) reject(err);
else {
logger.info(`[SecurityIntelligence] Anomaly resolved: ${anomalyId} by user ${resolvedBy}`);
resolve();
}
}
);
});
}
/**
* Get threat intelligence
*/
async getThreatIntelligence(filters = {}) {
const { level, type, limit = 100 } = filters;
let whereClause = [];
let params = [];
if (level) {
whereClause.push('threat_level = ?');
params.push(level);
}
if (type) {
whereClause.push('indicator_type = ?');
params.push(type);
}
const where = whereClause.length > 0 ? `WHERE ${whereClause.join(' AND ')}` : '';
params.push(limit);
return new Promise((resolve, reject) => {
db.all(
`SELECT * FROM threat_intelligence ${where}
ORDER BY last_seen DESC, occurrence_count DESC
LIMIT ?`,
params,
(err, rows) => {
if (err) reject(err);
else resolve(rows);
}
);
});
}
/**
* Get security intelligence dashboard data
*/
async getDashboardData() {
const [anomalies, threats, score] = await Promise.all([
this.getAnomalies({ status: 'open', limit: 50 }),
this.getThreatIntelligence({ limit: 20 }),
this.calculateThreatScore()
]);
const anomalyStats = {
critical: anomalies.filter(a => a.severity === 'critical').length,
high: anomalies.filter(a => a.severity === 'high').length,
medium: anomalies.filter(a => a.severity === 'medium').length,
low: anomalies.filter(a => a.severity === 'low').length
};
return {
threatScore: score,
anomalies: anomalies.slice(0, 10),
anomalyStats,
threats: threats.slice(0, 10),
timestamp: new Date().toISOString()
};
}
}
// Create singleton instance
const securityIntelligence = new SecurityIntelligence();
module.exports = securityIntelligence;

View file

@ -0,0 +1,481 @@
/**
* Threshold Manager
* Configurable notification thresholds for security threat detection
* CWE-778 Compliance: Logs all threshold configurations and evaluations
*/
const logger = require('./logger');
const logAggregator = require('./logAggregator');
const { db } = require('../database/db');
class ThresholdManager {
constructor() {
this.thresholds = new Map();
this.initialize();
}
/**
* Initialize threshold manager
*/
async initialize() {
await this.createThresholdsTable();
await this.loadThresholds();
logger.info('[ThresholdManager] Initialized with configurable thresholds');
// Log initialization (CWE-778)
logAggregator.aggregate('threshold_manager', 'info', 'security', 'Threshold manager initialized', {
totalThresholds: this.thresholds.size
});
}
/**
* Create thresholds table
*/
async createThresholdsTable() {
return new Promise((resolve, reject) => {
db.run(`
CREATE TABLE IF NOT EXISTS security_thresholds (
id INTEGER PRIMARY KEY AUTOINCREMENT,
threshold_id TEXT UNIQUE NOT NULL,
name TEXT NOT NULL,
description TEXT,
pattern_type TEXT NOT NULL,
metric_name TEXT NOT NULL,
operator TEXT NOT NULL,
threshold_value INTEGER NOT NULL,
time_window_minutes INTEGER DEFAULT 30,
severity TEXT NOT NULL,
enabled INTEGER DEFAULT 1,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
)
`, async (err) => {
if (err) reject(err);
else {
db.run(`CREATE INDEX IF NOT EXISTS idx_thresholds_pattern ON security_thresholds(pattern_type, enabled)`);
db.run(`CREATE INDEX IF NOT EXISTS idx_thresholds_enabled ON security_thresholds(enabled)`);
await this.createDefaultThresholds();
resolve();
}
});
});
}
/**
* Create default thresholds for common security patterns
*/
async createDefaultThresholds() {
const defaultThresholds = [
{
threshold_id: 'THRESHOLD-BRUTE-FORCE',
name: 'Brute Force Attack Threshold',
description: 'Alert when failed login attempts exceed threshold',
pattern_type: 'brute_force_attack',
metric_name: 'failed_login_count',
operator: '>=',
threshold_value: 5,
time_window_minutes: 10,
severity: 'critical'
},
{
threshold_id: 'THRESHOLD-CREDENTIAL-STUFFING',
name: 'Credential Stuffing Threshold',
description: 'Alert on multiple username attempts from same IP',
pattern_type: 'credential_stuffing',
metric_name: 'unique_username_count',
operator: '>=',
threshold_value: 5,
time_window_minutes: 5,
severity: 'critical'
},
{
threshold_id: 'THRESHOLD-PRIVILEGE-ESC',
name: 'Privilege Escalation Threshold',
description: 'Alert on repeated unauthorized access attempts',
pattern_type: 'privilege_escalation',
metric_name: 'escalation_attempt_count',
operator: '>=',
threshold_value: 3,
time_window_minutes: 30,
severity: 'critical'
},
{
threshold_id: 'THRESHOLD-SUSPICIOUS-IP',
name: 'Suspicious IP Activity Threshold',
description: 'Alert on excessive requests from single IP',
pattern_type: 'suspicious_ip',
metric_name: 'request_count',
operator: '>=',
threshold_value: 100,
time_window_minutes: 15,
severity: 'high'
},
{
threshold_id: 'THRESHOLD-DATA-EXFIL',
name: 'Data Exfiltration Threshold',
description: 'Alert on excessive data downloads',
pattern_type: 'data_exfiltration',
metric_name: 'download_count',
operator: '>=',
threshold_value: 10,
time_window_minutes: 60,
severity: 'high'
},
{
threshold_id: 'THRESHOLD-SESSION-ANOMALY',
name: 'Session Anomaly Threshold',
description: 'Alert on unusual session patterns',
pattern_type: 'session_anomaly',
metric_name: 'anomaly_score',
operator: '>=',
threshold_value: 70,
time_window_minutes: 30,
severity: 'medium'
},
{
threshold_id: 'THRESHOLD-IMPOSSIBLE-TRAVEL',
name: 'Impossible Travel Threshold',
description: 'Alert on geographically impossible travel speed',
pattern_type: 'impossible_travel',
metric_name: 'travel_speed_kmh',
operator: '>=',
threshold_value: 800,
time_window_minutes: 60,
severity: 'high'
},
{
threshold_id: 'THRESHOLD-THREAT-SCORE',
name: 'Critical Threat Score Threshold',
description: 'Alert when overall threat score is critical',
pattern_type: 'threat_score',
metric_name: 'threat_score',
operator: '>=',
threshold_value: 80,
time_window_minutes: 60,
severity: 'critical'
}
];
for (const threshold of defaultThresholds) {
await new Promise((resolve, reject) => {
db.run(
`INSERT OR IGNORE INTO security_thresholds
(threshold_id, name, description, pattern_type, metric_name, operator, threshold_value, time_window_minutes, severity)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
[
threshold.threshold_id,
threshold.name,
threshold.description,
threshold.pattern_type,
threshold.metric_name,
threshold.operator,
threshold.threshold_value,
threshold.time_window_minutes,
threshold.severity
],
(err) => {
if (err) reject(err);
else resolve();
}
);
});
}
logger.info(`[ThresholdManager] Created ${defaultThresholds.length} default thresholds`);
}
/**
* Load thresholds from database into memory
*/
async loadThresholds() {
return new Promise((resolve, reject) => {
db.all(
`SELECT * FROM security_thresholds WHERE enabled = 1`,
[],
(err, rows) => {
if (err) {
reject(err);
} else {
this.thresholds.clear();
rows.forEach(row => {
this.thresholds.set(row.threshold_id, row);
});
logger.info(`[ThresholdManager] Loaded ${rows.length} active thresholds`);
resolve();
}
}
);
});
}
/**
* Evaluate if a metric value exceeds threshold
* CWE-778: Logs all threshold evaluations
*/
async evaluateThreshold(patternType, metricName, value, context = {}) {
const matchingThresholds = Array.from(this.thresholds.values()).filter(
t => t.pattern_type === patternType && t.metric_name === metricName
);
if (matchingThresholds.length === 0) {
return { exceeded: false, thresholds: [] };
}
const exceededThresholds = [];
for (const threshold of matchingThresholds) {
const exceeded = this.compareValue(value, threshold.operator, threshold.threshold_value);
// Log threshold evaluation (CWE-778)
logAggregator.aggregate('threshold_manager', 'info', 'security', 'Threshold evaluated', {
thresholdId: threshold.threshold_id,
patternType,
metricName,
value,
operator: threshold.operator,
thresholdValue: threshold.threshold_value,
exceeded,
severity: threshold.severity,
context
});
if (exceeded) {
exceededThresholds.push({
...threshold,
actualValue: value,
context
});
logger.warn(`[ThresholdManager] Threshold exceeded: ${threshold.name} (${value} ${threshold.operator} ${threshold.threshold_value})`);
}
}
return {
exceeded: exceededThresholds.length > 0,
thresholds: exceededThresholds
};
}
/**
* Compare value against threshold using operator
*/
compareValue(value, operator, threshold) {
switch (operator) {
case '>=': return value >= threshold;
case '>': return value > threshold;
case '<=': return value <= threshold;
case '<': return value < threshold;
case '==': return value == threshold;
case '!=': return value != threshold;
default: return false;
}
}
/**
* Get all thresholds
*/
async getThresholds(filters = {}) {
const { patternType, enabled, limit = 100 } = filters;
let whereClause = [];
let params = [];
if (patternType) {
whereClause.push('pattern_type = ?');
params.push(patternType);
}
if (enabled !== undefined) {
whereClause.push('enabled = ?');
params.push(enabled ? 1 : 0);
}
const where = whereClause.length > 0 ? `WHERE ${whereClause.join(' AND ')}` : '';
params.push(limit);
return new Promise((resolve, reject) => {
db.all(
`SELECT * FROM security_thresholds ${where}
ORDER BY pattern_type, threshold_value DESC
LIMIT ?`,
params,
(err, rows) => {
if (err) reject(err);
else resolve(rows);
}
);
});
}
/**
* Get threshold by ID
*/
async getThresholdById(thresholdId) {
return new Promise((resolve, reject) => {
db.get(
`SELECT * FROM security_thresholds WHERE threshold_id = ?`,
[thresholdId],
(err, row) => {
if (err) reject(err);
else resolve(row);
}
);
});
}
/**
* Create new threshold
* CWE-778: Logs threshold creation
*/
async createThreshold(data, userId) {
const thresholdId = `THRESHOLD-${Date.now()}-${Math.random().toString(36).substr(2, 9).toUpperCase()}`;
return new Promise((resolve, reject) => {
db.run(
`INSERT INTO security_thresholds
(threshold_id, name, description, pattern_type, metric_name, operator, threshold_value, time_window_minutes, severity, enabled)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
[
thresholdId,
data.name,
data.description || '',
data.pattern_type,
data.metric_name,
data.operator,
data.threshold_value,
data.time_window_minutes || 30,
data.severity,
data.enabled !== undefined ? (data.enabled ? 1 : 0) : 1
],
async (err) => {
if (err) {
reject(err);
} else {
await this.loadThresholds();
// Log threshold creation (CWE-778)
logAggregator.aggregate('threshold_manager', 'info', 'security', 'Threshold created', {
thresholdId,
userId,
name: data.name,
patternType: data.pattern_type,
metricName: data.metric_name,
thresholdValue: data.threshold_value,
severity: data.severity
});
logger.info(`[ThresholdManager] Threshold created: ${thresholdId} by user ${userId}`);
resolve({ thresholdId });
}
}
);
});
}
/**
* Update threshold
* CWE-778: Logs threshold modifications
*/
async updateThreshold(thresholdId, updates, userId) {
const allowedFields = ['name', 'description', 'operator', 'threshold_value', 'time_window_minutes', 'severity', 'enabled'];
const setClause = [];
const params = [];
for (const [key, value] of Object.entries(updates)) {
if (allowedFields.includes(key)) {
setClause.push(`${key} = ?`);
params.push(key === 'enabled' ? (value ? 1 : 0) : value);
}
}
if (setClause.length === 0) {
throw new Error('No valid fields to update');
}
setClause.push('updated_at = CURRENT_TIMESTAMP');
params.push(thresholdId);
return new Promise((resolve, reject) => {
db.run(
`UPDATE security_thresholds
SET ${setClause.join(', ')}
WHERE threshold_id = ?`,
params,
async (err) => {
if (err) {
reject(err);
} else {
await this.loadThresholds();
// Log threshold update (CWE-778)
logAggregator.aggregate('threshold_manager', 'info', 'security', 'Threshold updated', {
thresholdId,
userId,
updates
});
logger.info(`[ThresholdManager] Threshold updated: ${thresholdId} by user ${userId}`);
resolve({ success: true });
}
}
);
});
}
/**
* Delete threshold
* CWE-778: Logs threshold deletion
*/
async deleteThreshold(thresholdId, userId) {
return new Promise((resolve, reject) => {
db.run(
`DELETE FROM security_thresholds WHERE threshold_id = ?`,
[thresholdId],
async (err) => {
if (err) {
reject(err);
} else {
await this.loadThresholds();
// Log threshold deletion (CWE-778)
logAggregator.aggregate('threshold_manager', 'warn', 'security', 'Threshold deleted', {
thresholdId,
userId
});
logger.info(`[ThresholdManager] Threshold deleted: ${thresholdId} by user ${userId}`);
resolve({ success: true });
}
}
);
});
}
/**
* Get threshold statistics
*/
async getStatistics() {
return new Promise((resolve, reject) => {
db.get(
`SELECT
COUNT(*) as total,
SUM(CASE WHEN enabled = 1 THEN 1 ELSE 0 END) as enabled,
SUM(CASE WHEN enabled = 0 THEN 1 ELSE 0 END) as disabled,
COUNT(DISTINCT pattern_type) as unique_patterns,
COUNT(DISTINCT severity) as unique_severities
FROM security_thresholds`,
[],
(err, row) => {
if (err) reject(err);
else resolve(row);
}
);
});
}
}
// Create singleton instance
const thresholdManager = new ThresholdManager();
module.exports = thresholdManager;

View file

@ -0,0 +1,245 @@
const axios = require('axios');
const { exec } = require('child_process');
const { promisify } = require('util');
const fs = require('fs').promises;
const execAsync = promisify(exec);
/**
* VPN Diagnostics - Check for IP and DNS leaks
*/
class VPNDiagnostics {
/**
* Get current public IP address
*/
static async getPublicIP() {
try {
const response = await axios.get('https://api.ipify.org?format=json', {
timeout: 10000,
validateStatus: () => true
});
return response.data.ip;
} catch (error) {
throw new Error(`Failed to get public IP: ${error.message}`);
}
}
/**
* Get detailed IP information (location, ISP, etc.)
*/
static async getIPInfo(ip) {
try {
const response = await axios.get(`https://ipinfo.io/${ip || ''}/json`, {
timeout: 10000,
validateStatus: () => true
});
return response.data;
} catch (error) {
console.warn('Could not get IP info:', error.message);
return null;
}
}
/**
* Get current DNS servers from resolv.conf
*/
static async getDNSServers() {
try {
const resolvConf = await fs.readFile('/etc/resolv.conf', 'utf8');
const nameservers = resolvConf
.split('\n')
.filter(line => line.trim().startsWith('nameserver'))
.map(line => line.split(/\s+/)[1]);
return nameservers;
} catch (error) {
throw new Error(`Failed to read DNS servers: ${error.message}`);
}
}
/**
* Check if VPN interface (tun) exists
*/
static async checkVPNInterface() {
try {
const { stdout } = await execAsync('ip addr show tun0 2>/dev/null');
return {
exists: true,
details: stdout.trim()
};
} catch (error) {
return {
exists: false,
details: null
};
}
}
/**
* Get routing table
*/
static async getRoutingTable() {
try {
const { stdout } = await execAsync('ip route');
return stdout.trim().split('\n');
} catch (error) {
throw new Error(`Failed to get routing table: ${error.message}`);
}
}
/**
* Test DNS resolution through current DNS servers
*/
static async testDNSResolution(domain = 'google.com') {
try {
const { stdout } = await execAsync(`nslookup ${domain} | grep -A1 "Name:" | tail -1`);
return {
success: true,
result: stdout.trim()
};
} catch (error) {
return {
success: false,
error: error.message
};
}
}
/**
* Check for DNS leaks by testing against multiple DNS leak test services
*/
static async checkDNSLeaks() {
const dnsServers = await this.getDNSServers();
// Check if DNS servers are common VPN DNS servers
const commonVPNDNS = ['10.2.0.1', '10.2.0.2', '10.8.0.1', '10.0.0.1'];
const isUsingVPNDNS = dnsServers.some(dns => commonVPNDNS.includes(dns) || dns.startsWith('10.'));
// Check if DNS servers match common public DNS (potential leak)
const commonPublicDNS = [
'8.8.8.8', '8.8.4.4', // Google
'1.1.1.1', '1.0.0.1', // Cloudflare
'9.9.9.9', // Quad9
];
const isUsingPublicDNS = dnsServers.some(dns => commonPublicDNS.includes(dns));
return {
dnsServers,
isUsingVPNDNS,
isUsingPublicDNS,
potentialLeak: !isUsingVPNDNS && isUsingPublicDNS
};
}
/**
* Run comprehensive VPN diagnostics
*/
static async runFullDiagnostics() {
console.log('🔍 Running VPN diagnostics...');
const results = {
timestamp: new Date().toISOString(),
publicIP: null,
ipInfo: null,
dnsServers: [],
vpnInterface: null,
routingTable: [],
dnsLeakCheck: null,
summary: {
vpnActive: false,
ipLeak: false,
dnsLeak: false,
issues: []
}
};
try {
// Get public IP
results.publicIP = await this.getPublicIP();
console.log(`✓ Public IP: ${results.publicIP}`);
// Get IP info
results.ipInfo = await this.getIPInfo(results.publicIP);
if (results.ipInfo) {
console.log(`✓ Location: ${results.ipInfo.city}, ${results.ipInfo.country}`);
console.log(`✓ ISP: ${results.ipInfo.org}`);
}
// Get DNS servers
results.dnsServers = await this.getDNSServers();
console.log(`✓ DNS Servers: ${results.dnsServers.join(', ')}`);
// Check VPN interface
results.vpnInterface = await this.checkVPNInterface();
if (results.vpnInterface.exists) {
console.log('✓ VPN interface (tun0) is UP');
results.summary.vpnActive = true;
} else {
console.log('✗ VPN interface (tun0) NOT found');
results.summary.issues.push('VPN interface not found');
}
// Get routing table
results.routingTable = await this.getRoutingTable();
const defaultRoute = results.routingTable.find(route => route.startsWith('default'));
if (defaultRoute) {
console.log(`✓ Default route: ${defaultRoute}`);
if (defaultRoute.includes('tun')) {
console.log('✓ Traffic is routed through VPN');
} else {
console.log('✗ Traffic NOT routed through VPN');
results.summary.ipLeak = true;
results.summary.issues.push('Traffic not routed through VPN interface');
}
}
// DNS leak check
results.dnsLeakCheck = await this.checkDNSLeaks();
if (results.dnsLeakCheck.isUsingVPNDNS) {
console.log('✓ Using VPN DNS servers');
} else if (results.dnsLeakCheck.potentialLeak) {
console.log('✗ Potential DNS leak detected');
results.summary.dnsLeak = true;
results.summary.issues.push('Using non-VPN DNS servers');
}
// Overall status
if (results.summary.vpnActive && !results.summary.ipLeak && !results.summary.dnsLeak) {
console.log('\n✓ VPN is working correctly - No leaks detected');
} else {
console.log('\n✗ VPN issues detected:');
results.summary.issues.forEach(issue => console.log(` - ${issue}`));
}
} catch (error) {
console.error('Error during diagnostics:', error);
results.summary.issues.push(error.message);
}
return results;
}
/**
* Quick check if VPN is working (used by status endpoint)
*/
static async quickCheck() {
try {
const [publicIP, dnsServers, vpnInterface] = await Promise.all([
this.getPublicIP(),
this.getDNSServers(),
this.checkVPNInterface()
]);
return {
publicIP,
dnsServers,
vpnInterfaceActive: vpnInterface.exists,
timestamp: new Date().toISOString()
};
} catch (error) {
throw new Error(`Quick check failed: ${error.message}`);
}
}
}
module.exports = VPNDiagnostics;