Initial commit: StreamFlow IPTV platform
This commit is contained in:
commit
73a8ae9ffd
1240 changed files with 278451 additions and 0 deletions
776
backend/routes/auth.js
Normal file
776
backend/routes/auth.js
Normal file
|
|
@ -0,0 +1,776 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const bcrypt = require('bcryptjs');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const { body, validationResult } = require('express-validator');
|
||||
const { authLimiter } = require('../middleware/rateLimiter');
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { db } = require('../database/db');
|
||||
const logger = require('../utils/logger');
|
||||
const { validatePassword, calculatePasswordStrength } = require('../utils/passwordPolicy');
|
||||
const SecurityAuditLogger = require('../utils/securityAudit');
|
||||
const {
|
||||
enforceAccountLockout,
|
||||
recordFailedLogin,
|
||||
clearFailedAttempts,
|
||||
updatePasswordExpiry,
|
||||
createSession,
|
||||
checkPasswordExpiry
|
||||
} = require('../middleware/securityEnhancements');
|
||||
|
||||
const JWT_SECRET = process.env.JWT_SECRET || 'change_this_in_production';
|
||||
const JWT_EXPIRES_IN = '7d';
|
||||
|
||||
// Register - Controlled by DISABLE_SIGNUPS environment variable
|
||||
router.post('/register',
|
||||
[
|
||||
body('username').trim().isLength({ min: 3, max: 50 }).isAlphanumeric(),
|
||||
body('email').isEmail().normalizeEmail(),
|
||||
body('password').notEmpty()
|
||||
],
|
||||
async (req, res) => {
|
||||
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
|
||||
const userAgent = req.headers['user-agent'];
|
||||
|
||||
// Check if signups are disabled (default: true)
|
||||
const disableSignups = process.env.DISABLE_SIGNUPS !== 'false';
|
||||
|
||||
if (disableSignups) {
|
||||
await SecurityAuditLogger.logAuthEvent(null, 'registration_attempt', 'blocked', {
|
||||
ip,
|
||||
userAgent,
|
||||
reason: 'Registration disabled'
|
||||
});
|
||||
return res.status(403).json({
|
||||
error: 'Registration is disabled. Contact an administrator to create your account.'
|
||||
});
|
||||
}
|
||||
|
||||
// If signups are enabled, proceed with registration (fallback for future flexibility)
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
return res.status(400).json({ errors: errors.array() });
|
||||
}
|
||||
|
||||
const { username, email, password } = req.body;
|
||||
|
||||
try {
|
||||
// Validate password against policy
|
||||
const passwordValidation = validatePassword(password, username, email);
|
||||
if (!passwordValidation.valid) {
|
||||
await SecurityAuditLogger.logAuthEvent(null, 'registration_attempt', 'failed', {
|
||||
ip,
|
||||
userAgent,
|
||||
username,
|
||||
reason: passwordValidation.errors.join(', ')
|
||||
});
|
||||
return res.status(400).json({
|
||||
error: 'Password does not meet requirements',
|
||||
details: passwordValidation.errors,
|
||||
strength: calculatePasswordStrength(password)
|
||||
});
|
||||
}
|
||||
|
||||
const hashedPassword = await bcrypt.hash(password, 10);
|
||||
const now = new Date().toISOString();
|
||||
|
||||
db.run(
|
||||
'INSERT INTO users (username, email, password, role, must_change_password, password_changed_at, password_expires_at) VALUES (?, ?, ?, ?, ?, ?, ?)',
|
||||
[username, email, hashedPassword, 'user', 0, now, null],
|
||||
async function(err) {
|
||||
if (err) {
|
||||
if (err.message.includes('UNIQUE')) {
|
||||
await SecurityAuditLogger.logAuthEvent(null, 'registration_attempt', 'failed', {
|
||||
ip,
|
||||
userAgent,
|
||||
username,
|
||||
reason: 'Duplicate username or email'
|
||||
});
|
||||
return res.status(400).json({ error: 'Username or email already exists' });
|
||||
}
|
||||
logger.error('Registration error:', err);
|
||||
return res.status(500).json({ error: 'Registration failed' });
|
||||
}
|
||||
|
||||
const userId = this.lastID;
|
||||
|
||||
// Update password expiry
|
||||
await updatePasswordExpiry(userId);
|
||||
|
||||
// Log successful registration
|
||||
await SecurityAuditLogger.logAuthEvent(userId, 'registration', 'success', {
|
||||
ip,
|
||||
userAgent,
|
||||
username
|
||||
});
|
||||
|
||||
const token = jwt.sign(
|
||||
{ userId, role: 'user' },
|
||||
JWT_SECRET,
|
||||
{ expiresIn: JWT_EXPIRES_IN }
|
||||
);
|
||||
|
||||
// CWE-778: Log token issuance
|
||||
await SecurityAuditLogger.logTokenIssuance(userId, 'JWT', {
|
||||
ip,
|
||||
userAgent,
|
||||
expiresIn: JWT_EXPIRES_IN,
|
||||
purpose: 'registration'
|
||||
});
|
||||
|
||||
// Create session
|
||||
await createSession(userId, token, req);
|
||||
|
||||
// Set secure HTTP-only cookie
|
||||
res.cookie('auth_token', token, {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
sameSite: 'strict',
|
||||
maxAge: 7 * 24 * 60 * 60 * 1000 // 7 days
|
||||
});
|
||||
|
||||
res.status(201).json({
|
||||
message: 'Registration successful',
|
||||
token,
|
||||
user: {
|
||||
id: userId,
|
||||
username,
|
||||
email,
|
||||
role: 'user'
|
||||
}
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Registration error:', error);
|
||||
await SecurityAuditLogger.logAuthEvent(null, 'registration_attempt', 'error', {
|
||||
ip,
|
||||
userAgent,
|
||||
error: error.message
|
||||
});
|
||||
res.status(500).json({ error: 'Registration failed' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Login with strict rate limiting and account lockout
|
||||
router.post('/login', authLimiter, enforceAccountLockout,
|
||||
[
|
||||
body('username').trim().notEmpty(),
|
||||
body('password').notEmpty()
|
||||
],
|
||||
async (req, res) => {
|
||||
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
|
||||
const userAgent = req.headers['user-agent'];
|
||||
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
return res.status(400).json({ errors: errors.array() });
|
||||
}
|
||||
|
||||
const { username, password } = req.body;
|
||||
|
||||
try {
|
||||
db.get(
|
||||
'SELECT * FROM users WHERE username = ? OR email = ?',
|
||||
[username, username],
|
||||
async (err, user) => {
|
||||
if (err) {
|
||||
logger.error('Login error:', err);
|
||||
return res.status(500).json({ error: 'Login failed' });
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
await recordFailedLogin(username, ip, userAgent);
|
||||
return res.status(401).json({ error: 'Invalid credentials' });
|
||||
}
|
||||
|
||||
// Check if user is active
|
||||
if (!user.is_active) {
|
||||
await SecurityAuditLogger.logLoginFailure(username, 'Account disabled', { ip, userAgent });
|
||||
return res.status(403).json({ error: 'Account is disabled. Contact an administrator.' });
|
||||
}
|
||||
|
||||
const isValidPassword = await bcrypt.compare(password, user.password);
|
||||
if (!isValidPassword) {
|
||||
await recordFailedLogin(username, ip, userAgent);
|
||||
return res.status(401).json({ error: 'Invalid credentials' });
|
||||
}
|
||||
|
||||
// Clear failed attempts on successful password check
|
||||
await clearFailedAttempts(user.id);
|
||||
|
||||
// Check password expiry
|
||||
const expiryStatus = await checkPasswordExpiry(user.id);
|
||||
if (expiryStatus.expired) {
|
||||
await SecurityAuditLogger.logLoginFailure(username, 'Password expired', { ip, userAgent });
|
||||
return res.status(403).json({
|
||||
error: expiryStatus.message,
|
||||
passwordExpired: true,
|
||||
requirePasswordChange: true
|
||||
});
|
||||
}
|
||||
|
||||
// Check if 2FA is enabled
|
||||
if (user.two_factor_enabled) {
|
||||
// Create temporary token for 2FA verification
|
||||
const tempToken = jwt.sign(
|
||||
{ userId: user.id, temp: true, purpose: '2fa' },
|
||||
JWT_SECRET,
|
||||
{ expiresIn: '10m' }
|
||||
);
|
||||
|
||||
// CWE-778: Log temp token issuance for 2FA
|
||||
await SecurityAuditLogger.logTokenIssuance(user.id, 'TEMP_2FA', {
|
||||
ip,
|
||||
userAgent,
|
||||
expiresIn: '10m',
|
||||
purpose: '2fa'
|
||||
});
|
||||
|
||||
await SecurityAuditLogger.logAuthEvent(user.id, '2fa_required', 'pending', { ip, userAgent });
|
||||
|
||||
return res.json({
|
||||
require2FA: true,
|
||||
tempToken,
|
||||
userId: user.id,
|
||||
passwordWarning: expiryStatus.warning ? expiryStatus.message : null
|
||||
});
|
||||
}
|
||||
|
||||
const token = jwt.sign(
|
||||
{ userId: user.id, role: user.role },
|
||||
JWT_SECRET,
|
||||
{ expiresIn: JWT_EXPIRES_IN }
|
||||
);
|
||||
|
||||
// CWE-778: Log token issuance
|
||||
await SecurityAuditLogger.logTokenIssuance(user.id, 'JWT', {
|
||||
ip,
|
||||
userAgent,
|
||||
expiresIn: JWT_EXPIRES_IN,
|
||||
purpose: 'login'
|
||||
});
|
||||
|
||||
// Update last login
|
||||
db.run(
|
||||
'UPDATE users SET last_login_at = ?, last_login_ip = ? WHERE id = ?',
|
||||
[new Date().toISOString(), ip, user.id]
|
||||
);
|
||||
|
||||
// Create session
|
||||
await createSession(user.id, token, req);
|
||||
|
||||
// Log successful login
|
||||
await SecurityAuditLogger.logLoginSuccess(user.id, { ip, userAgent });
|
||||
|
||||
// Set secure HTTP-only cookie
|
||||
res.cookie('auth_token', token, {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
sameSite: 'strict',
|
||||
maxAge: 7 * 24 * 60 * 60 * 1000 // 7 days
|
||||
});
|
||||
|
||||
res.json({
|
||||
message: 'Login successful',
|
||||
token,
|
||||
user: {
|
||||
id: user.id,
|
||||
username: user.username,
|
||||
email: user.email,
|
||||
role: user.role,
|
||||
must_change_password: user.must_change_password === 1
|
||||
},
|
||||
passwordWarning: expiryStatus.warning ? expiryStatus.message : null
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Login error:', error);
|
||||
await SecurityAuditLogger.logAuthEvent(null, 'login_attempt', 'error', {
|
||||
ip,
|
||||
userAgent,
|
||||
error: error.message
|
||||
});
|
||||
res.status(500).json({ error: 'Login failed' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Verify 2FA and complete login
|
||||
router.post('/verify-2fa', authLimiter,
|
||||
[
|
||||
body('tempToken').notEmpty(),
|
||||
body('code').notEmpty()
|
||||
],
|
||||
async (req, res) => {
|
||||
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
|
||||
const userAgent = req.headers['user-agent'];
|
||||
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
return res.status(400).json({ errors: errors.array() });
|
||||
}
|
||||
|
||||
const { tempToken, code } = req.body;
|
||||
|
||||
try {
|
||||
// Verify temp token
|
||||
const decoded = jwt.verify(tempToken, JWT_SECRET);
|
||||
|
||||
if (!decoded.temp || decoded.purpose !== '2fa') {
|
||||
return res.status(401).json({ error: 'Invalid token' });
|
||||
}
|
||||
|
||||
const speakeasy = require('speakeasy');
|
||||
|
||||
db.get(
|
||||
'SELECT * FROM users WHERE id = ?',
|
||||
[decoded.userId],
|
||||
async (err, user) => {
|
||||
if (err || !user) {
|
||||
logger.error('2FA verify - user not found:', err);
|
||||
return res.status(401).json({ error: 'Invalid token' });
|
||||
}
|
||||
|
||||
if (!user.two_factor_enabled) {
|
||||
return res.status(400).json({ error: '2FA not enabled for this user' });
|
||||
}
|
||||
|
||||
// Check if it's a backup code
|
||||
db.get(
|
||||
'SELECT id FROM two_factor_backup_codes WHERE user_id = ? AND code = ? AND used = 0',
|
||||
[user.id, code.toUpperCase()],
|
||||
async (err, backupCode) => {
|
||||
if (backupCode) {
|
||||
// Mark backup code as used
|
||||
db.run(
|
||||
'UPDATE two_factor_backup_codes SET used = 1, used_at = CURRENT_TIMESTAMP WHERE id = ?',
|
||||
[backupCode.id]
|
||||
);
|
||||
|
||||
logger.info(`Backup code used for user ${user.id}`);
|
||||
|
||||
// Log 2FA success with backup code
|
||||
await SecurityAuditLogger.log2FAEvent(user.id, 'backup_code_used', 'success', { ip, userAgent });
|
||||
|
||||
// Generate full token
|
||||
const token = jwt.sign(
|
||||
{ userId: user.id, role: user.role },
|
||||
JWT_SECRET,
|
||||
{ expiresIn: JWT_EXPIRES_IN }
|
||||
);
|
||||
|
||||
// CWE-778: Log token issuance after 2FA backup code
|
||||
await SecurityAuditLogger.logTokenIssuance(user.id, 'JWT', {
|
||||
ip,
|
||||
userAgent,
|
||||
expiresIn: JWT_EXPIRES_IN,
|
||||
purpose: '2fa_backup_verification'
|
||||
});
|
||||
|
||||
// Update last login
|
||||
db.run(
|
||||
'UPDATE users SET last_login_at = ?, last_login_ip = ? WHERE id = ?',
|
||||
[new Date().toISOString(), ip, user.id]
|
||||
);
|
||||
|
||||
// Create session
|
||||
await createSession(user.id, token, req);
|
||||
|
||||
// Log successful login
|
||||
await SecurityAuditLogger.logLoginSuccess(user.id, { ip, userAgent, method: '2fa_backup' });
|
||||
|
||||
// Set secure HTTP-only cookie
|
||||
res.cookie('auth_token', token, {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
sameSite: 'strict',
|
||||
maxAge: 7 * 24 * 60 * 60 * 1000 // 7 days
|
||||
});
|
||||
|
||||
return res.json({
|
||||
message: 'Login successful with backup code',
|
||||
token,
|
||||
user: {
|
||||
id: user.id,
|
||||
username: user.username,
|
||||
email: user.email,
|
||||
role: user.role,
|
||||
must_change_password: user.must_change_password === 1
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Verify TOTP code
|
||||
const verified = speakeasy.totp.verify({
|
||||
secret: user.two_factor_secret,
|
||||
encoding: 'base32',
|
||||
token: code,
|
||||
window: 2
|
||||
});
|
||||
|
||||
if (!verified) {
|
||||
await SecurityAuditLogger.log2FAEvent(user.id, 'totp_verification', 'failed', {
|
||||
ip,
|
||||
userAgent,
|
||||
reason: 'Invalid code'
|
||||
});
|
||||
return res.status(400).json({ error: 'Invalid 2FA code' });
|
||||
}
|
||||
|
||||
// Log 2FA success
|
||||
await SecurityAuditLogger.log2FAEvent(user.id, 'totp_verification', 'success', { ip, userAgent });
|
||||
|
||||
// Generate full token
|
||||
const token = jwt.sign(
|
||||
{ userId: user.id, role: user.role },
|
||||
JWT_SECRET,
|
||||
{ expiresIn: JWT_EXPIRES_IN }
|
||||
);
|
||||
|
||||
// CWE-778: Log token issuance after TOTP 2FA
|
||||
await SecurityAuditLogger.logTokenIssuance(user.id, 'JWT', {
|
||||
ip,
|
||||
userAgent,
|
||||
expiresIn: JWT_EXPIRES_IN,
|
||||
purpose: '2fa_totp_verification'
|
||||
});
|
||||
|
||||
// Update last login
|
||||
db.run(
|
||||
'UPDATE users SET last_login_at = ?, last_login_ip = ? WHERE id = ?',
|
||||
[new Date().toISOString(), ip, user.id]
|
||||
);
|
||||
|
||||
// Create session
|
||||
await createSession(user.id, token, req);
|
||||
|
||||
// Log successful login
|
||||
await SecurityAuditLogger.logLoginSuccess(user.id, { ip, userAgent, method: '2fa_totp' });
|
||||
|
||||
// Set secure HTTP-only cookie
|
||||
res.cookie('auth_token', token, {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
sameSite: 'strict',
|
||||
maxAge: 7 * 24 * 60 * 60 * 1000 // 7 days
|
||||
});
|
||||
|
||||
res.json({
|
||||
message: 'Login successful',
|
||||
token,
|
||||
user: {
|
||||
id: user.id,
|
||||
username: user.username,
|
||||
email: user.email,
|
||||
role: user.role,
|
||||
must_change_password: user.must_change_password === 1
|
||||
}
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('2FA verify error:', error);
|
||||
if (error.name === 'TokenExpiredError') {
|
||||
return res.status(401).json({ error: '2FA session expired. Please login again' });
|
||||
}
|
||||
res.status(500).json({ error: 'Failed to verify 2FA' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Change password with enhanced security
|
||||
router.post('/change-password',
|
||||
[
|
||||
body('currentPassword').notEmpty(),
|
||||
body('newPassword').notEmpty()
|
||||
],
|
||||
async (req, res) => {
|
||||
const token = req.headers.authorization?.split(' ')[1];
|
||||
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
|
||||
const userAgent = req.headers['user-agent'];
|
||||
|
||||
if (!token) {
|
||||
return res.status(401).json({ error: 'No token provided' });
|
||||
}
|
||||
|
||||
try {
|
||||
const decoded = jwt.verify(token, JWT_SECRET);
|
||||
const { currentPassword, newPassword } = req.body;
|
||||
|
||||
db.get(
|
||||
'SELECT * FROM users WHERE id = ?',
|
||||
[decoded.userId],
|
||||
async (err, user) => {
|
||||
if (err || !user) {
|
||||
return res.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
const isValidPassword = await bcrypt.compare(currentPassword, user.password);
|
||||
if (!isValidPassword) {
|
||||
await SecurityAuditLogger.logPasswordChange(user.id, 'failed', {
|
||||
ip,
|
||||
userAgent,
|
||||
reason: 'Incorrect current password'
|
||||
});
|
||||
return res.status(401).json({ error: 'Current password is incorrect' });
|
||||
}
|
||||
|
||||
// Validate new password
|
||||
const passwordValidation = validatePassword(newPassword, user.username, user.email);
|
||||
if (!passwordValidation.valid) {
|
||||
await SecurityAuditLogger.logPasswordChange(user.id, 'failed', {
|
||||
ip,
|
||||
userAgent,
|
||||
reason: passwordValidation.errors.join(', ')
|
||||
});
|
||||
return res.status(400).json({
|
||||
error: 'New password does not meet requirements',
|
||||
details: passwordValidation.errors,
|
||||
strength: calculatePasswordStrength(newPassword)
|
||||
});
|
||||
}
|
||||
|
||||
// Check password history (prevent reuse of last 5 passwords)
|
||||
db.all(
|
||||
'SELECT password_hash FROM password_history WHERE user_id = ? ORDER BY changed_at DESC LIMIT 5',
|
||||
[user.id],
|
||||
async (err, history) => {
|
||||
if (err) {
|
||||
logger.error('Password history check error:', err);
|
||||
return res.status(500).json({ error: 'Failed to change password' });
|
||||
}
|
||||
|
||||
// Check if new password matches any recent password
|
||||
for (const record of history || []) {
|
||||
const matches = await bcrypt.compare(newPassword, record.password_hash);
|
||||
if (matches) {
|
||||
await SecurityAuditLogger.logPasswordChange(user.id, 'failed', {
|
||||
ip,
|
||||
userAgent,
|
||||
reason: 'Password reused from history'
|
||||
});
|
||||
return res.status(400).json({
|
||||
error: 'Cannot reuse any of your last 5 passwords'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const hashedPassword = await bcrypt.hash(newPassword, 10);
|
||||
|
||||
// Save old password to history
|
||||
db.run(
|
||||
'INSERT INTO password_history (user_id, password_hash) VALUES (?, ?)',
|
||||
[user.id, user.password]
|
||||
);
|
||||
|
||||
// Update password
|
||||
db.run(
|
||||
'UPDATE users SET password = ?, must_change_password = 0, updated_at = CURRENT_TIMESTAMP WHERE id = ?',
|
||||
[hashedPassword, user.id],
|
||||
async (err) => {
|
||||
if (err) {
|
||||
logger.error('Password change error:', err);
|
||||
return res.status(500).json({ error: 'Failed to change password' });
|
||||
}
|
||||
|
||||
// Update password expiry
|
||||
await updatePasswordExpiry(user.id);
|
||||
|
||||
// CWE-778: Revoke all tokens on password change (security best practice)
|
||||
await SecurityAuditLogger.logTokenRevocation(user.id, 'password_change', {
|
||||
ip,
|
||||
userAgent
|
||||
});
|
||||
|
||||
// Log successful password change
|
||||
await SecurityAuditLogger.logPasswordChange(user.id, 'success', { ip, userAgent });
|
||||
|
||||
res.json({ message: 'Password changed successfully' });
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Password change error:', error);
|
||||
res.status(401).json({ error: 'Invalid token' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Verify token
|
||||
router.get('/verify', async (req, res) => {
|
||||
const token = req.headers.authorization?.split(' ')[1];
|
||||
|
||||
if (!token) {
|
||||
return res.status(401).json({ error: 'No token provided' });
|
||||
}
|
||||
|
||||
try {
|
||||
const decoded = jwt.verify(token, JWT_SECRET);
|
||||
db.get(
|
||||
'SELECT id, username, email, role, must_change_password, is_active FROM users WHERE id = ?',
|
||||
[decoded.userId],
|
||||
async (err, user) => {
|
||||
if (err || !user) {
|
||||
return res.status(401).json({ error: 'Invalid token' });
|
||||
}
|
||||
|
||||
if (!user.is_active) {
|
||||
return res.status(403).json({ error: 'Account is disabled' });
|
||||
}
|
||||
|
||||
// Check password expiry for warning
|
||||
const expiryStatus = await checkPasswordExpiry(user.id);
|
||||
|
||||
res.json({
|
||||
valid: true,
|
||||
user: {
|
||||
...user,
|
||||
must_change_password: user.must_change_password === 1
|
||||
},
|
||||
passwordWarning: expiryStatus.warning ? expiryStatus.message : null,
|
||||
daysUntilExpiry: expiryStatus.daysRemaining || null
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
res.status(401).json({ error: 'Invalid token' });
|
||||
}
|
||||
});
|
||||
|
||||
// Check password strength
|
||||
router.post('/check-password-strength',
|
||||
[body('password').notEmpty()],
|
||||
(req, res) => {
|
||||
const { password, username, email } = req.body;
|
||||
|
||||
const validation = validatePassword(password, username, email);
|
||||
const strength = calculatePasswordStrength(password);
|
||||
|
||||
res.json({
|
||||
valid: validation.valid,
|
||||
errors: validation.errors,
|
||||
strength: {
|
||||
score: strength.score,
|
||||
level: strength.level,
|
||||
feedback: strength.feedback
|
||||
}
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// Get security status for current user
|
||||
router.get('/security-status', async (req, res) => {
|
||||
const token = req.headers.authorization?.split(' ')[1];
|
||||
|
||||
if (!token) {
|
||||
return res.status(401).json({ error: 'No token provided' });
|
||||
}
|
||||
|
||||
try {
|
||||
const decoded = jwt.verify(token, JWT_SECRET);
|
||||
|
||||
db.get(
|
||||
'SELECT id, username, two_factor_enabled, password_changed_at, password_expires_at, last_login_at, last_login_ip, failed_login_attempts FROM users WHERE id = ?',
|
||||
[decoded.userId],
|
||||
async (err, user) => {
|
||||
if (err || !user) {
|
||||
return res.status(401).json({ error: 'Invalid token' });
|
||||
}
|
||||
|
||||
// Get active sessions count
|
||||
const sessions = await new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
'SELECT COUNT(*) as count, MAX(last_activity) as last_activity FROM active_sessions WHERE user_id = ? AND expires_at > ?',
|
||||
[user.id, new Date().toISOString()],
|
||||
(err, rows) => err ? reject(err) : resolve(rows[0])
|
||||
);
|
||||
});
|
||||
|
||||
// Get recent security events
|
||||
let recentEvents = [];
|
||||
try {
|
||||
recentEvents = await SecurityAuditLogger.getUserSecurityEvents(user.id, 10);
|
||||
} catch (eventErr) {
|
||||
logger.error('Error fetching security events:', eventErr);
|
||||
}
|
||||
|
||||
// Check password expiry
|
||||
const expiryStatus = await checkPasswordExpiry(user.id);
|
||||
|
||||
res.json({
|
||||
twoFactorEnabled: user.two_factor_enabled === 1,
|
||||
passwordAge: user.password_changed_at ?
|
||||
Math.floor((Date.now() - new Date(user.password_changed_at)) / (24 * 60 * 60 * 1000)) : null,
|
||||
passwordExpiry: expiryStatus,
|
||||
lastLogin: {
|
||||
timestamp: user.last_login_at,
|
||||
ip: user.last_login_ip
|
||||
},
|
||||
activeSessions: sessions.count || 0,
|
||||
lastActivity: sessions.last_activity,
|
||||
failedLoginAttempts: user.failed_login_attempts || 0,
|
||||
recentEvents: Array.isArray(recentEvents) ? recentEvents.map(e => ({
|
||||
type: e.event_type,
|
||||
status: e.status,
|
||||
timestamp: e.created_at
|
||||
})) : []
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Security status error:', error);
|
||||
res.status(401).json({ error: 'Invalid token' });
|
||||
}
|
||||
});
|
||||
|
||||
// Logout endpoint - invalidate session and clear cookie
|
||||
router.post('/logout', authenticate, async (req, res) => {
|
||||
const token = req.headers.authorization?.replace('Bearer ', '');
|
||||
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
|
||||
const userAgent = req.headers['user-agent'];
|
||||
|
||||
try {
|
||||
// Delete the session from database
|
||||
await new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
'DELETE FROM active_sessions WHERE session_token = ?',
|
||||
[token],
|
||||
(err) => err ? reject(err) : resolve()
|
||||
);
|
||||
});
|
||||
|
||||
// CWE-778: Log token revocation on logout
|
||||
await SecurityAuditLogger.logTokenRevocation(req.user.userId, 'user_logout', {
|
||||
ip,
|
||||
userAgent
|
||||
});
|
||||
|
||||
// Log logout event
|
||||
await SecurityAuditLogger.logAuthEvent(req.user.userId, 'logout', 'success', {
|
||||
ip,
|
||||
userAgent
|
||||
});
|
||||
|
||||
// Clear the HTTP-only cookie
|
||||
res.clearCookie('auth_token', {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
sameSite: 'strict'
|
||||
});
|
||||
|
||||
res.json({ message: 'Logout successful' });
|
||||
} catch (error) {
|
||||
logger.error('Logout error:', error);
|
||||
res.status(500).json({ error: 'Logout failed' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
471
backend/routes/backup.js
Normal file
471
backend/routes/backup.js
Normal file
|
|
@ -0,0 +1,471 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { db } = require('../database/db');
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { backupLimiter, heavyLimiter, readLimiter } = require('../middleware/rateLimiter');
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
const archiver = require('archiver');
|
||||
const unzipper = require('unzipper');
|
||||
const { promisify } = require('util');
|
||||
|
||||
const dbAll = promisify(db.all.bind(db));
|
||||
const dbRun = promisify(db.run.bind(db));
|
||||
|
||||
const BACKUP_DIR = path.join(__dirname, '../../data/backups');
|
||||
|
||||
// Ensure backup directory exists
|
||||
const ensureBackupDir = async () => {
|
||||
try {
|
||||
await fs.mkdir(BACKUP_DIR, { recursive: true });
|
||||
} catch (error) {
|
||||
console.error('Failed to create backup directory:', error);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* GET /api/backup/list
|
||||
* List all backups for the current user
|
||||
*/
|
||||
router.get('/list', authenticate, readLimiter, async (req, res) => {
|
||||
try {
|
||||
await ensureBackupDir();
|
||||
const userId = req.user.id;
|
||||
const username = req.user.username;
|
||||
|
||||
const files = await fs.readdir(BACKUP_DIR);
|
||||
const userBackups = files.filter(f =>
|
||||
(f.startsWith(`StreamFlow_Backup_${username}_`) || f.startsWith(`backup_user${userId}_`)) && f.endsWith('.zip')
|
||||
);
|
||||
|
||||
const backupList = await Promise.all(
|
||||
userBackups.map(async (filename) => {
|
||||
const filePath = path.join(BACKUP_DIR, filename);
|
||||
const stats = await fs.stat(filePath);
|
||||
|
||||
// Parse filename: StreamFlow_Backup_{username}_{timestamp}.zip or old format backup_user{id}_{timestamp}.zip
|
||||
let match = filename.match(/StreamFlow_Backup_[^_]+_(\d+)\.zip/);
|
||||
if (!match) {
|
||||
match = filename.match(/backup_user\d+_(\d+)\.zip/);
|
||||
}
|
||||
const filenameTimestamp = match ? parseInt(match[1]) : 0;
|
||||
|
||||
// Use file's actual mtime (modification time) which is more reliable
|
||||
const actualTimestamp = stats.mtimeMs;
|
||||
|
||||
return {
|
||||
filename,
|
||||
size: stats.size,
|
||||
created: new Date(actualTimestamp).toISOString(),
|
||||
timestamp: actualTimestamp
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
// Sort by timestamp descending (newest first)
|
||||
backupList.sort((a, b) => b.timestamp - a.timestamp);
|
||||
|
||||
res.json(backupList);
|
||||
} catch (error) {
|
||||
console.error('Failed to list backups:', error);
|
||||
res.status(500).json({ error: 'Failed to list backups' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/backup/create
|
||||
* Create a new backup of user data
|
||||
*/
|
||||
router.post('/create', authenticate, backupLimiter, async (req, res) => {
|
||||
try {
|
||||
await ensureBackupDir();
|
||||
const userId = req.user.id;
|
||||
const username = req.user.username;
|
||||
const timestamp = Date.now();
|
||||
const filename = `StreamFlow_Backup_${username}_${timestamp}.zip`;
|
||||
const backupPath = path.join(BACKUP_DIR, filename);
|
||||
|
||||
// Create write stream for zip
|
||||
const output = require('fs').createWriteStream(backupPath);
|
||||
const archive = archiver('zip', { zlib: { level: 9 } });
|
||||
|
||||
output.on('close', () => {
|
||||
res.json({
|
||||
success: true,
|
||||
filename,
|
||||
size: archive.pointer(),
|
||||
created: new Date(timestamp).toISOString()
|
||||
});
|
||||
});
|
||||
|
||||
archive.on('error', (err) => {
|
||||
throw err;
|
||||
});
|
||||
|
||||
archive.pipe(output);
|
||||
|
||||
// Export user data - CWE-532: Exclude password and sensitive fields
|
||||
const userData = await dbAll(
|
||||
`SELECT id, username, email, role, two_factor_enabled, is_active,
|
||||
created_at, updated_at, last_login_at, last_login_ip,
|
||||
password_changed_at, password_expires_at
|
||||
FROM users WHERE id = ?`,
|
||||
[userId]
|
||||
);
|
||||
archive.append(JSON.stringify(userData, null, 2), { name: 'user.json' });
|
||||
|
||||
// Export playlists
|
||||
const playlists = await dbAll('SELECT * FROM playlists WHERE user_id = ?', [userId]);
|
||||
archive.append(JSON.stringify(playlists, null, 2), { name: 'playlists.json' });
|
||||
|
||||
// Export channels
|
||||
const channels = await dbAll(
|
||||
`SELECT DISTINCT c.* FROM channels c
|
||||
JOIN playlists p ON c.playlist_id = p.id
|
||||
WHERE p.user_id = ?`,
|
||||
[userId]
|
||||
);
|
||||
archive.append(JSON.stringify(channels, null, 2), { name: 'channels.json' });
|
||||
|
||||
// Export favorites
|
||||
const favorites = await dbAll('SELECT * FROM favorites WHERE user_id = ?', [userId]);
|
||||
archive.append(JSON.stringify(favorites, null, 2), { name: 'favorites.json' });
|
||||
|
||||
// Export custom channel logos
|
||||
const customLogos = await dbAll(
|
||||
`SELECT c.id, c.name, c.custom_logo FROM channels c
|
||||
JOIN playlists p ON c.playlist_id = p.id
|
||||
WHERE p.user_id = ? AND c.custom_logo IS NOT NULL`,
|
||||
[userId]
|
||||
);
|
||||
|
||||
// Add custom logo files
|
||||
for (const channel of customLogos) {
|
||||
if (channel.custom_logo) {
|
||||
const logoPath = path.join(__dirname, '../../data/logos', channel.custom_logo);
|
||||
try {
|
||||
const logoExists = await fs.access(logoPath).then(() => true).catch(() => false);
|
||||
if (logoExists) {
|
||||
archive.file(logoPath, { name: `logos/${channel.custom_logo}` });
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Failed to add logo ${channel.custom_logo}:`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Export m3u files
|
||||
const m3uFiles = await dbAll('SELECT * FROM m3u_files WHERE user_id = ?', [userId]);
|
||||
archive.append(JSON.stringify(m3uFiles, null, 2), { name: 'm3u_files.json' });
|
||||
|
||||
// Add actual m3u files
|
||||
for (const m3uFile of m3uFiles) {
|
||||
if (m3uFile.file_path) {
|
||||
const m3uPath = path.join(__dirname, '../../data/playlists', m3uFile.file_path);
|
||||
try {
|
||||
const m3uExists = await fs.access(m3uPath).then(() => true).catch(() => false);
|
||||
if (m3uExists) {
|
||||
archive.file(m3uPath, { name: `m3u_files/${m3uFile.file_path}` });
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Failed to add m3u file ${m3uFile.file_path}:`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Export user settings (stream settings, preferences)
|
||||
const settings = {
|
||||
created: new Date(timestamp).toISOString(),
|
||||
version: '1.0',
|
||||
userId: userId,
|
||||
username: req.user.username
|
||||
};
|
||||
archive.append(JSON.stringify(settings, null, 2), { name: 'backup_info.json' });
|
||||
|
||||
// Finalize the archive
|
||||
await archive.finalize();
|
||||
|
||||
} catch (error) {
|
||||
console.error('Failed to create backup:', error);
|
||||
res.status(500).json({ error: 'Failed to create backup' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/backup/download/:filename
|
||||
* Download a backup file
|
||||
*/
|
||||
router.get('/download/:filename', authenticate, heavyLimiter, async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const username = req.user.username;
|
||||
const { filename } = req.params;
|
||||
|
||||
// Validate filename belongs to user (support both old and new format)
|
||||
const isOldFormat = filename.startsWith(`backup_user${userId}_`);
|
||||
const isNewFormat = filename.startsWith(`StreamFlow_Backup_${username}_`);
|
||||
if ((!isOldFormat && !isNewFormat) || !filename.endsWith('.zip')) {
|
||||
return res.status(403).json({ error: 'Unauthorized' });
|
||||
}
|
||||
|
||||
const filePath = path.join(BACKUP_DIR, filename);
|
||||
|
||||
// Check if file exists
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
} catch (err) {
|
||||
return res.status(404).json({ error: 'Backup not found' });
|
||||
}
|
||||
|
||||
res.download(filePath, filename);
|
||||
} catch (error) {
|
||||
console.error('Failed to download backup:', error);
|
||||
res.status(500).json({ error: 'Failed to download backup' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* DELETE /api/backup/:filename
|
||||
* Delete a backup file
|
||||
*/
|
||||
router.delete('/:filename', authenticate, readLimiter, async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const username = req.user.username;
|
||||
const { filename } = req.params;
|
||||
|
||||
// Validate filename belongs to user (support both old and new format)
|
||||
const isOldFormat = filename.startsWith(`backup_user${userId}_`);
|
||||
const isNewFormat = filename.startsWith(`StreamFlow_Backup_${username}_`);
|
||||
if ((!isOldFormat && !isNewFormat) || !filename.endsWith('.zip')) {
|
||||
return res.status(403).json({ error: 'Unauthorized' });
|
||||
}
|
||||
|
||||
const filePath = path.join(BACKUP_DIR, filename);
|
||||
await fs.unlink(filePath);
|
||||
|
||||
res.json({ success: true, message: 'Backup deleted' });
|
||||
} catch (error) {
|
||||
console.error('Failed to delete backup:', error);
|
||||
res.status(500).json({ error: 'Failed to delete backup' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/backup/upload
|
||||
* Upload a backup file for restoration
|
||||
*/
|
||||
router.post('/upload', authenticate, heavyLimiter, async (req, res) => {
|
||||
try {
|
||||
await ensureBackupDir();
|
||||
|
||||
if (!req.files || !req.files.backup) {
|
||||
return res.status(400).json({ error: 'No backup file provided' });
|
||||
}
|
||||
|
||||
const userId = req.user.id;
|
||||
const username = req.user.username;
|
||||
const backupFile = req.files.backup;
|
||||
const timestamp = Date.now();
|
||||
const filename = `StreamFlow_Backup_${username}_${timestamp}.zip`;
|
||||
const uploadPath = path.join(BACKUP_DIR, filename);
|
||||
|
||||
await backupFile.mv(uploadPath);
|
||||
|
||||
const stats = await fs.stat(uploadPath);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
filename,
|
||||
size: stats.size,
|
||||
created: new Date(timestamp).toISOString()
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to upload backup:', error);
|
||||
res.status(500).json({ error: 'Failed to upload backup' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/backup/restore/:filename
|
||||
* Restore data from a backup file
|
||||
*/
|
||||
router.post('/restore/:filename', authenticate, backupLimiter, async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const username = req.user.username;
|
||||
const { filename } = req.params;
|
||||
|
||||
// Validate filename belongs to user (support both old and new format)
|
||||
const isOldFormat = filename.startsWith(`backup_user${userId}_`);
|
||||
const isNewFormat = filename.startsWith(`StreamFlow_Backup_${username}_`);
|
||||
if ((!isOldFormat && !isNewFormat) || !filename.endsWith('.zip')) {
|
||||
return res.status(403).json({ error: 'Unauthorized' });
|
||||
}
|
||||
|
||||
const backupPath = path.join(BACKUP_DIR, filename);
|
||||
const extractPath = path.join(BACKUP_DIR, `extract_${userId}_${Date.now()}`);
|
||||
|
||||
// Check if backup exists
|
||||
try {
|
||||
await fs.access(backupPath);
|
||||
} catch (err) {
|
||||
return res.status(404).json({ error: 'Backup not found' });
|
||||
}
|
||||
|
||||
// Extract backup
|
||||
await fs.mkdir(extractPath, { recursive: true });
|
||||
|
||||
await require('fs')
|
||||
.createReadStream(backupPath)
|
||||
.pipe(unzipper.Extract({ path: extractPath }))
|
||||
.promise();
|
||||
|
||||
// Read backup data
|
||||
const playlistsData = JSON.parse(await fs.readFile(path.join(extractPath, 'playlists.json'), 'utf8'));
|
||||
const channelsData = JSON.parse(await fs.readFile(path.join(extractPath, 'channels.json'), 'utf8'));
|
||||
const favoritesData = JSON.parse(await fs.readFile(path.join(extractPath, 'favorites.json'), 'utf8'));
|
||||
|
||||
// Start transaction-like operations
|
||||
let restoredPlaylists = 0;
|
||||
let restoredChannels = 0;
|
||||
let restoredFavorites = 0;
|
||||
|
||||
// Restore playlists
|
||||
for (const playlist of playlistsData) {
|
||||
try {
|
||||
await dbRun(
|
||||
`INSERT OR IGNORE INTO playlists (name, url, username, password, user_id, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?)`,
|
||||
[playlist.name, playlist.url, playlist.username, playlist.password, userId, playlist.created_at]
|
||||
);
|
||||
restoredPlaylists++;
|
||||
} catch (err) {
|
||||
console.error('Failed to restore playlist:', err);
|
||||
}
|
||||
}
|
||||
|
||||
// Get new playlist IDs mapping
|
||||
const newPlaylists = await dbAll('SELECT id, name FROM playlists WHERE user_id = ?', [userId]);
|
||||
const playlistMap = {};
|
||||
playlistsData.forEach((oldP, idx) => {
|
||||
const newP = newPlaylists.find(p => p.name === oldP.name);
|
||||
if (newP) playlistMap[oldP.id] = newP.id;
|
||||
});
|
||||
|
||||
// Restore channels
|
||||
for (const channel of channelsData) {
|
||||
const newPlaylistId = playlistMap[channel.playlist_id];
|
||||
if (!newPlaylistId) continue;
|
||||
|
||||
try {
|
||||
await dbRun(
|
||||
`INSERT OR IGNORE INTO channels
|
||||
(name, url, logo, group_name, playlist_id, custom_logo, is_radio, tvg_id, tvg_name)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[
|
||||
channel.name, channel.url, channel.logo, channel.group_name,
|
||||
newPlaylistId, channel.custom_logo, channel.is_radio,
|
||||
channel.tvg_id, channel.tvg_name
|
||||
]
|
||||
);
|
||||
restoredChannels++;
|
||||
} catch (err) {
|
||||
console.error('Failed to restore channel:', err);
|
||||
}
|
||||
}
|
||||
|
||||
// Restore custom logos
|
||||
const logosDir = path.join(extractPath, 'logos');
|
||||
try {
|
||||
const logoFiles = await fs.readdir(logosDir);
|
||||
const targetLogosDir = path.join(__dirname, '../../data/logos');
|
||||
await fs.mkdir(targetLogosDir, { recursive: true });
|
||||
|
||||
for (const logoFile of logoFiles) {
|
||||
const src = path.join(logosDir, logoFile);
|
||||
const dest = path.join(targetLogosDir, logoFile);
|
||||
await fs.copyFile(src, dest);
|
||||
}
|
||||
} catch (err) {
|
||||
// Logos directory might not exist in older backups
|
||||
console.log('No custom logos to restore');
|
||||
}
|
||||
|
||||
// Restore favorites (map to new channel IDs)
|
||||
const newChannels = await dbAll(
|
||||
`SELECT c.id, c.name, c.url FROM channels c
|
||||
JOIN playlists p ON c.playlist_id = p.id
|
||||
WHERE p.user_id = ?`,
|
||||
[userId]
|
||||
);
|
||||
|
||||
for (const fav of favoritesData) {
|
||||
const oldChannel = channelsData.find(c => c.id === fav.channel_id);
|
||||
if (!oldChannel) continue;
|
||||
|
||||
const newChannel = newChannels.find(c => c.name === oldChannel.name && c.url === oldChannel.url);
|
||||
if (!newChannel) continue;
|
||||
|
||||
try {
|
||||
await dbRun(
|
||||
`INSERT OR IGNORE INTO favorites (user_id, channel_id, custom_logo, is_radio)
|
||||
VALUES (?, ?, ?, ?)`,
|
||||
[userId, newChannel.id, fav.custom_logo, fav.is_radio]
|
||||
);
|
||||
restoredFavorites++;
|
||||
} catch (err) {
|
||||
console.error('Failed to restore favorite:', err);
|
||||
}
|
||||
}
|
||||
|
||||
// Restore M3U files
|
||||
try {
|
||||
const m3uFilesData = JSON.parse(await fs.readFile(path.join(extractPath, 'm3u_files.json'), 'utf8'));
|
||||
const m3uFilesDir = path.join(extractPath, 'm3u_files');
|
||||
const targetM3uDir = path.join(__dirname, '../../data/playlists');
|
||||
await fs.mkdir(targetM3uDir, { recursive: true });
|
||||
|
||||
for (const m3uFile of m3uFilesData) {
|
||||
try {
|
||||
// Copy physical file
|
||||
if (m3uFile.file_path) {
|
||||
const src = path.join(m3uFilesDir, m3uFile.file_path);
|
||||
const dest = path.join(targetM3uDir, m3uFile.file_path);
|
||||
await fs.copyFile(src, dest);
|
||||
}
|
||||
|
||||
// Insert database record
|
||||
await dbRun(
|
||||
`INSERT OR IGNORE INTO m3u_files (user_id, name, file_path, uploaded_at)
|
||||
VALUES (?, ?, ?, ?)`,
|
||||
[userId, m3uFile.name, m3uFile.file_path, m3uFile.uploaded_at]
|
||||
);
|
||||
} catch (err) {
|
||||
console.error('Failed to restore m3u file:', err);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.log('No M3U files to restore');
|
||||
}
|
||||
|
||||
// Cleanup extraction directory
|
||||
await fs.rm(extractPath, { recursive: true, force: true });
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Backup restored successfully',
|
||||
stats: {
|
||||
playlists: restoredPlaylists,
|
||||
channels: restoredChannels,
|
||||
favorites: restoredFavorites
|
||||
}
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Failed to restore backup:', error);
|
||||
res.status(500).json({ error: 'Failed to restore backup' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
312
backend/routes/channels.js
Normal file
312
backend/routes/channels.js
Normal file
|
|
@ -0,0 +1,312 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const multer = require('multer');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { modifyLimiter, readLimiter } = require('../middleware/rateLimiter');
|
||||
const { db } = require('../database/db');
|
||||
const logger = require('../utils/logger');
|
||||
const {
|
||||
validateIdParam,
|
||||
validateChannelUpdate,
|
||||
validatePagination,
|
||||
validateSearch
|
||||
} = require('../middleware/inputValidation');
|
||||
|
||||
// Configure multer for logo uploads
|
||||
const storage = multer.diskStorage({
|
||||
destination: (req, file, cb) => {
|
||||
const uploadDir = path.join(__dirname, '../uploads/logos');
|
||||
if (!fs.existsSync(uploadDir)) {
|
||||
fs.mkdirSync(uploadDir, { recursive: true });
|
||||
}
|
||||
cb(null, uploadDir);
|
||||
},
|
||||
filename: (req, file, cb) => {
|
||||
const uniqueSuffix = Date.now() + '-' + Math.round(Math.random() * 1E9);
|
||||
cb(null, 'channel-' + uniqueSuffix + path.extname(file.originalname));
|
||||
}
|
||||
});
|
||||
|
||||
const upload = multer({
|
||||
storage: storage,
|
||||
limits: { fileSize: 5 * 1024 * 1024 }, // 5MB limit
|
||||
fileFilter: (req, file, cb) => {
|
||||
const allowedTypes = /jpeg|jpg|png|gif|svg|webp/;
|
||||
const extname = allowedTypes.test(path.extname(file.originalname).toLowerCase());
|
||||
const mimetype = allowedTypes.test(file.mimetype);
|
||||
if (mimetype && extname) {
|
||||
return cb(null, true);
|
||||
}
|
||||
cb(new Error('Only image files are allowed'));
|
||||
}
|
||||
});
|
||||
|
||||
// Get channels with filters
|
||||
router.get('/', authenticate, readLimiter, validatePagination, validateSearch, (req, res) => {
|
||||
const { playlistId, isRadio, groupName, search } = req.query;
|
||||
const limit = req.sanitizedQuery?.limit || 100;
|
||||
const offset = req.sanitizedQuery?.offset || 0;
|
||||
|
||||
let query = `
|
||||
SELECT c.*, p.name as playlist_name,
|
||||
COALESCE(c.custom_logo, c.logo) as logo
|
||||
FROM channels c
|
||||
JOIN playlists p ON c.playlist_id = p.id
|
||||
WHERE p.user_id = ? AND c.is_active = 1
|
||||
`;
|
||||
const params = [req.user.userId];
|
||||
|
||||
if (playlistId) {
|
||||
query += ' AND c.playlist_id = ?';
|
||||
params.push(playlistId);
|
||||
}
|
||||
|
||||
if (isRadio !== undefined) {
|
||||
query += ' AND c.is_radio = ?';
|
||||
params.push(isRadio === 'true' ? 1 : 0);
|
||||
}
|
||||
|
||||
if (groupName) {
|
||||
query += ' AND c.group_name = ?';
|
||||
params.push(groupName);
|
||||
}
|
||||
|
||||
if (search) {
|
||||
query += ' AND (c.name LIKE ? OR c.group_name LIKE ?)';
|
||||
params.push(`%${search}%`, `%${search}%`);
|
||||
}
|
||||
|
||||
query += ' ORDER BY c.name LIMIT ? OFFSET ?';
|
||||
params.push(parseInt(limit), parseInt(offset));
|
||||
|
||||
db.all(query, params, (err, channels) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'Failed to fetch channels' });
|
||||
}
|
||||
res.json(channels);
|
||||
});
|
||||
});
|
||||
|
||||
// Get channel groups
|
||||
router.get('/groups', authenticate, readLimiter, (req, res) => {
|
||||
const { playlistId, isRadio } = req.query;
|
||||
|
||||
// Validate playlist ID if provided
|
||||
if (playlistId && (isNaN(parseInt(playlistId)) || parseInt(playlistId) < 1)) {
|
||||
return res.status(400).json({ error: 'Invalid playlist ID' });
|
||||
}
|
||||
|
||||
let query = `
|
||||
SELECT DISTINCT c.group_name, COUNT(*) as count
|
||||
FROM channels c
|
||||
JOIN playlists p ON c.playlist_id = p.id
|
||||
WHERE p.user_id = ? AND c.is_active = 1
|
||||
`;
|
||||
const params = [req.user.userId];
|
||||
|
||||
if (playlistId) {
|
||||
query += ' AND c.playlist_id = ?';
|
||||
params.push(playlistId);
|
||||
}
|
||||
|
||||
if (isRadio !== undefined) {
|
||||
query += ' AND c.is_radio = ?';
|
||||
params.push(isRadio === 'true' ? 1 : 0);
|
||||
}
|
||||
|
||||
query += ' GROUP BY c.group_name ORDER BY c.group_name';
|
||||
|
||||
db.all(query, params, (err, groups) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'Failed to fetch groups' });
|
||||
}
|
||||
res.json(groups);
|
||||
});
|
||||
});
|
||||
|
||||
// Upload custom logo for channel
|
||||
router.post('/:id/logo', authenticate, modifyLimiter, validateIdParam, upload.single('logo'), (req, res) => {
|
||||
const channelId = req.params.id;
|
||||
const userId = req.user.userId;
|
||||
|
||||
if (!req.file) {
|
||||
return res.status(400).json({ error: 'No file uploaded' });
|
||||
}
|
||||
|
||||
// Verify channel belongs to user
|
||||
db.get(
|
||||
`SELECT c.*, c.custom_logo as old_logo
|
||||
FROM channels c
|
||||
JOIN playlists p ON c.playlist_id = p.id
|
||||
WHERE c.id = ? AND p.user_id = ?`,
|
||||
[channelId, userId],
|
||||
(err, channel) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'Database error' });
|
||||
}
|
||||
if (!channel) {
|
||||
// Delete uploaded file if channel not found
|
||||
fs.unlinkSync(req.file.path);
|
||||
return res.status(404).json({ error: 'Channel not found' });
|
||||
}
|
||||
|
||||
// Delete old custom logo if exists
|
||||
if (channel.old_logo) {
|
||||
const oldLogoPath = path.join(__dirname, '..', channel.old_logo);
|
||||
if (fs.existsSync(oldLogoPath)) {
|
||||
fs.unlinkSync(oldLogoPath);
|
||||
}
|
||||
}
|
||||
|
||||
// Save new logo path
|
||||
const logoPath = `/uploads/logos/${req.file.filename}`;
|
||||
db.run(
|
||||
'UPDATE channels SET custom_logo = ? WHERE id = ?',
|
||||
[logoPath, channelId],
|
||||
(err) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'Failed to update logo' });
|
||||
}
|
||||
res.json({
|
||||
message: 'Logo uploaded successfully',
|
||||
logoUrl: logoPath
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Delete custom logo for a channel
|
||||
router.delete('/:id/logo', authenticate, modifyLimiter, (req, res) => {
|
||||
const channelId = req.params.id;
|
||||
const userId = req.user.userId;
|
||||
|
||||
// Verify channel belongs to user and get current logo
|
||||
db.get(
|
||||
`SELECT c.*, c.custom_logo
|
||||
FROM channels c
|
||||
JOIN playlists p ON c.playlist_id = p.id
|
||||
WHERE c.id = ? AND p.user_id = ?`,
|
||||
[channelId, userId],
|
||||
(err, channel) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'Database error' });
|
||||
}
|
||||
if (!channel) {
|
||||
return res.status(404).json({ error: 'Channel not found' });
|
||||
}
|
||||
|
||||
if (!channel.custom_logo) {
|
||||
return res.status(400).json({ error: 'No custom logo to delete' });
|
||||
}
|
||||
|
||||
// Delete file from filesystem
|
||||
const logoPath = path.join(__dirname, '..', channel.custom_logo);
|
||||
if (fs.existsSync(logoPath)) {
|
||||
fs.unlinkSync(logoPath);
|
||||
}
|
||||
|
||||
// Remove logo from database
|
||||
db.run(
|
||||
'UPDATE channels SET custom_logo = NULL WHERE id = ?',
|
||||
[channelId],
|
||||
(err) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'Failed to delete logo' });
|
||||
}
|
||||
res.json({ message: 'Logo deleted successfully' });
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Get single channel by ID
|
||||
router.get('/:id', authenticate, readLimiter, (req, res) => {
|
||||
const channelId = req.params.id;
|
||||
const userId = req.user.userId;
|
||||
|
||||
db.get(
|
||||
`SELECT c.*, p.name as playlist_name,
|
||||
COALESCE(c.custom_logo, c.logo) as logo
|
||||
FROM channels c
|
||||
JOIN playlists p ON c.playlist_id = p.id
|
||||
WHERE c.id = ? AND p.user_id = ?`,
|
||||
[channelId, userId],
|
||||
(err, channel) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'Database error' });
|
||||
}
|
||||
if (!channel) {
|
||||
return res.status(404).json({ error: 'Channel not found' });
|
||||
}
|
||||
res.json(channel);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Delete channel from playlist
|
||||
router.delete('/:id', authenticate, modifyLimiter, validateIdParam, (req, res) => {
|
||||
const channelId = req.params.id;
|
||||
const userId = req.user.userId;
|
||||
|
||||
// Verify channel belongs to user
|
||||
db.get(
|
||||
`SELECT c.id, c.playlist_id, c.custom_logo
|
||||
FROM channels c
|
||||
JOIN playlists p ON c.playlist_id = p.id
|
||||
WHERE c.id = ? AND p.user_id = ?`,
|
||||
[channelId, userId],
|
||||
(err, channel) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching channel for deletion:', err);
|
||||
return res.status(500).json({ error: 'Database error' });
|
||||
}
|
||||
if (!channel) {
|
||||
return res.status(404).json({ error: 'Channel not found' });
|
||||
}
|
||||
|
||||
// Delete custom logo file if exists
|
||||
if (channel.custom_logo) {
|
||||
const logoPath = path.join(__dirname, '..', channel.custom_logo);
|
||||
if (fs.existsSync(logoPath)) {
|
||||
try {
|
||||
fs.unlinkSync(logoPath);
|
||||
} catch (err) {
|
||||
logger.error('Error deleting custom logo file:', err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Delete channel from database
|
||||
db.run('DELETE FROM channels WHERE id = ?', [channelId], function(err) {
|
||||
if (err) {
|
||||
logger.error('Error deleting channel:', err);
|
||||
return res.status(500).json({ error: 'Failed to delete channel' });
|
||||
}
|
||||
|
||||
// Update playlist channel count
|
||||
db.run(
|
||||
'UPDATE playlists SET channel_count = (SELECT COUNT(*) FROM channels WHERE playlist_id = ?) WHERE id = ?',
|
||||
[channel.playlist_id, channel.playlist_id],
|
||||
(updateErr) => {
|
||||
if (updateErr) {
|
||||
logger.error('Error updating playlist count:', updateErr);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
logger.info(`Channel ${channelId} deleted by user ${userId}`);
|
||||
res.json({
|
||||
message: 'Channel deleted successfully',
|
||||
deletedId: channelId
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
218
backend/routes/csp.js
Normal file
218
backend/routes/csp.js
Normal file
|
|
@ -0,0 +1,218 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticate, requireAdmin } = require('../middleware/auth');
|
||||
const logger = require('../utils/logger');
|
||||
const { db } = require('../database/db');
|
||||
|
||||
// Store CSP violations in database
|
||||
db.run(`
|
||||
CREATE TABLE IF NOT EXISTS csp_violations (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
document_uri TEXT,
|
||||
violated_directive TEXT,
|
||||
blocked_uri TEXT,
|
||||
source_file TEXT,
|
||||
line_number INTEGER,
|
||||
column_number INTEGER,
|
||||
user_agent TEXT,
|
||||
ip_address TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`);
|
||||
|
||||
// CSP violation reporting endpoint (no auth required)
|
||||
router.post('/report', express.json({ type: 'application/csp-report' }), (req, res) => {
|
||||
const report = req.body['csp-report'];
|
||||
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
|
||||
const userAgent = req.headers['user-agent'];
|
||||
|
||||
if (!report) {
|
||||
return res.status(400).json({ error: 'Invalid CSP report' });
|
||||
}
|
||||
|
||||
logger.warn('CSP Violation:', {
|
||||
documentUri: report['document-uri'],
|
||||
violatedDirective: report['violated-directive'],
|
||||
blockedUri: report['blocked-uri'],
|
||||
sourceFile: report['source-file'],
|
||||
lineNumber: report['line-number'],
|
||||
columnNumber: report['column-number'],
|
||||
ip,
|
||||
userAgent
|
||||
});
|
||||
|
||||
// Store in database
|
||||
db.run(
|
||||
`INSERT INTO csp_violations
|
||||
(document_uri, violated_directive, blocked_uri, source_file, line_number, column_number, user_agent, ip_address)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[
|
||||
report['document-uri'],
|
||||
report['violated-directive'],
|
||||
report['blocked-uri'],
|
||||
report['source-file'],
|
||||
report['line-number'],
|
||||
report['column-number'],
|
||||
userAgent,
|
||||
ip
|
||||
],
|
||||
(err) => {
|
||||
if (err) {
|
||||
logger.error('Failed to store CSP violation:', err);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
res.status(204).end();
|
||||
});
|
||||
|
||||
// Get CSP violations (admin only)
|
||||
router.get('/violations', authenticate, requireAdmin, (req, res) => {
|
||||
const { limit = 100, offset = 0 } = req.query;
|
||||
|
||||
db.all(
|
||||
`SELECT * FROM csp_violations
|
||||
ORDER BY created_at DESC
|
||||
LIMIT ? OFFSET ?`,
|
||||
[parseInt(limit), parseInt(offset)],
|
||||
(err, violations) => {
|
||||
if (err) {
|
||||
logger.error('Failed to fetch CSP violations:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch violations' });
|
||||
}
|
||||
|
||||
// Get total count
|
||||
db.get('SELECT COUNT(*) as total FROM csp_violations', (countErr, countResult) => {
|
||||
if (countErr) {
|
||||
logger.error('Failed to count CSP violations:', countErr);
|
||||
return res.status(500).json({ error: 'Failed to count violations' });
|
||||
}
|
||||
|
||||
res.json({
|
||||
violations,
|
||||
total: countResult.total,
|
||||
limit: parseInt(limit),
|
||||
offset: parseInt(offset)
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Get CSP violation statistics (admin only)
|
||||
router.get('/stats', authenticate, requireAdmin, (req, res) => {
|
||||
const { days = 7 } = req.query;
|
||||
const cutoffDate = new Date();
|
||||
cutoffDate.setDate(cutoffDate.getDate() - parseInt(days));
|
||||
|
||||
Promise.all([
|
||||
// Total violations
|
||||
new Promise((resolve, reject) => {
|
||||
db.get(
|
||||
'SELECT COUNT(*) as total FROM csp_violations WHERE created_at >= ?',
|
||||
[cutoffDate.toISOString()],
|
||||
(err, row) => err ? reject(err) : resolve(row.total)
|
||||
);
|
||||
}),
|
||||
// By directive
|
||||
new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT violated_directive, COUNT(*) as count
|
||||
FROM csp_violations
|
||||
WHERE created_at >= ?
|
||||
GROUP BY violated_directive
|
||||
ORDER BY count DESC
|
||||
LIMIT 10`,
|
||||
[cutoffDate.toISOString()],
|
||||
(err, rows) => err ? reject(err) : resolve(rows)
|
||||
);
|
||||
}),
|
||||
// By blocked URI
|
||||
new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT blocked_uri, COUNT(*) as count
|
||||
FROM csp_violations
|
||||
WHERE created_at >= ?
|
||||
GROUP BY blocked_uri
|
||||
ORDER BY count DESC
|
||||
LIMIT 10`,
|
||||
[cutoffDate.toISOString()],
|
||||
(err, rows) => err ? reject(err) : resolve(rows)
|
||||
);
|
||||
}),
|
||||
// Recent violations
|
||||
new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT * FROM csp_violations
|
||||
WHERE created_at >= ?
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 20`,
|
||||
[cutoffDate.toISOString()],
|
||||
(err, rows) => err ? reject(err) : resolve(rows)
|
||||
);
|
||||
})
|
||||
])
|
||||
.then(([total, byDirective, byUri, recent]) => {
|
||||
res.json({
|
||||
total,
|
||||
byDirective,
|
||||
byUri,
|
||||
recent,
|
||||
days: parseInt(days)
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.error('Failed to fetch CSP stats:', err);
|
||||
res.status(500).json({ error: 'Failed to fetch statistics' });
|
||||
});
|
||||
});
|
||||
|
||||
// Clear old CSP violations (admin only)
|
||||
router.delete('/violations', authenticate, requireAdmin, (req, res) => {
|
||||
const { days = 30 } = req.query;
|
||||
const cutoffDate = new Date();
|
||||
cutoffDate.setDate(cutoffDate.getDate() - parseInt(days));
|
||||
|
||||
db.run(
|
||||
'DELETE FROM csp_violations WHERE created_at < ?',
|
||||
[cutoffDate.toISOString()],
|
||||
function(err) {
|
||||
if (err) {
|
||||
logger.error('Failed to delete old CSP violations:', err);
|
||||
return res.status(500).json({ error: 'Failed to delete violations' });
|
||||
}
|
||||
|
||||
res.json({
|
||||
message: 'Old violations cleared',
|
||||
deleted: this.changes
|
||||
});
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Get current CSP policy (authenticated users)
|
||||
router.get('/policy', authenticate, (req, res) => {
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
|
||||
res.json({
|
||||
mode: isProduction ? 'enforce' : 'report-only',
|
||||
policy: {
|
||||
defaultSrc: ["'self'"],
|
||||
scriptSrc: ["'self'", "'unsafe-inline'", "'unsafe-eval'", "https://www.gstatic.com"],
|
||||
styleSrc: ["'self'", "'unsafe-inline'", "https://fonts.googleapis.com"],
|
||||
fontSrc: ["'self'", "data:", "https://fonts.gstatic.com"],
|
||||
imgSrc: ["'self'", "data:", "blob:", "https:", "http:"],
|
||||
mediaSrc: ["'self'", "blob:", "data:", "mediastream:", "https:", "http:", "*"],
|
||||
connectSrc: ["'self'", "https:", "http:", "ws:", "wss:", "blob:", "*"],
|
||||
frameSrc: ["'self'", "https://www.youtube.com", "https://player.vimeo.com"],
|
||||
objectSrc: ["'none'"],
|
||||
baseUri: ["'self'"],
|
||||
formAction: ["'self'"],
|
||||
frameAncestors: ["'self'"],
|
||||
upgradeInsecureRequests: isProduction
|
||||
},
|
||||
reportUri: '/api/csp/report'
|
||||
});
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
438
backend/routes/encryption-management.js
Normal file
438
backend/routes/encryption-management.js
Normal file
|
|
@ -0,0 +1,438 @@
|
|||
/**
|
||||
* Encryption Management API
|
||||
* CWE-311: Encrypt Sensitive Data
|
||||
*
|
||||
* Provides endpoints for:
|
||||
* - Checking encryption status
|
||||
* - Migrating plaintext data to encrypted format
|
||||
* - Key rotation
|
||||
* - Encryption health monitoring
|
||||
*/
|
||||
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticate, requireAdmin } = require('../middleware/auth');
|
||||
const { readLimiter, modifyLimiter } = require('../middleware/rateLimiter');
|
||||
const { db } = require('../database/db');
|
||||
const encryption = require('../utils/encryption');
|
||||
const logger = require('../utils/logger');
|
||||
const SecurityAuditLogger = require('../utils/securityAudit');
|
||||
|
||||
/**
|
||||
* GET /api/encryption/status
|
||||
* Get encryption configuration and health status
|
||||
*/
|
||||
router.get('/status', authenticate, requireAdmin, readLimiter, async (req, res) => {
|
||||
try {
|
||||
const status = encryption.getEncryptionStatus();
|
||||
|
||||
// Count encrypted vs unencrypted sensitive data
|
||||
const stats = await getEncryptionStats();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
...status,
|
||||
statistics: stats
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error getting encryption status:', error);
|
||||
res.status(500).json({ error: 'Failed to get encryption status' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/encryption/scan
|
||||
* Scan database for sensitive unencrypted data
|
||||
*/
|
||||
router.get('/scan', authenticate, requireAdmin, readLimiter, async (req, res) => {
|
||||
try {
|
||||
const findings = await scanForUnencryptedData();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
findings,
|
||||
totalIssues: findings.reduce((sum, f) => sum + f.count, 0),
|
||||
recommendation: findings.length > 0
|
||||
? 'Run migration to encrypt sensitive data'
|
||||
: 'All sensitive data is encrypted'
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error scanning for unencrypted data:', error);
|
||||
res.status(500).json({ error: 'Failed to scan database' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/encryption/migrate
|
||||
* Migrate unencrypted sensitive data to encrypted format
|
||||
*/
|
||||
router.post('/migrate', authenticate, requireAdmin, modifyLimiter, async (req, res) => {
|
||||
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
|
||||
const userAgent = req.headers['user-agent'];
|
||||
|
||||
try {
|
||||
logger.info('Starting encryption migration...');
|
||||
|
||||
const results = {
|
||||
settings: 0,
|
||||
vpnConfigs: 0,
|
||||
apiTokens: 0,
|
||||
twoFactorSecrets: 0,
|
||||
errors: []
|
||||
};
|
||||
|
||||
// Migrate settings (sensitive keys only)
|
||||
try {
|
||||
const sensitiveKeys = ['api_key', 'api_secret', 'vpn_password', 'smtp_password'];
|
||||
const settingsResult = await migrateSettings(sensitiveKeys);
|
||||
results.settings = settingsResult.migrated;
|
||||
if (settingsResult.error) results.errors.push(settingsResult.error);
|
||||
} catch (error) {
|
||||
results.errors.push(`Settings migration error: ${error.message}`);
|
||||
}
|
||||
|
||||
// Migrate VPN configs
|
||||
try {
|
||||
const vpnResult = await migrateVPNConfigs();
|
||||
results.vpnConfigs = vpnResult.migrated;
|
||||
if (vpnResult.error) results.errors.push(vpnResult.error);
|
||||
} catch (error) {
|
||||
results.errors.push(`VPN migration error: ${error.message}`);
|
||||
}
|
||||
|
||||
// Migrate API tokens (if any exist in plaintext)
|
||||
try {
|
||||
const tokenResult = await migrateAPITokens();
|
||||
results.apiTokens = tokenResult.migrated;
|
||||
if (tokenResult.error) results.errors.push(tokenResult.error);
|
||||
} catch (error) {
|
||||
results.errors.push(`API tokens migration error: ${error.message}`);
|
||||
}
|
||||
|
||||
// Note: 2FA secrets are NOT migrated as they're already handled securely
|
||||
// Passwords are bcrypt hashed (not encrypted), which is correct
|
||||
|
||||
const totalMigrated = results.settings + results.vpnConfigs + results.apiTokens;
|
||||
|
||||
// Log admin activity
|
||||
await SecurityAuditLogger.logAdminActivity(req.user.userId, 'encryption_migration', {
|
||||
ip,
|
||||
userAgent,
|
||||
results: {
|
||||
totalMigrated,
|
||||
...results
|
||||
}
|
||||
});
|
||||
|
||||
logger.info('Encryption migration completed', results);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Encryption migration completed',
|
||||
data: {
|
||||
totalMigrated,
|
||||
...results
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error during encryption migration:', error);
|
||||
res.status(500).json({ error: 'Encryption migration failed' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/encryption/verify
|
||||
* Verify encrypted data integrity
|
||||
*/
|
||||
router.post('/verify', authenticate, requireAdmin, readLimiter, async (req, res) => {
|
||||
try {
|
||||
const results = await verifyEncryptedData();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: results
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error verifying encrypted data:', error);
|
||||
res.status(500).json({ error: 'Verification failed' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Helper: Get encryption statistics
|
||||
*/
|
||||
async function getEncryptionStats() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.serialize(() => {
|
||||
const stats = {
|
||||
vpnConfigs: { total: 0, encrypted: 0 },
|
||||
settings: { total: 0, encrypted: 0 },
|
||||
twoFactorSecrets: { total: 0, encrypted: 0 }
|
||||
};
|
||||
|
||||
// Count VPN configs
|
||||
db.get('SELECT COUNT(*) as total FROM vpn_configs', [], (err, row) => {
|
||||
if (!err && row) {
|
||||
stats.vpnConfigs.total = row.total;
|
||||
|
||||
// Check how many are encrypted (encrypted format has 4 colon-separated parts)
|
||||
db.get(
|
||||
`SELECT COUNT(*) as encrypted FROM vpn_configs
|
||||
WHERE config_data LIKE '%:%:%:%'`,
|
||||
[],
|
||||
(err, row) => {
|
||||
if (!err && row) stats.vpnConfigs.encrypted = row.encrypted;
|
||||
}
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// Count settings with sensitive data
|
||||
db.get(
|
||||
`SELECT COUNT(*) as total FROM settings
|
||||
WHERE key IN ('api_key', 'api_secret', 'vpn_password', 'smtp_password')`,
|
||||
[],
|
||||
(err, row) => {
|
||||
if (!err && row) {
|
||||
stats.settings.total = row.total;
|
||||
|
||||
// Count encrypted ones
|
||||
db.get(
|
||||
`SELECT COUNT(*) as encrypted FROM settings
|
||||
WHERE key IN ('api_key', 'api_secret', 'vpn_password', 'smtp_password')
|
||||
AND value LIKE '%:%:%:%'`,
|
||||
[],
|
||||
(err, row) => {
|
||||
if (!err && row) stats.settings.encrypted = row.encrypted;
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Count 2FA secrets (these should always be protected, not necessarily encrypted)
|
||||
db.get(
|
||||
'SELECT COUNT(*) as total FROM users WHERE two_factor_secret IS NOT NULL',
|
||||
[],
|
||||
(err, row) => {
|
||||
if (!err && row) {
|
||||
stats.twoFactorSecrets.total = row.total;
|
||||
stats.twoFactorSecrets.encrypted = row.total; // Consider all protected
|
||||
}
|
||||
|
||||
// Wait a bit for async queries to complete
|
||||
setTimeout(() => resolve(stats), 100);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper: Scan for unencrypted sensitive data
|
||||
*/
|
||||
async function scanForUnencryptedData() {
|
||||
const findings = [];
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.serialize(() => {
|
||||
// Check VPN configs
|
||||
db.get(
|
||||
`SELECT COUNT(*) as count FROM vpn_configs
|
||||
WHERE config_data NOT LIKE '%:%:%:%' OR config_data IS NULL`,
|
||||
[],
|
||||
(err, row) => {
|
||||
if (!err && row && row.count > 0) {
|
||||
findings.push({
|
||||
table: 'vpn_configs',
|
||||
field: 'config_data',
|
||||
count: row.count,
|
||||
severity: 'high',
|
||||
description: 'VPN configuration files contain credentials and private keys'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Check sensitive settings
|
||||
db.get(
|
||||
`SELECT COUNT(*) as count FROM settings
|
||||
WHERE key IN ('api_key', 'api_secret', 'vpn_password', 'smtp_password')
|
||||
AND (value NOT LIKE '%:%:%:%' OR value IS NULL)`,
|
||||
[],
|
||||
(err, row) => {
|
||||
if (!err && row && row.count > 0) {
|
||||
findings.push({
|
||||
table: 'settings',
|
||||
field: 'value',
|
||||
count: row.count,
|
||||
severity: 'medium',
|
||||
description: 'Settings may contain API keys, passwords, and secrets'
|
||||
});
|
||||
}
|
||||
|
||||
// Wait for async queries to complete
|
||||
setTimeout(() => resolve(findings), 100);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper: Migrate settings to encrypted format
|
||||
*/
|
||||
async function migrateSettings(sensitiveKeys) {
|
||||
return new Promise((resolve) => {
|
||||
db.all(
|
||||
`SELECT id, user_id, key, value FROM settings
|
||||
WHERE key IN (${sensitiveKeys.map(() => '?').join(',')})
|
||||
AND value IS NOT NULL
|
||||
AND value NOT LIKE '%:%:%:%'`,
|
||||
sensitiveKeys,
|
||||
async (err, settings) => {
|
||||
if (err || !settings || settings.length === 0) {
|
||||
return resolve({ migrated: 0, error: err ? err.message : null });
|
||||
}
|
||||
|
||||
let migrated = 0;
|
||||
for (const setting of settings) {
|
||||
try {
|
||||
const encrypted = encryption.encryptSetting(setting.value, setting.key);
|
||||
|
||||
await new Promise((res, rej) => {
|
||||
db.run(
|
||||
'UPDATE settings SET value = ? WHERE id = ?',
|
||||
[encrypted, setting.id],
|
||||
(err) => (err ? rej(err) : res())
|
||||
);
|
||||
});
|
||||
|
||||
migrated++;
|
||||
} catch (error) {
|
||||
logger.error(`Failed to encrypt setting ${setting.key}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
resolve({ migrated, error: null });
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper: Migrate VPN configs to new encryption format
|
||||
*/
|
||||
async function migrateVPNConfigs() {
|
||||
return new Promise((resolve) => {
|
||||
db.all(
|
||||
`SELECT id, user_id, config_data FROM vpn_configs
|
||||
WHERE config_data IS NOT NULL`,
|
||||
[],
|
||||
async (err, configs) => {
|
||||
if (err || !configs || configs.length === 0) {
|
||||
return resolve({ migrated: 0, error: err ? err.message : null });
|
||||
}
|
||||
|
||||
let migrated = 0;
|
||||
for (const config of configs) {
|
||||
try {
|
||||
// Check if already encrypted (new format)
|
||||
if (config.config_data.split(':').length === 4) {
|
||||
continue; // Already encrypted
|
||||
}
|
||||
|
||||
// Re-encrypt using new encryption module
|
||||
const encrypted = encryption.encryptVPN(JSON.parse(config.config_data));
|
||||
|
||||
await new Promise((res, rej) => {
|
||||
db.run(
|
||||
'UPDATE vpn_configs SET config_data = ? WHERE id = ?',
|
||||
[encrypted, config.id],
|
||||
(err) => (err ? rej(err) : res())
|
||||
);
|
||||
});
|
||||
|
||||
migrated++;
|
||||
} catch (error) {
|
||||
logger.error(`Failed to encrypt VPN config ${config.id}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
resolve({ migrated, error: null });
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper: Migrate API tokens (if storing plaintext tokens - usually they're hashed)
|
||||
*/
|
||||
async function migrateAPITokens() {
|
||||
// API tokens are typically already hashed/encrypted, so this is a placeholder
|
||||
// Only migrate if you're storing plaintext tokens
|
||||
return Promise.resolve({ migrated: 0, error: null });
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper: Verify encrypted data can be decrypted
|
||||
*/
|
||||
async function verifyEncryptedData() {
|
||||
const results = {
|
||||
vpnConfigs: { tested: 0, valid: 0, invalid: 0 },
|
||||
settings: { tested: 0, valid: 0, invalid: 0 }
|
||||
};
|
||||
|
||||
return new Promise((resolve) => {
|
||||
db.serialize(() => {
|
||||
// Verify VPN configs
|
||||
db.all(
|
||||
`SELECT id, config_data FROM vpn_configs
|
||||
WHERE config_data LIKE '%:%:%:%' LIMIT 10`,
|
||||
[],
|
||||
(err, configs) => {
|
||||
if (!err && configs) {
|
||||
configs.forEach(config => {
|
||||
results.vpnConfigs.tested++;
|
||||
try {
|
||||
encryption.decryptVPN(config.config_data);
|
||||
results.vpnConfigs.valid++;
|
||||
} catch {
|
||||
results.vpnConfigs.invalid++;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Verify settings
|
||||
db.all(
|
||||
`SELECT id, key, value FROM settings
|
||||
WHERE value LIKE '%:%:%:%' LIMIT 10`,
|
||||
[],
|
||||
(err, settings) => {
|
||||
if (!err && settings) {
|
||||
settings.forEach(setting => {
|
||||
results.settings.tested++;
|
||||
try {
|
||||
encryption.decryptSetting(setting.value, setting.key);
|
||||
results.settings.valid++;
|
||||
} catch {
|
||||
results.settings.invalid++;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Wait for async queries
|
||||
setTimeout(() => resolve(results), 100);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = router;
|
||||
136
backend/routes/favorites.js
Normal file
136
backend/routes/favorites.js
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { db } = require('../database/db');
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { modifyLimiter, readLimiter } = require('../middleware/rateLimiter');
|
||||
const logger = require('../utils/logger');
|
||||
const { validateChannelIdParam } = require('../middleware/inputValidation');
|
||||
|
||||
/**
|
||||
* Get user's favorite channels
|
||||
*/
|
||||
router.get('/', authenticate, readLimiter, (req, res) => {
|
||||
const { isRadio } = req.query;
|
||||
const userId = req.user.userId;
|
||||
|
||||
let query = `
|
||||
SELECT
|
||||
c.id, c.name, c.url, COALESCE(c.custom_logo, c.logo) as logo,
|
||||
c.group_name, c.is_radio, f.created_at as favorited_at
|
||||
FROM favorites f
|
||||
JOIN channels c ON f.channel_id = c.id
|
||||
WHERE f.user_id = ? AND c.is_active = 1
|
||||
`;
|
||||
|
||||
const params = [userId];
|
||||
|
||||
// Filter by radio or TV
|
||||
if (isRadio !== undefined) {
|
||||
query += ' AND c.is_radio = ?';
|
||||
params.push(isRadio === 'true' ? 1 : 0);
|
||||
}
|
||||
|
||||
query += ' ORDER BY f.created_at DESC';
|
||||
|
||||
db.all(query, params, (err, rows) => {
|
||||
if (err) {
|
||||
console.error('Error fetching favorites:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch favorites' });
|
||||
}
|
||||
res.json(rows || []);
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Add channel to favorites
|
||||
*/
|
||||
router.post('/:channelId', authenticate, modifyLimiter, validateChannelIdParam, (req, res) => {
|
||||
const { channelId } = req.params;
|
||||
const userId = req.user.userId;
|
||||
|
||||
// Check if channel exists and is active
|
||||
db.get(
|
||||
'SELECT id, is_radio FROM channels WHERE id = ? AND is_active = 1',
|
||||
[channelId],
|
||||
(err, channel) => {
|
||||
if (err) {
|
||||
console.error('Error checking channel:', err);
|
||||
return res.status(500).json({ error: 'Database error' });
|
||||
}
|
||||
|
||||
if (!channel) {
|
||||
return res.status(404).json({ error: 'Channel not found' });
|
||||
}
|
||||
|
||||
// Add to favorites
|
||||
db.run(
|
||||
`INSERT OR IGNORE INTO favorites (user_id, channel_id) VALUES (?, ?)`,
|
||||
[userId, channelId],
|
||||
function(err) {
|
||||
if (err) {
|
||||
console.error('Error adding favorite:', err);
|
||||
return res.status(500).json({ error: 'Failed to add favorite' });
|
||||
}
|
||||
|
||||
if (this.changes === 0) {
|
||||
return res.status(200).json({ message: 'Already in favorites', alreadyExists: true });
|
||||
}
|
||||
|
||||
res.status(201).json({
|
||||
message: 'Added to favorites',
|
||||
channelId: parseInt(channelId),
|
||||
isRadio: channel.is_radio === 1
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
/**
|
||||
* Remove channel from favorites
|
||||
*/
|
||||
router.delete('/:channelId', authenticate, modifyLimiter, validateChannelIdParam, (req, res) => {
|
||||
const { channelId } = req.params;
|
||||
const userId = req.user.userId;
|
||||
|
||||
db.run(
|
||||
'DELETE FROM favorites WHERE user_id = ? AND channel_id = ?',
|
||||
[userId, channelId],
|
||||
function(err) {
|
||||
if (err) {
|
||||
console.error('Error removing favorite:', err);
|
||||
return res.status(500).json({ error: 'Failed to remove favorite' });
|
||||
}
|
||||
|
||||
if (this.changes === 0) {
|
||||
return res.status(404).json({ error: 'Favorite not found' });
|
||||
}
|
||||
|
||||
res.json({ message: 'Removed from favorites' });
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
/**
|
||||
* Check if channel is favorited
|
||||
*/
|
||||
router.get('/check/:channelId', authenticate, (req, res) => {
|
||||
const { channelId } = req.params;
|
||||
const userId = req.user.userId;
|
||||
|
||||
db.get(
|
||||
'SELECT id FROM favorites WHERE user_id = ? AND channel_id = ?',
|
||||
[userId, channelId],
|
||||
(err, row) => {
|
||||
if (err) {
|
||||
console.error('Error checking favorite:', err);
|
||||
return res.status(500).json({ error: 'Database error' });
|
||||
}
|
||||
|
||||
res.json({ isFavorite: !!row });
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
9
backend/routes/groups.js
Normal file
9
backend/routes/groups.js
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
|
||||
router.get('/', authenticate, (req, res) => {
|
||||
res.json({ message: 'Groups endpoint' });
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
216
backend/routes/history.js
Normal file
216
backend/routes/history.js
Normal file
|
|
@ -0,0 +1,216 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { readLimiter, modifyLimiter } = require('../middleware/rateLimiter');
|
||||
const { db } = require('../database/db');
|
||||
const logger = require('../utils/logger');
|
||||
|
||||
/**
|
||||
* Record watch history
|
||||
*/
|
||||
router.post('/', modifyLimiter, authenticate, (req, res) => {
|
||||
const { channel_id, duration, profile_id } = req.body;
|
||||
const user_id = req.user.userId;
|
||||
|
||||
if (!channel_id) {
|
||||
return res.status(400).json({ error: 'channel_id is required' });
|
||||
}
|
||||
|
||||
db.run(
|
||||
`INSERT INTO watch_history (user_id, profile_id, channel_id, duration)
|
||||
VALUES (?, ?, ?, ?)`,
|
||||
[user_id, profile_id || null, channel_id, duration || 0],
|
||||
function(err) {
|
||||
if (err) {
|
||||
logger.error('Error recording watch history:', err);
|
||||
return res.status(500).json({ error: 'Failed to record watch history' });
|
||||
}
|
||||
res.json({ message: 'Watch history recorded', id: this.lastID });
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
/**
|
||||
* Get user watch history
|
||||
*/
|
||||
router.get('/', readLimiter, authenticate, (req, res) => {
|
||||
const user_id = req.user.userId;
|
||||
const { limit = 50, offset = 0, profile_id } = req.query;
|
||||
|
||||
let query = `
|
||||
SELECT
|
||||
wh.id,
|
||||
wh.watched_at,
|
||||
wh.duration,
|
||||
c.id as channel_id,
|
||||
c.name as channel_name,
|
||||
c.logo,
|
||||
c.custom_logo,
|
||||
c.group_name,
|
||||
c.is_radio
|
||||
FROM watch_history wh
|
||||
INNER JOIN channels c ON wh.channel_id = c.id
|
||||
WHERE wh.user_id = ?
|
||||
`;
|
||||
|
||||
const params = [user_id];
|
||||
|
||||
if (profile_id) {
|
||||
query += ' AND wh.profile_id = ?';
|
||||
params.push(profile_id);
|
||||
}
|
||||
|
||||
query += ' ORDER BY wh.watched_at DESC LIMIT ? OFFSET ?';
|
||||
params.push(parseInt(limit), parseInt(offset));
|
||||
|
||||
db.all(query, params, (err, rows) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching watch history:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch watch history' });
|
||||
}
|
||||
res.json(rows);
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Get most watched channels (recommendations)
|
||||
*/
|
||||
router.get('/top-channels', readLimiter, authenticate, (req, res) => {
|
||||
const user_id = req.user.userId;
|
||||
const { limit = 10, profile_id, days = 30 } = req.query;
|
||||
|
||||
let query = `
|
||||
SELECT
|
||||
c.id,
|
||||
c.name,
|
||||
c.logo,
|
||||
c.custom_logo,
|
||||
c.group_name,
|
||||
c.is_radio,
|
||||
c.url,
|
||||
COUNT(wh.id) as watch_count,
|
||||
SUM(wh.duration) as total_duration,
|
||||
MAX(wh.watched_at) as last_watched
|
||||
FROM watch_history wh
|
||||
INNER JOIN channels c ON wh.channel_id = c.id
|
||||
WHERE wh.user_id = ?
|
||||
AND wh.watched_at >= datetime('now', '-' || ? || ' days')
|
||||
`;
|
||||
|
||||
const params = [user_id, days];
|
||||
|
||||
if (profile_id) {
|
||||
query += ' AND wh.profile_id = ?';
|
||||
params.push(profile_id);
|
||||
}
|
||||
|
||||
query += `
|
||||
GROUP BY c.id
|
||||
ORDER BY watch_count DESC, total_duration DESC
|
||||
LIMIT ?
|
||||
`;
|
||||
params.push(parseInt(limit));
|
||||
|
||||
db.all(query, params, (err, rows) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching top channels:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch top channels' });
|
||||
}
|
||||
res.json(rows);
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Get recommended channels based on viewing patterns
|
||||
*/
|
||||
router.get('/recommendations', readLimiter, authenticate, (req, res) => {
|
||||
const user_id = req.user.userId;
|
||||
const { limit = 10, profile_id } = req.query;
|
||||
|
||||
// Get channels from same groups as user's most watched channels
|
||||
let query = `
|
||||
WITH user_favorite_groups AS (
|
||||
SELECT DISTINCT c.group_name, COUNT(wh.id) as watch_count
|
||||
FROM watch_history wh
|
||||
INNER JOIN channels c ON wh.channel_id = c.id
|
||||
WHERE wh.user_id = ?
|
||||
AND wh.watched_at >= datetime('now', '-30 days')
|
||||
AND c.group_name IS NOT NULL
|
||||
`;
|
||||
|
||||
const params = [user_id];
|
||||
|
||||
if (profile_id) {
|
||||
query += ' AND wh.profile_id = ?';
|
||||
params.push(profile_id);
|
||||
}
|
||||
|
||||
query += `
|
||||
GROUP BY c.group_name
|
||||
ORDER BY watch_count DESC
|
||||
LIMIT 5
|
||||
)
|
||||
SELECT DISTINCT
|
||||
c.id,
|
||||
c.name,
|
||||
c.logo,
|
||||
c.custom_logo,
|
||||
c.group_name,
|
||||
c.is_radio,
|
||||
c.url,
|
||||
c.health_status
|
||||
FROM channels c
|
||||
INNER JOIN user_favorite_groups ufg ON c.group_name = ufg.group_name
|
||||
WHERE c.id NOT IN (
|
||||
SELECT channel_id
|
||||
FROM watch_history
|
||||
WHERE user_id = ?
|
||||
AND watched_at >= datetime('now', '-7 days')
|
||||
)
|
||||
AND c.is_active = 1
|
||||
AND c.health_status != 'dead'
|
||||
ORDER BY RANDOM()
|
||||
LIMIT ?
|
||||
`;
|
||||
|
||||
params.push(user_id, parseInt(limit));
|
||||
|
||||
db.all(query, params, (err, rows) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching recommendations:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch recommendations' });
|
||||
}
|
||||
res.json(rows);
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Clear watch history
|
||||
*/
|
||||
router.delete('/', modifyLimiter, authenticate, (req, res) => {
|
||||
const user_id = req.user.userId;
|
||||
const { profile_id, days } = req.query;
|
||||
|
||||
let query = 'DELETE FROM watch_history WHERE user_id = ?';
|
||||
const params = [user_id];
|
||||
|
||||
if (profile_id) {
|
||||
query += ' AND profile_id = ?';
|
||||
params.push(profile_id);
|
||||
}
|
||||
|
||||
if (days) {
|
||||
query += ' AND watched_at < datetime("now", "-" || ? || " days")';
|
||||
params.push(days);
|
||||
}
|
||||
|
||||
db.run(query, params, function(err) {
|
||||
if (err) {
|
||||
logger.error('Error clearing watch history:', err);
|
||||
return res.status(500).json({ error: 'Failed to clear watch history' });
|
||||
}
|
||||
res.json({ message: 'Watch history cleared', deleted: this.changes });
|
||||
});
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
298
backend/routes/log-management.js
Normal file
298
backend/routes/log-management.js
Normal file
|
|
@ -0,0 +1,298 @@
|
|||
/**
|
||||
* Log Management API Routes (CWE-53 Compliance)
|
||||
* Admin-only endpoints for log retention, archival, and integrity
|
||||
*/
|
||||
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { requirePermission } = require('../middleware/rbac');
|
||||
const { readLimiter, modifyLimiter } = require('../middleware/rateLimiter');
|
||||
const logManagement = require('../jobs/logManagement');
|
||||
const SecurityAuditLogger = require('../utils/securityAudit');
|
||||
const logger = require('../utils/logger');
|
||||
const path = require('path');
|
||||
const fs = require('fs').promises;
|
||||
|
||||
/**
|
||||
* GET /api/log-management/statistics
|
||||
* Get log management statistics
|
||||
*/
|
||||
router.get('/statistics',
|
||||
authenticate,
|
||||
requirePermission('security.view_audit'),
|
||||
readLimiter,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const stats = await logManagement.getStatistics();
|
||||
|
||||
await SecurityAuditLogger.logSensitiveDataAccess(req.user.userId, 'log_statistics', {
|
||||
ip: req.ip,
|
||||
userAgent: req.headers['user-agent']
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: stats
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[LogManagement API] Error getting statistics:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to get log statistics'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* GET /api/log-management/archives
|
||||
* List available log archives
|
||||
*/
|
||||
router.get('/archives',
|
||||
authenticate,
|
||||
requirePermission('security.view_audit'),
|
||||
readLimiter,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const archives = await logManagement.listArchives();
|
||||
|
||||
await SecurityAuditLogger.logSensitiveDataAccess(req.user.userId, 'log_archives_list', {
|
||||
ip: req.ip,
|
||||
userAgent: req.headers['user-agent'],
|
||||
recordCount: archives.length
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: archives
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[LogManagement API] Error listing archives:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to list archives'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* POST /api/log-management/cleanup
|
||||
* Manual trigger for log cleanup
|
||||
* Admin only
|
||||
*/
|
||||
router.post('/cleanup',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
modifyLimiter,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { retentionDays } = req.body;
|
||||
const days = parseInt(retentionDays) || 90;
|
||||
|
||||
if (days < 7) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'Retention days must be at least 7'
|
||||
});
|
||||
}
|
||||
|
||||
const result = await logManagement.manualCleanup(days);
|
||||
|
||||
await SecurityAuditLogger.logAdminActivity(req.user.userId, 'log_cleanup_manual', {
|
||||
ip: req.ip,
|
||||
userAgent: req.headers['user-agent'],
|
||||
retentionDays: days,
|
||||
...result
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: `Deleted ${result.auditDeleted + result.aggregatedDeleted} old log entries`,
|
||||
data: result
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[LogManagement API] Error during manual cleanup:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to perform log cleanup'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* POST /api/log-management/verify-integrity
|
||||
* Manual trigger for integrity verification
|
||||
* Admin only
|
||||
*/
|
||||
router.post('/verify-integrity',
|
||||
authenticate,
|
||||
requirePermission('security.view_audit'),
|
||||
modifyLimiter,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const result = await logManagement.manualIntegrityCheck();
|
||||
|
||||
if (!result) {
|
||||
return res.status(500).json({
|
||||
success: false,
|
||||
message: 'Integrity verification failed'
|
||||
});
|
||||
}
|
||||
|
||||
await SecurityAuditLogger.logAdminActivity(req.user.userId, 'log_integrity_check', {
|
||||
ip: req.ip,
|
||||
userAgent: req.headers['user-agent'],
|
||||
verified: result.verified,
|
||||
tampered: result.tampered
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: result.tampered > 0
|
||||
? `⚠️ WARNING: ${result.tampered} tampered logs detected!`
|
||||
: `All ${result.verified} logs verified successfully`,
|
||||
data: result,
|
||||
alert: result.tampered > 0
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[LogManagement API] Error during integrity check:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to verify log integrity'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* GET /api/log-management/archives/download/:filename
|
||||
* Download a log archive
|
||||
* Admin only
|
||||
*/
|
||||
router.get('/archives/download/:filename',
|
||||
authenticate,
|
||||
requirePermission('security.view_audit'),
|
||||
readLimiter,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { filename } = req.params;
|
||||
|
||||
// Security: prevent path traversal
|
||||
if (filename.includes('..') || filename.includes('/') || filename.includes('\\')) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'Invalid filename'
|
||||
});
|
||||
}
|
||||
|
||||
// Security: only allow .json.gz files
|
||||
if (!filename.endsWith('.json.gz')) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'Invalid file type'
|
||||
});
|
||||
}
|
||||
|
||||
const archiveDir = path.join(__dirname, '../../data/log-archives');
|
||||
const filePath = path.join(archiveDir, filename);
|
||||
|
||||
// Check if file exists
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
} catch (error) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
message: 'Archive not found'
|
||||
});
|
||||
}
|
||||
|
||||
// Log the access
|
||||
await SecurityAuditLogger.logSensitiveDataAccess(req.user.userId, 'log_archive_download', {
|
||||
ip: req.ip,
|
||||
userAgent: req.headers['user-agent'],
|
||||
filename,
|
||||
accessMethod: 'download'
|
||||
});
|
||||
|
||||
// Send file
|
||||
res.download(filePath, filename);
|
||||
} catch (error) {
|
||||
logger.error('[LogManagement API] Error downloading archive:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to download archive'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* DELETE /api/log-management/archives/:filename
|
||||
* Delete a log archive
|
||||
* Admin only
|
||||
*/
|
||||
router.delete('/archives/:filename',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
modifyLimiter,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { filename } = req.params;
|
||||
|
||||
// Security: prevent path traversal
|
||||
if (filename.includes('..') || filename.includes('/') || filename.includes('\\')) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'Invalid filename'
|
||||
});
|
||||
}
|
||||
|
||||
// Security: only allow .json.gz files
|
||||
if (!filename.endsWith('.json.gz')) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'Invalid file type'
|
||||
});
|
||||
}
|
||||
|
||||
const archiveDir = path.join(__dirname, '../../data/log-archives');
|
||||
const filePath = path.join(archiveDir, filename);
|
||||
|
||||
// Check if file exists
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
} catch (error) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
message: 'Archive not found'
|
||||
});
|
||||
}
|
||||
|
||||
// Delete file
|
||||
await fs.unlink(filePath);
|
||||
|
||||
// Log the deletion
|
||||
await SecurityAuditLogger.logAdminActivity(req.user.userId, 'log_archive_deleted', {
|
||||
ip: req.ip,
|
||||
userAgent: req.headers['user-agent'],
|
||||
filename
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Archive deleted successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[LogManagement API] Error deleting archive:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to delete archive'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
58
backend/routes/logo-cache.js
Normal file
58
backend/routes/logo-cache.js
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { modifyLimiter } = require('../middleware/rateLimiter');
|
||||
const { cacheAllLogos, cleanupOldLogos } = require('../jobs/logoCacher');
|
||||
const { db } = require('../database/db');
|
||||
|
||||
// Trigger logo caching (admin only)
|
||||
router.post('/cache', authenticate, modifyLimiter, async (req, res) => {
|
||||
try {
|
||||
// Run caching in background
|
||||
cacheAllLogos().catch(err => {
|
||||
console.error('[LogoCache API] Background caching error:', err);
|
||||
});
|
||||
|
||||
res.json({ message: 'Logo caching started in background' });
|
||||
} catch (error) {
|
||||
console.error('[LogoCache API] Error starting cache:', error);
|
||||
res.status(500).json({ error: 'Failed to start logo caching' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get cache status
|
||||
router.get('/status', authenticate, async (req, res) => {
|
||||
try {
|
||||
const stats = await new Promise((resolve, reject) => {
|
||||
db.get(
|
||||
`SELECT
|
||||
COUNT(*) as cached_count,
|
||||
(SELECT COUNT(*) FROM channels WHERE logo LIKE 'http%' OR custom_logo LIKE 'http%') as total_count
|
||||
FROM logo_cache`,
|
||||
[],
|
||||
(err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
res.json(stats);
|
||||
} catch (error) {
|
||||
console.error('[LogoCache API] Error getting status:', error);
|
||||
res.status(500).json({ error: 'Failed to get cache status' });
|
||||
}
|
||||
});
|
||||
|
||||
// Cleanup old cached logos
|
||||
router.post('/cleanup', authenticate, modifyLimiter, async (req, res) => {
|
||||
try {
|
||||
await cleanupOldLogos();
|
||||
res.json({ message: 'Old logos cleaned up successfully' });
|
||||
} catch (error) {
|
||||
console.error('[LogoCache API] Cleanup error:', error);
|
||||
res.status(500).json({ error: 'Failed to cleanup logos' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
122
backend/routes/logo-proxy.js
Normal file
122
backend/routes/logo-proxy.js
Normal file
|
|
@ -0,0 +1,122 @@
|
|||
const express = require('express');
|
||||
const axios = require('axios');
|
||||
const logger = require('../utils/logger');
|
||||
const { db } = require('../database/db');
|
||||
const path = require('path');
|
||||
const fs = require('fs').promises;
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Middleware to fix CORS for public image serving
|
||||
const fixImageCORS = (req, res, next) => {
|
||||
// Remove credentials header set by global CORS middleware
|
||||
res.removeHeader('Access-Control-Allow-Credentials');
|
||||
// Set proper CORS for public images
|
||||
res.set({
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'GET, OPTIONS',
|
||||
'Access-Control-Allow-Headers': 'Content-Type',
|
||||
'Cross-Origin-Resource-Policy': 'cross-origin'
|
||||
});
|
||||
next();
|
||||
};
|
||||
|
||||
// Handle OPTIONS preflight requests
|
||||
router.options('/', fixImageCORS, (req, res) => {
|
||||
res.status(204).end();
|
||||
});
|
||||
|
||||
// Proxy external logos to handle CORS issues
|
||||
router.get('/', fixImageCORS, async (req, res) => {
|
||||
const { url } = req.query;
|
||||
|
||||
if (!url) {
|
||||
return res.status(400).json({ error: 'URL parameter is required' });
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if logo is cached
|
||||
const cached = await new Promise((resolve, reject) => {
|
||||
const query = 'SELECT logo_url, local_path FROM logo_cache WHERE logo_url = ? LIMIT 1';
|
||||
db.get(query, [url], (err, row) => {
|
||||
console.log(`[LogoProxy] SQL: ${query} with url="${url}"`);
|
||||
if (err) {
|
||||
console.error(`[LogoProxy] DB Error:`, err);
|
||||
reject(err);
|
||||
} else {
|
||||
console.log(`[LogoProxy] DB Result:`, row ? JSON.stringify(row) : 'null');
|
||||
resolve(row);
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
console.log(`[LogoProxy] Cache lookup for ${url}: ${cached ? 'FOUND at ' + cached.local_path : 'NOT FOUND'}`);
|
||||
|
||||
// If cached, serve from disk
|
||||
if (cached && cached.local_path) {
|
||||
const cachedPath = cached.local_path;
|
||||
try {
|
||||
const fileData = await fs.readFile(cachedPath);
|
||||
const ext = path.extname(cachedPath).toLowerCase();
|
||||
const contentType = {
|
||||
'.png': 'image/png',
|
||||
'.jpg': 'image/jpeg',
|
||||
'.jpeg': 'image/jpeg',
|
||||
'.gif': 'image/gif',
|
||||
'.webp': 'image/webp',
|
||||
'.svg': 'image/svg+xml'
|
||||
}[ext] || 'image/png';
|
||||
|
||||
res.set({
|
||||
'Content-Type': contentType,
|
||||
'Cache-Control': 'public, max-age=2592000' // Cache for 30 days
|
||||
});
|
||||
return res.send(fileData);
|
||||
} catch (err) {
|
||||
logger.warn('Cached logo file not found, fetching fresh:', err.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate URL
|
||||
const logoUrl = new URL(url);
|
||||
|
||||
// Fetch the image
|
||||
const response = await axios.get(url, {
|
||||
responseType: 'arraybuffer',
|
||||
timeout: 10000,
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
|
||||
'Accept': 'image/webp,image/apng,image/*,*/*;q=0.8',
|
||||
'Accept-Language': 'en-US,en;q=0.9',
|
||||
'Cache-Control': 'no-cache',
|
||||
'Pragma': 'no-cache'
|
||||
},
|
||||
maxRedirects: 5
|
||||
});
|
||||
|
||||
// Set appropriate headers
|
||||
const contentType = response.headers['content-type'] || 'image/png';
|
||||
res.set({
|
||||
'Content-Type': contentType,
|
||||
'Cache-Control': 'public, max-age=86400' // Cache for 24 hours
|
||||
});
|
||||
|
||||
// Send the image
|
||||
res.send(response.data);
|
||||
} catch (error) {
|
||||
logger.error('Logo proxy error:', {
|
||||
url,
|
||||
error: error.message,
|
||||
status: error.response?.status
|
||||
});
|
||||
|
||||
// Return a 404 or error response
|
||||
res.status(error.response?.status || 500).json({
|
||||
error: 'Failed to fetch logo',
|
||||
message: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
841
backend/routes/m3u-files.js
Normal file
841
backend/routes/m3u-files.js
Normal file
|
|
@ -0,0 +1,841 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const path = require('path');
|
||||
const fs = require('fs').promises;
|
||||
const fsSync = require('fs');
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { db } = require('../database/db');
|
||||
const m3uParser = require('iptv-playlist-parser');
|
||||
const axios = require('axios');
|
||||
const crypto = require('crypto');
|
||||
|
||||
// Cache for IPTV-org logos database (in memory)
|
||||
let logoDbCache = null;
|
||||
let logoDbCacheTime = null;
|
||||
const CACHE_DURATION = 24 * 60 * 60 * 1000; // 24 hours
|
||||
|
||||
// Local logo cache directory
|
||||
const LOGO_CACHE_DIR = path.join('/app', 'data', 'logo-cache');
|
||||
|
||||
// Ensure logo cache directory exists
|
||||
async function ensureLogoCacheDir() {
|
||||
try {
|
||||
await fs.mkdir(LOGO_CACHE_DIR, { recursive: true });
|
||||
} catch (error) {
|
||||
console.error('Failed to create logo cache directory:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize cache directory
|
||||
ensureLogoCacheDir();
|
||||
|
||||
// Download and cache a logo locally
|
||||
async function downloadAndCacheLogo(logoUrl) {
|
||||
try {
|
||||
// Generate a hash-based filename
|
||||
const hash = crypto.createHash('md5').update(logoUrl).digest('hex');
|
||||
const ext = path.extname(new URL(logoUrl).pathname) || '.png';
|
||||
const filename = `${hash}${ext}`;
|
||||
const localPath = path.join(LOGO_CACHE_DIR, filename);
|
||||
const publicPath = `/logos/${filename}`;
|
||||
|
||||
// Check if already cached
|
||||
try {
|
||||
await fs.access(localPath);
|
||||
return publicPath; // Already cached
|
||||
} catch {
|
||||
// Not cached, download it
|
||||
}
|
||||
|
||||
console.log(`Downloading logo: ${logoUrl}`);
|
||||
|
||||
const response = await axios({
|
||||
method: 'GET',
|
||||
url: logoUrl,
|
||||
responseType: 'arraybuffer',
|
||||
timeout: 10000,
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
|
||||
}
|
||||
});
|
||||
|
||||
// Save to cache
|
||||
await fs.writeFile(localPath, response.data);
|
||||
console.log(`Cached logo: ${publicPath}`);
|
||||
|
||||
return publicPath;
|
||||
} catch (error) {
|
||||
console.error(`Failed to download logo ${logoUrl}:`, error.message);
|
||||
// Return null instead of the failed URL to avoid overwriting existing logos
|
||||
// with geo-blocked/inaccessible URLs
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch and cache IPTV-org logos database
|
||||
async function getLogoDatabase() {
|
||||
const now = Date.now();
|
||||
|
||||
// Return cached data if still valid
|
||||
if (logoDbCache && logoDbCacheTime && (now - logoDbCacheTime) < CACHE_DURATION) {
|
||||
return logoDbCache;
|
||||
}
|
||||
|
||||
try {
|
||||
console.log('Fetching logos from tv-logo/tv-logos (Romania)...');
|
||||
|
||||
// Fetch Romanian logos from tv-logos repository
|
||||
const response = await axios.get('https://api.github.com/repos/tv-logo/tv-logos/contents/countries/romania', {
|
||||
timeout: 10000,
|
||||
headers: {
|
||||
'Accept': 'application/vnd.github.v3+json',
|
||||
'User-Agent': 'StreamFlow-IPTV'
|
||||
}
|
||||
});
|
||||
|
||||
// Transform GitHub API response to match our logo database format
|
||||
const logos = response.data
|
||||
.filter(item => item.type === 'file' && (item.name.endsWith('.png') || item.name.endsWith('.svg')))
|
||||
.map(item => ({
|
||||
name: item.name.replace(/\.(png|svg)$/i, '').toLowerCase(),
|
||||
url: item.download_url,
|
||||
guides: []
|
||||
}));
|
||||
|
||||
logoDbCache = logos;
|
||||
logoDbCacheTime = now;
|
||||
console.log(`Loaded ${logoDbCache.length} logos from tv-logos (Romania)`);
|
||||
|
||||
// Also try to fetch IPTV-org as fallback
|
||||
try {
|
||||
const iptvOrgResponse = await axios.get('https://iptv-org.github.io/api/logos.json', {
|
||||
timeout: 5000
|
||||
});
|
||||
// Merge both sources, tv-logos takes priority
|
||||
const combinedLogos = [...logoDbCache];
|
||||
const existingNames = new Set(logoDbCache.map(l => l.name));
|
||||
|
||||
iptvOrgResponse.data.forEach(logo => {
|
||||
if (!existingNames.has(logo.name.toLowerCase())) {
|
||||
combinedLogos.push(logo);
|
||||
}
|
||||
});
|
||||
|
||||
logoDbCache = combinedLogos;
|
||||
console.log(`Total logos after merging with IPTV-org: ${logoDbCache.length}`);
|
||||
} catch (iptvError) {
|
||||
console.log('IPTV-org fallback not available, using only tv-logos');
|
||||
}
|
||||
|
||||
return logoDbCache;
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch logos from tv-logos:', error.message);
|
||||
|
||||
// Try IPTV-org as fallback
|
||||
try {
|
||||
console.log('Trying IPTV-org as fallback...');
|
||||
const response = await axios.get('https://iptv-org.github.io/api/logos.json', {
|
||||
timeout: 10000
|
||||
});
|
||||
logoDbCache = response.data;
|
||||
logoDbCacheTime = now;
|
||||
console.log(`Loaded ${logoDbCache.length} logos from IPTV-org (fallback)`);
|
||||
return logoDbCache;
|
||||
} catch (fallbackError) {
|
||||
console.error('Fallback to IPTV-org also failed:', fallbackError.message);
|
||||
return logoDbCache || [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Find logo for a channel
|
||||
async function findChannelLogo(channelName, channelUrl, logoDb) {
|
||||
if (!logoDb || logoDb.length === 0) return null;
|
||||
|
||||
const cleanName = channelName.toLowerCase()
|
||||
.replace(/\s*\([^)]*\)/g, '') // Remove parentheses content
|
||||
.replace(/\s*\[[^\]]*\]/g, '') // Remove brackets content
|
||||
.replace(/\s+/g, ' ')
|
||||
.trim();
|
||||
|
||||
// Try exact match first
|
||||
let match = logoDb.find(logo =>
|
||||
logo.name.toLowerCase() === cleanName
|
||||
);
|
||||
|
||||
if (match) return await downloadAndCacheLogo(match.url);
|
||||
|
||||
// Try partial match
|
||||
match = logoDb.find(logo =>
|
||||
cleanName.includes(logo.name.toLowerCase()) ||
|
||||
logo.name.toLowerCase().includes(cleanName)
|
||||
);
|
||||
|
||||
if (match) return await downloadAndCacheLogo(match.url);
|
||||
|
||||
// Try matching by domain in URL
|
||||
if (channelUrl) {
|
||||
try {
|
||||
const urlObj = new URL(channelUrl);
|
||||
const domain = urlObj.hostname.replace('www.', '');
|
||||
|
||||
match = logoDb.find(logo =>
|
||||
logo.name.toLowerCase().includes(domain.split('.')[0])
|
||||
);
|
||||
|
||||
if (match) return await downloadAndCacheLogo(match.url);
|
||||
} catch (e) {
|
||||
// Invalid URL, skip
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Helper function to detect if a channel is likely radio or TV
|
||||
function detectChannelType(item, userSelectedType) {
|
||||
const url = (item.url || '').toLowerCase();
|
||||
const name = (item.name || '').toLowerCase();
|
||||
const group = (item.group?.title || '').toLowerCase();
|
||||
|
||||
// Strong TV indicators - if any match, it's definitely TV
|
||||
const strongTvIndicators = [
|
||||
'.m3u8', // HLS video streams
|
||||
'/playlist.m3u8', // Video playlists
|
||||
'video',
|
||||
'tv',
|
||||
'1080p', '720p', '480p', '360p', '4k', 'hd', 'fhd', 'uhd', // Video quality markers
|
||||
'tvsat', 'livestream'
|
||||
];
|
||||
|
||||
// Strong radio indicators
|
||||
const strongRadioIndicators = [
|
||||
':8000/', ':8001/', ':8080/', ':8443/', // Common radio streaming ports
|
||||
'/radio',
|
||||
'radiostream',
|
||||
'.mp3', '.aac', '.ogg', // Audio file extensions
|
||||
'icecast', 'shoutcast' // Radio streaming platforms
|
||||
];
|
||||
|
||||
// Radio name patterns
|
||||
const radioNamePatterns = [
|
||||
/^radio\s/i, // Starts with "Radio "
|
||||
/\sradio$/i, // Ends with " Radio"
|
||||
/\sradio\s/i, // Contains " Radio "
|
||||
/\sfm$/i, // Ends with " FM"
|
||||
/^fm\s/i, // Starts with "FM "
|
||||
/\d+\.?\d*\s?fm/i, // Frequency like "101.5 FM" or "101FM"
|
||||
/\sam\s/i, // Contains " AM "
|
||||
];
|
||||
|
||||
// Check for strong TV indicators
|
||||
for (const indicator of strongTvIndicators) {
|
||||
if (url.includes(indicator) || name.includes(indicator)) {
|
||||
return 0; // Definitely TV
|
||||
}
|
||||
}
|
||||
|
||||
// Check for strong radio indicators
|
||||
for (const indicator of strongRadioIndicators) {
|
||||
if (url.includes(indicator)) {
|
||||
return 1; // Definitely radio
|
||||
}
|
||||
}
|
||||
|
||||
// Check radio name patterns
|
||||
for (const pattern of radioNamePatterns) {
|
||||
if (pattern.test(name) || pattern.test(group)) {
|
||||
return 1; // Likely radio based on name
|
||||
}
|
||||
}
|
||||
|
||||
// Check group names
|
||||
if (group.includes('radio') || group.includes('fm') || group.includes('am')) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
// If no strong indicators, use user selection
|
||||
return userSelectedType === 'radio' ? 1 : 0;
|
||||
}
|
||||
|
||||
// Ensure M3U upload directory exists
|
||||
const M3U_UPLOAD_DIR = path.join('/app', 'data', 'm3u-files');
|
||||
try {
|
||||
if (!fsSync.existsSync(M3U_UPLOAD_DIR)) {
|
||||
fsSync.mkdirSync(M3U_UPLOAD_DIR, { recursive: true });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to create M3U upload directory:', error);
|
||||
}
|
||||
|
||||
// Get all M3U files for current user
|
||||
router.get('/', authenticate, (req, res) => {
|
||||
db.all(
|
||||
`SELECT id, user_id, name, original_filename, size, created_at, updated_at
|
||||
FROM m3u_files
|
||||
WHERE user_id = ?
|
||||
ORDER BY created_at DESC`,
|
||||
[req.user.userId],
|
||||
(err, files) => {
|
||||
if (err) {
|
||||
console.error('Error fetching M3U files:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch M3U files' });
|
||||
}
|
||||
res.json(files);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Download M3U file
|
||||
router.get('/:id/download', authenticate, async (req, res) => {
|
||||
const { id } = req.params;
|
||||
|
||||
db.get(
|
||||
'SELECT * FROM m3u_files WHERE id = ? AND user_id = ?',
|
||||
[id, req.user.userId],
|
||||
async (err, file) => {
|
||||
if (err) {
|
||||
console.error('Error fetching M3U file:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch M3U file' });
|
||||
}
|
||||
|
||||
if (!file) {
|
||||
return res.status(404).json({ error: 'M3U file not found' });
|
||||
}
|
||||
|
||||
try {
|
||||
const filePath = file.file_path;
|
||||
// Sanitize filename to prevent path traversal attacks
|
||||
const fileName = (file.original_filename || `${file.name}.m3u`).replace(/[^a-zA-Z0-9._-]/g, '_');
|
||||
|
||||
// Check if file exists
|
||||
await fs.access(filePath);
|
||||
|
||||
// Set headers for download
|
||||
res.setHeader('Content-Type', 'audio/x-mpegurl');
|
||||
res.setHeader('Content-Disposition', `attachment; filename="${fileName}"`);
|
||||
|
||||
// Stream the file
|
||||
const fileStream = fsSync.createReadStream(filePath);
|
||||
fileStream.pipe(res);
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error downloading M3U file:', error);
|
||||
res.status(500).json({ error: 'Failed to download M3U file' });
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Upload M3U file
|
||||
router.post('/upload', authenticate, async (req, res) => {
|
||||
try {
|
||||
if (!req.files || !req.files.m3u) {
|
||||
return res.status(400).json({ error: 'No file uploaded' });
|
||||
}
|
||||
|
||||
const uploadedFile = req.files.m3u;
|
||||
const { name } = req.body;
|
||||
|
||||
if (!name || !name.trim()) {
|
||||
return res.status(400).json({ error: 'File name is required' });
|
||||
}
|
||||
|
||||
// Validate file extension
|
||||
const ext = path.extname(uploadedFile.name).toLowerCase();
|
||||
if (ext !== '.m3u' && ext !== '.m3u8') {
|
||||
return res.status(400).json({ error: 'Only .m3u and .m3u8 files are allowed' });
|
||||
}
|
||||
|
||||
// Generate unique filename
|
||||
const uniqueSuffix = Date.now() + '-' + Math.round(Math.random() * 1E9);
|
||||
const filename = uniqueSuffix + ext;
|
||||
const filePath = path.join(M3U_UPLOAD_DIR, filename);
|
||||
|
||||
// Move file from temp location to M3U directory
|
||||
await uploadedFile.mv(filePath);
|
||||
|
||||
db.run(
|
||||
`INSERT INTO m3u_files (user_id, name, original_filename, file_path, size)
|
||||
VALUES (?, ?, ?, ?, ?)`,
|
||||
[req.user.userId, name.trim(), uploadedFile.name, filePath, uploadedFile.size],
|
||||
function(err) {
|
||||
if (err) {
|
||||
console.error('Error uploading M3U file:', err);
|
||||
fs.unlink(filePath).catch(console.error);
|
||||
return res.status(500).json({ error: 'Failed to upload M3U file' });
|
||||
}
|
||||
|
||||
db.get(
|
||||
`SELECT id, user_id, name, original_filename, size, created_at, updated_at
|
||||
FROM m3u_files WHERE id = ?`,
|
||||
[this.lastID],
|
||||
(err, file) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'Failed to fetch uploaded file' });
|
||||
}
|
||||
res.status(201).json(file);
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
console.error('Error uploading M3U file:', error);
|
||||
res.status(500).json({ error: 'Failed to upload M3U file' });
|
||||
}
|
||||
});
|
||||
|
||||
// Rename M3U file
|
||||
router.patch('/:id', authenticate, (req, res) => {
|
||||
const { id } = req.params;
|
||||
const { name } = req.body;
|
||||
|
||||
if (!name || !name.trim()) {
|
||||
return res.status(400).json({ error: 'File name is required' });
|
||||
}
|
||||
|
||||
// Verify ownership
|
||||
db.get(
|
||||
'SELECT * FROM m3u_files WHERE id = ? AND user_id = ?',
|
||||
[id, req.user.userId],
|
||||
(err, file) => {
|
||||
if (err) {
|
||||
console.error('Error fetching M3U file:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch M3U file' });
|
||||
}
|
||||
if (!file) {
|
||||
return res.status(404).json({ error: 'M3U file not found' });
|
||||
}
|
||||
|
||||
db.run(
|
||||
'UPDATE m3u_files SET name = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?',
|
||||
[name.trim(), id],
|
||||
(err) => {
|
||||
if (err) {
|
||||
console.error('Error renaming M3U file:', err);
|
||||
return res.status(500).json({ error: 'Failed to rename M3U file' });
|
||||
}
|
||||
|
||||
db.get(
|
||||
`SELECT id, user_id, name, original_filename, size, created_at, updated_at
|
||||
FROM m3u_files WHERE id = ?`,
|
||||
[id],
|
||||
(err, updatedFile) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'Failed to fetch updated file' });
|
||||
}
|
||||
res.json(updatedFile);
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Delete M3U file
|
||||
router.delete('/:id', authenticate, async (req, res) => {
|
||||
const { id } = req.params;
|
||||
|
||||
// Get file info and verify ownership
|
||||
db.get(
|
||||
'SELECT * FROM m3u_files WHERE id = ? AND user_id = ?',
|
||||
[id, req.user.userId],
|
||||
async (err, file) => {
|
||||
if (err) {
|
||||
console.error('Error fetching M3U file:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch M3U file' });
|
||||
}
|
||||
if (!file) {
|
||||
return res.status(404).json({ error: 'M3U file not found' });
|
||||
}
|
||||
|
||||
// Delete from database
|
||||
db.run('DELETE FROM m3u_files WHERE id = ?', [id], async (err) => {
|
||||
if (err) {
|
||||
console.error('Error deleting M3U file:', err);
|
||||
return res.status(500).json({ error: 'Failed to delete M3U file' });
|
||||
}
|
||||
|
||||
// Delete physical file
|
||||
try {
|
||||
await fs.unlink(file.file_path);
|
||||
} catch (fsError) {
|
||||
console.error('Error deleting physical file:', fsError);
|
||||
}
|
||||
|
||||
res.json({ message: 'M3U file deleted successfully' });
|
||||
});
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Import M3U file to playlist
|
||||
router.post('/:id/import', authenticate, async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const { type } = req.body; // 'tv' or 'radio'
|
||||
|
||||
if (!type || !['tv', 'radio'].includes(type)) {
|
||||
return res.status(400).json({ error: 'Invalid type. Must be "tv" or "radio"' });
|
||||
}
|
||||
|
||||
// Fetch logo database in parallel with file processing
|
||||
const logoDbPromise = getLogoDatabase();
|
||||
|
||||
// Get file info and verify ownership
|
||||
db.get(
|
||||
'SELECT * FROM m3u_files WHERE id = ? AND user_id = ?',
|
||||
[id, req.user.userId],
|
||||
async (err, file) => {
|
||||
if (err) {
|
||||
console.error('Error fetching M3U file:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch M3U file' });
|
||||
}
|
||||
if (!file) {
|
||||
return res.status(404).json({ error: 'M3U file not found' });
|
||||
}
|
||||
|
||||
try {
|
||||
// Read and parse M3U file
|
||||
console.log('Reading M3U file:', file.file_path);
|
||||
const m3uContent = await fs.readFile(file.file_path, 'utf-8');
|
||||
console.log('M3U content length:', m3uContent.length);
|
||||
const parsed = m3uParser.parse(m3uContent);
|
||||
console.log('Parsed items count:', parsed.items.length);
|
||||
|
||||
// Wait for logo database
|
||||
const logoDb = await logoDbPromise;
|
||||
console.log(`Logo database ready with ${logoDb.length} entries`);
|
||||
|
||||
// Create or get playlist
|
||||
db.get(
|
||||
`SELECT * FROM playlists WHERE user_id = ? AND name = ?`,
|
||||
[req.user.userId, file.name],
|
||||
(err, playlist) => {
|
||||
if (err) {
|
||||
console.error('Error checking playlist:', err);
|
||||
return res.status(500).json({ error: 'Failed to check playlist' });
|
||||
}
|
||||
|
||||
const createOrUsePlaylist = async (playlistId) => {
|
||||
// Insert channels
|
||||
let channelsAdded = 0;
|
||||
let channelsProcessed = 0;
|
||||
const totalChannels = parsed.items.length;
|
||||
|
||||
if (totalChannels === 0) {
|
||||
return res.json({
|
||||
message: 'No channels found in M3U file',
|
||||
playlist_id: playlistId,
|
||||
channels_added: 0,
|
||||
type,
|
||||
});
|
||||
}
|
||||
|
||||
// Process channels sequentially to handle async logo downloads
|
||||
for (const item of parsed.items) {
|
||||
try {
|
||||
// Detect actual channel type using heuristics
|
||||
const isRadio = detectChannelType(item, type);
|
||||
|
||||
// Try to find logo from IPTV-org if not provided, or cache existing logo
|
||||
let logo = item.tvg?.logo;
|
||||
if (!logo || logo.trim() === '') {
|
||||
// No logo in M3U, try to find one from IPTV-org
|
||||
const foundLogo = await findChannelLogo(item.name, item.url, logoDb);
|
||||
if (foundLogo) {
|
||||
logo = foundLogo;
|
||||
}
|
||||
} else {
|
||||
// Logo exists in M3U, try to cache it
|
||||
const cachedLogo = await downloadAndCacheLogo(logo);
|
||||
// Only use cached logo if download succeeded
|
||||
if (cachedLogo) {
|
||||
logo = cachedLogo;
|
||||
}
|
||||
// Otherwise keep original logo URL from M3U
|
||||
}
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT OR IGNORE INTO channels (
|
||||
playlist_id, name, url, logo, group_name,
|
||||
tvg_id, tvg_name, language, country, is_radio
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[
|
||||
playlistId,
|
||||
item.name || 'Unknown Channel',
|
||||
item.url,
|
||||
logo,
|
||||
item.group?.title || null,
|
||||
item.tvg?.id || null,
|
||||
item.tvg?.name || null,
|
||||
item.tvg?.language || null,
|
||||
item.tvg?.country || null,
|
||||
isRadio,
|
||||
],
|
||||
function(insertErr) {
|
||||
if (!insertErr && this.changes > 0) {
|
||||
channelsAdded++;
|
||||
}
|
||||
if (insertErr) {
|
||||
console.error('Error inserting channel:', insertErr);
|
||||
}
|
||||
resolve();
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
channelsProcessed++;
|
||||
} catch (error) {
|
||||
console.error('Error processing channel:', error);
|
||||
channelsProcessed++;
|
||||
}
|
||||
}
|
||||
|
||||
// Update complete
|
||||
console.log(`Import complete: ${channelsAdded} channels added out of ${totalChannels}`);
|
||||
|
||||
// Update channel count - count all channels in this playlist regardless of type
|
||||
db.run(
|
||||
'UPDATE playlists SET channel_count = (SELECT COUNT(*) FROM channels WHERE playlist_id = ?) WHERE id = ?',
|
||||
[playlistId, playlistId],
|
||||
(updateErr) => {
|
||||
if (updateErr) {
|
||||
console.error('Error updating playlist count:', updateErr);
|
||||
}
|
||||
|
||||
// Get actual counts of radio vs TV
|
||||
db.get(
|
||||
'SELECT SUM(is_radio = 1) as radio_count, SUM(is_radio = 0) as tv_count FROM channels WHERE playlist_id = ?',
|
||||
[playlistId],
|
||||
(err, counts) => {
|
||||
const radioCount = counts?.radio_count || 0;
|
||||
const tvCount = counts?.tv_count || 0;
|
||||
|
||||
res.json({
|
||||
message: 'M3U file imported successfully',
|
||||
playlist_id: playlistId,
|
||||
channels_added: channelsAdded,
|
||||
radio_channels: radioCount,
|
||||
tv_channels: tvCount,
|
||||
type,
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
if (!playlist) {
|
||||
// Create new playlist
|
||||
db.run(
|
||||
`INSERT INTO playlists (user_id, name, type, url)
|
||||
VALUES (?, ?, ?, ?)`,
|
||||
[req.user.userId, file.name, type, file.file_path],
|
||||
function(insertErr) {
|
||||
if (insertErr) {
|
||||
console.error('Error creating playlist:', insertErr);
|
||||
return res.status(500).json({ error: 'Failed to create playlist' });
|
||||
}
|
||||
createOrUsePlaylist(this.lastID);
|
||||
}
|
||||
);
|
||||
} else {
|
||||
createOrUsePlaylist(playlist.id);
|
||||
}
|
||||
}
|
||||
);
|
||||
} catch (parseError) {
|
||||
console.error('Error parsing/importing M3U file:', parseError);
|
||||
console.error('Error stack:', parseError.stack);
|
||||
res.status(500).json({ error: 'Failed to parse M3U file: ' + parseError.message });
|
||||
}
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
console.error('Error importing M3U file:', error);
|
||||
res.status(500).json({ error: 'Failed to import M3U file' });
|
||||
}
|
||||
});
|
||||
|
||||
// Fix channel types for existing channels (utility endpoint)
|
||||
router.post('/fix-channel-types', authenticate, async (req, res) => {
|
||||
try {
|
||||
console.log('Starting channel type fix and deduplication...');
|
||||
|
||||
// First, remove duplicates (same URL in same playlist)
|
||||
db.run(
|
||||
`DELETE FROM channels WHERE id NOT IN (
|
||||
SELECT MIN(id) FROM channels
|
||||
GROUP BY playlist_id, url
|
||||
)`,
|
||||
(delErr, delResult) => {
|
||||
if (delErr) {
|
||||
console.error('Error removing duplicates:', delErr);
|
||||
} else {
|
||||
console.log('Duplicates removed');
|
||||
}
|
||||
|
||||
// Now fix channel types
|
||||
db.all(
|
||||
`SELECT c.id, c.name, c.url, c.group_name, c.is_radio
|
||||
FROM channels c
|
||||
JOIN playlists p ON c.playlist_id = p.id
|
||||
WHERE p.user_id = ?`,
|
||||
[req.user.userId],
|
||||
(err, channels) => {
|
||||
if (err) {
|
||||
console.error('Error fetching channels:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch channels' });
|
||||
}
|
||||
|
||||
let updated = 0;
|
||||
let processed = 0;
|
||||
const total = channels.length;
|
||||
|
||||
console.log(`Found ${total} channels to analyze`);
|
||||
|
||||
if (total === 0) {
|
||||
return res.json({
|
||||
message: 'No channels found',
|
||||
total_channels: 0,
|
||||
updated_channels: 0
|
||||
});
|
||||
}
|
||||
|
||||
channels.forEach(channel => {
|
||||
// Detect correct type
|
||||
const item = {
|
||||
name: channel.name,
|
||||
url: channel.url,
|
||||
group: { title: channel.group_name }
|
||||
};
|
||||
|
||||
// Determine if it should be radio or TV based on current type
|
||||
const currentType = channel.is_radio === 1 ? 'radio' : 'tv';
|
||||
const detectedIsRadio = detectChannelType(item, currentType);
|
||||
|
||||
// Only update if detection differs from current value
|
||||
if (detectedIsRadio !== channel.is_radio) {
|
||||
db.run(
|
||||
'UPDATE channels SET is_radio = ? WHERE id = ?',
|
||||
[detectedIsRadio, channel.id],
|
||||
function(updateErr) {
|
||||
processed++;
|
||||
if (!updateErr && this.changes > 0) {
|
||||
updated++;
|
||||
console.log(`Updated channel ${channel.id} (${channel.name}): ${channel.is_radio} -> ${detectedIsRadio}`);
|
||||
}
|
||||
|
||||
if (processed === total) {
|
||||
console.log(`Fix complete: ${updated} channels updated out of ${total}`);
|
||||
res.json({
|
||||
message: 'Channel types fixed and duplicates removed',
|
||||
total_channels: total,
|
||||
updated_channels: updated
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
} else {
|
||||
processed++;
|
||||
if (processed === total) {
|
||||
console.log(`Fix complete: ${updated} channels updated out of ${total}`);
|
||||
res.json({
|
||||
message: 'Channel types fixed and duplicates removed',
|
||||
total_channels: total,
|
||||
updated_channels: updated
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
console.error('Error fixing channel types:', error);
|
||||
res.status(500).json({ error: 'Failed to fix channel types' });
|
||||
}
|
||||
});
|
||||
|
||||
// Update missing logos from IPTV-org database
|
||||
router.post('/update-logos', authenticate, async (req, res) => {
|
||||
try {
|
||||
console.log('Fetching logo database...');
|
||||
const logoDb = await getLogoDatabase();
|
||||
|
||||
if (!logoDb || logoDb.length === 0) {
|
||||
return res.status(503).json({ error: 'Logo database unavailable' });
|
||||
}
|
||||
|
||||
console.log('Fetching channels with missing logos...');
|
||||
|
||||
// Get channels without logos
|
||||
db.all(
|
||||
`SELECT c.id, c.name, c.url, c.logo
|
||||
FROM channels c
|
||||
JOIN playlists p ON c.playlist_id = p.id
|
||||
WHERE p.user_id = ? AND (c.logo IS NULL OR c.logo = '')`,
|
||||
[req.user.userId],
|
||||
async (err, channels) => {
|
||||
if (err) {
|
||||
console.error('Error fetching channels:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch channels' });
|
||||
}
|
||||
|
||||
const total = channels.length;
|
||||
console.log(`Found ${total} channels without logos`);
|
||||
|
||||
if (total === 0) {
|
||||
return res.json({
|
||||
message: 'All channels already have logos',
|
||||
total_channels: 0,
|
||||
updated_channels: 0
|
||||
});
|
||||
}
|
||||
|
||||
let updated = 0;
|
||||
|
||||
// Process sequentially to handle async logo downloads
|
||||
for (const channel of channels) {
|
||||
try {
|
||||
const foundLogo = await findChannelLogo(channel.name, channel.url, logoDb);
|
||||
|
||||
if (foundLogo) {
|
||||
await new Promise((resolve) => {
|
||||
db.run(
|
||||
'UPDATE channels SET logo = ? WHERE id = ?',
|
||||
[foundLogo, channel.id],
|
||||
function(updateErr) {
|
||||
if (!updateErr && this.changes > 0) {
|
||||
updated++;
|
||||
console.log(`Updated logo for ${channel.name}: ${foundLogo}`);
|
||||
}
|
||||
resolve();
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error updating logo for ${channel.name}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Logo update complete: ${updated} logos added`);
|
||||
res.json({
|
||||
message: 'Channel logos updated',
|
||||
total_channels: total,
|
||||
updated_channels: updated
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
console.error('Error updating logos:', error);
|
||||
res.status(500).json({ error: 'Failed to update logos' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
218
backend/routes/metadata.js
Normal file
218
backend/routes/metadata.js
Normal file
|
|
@ -0,0 +1,218 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const axios = require('axios');
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { readLimiter } = require('../middleware/rateLimiter');
|
||||
const logger = require('../utils/logger');
|
||||
const { getRadioStationMetadata } = require('../utils/radioMetadata');
|
||||
|
||||
/**
|
||||
* Fetch metadata from radio stream (ICY/Shoutcast metadata)
|
||||
* This attempts to extract "now playing" information from radio streams
|
||||
*/
|
||||
router.get('/radio/:channelId', readLimiter, authenticate, async (req, res) => {
|
||||
const { channelId } = req.params;
|
||||
const { db } = require('../database/db');
|
||||
|
||||
try {
|
||||
// Validate channel ID
|
||||
const id = parseInt(channelId, 10);
|
||||
if (isNaN(id) || id < 1) {
|
||||
return res.status(400).json({ error: 'Invalid channel ID' });
|
||||
}
|
||||
|
||||
// Get channel URL from database
|
||||
db.get(
|
||||
'SELECT url, name FROM channels WHERE id = ? AND is_radio = 1',
|
||||
[id],
|
||||
async (err, channel) => {
|
||||
if (err || !channel) {
|
||||
return res.status(404).json({ error: 'Channel not found' });
|
||||
}
|
||||
|
||||
try {
|
||||
// Request stream with ICY metadata headers
|
||||
const response = await axios.get(channel.url, {
|
||||
headers: {
|
||||
'Icy-MetaData': '1',
|
||||
'User-Agent': 'StreamFlow/1.0'
|
||||
},
|
||||
responseType: 'stream',
|
||||
timeout: 5000,
|
||||
maxRedirects: 5
|
||||
});
|
||||
|
||||
let metadata = {
|
||||
channelId: parseInt(channelId),
|
||||
channelName: channel.name,
|
||||
title: null,
|
||||
artist: null,
|
||||
song: null,
|
||||
streamTitle: null,
|
||||
bitrate: null,
|
||||
genre: null,
|
||||
url: null
|
||||
};
|
||||
|
||||
// Extract ICY headers
|
||||
const icyName = response.headers['icy-name'];
|
||||
const icyGenre = response.headers['icy-genre'];
|
||||
const icyBr = response.headers['icy-br'];
|
||||
const icyUrl = response.headers['icy-url'];
|
||||
const icyDescription = response.headers['icy-description'];
|
||||
|
||||
if (icyName) {
|
||||
// Filter out stream quality info (e.g., "europafm_aacp_48k")
|
||||
const cleanName = icyName.replace(/_aacp?_\d+k?/gi, '').replace(/_mp3_\d+k?/gi, '').replace(/_\d+k/gi, '');
|
||||
if (cleanName && !cleanName.match(/^\w+_\w+$/)) {
|
||||
metadata.streamTitle = cleanName;
|
||||
} else {
|
||||
metadata.streamTitle = icyName;
|
||||
}
|
||||
}
|
||||
if (icyGenre) metadata.genre = icyGenre;
|
||||
if (icyBr) metadata.bitrate = icyBr + ' kbps';
|
||||
if (icyUrl) metadata.url = icyUrl;
|
||||
|
||||
// Try to get current track from ICY-MetaInt
|
||||
const metaInt = parseInt(response.headers['icy-metaint']);
|
||||
|
||||
if (metaInt && metaInt > 0) {
|
||||
// Read metadata from stream
|
||||
const chunks = [];
|
||||
let bytesRead = 0;
|
||||
let metadataFound = false;
|
||||
|
||||
response.data.on('data', (chunk) => {
|
||||
if (metadataFound) return;
|
||||
|
||||
chunks.push(chunk);
|
||||
bytesRead += chunk.length;
|
||||
|
||||
// Once we have enough data, parse metadata
|
||||
if (bytesRead >= metaInt + 255) {
|
||||
const buffer = Buffer.concat(chunks);
|
||||
const metadataLength = buffer[metaInt] * 16;
|
||||
|
||||
if (metadataLength > 0) {
|
||||
const metadataBuffer = buffer.slice(metaInt + 1, metaInt + 1 + metadataLength);
|
||||
const metadataString = metadataBuffer.toString('utf8').replace(/\0/g, '');
|
||||
|
||||
logger.info(`[Metadata] Raw metadata string for ${channel.name}: ${metadataString}`);
|
||||
|
||||
// Parse StreamTitle='Artist - Song'
|
||||
const titleMatch = metadataString.match(/StreamTitle='([^']*)'/);
|
||||
if (titleMatch && titleMatch[1]) {
|
||||
const rawTitle = titleMatch[1];
|
||||
logger.info(`[Metadata] Raw title: ${rawTitle}`);
|
||||
|
||||
// Skip if it's just stream quality info
|
||||
if (!rawTitle.match(/^\w+_aacp?_\d+k?$/i) && !rawTitle.match(/^\w+_mp3_\d+k?$/i)) {
|
||||
metadata.song = rawTitle;
|
||||
|
||||
// Try to split into artist and title (various separators)
|
||||
let parts = rawTitle.split(' - ');
|
||||
if (parts.length === 1) {
|
||||
parts = rawTitle.split(' – '); // em dash
|
||||
}
|
||||
if (parts.length === 1) {
|
||||
parts = rawTitle.split(' | ');
|
||||
}
|
||||
|
||||
if (parts.length >= 2) {
|
||||
metadata.artist = parts[0].trim();
|
||||
metadata.title = parts.slice(1).join(' - ').trim();
|
||||
} else if (parts.length === 1 && rawTitle.length > 0) {
|
||||
// If no separator, use the whole thing as title
|
||||
metadata.title = rawTitle.trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
metadataFound = true;
|
||||
response.data.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
// Wait a bit for metadata
|
||||
await new Promise(resolve => setTimeout(resolve, 2000));
|
||||
}
|
||||
|
||||
// Close the stream
|
||||
if (response.data && !response.data.destroyed) {
|
||||
response.data.destroy();
|
||||
}
|
||||
|
||||
// If no metadata found from ICY, try external APIs
|
||||
if (!metadata.title && !metadata.artist && !metadata.song) {
|
||||
logger.info(`[Metadata] No ICY metadata found, trying external sources for ${channel.name}`);
|
||||
|
||||
try {
|
||||
const externalMetadata = await getRadioStationMetadata(channel.name, channel.url);
|
||||
if (externalMetadata) {
|
||||
metadata.title = externalMetadata.title;
|
||||
metadata.artist = externalMetadata.artist;
|
||||
if (externalMetadata.title && externalMetadata.artist) {
|
||||
metadata.song = `${externalMetadata.artist} - ${externalMetadata.title}`;
|
||||
} else if (externalMetadata.title) {
|
||||
metadata.song = externalMetadata.title;
|
||||
}
|
||||
logger.info(`[Metadata] External metadata found: ${metadata.song}`);
|
||||
}
|
||||
} catch (externalError) {
|
||||
logger.error(`[Metadata] External metadata fetch failed: ${externalError.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
res.json(metadata);
|
||||
|
||||
} catch (streamError) {
|
||||
logger.error('Error fetching stream metadata:', streamError.message);
|
||||
|
||||
// Try external metadata as fallback
|
||||
try {
|
||||
const externalMetadata = await getRadioStationMetadata(channel.name, channel.url);
|
||||
if (externalMetadata) {
|
||||
return res.json({
|
||||
channelId: parseInt(channelId),
|
||||
channelName: channel.name,
|
||||
title: externalMetadata.title,
|
||||
artist: externalMetadata.artist,
|
||||
song: externalMetadata.artist && externalMetadata.title
|
||||
? `${externalMetadata.artist} - ${externalMetadata.title}`
|
||||
: externalMetadata.title,
|
||||
streamTitle: channel.name,
|
||||
bitrate: null,
|
||||
genre: null,
|
||||
url: null,
|
||||
source: externalMetadata.source
|
||||
});
|
||||
}
|
||||
} catch (externalError) {
|
||||
logger.error(`[Metadata] External metadata fallback failed: ${externalError.message}`);
|
||||
}
|
||||
|
||||
// Return basic info if everything fails
|
||||
res.json({
|
||||
channelId: parseInt(channelId),
|
||||
channelName: channel.name,
|
||||
title: null,
|
||||
artist: null,
|
||||
song: null,
|
||||
streamTitle: channel.name,
|
||||
bitrate: null,
|
||||
genre: null,
|
||||
url: null,
|
||||
error: 'Unable to fetch stream metadata'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Metadata fetch error:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch metadata' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
233
backend/routes/playlists.js
Normal file
233
backend/routes/playlists.js
Normal file
|
|
@ -0,0 +1,233 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const multer = require('multer');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { modifyLimiter, heavyLimiter, readLimiter } = require('../middleware/rateLimiter');
|
||||
const { db } = require('../database/db');
|
||||
const logger = require('../utils/logger');
|
||||
const { parseM3U } = require('../utils/m3uParser');
|
||||
const {
|
||||
validatePlaylist,
|
||||
validateIdParam,
|
||||
validateBulkDelete,
|
||||
createValidationMiddleware
|
||||
} = require('../middleware/inputValidation');
|
||||
const { validatePlaylistName } = require('../utils/inputValidator');
|
||||
|
||||
const storage = multer.diskStorage({
|
||||
destination: (req, file, cb) => {
|
||||
const uploadDir = path.join(__dirname, '../../data/uploads');
|
||||
if (!fs.existsSync(uploadDir)) {
|
||||
fs.mkdirSync(uploadDir, { recursive: true, mode: 0o755 });
|
||||
}
|
||||
cb(null, uploadDir);
|
||||
},
|
||||
filename: (req, file, cb) => {
|
||||
const uniqueSuffix = Date.now() + '-' + Math.round(Math.random() * 1E9);
|
||||
cb(null, 'playlist-' + uniqueSuffix + path.extname(file.originalname));
|
||||
}
|
||||
});
|
||||
|
||||
const upload = multer({
|
||||
storage,
|
||||
limits: { fileSize: 50 * 1024 * 1024 }, // 50MB
|
||||
fileFilter: (req, file, cb) => {
|
||||
if (file.mimetype === 'audio/x-mpegurl' || file.originalname.endsWith('.m3u') || file.originalname.endsWith('.m3u8')) {
|
||||
cb(null, true);
|
||||
} else {
|
||||
cb(new Error('Only M3U files are allowed'));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Get all playlists for user
|
||||
router.get('/', authenticate, readLimiter, (req, res) => {
|
||||
db.all(
|
||||
'SELECT * FROM playlists WHERE user_id = ? ORDER BY created_at DESC',
|
||||
[req.user.userId],
|
||||
(err, playlists) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching playlists:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch playlists' });
|
||||
}
|
||||
res.json(playlists);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Add playlist from URL
|
||||
router.post('/url', authenticate, modifyLimiter, validatePlaylist, async (req, res) => {
|
||||
const { name, url, username, password, category, type } = req.body;
|
||||
|
||||
let playlistUrl = url;
|
||||
if (username && password) {
|
||||
playlistUrl = url.replace('username=', `username=${username}`).replace('password=', `password=${password}`);
|
||||
}
|
||||
|
||||
db.run(
|
||||
'INSERT INTO playlists (user_id, name, url, type, category) VALUES (?, ?, ?, ?, ?)',
|
||||
[req.user.userId, name, playlistUrl, type || 'live', category],
|
||||
async function(err) {
|
||||
if (err) {
|
||||
logger.error('Error adding playlist:', err);
|
||||
return res.status(500).json({ error: 'Failed to add playlist' });
|
||||
}
|
||||
|
||||
const playlistId = this.lastID;
|
||||
|
||||
try {
|
||||
await parseM3U(playlistUrl, playlistId);
|
||||
res.status(201).json({
|
||||
message: 'Playlist added successfully',
|
||||
id: playlistId
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error parsing M3U:', error);
|
||||
res.status(500).json({ error: 'Failed to parse playlist' });
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Upload M3U file
|
||||
router.post('/upload', authenticate, heavyLimiter, upload.single('file'), async (req, res) => {
|
||||
if (!req.file) {
|
||||
return res.status(400).json({ error: 'No file uploaded' });
|
||||
}
|
||||
|
||||
const { name, category, type } = req.body;
|
||||
|
||||
// Validate playlist name if provided
|
||||
if (name) {
|
||||
const validation = validatePlaylistName(name);
|
||||
if (!validation.valid) {
|
||||
// Clean up uploaded file
|
||||
if (fs.existsSync(req.file.path)) {
|
||||
fs.unlinkSync(req.file.path);
|
||||
}
|
||||
return res.status(400).json({ error: validation.errors.join(', ') });
|
||||
}
|
||||
}
|
||||
|
||||
const filename = req.file.filename;
|
||||
const filePath = req.file.path;
|
||||
|
||||
db.run(
|
||||
'INSERT INTO playlists (user_id, name, filename, type, category) VALUES (?, ?, ?, ?, ?)',
|
||||
[req.user.userId, name || req.file.originalname, filename, type || 'live', category],
|
||||
async function(err) {
|
||||
if (err) {
|
||||
logger.error('Error saving playlist:', err);
|
||||
return res.status(500).json({ error: 'Failed to save playlist' });
|
||||
}
|
||||
|
||||
const playlistId = this.lastID;
|
||||
|
||||
try {
|
||||
await parseM3U(filePath, playlistId, true);
|
||||
res.status(201).json({
|
||||
message: 'Playlist uploaded successfully',
|
||||
id: playlistId,
|
||||
filename
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error parsing uploaded M3U:', error);
|
||||
res.status(500).json({ error: 'Failed to parse playlist' });
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Delete playlist
|
||||
router.delete('/:id', authenticate, modifyLimiter, validateIdParam, (req, res) => {
|
||||
const playlistId = req.params.id;
|
||||
|
||||
db.get(
|
||||
'SELECT * FROM playlists WHERE id = ? AND user_id = ?',
|
||||
[playlistId, req.user.userId],
|
||||
(err, playlist) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching playlist:', err);
|
||||
return res.status(500).json({ error: 'Failed to delete playlist' });
|
||||
}
|
||||
|
||||
if (!playlist) {
|
||||
return res.status(404).json({ error: 'Playlist not found' });
|
||||
}
|
||||
|
||||
// Delete file if exists
|
||||
if (playlist.filename) {
|
||||
const filePath = path.join(__dirname, '../../data/uploads', playlist.filename);
|
||||
if (fs.existsSync(filePath)) {
|
||||
fs.unlinkSync(filePath);
|
||||
}
|
||||
}
|
||||
|
||||
db.run('DELETE FROM playlists WHERE id = ?', [playlistId], (err) => {
|
||||
if (err) {
|
||||
logger.error('Error deleting playlist:', err);
|
||||
return res.status(500).json({ error: 'Failed to delete playlist' });
|
||||
}
|
||||
res.json({ message: 'Playlist deleted successfully' });
|
||||
});
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Bulk delete playlists
|
||||
router.post('/bulk-delete', authenticate, modifyLimiter, validateBulkDelete, (req, res) => {
|
||||
const { ids } = req.body;
|
||||
|
||||
const placeholders = ids.map(() => '?').join(',');
|
||||
const query = `DELETE FROM playlists WHERE id IN (${placeholders}) AND user_id = ?`;
|
||||
|
||||
db.run(query, [...ids, req.user.userId], function(err) {
|
||||
if (err) {
|
||||
logger.error('Error bulk deleting playlists:', err);
|
||||
return res.status(500).json({ error: 'Failed to delete playlists' });
|
||||
}
|
||||
res.json({ message: 'Playlists deleted successfully', deleted: this.changes });
|
||||
});
|
||||
});
|
||||
|
||||
// Rename playlist
|
||||
const validatePlaylistRename = createValidationMiddleware({
|
||||
params: {
|
||||
id: (value) => {
|
||||
const num = parseInt(value, 10);
|
||||
if (isNaN(num) || num < 1) {
|
||||
return { valid: false, errors: ['Invalid playlist ID'], sanitized: null };
|
||||
}
|
||||
return { valid: true, errors: [], sanitized: num };
|
||||
}
|
||||
},
|
||||
body: {
|
||||
name: validatePlaylistName
|
||||
}
|
||||
});
|
||||
|
||||
router.patch('/:id', authenticate, modifyLimiter, validatePlaylistRename, (req, res) => {
|
||||
const { name } = req.body;
|
||||
const playlistId = req.params.id;
|
||||
|
||||
db.run(
|
||||
'UPDATE playlists SET name = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ? AND user_id = ?',
|
||||
[name, playlistId, req.user.userId],
|
||||
function(err) {
|
||||
if (err) {
|
||||
logger.error('Error renaming playlist:', err);
|
||||
return res.status(500).json({ error: 'Failed to rename playlist' });
|
||||
}
|
||||
|
||||
if (this.changes === 0) {
|
||||
return res.status(404).json({ error: 'Playlist not found' });
|
||||
}
|
||||
|
||||
res.json({ message: 'Playlist renamed successfully' });
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
9
backend/routes/profiles.js
Normal file
9
backend/routes/profiles.js
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
|
||||
router.get('/', authenticate, (req, res) => {
|
||||
res.json({ message: 'Profiles endpoint' });
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
9
backend/routes/radio.js
Normal file
9
backend/routes/radio.js
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
|
||||
router.get('/', authenticate, (req, res) => {
|
||||
res.json({ message: 'Radio endpoint' });
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
620
backend/routes/rbac.js
Normal file
620
backend/routes/rbac.js
Normal file
|
|
@ -0,0 +1,620 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { body, validationResult } = require('express-validator');
|
||||
const { authenticate, requireAdmin } = require('../middleware/auth');
|
||||
const { requirePermission, requireAllPermissions, PERMISSIONS, DEFAULT_ROLES, clearAllPermissionCache, clearUserPermissionCache, logPermissionAction, getUserPermissions } = require('../middleware/rbac');
|
||||
const { modifyLimiter, readLimiter } = require('../middleware/rateLimiter');
|
||||
const { db } = require('../database/db');
|
||||
const logger = require('../utils/logger');
|
||||
const SecurityAuditLogger = require('../utils/securityAudit');
|
||||
|
||||
/**
|
||||
* Get all available permissions
|
||||
* Returns the complete permission catalog
|
||||
*/
|
||||
router.get('/permissions', authenticate, requirePermission('users.manage_roles'), readLimiter, (req, res) => {
|
||||
res.json({
|
||||
permissions: Object.entries(PERMISSIONS).map(([key, description]) => ({
|
||||
key,
|
||||
description
|
||||
})),
|
||||
categories: {
|
||||
'User Management': Object.keys(PERMISSIONS).filter(k => k.startsWith('users.')),
|
||||
'Session Management': Object.keys(PERMISSIONS).filter(k => k.startsWith('sessions.')),
|
||||
'Content Management': Object.keys(PERMISSIONS).filter(k => k.startsWith('playlists.') || k.startsWith('channels.') || k.startsWith('favorites.') || k.startsWith('history.')),
|
||||
'System & Settings': Object.keys(PERMISSIONS).filter(k => k.startsWith('settings.') || k.startsWith('stats.') || k.startsWith('backup.')),
|
||||
'Security Management': Object.keys(PERMISSIONS).filter(k => k.startsWith('security.')),
|
||||
'Search & Discovery': Object.keys(PERMISSIONS).filter(k => k.startsWith('search.')),
|
||||
'VPN & Network': Object.keys(PERMISSIONS).filter(k => k.startsWith('vpn.'))
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Get all roles
|
||||
*/
|
||||
router.get('/roles', authenticate, requirePermission('users.view'), readLimiter, (req, res) => {
|
||||
db.all(
|
||||
`SELECT id, role_key, name, description, permissions, is_system_role, created_at, updated_at
|
||||
FROM roles
|
||||
ORDER BY is_system_role DESC, name ASC`,
|
||||
[],
|
||||
(err, roles) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching roles:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch roles' });
|
||||
}
|
||||
|
||||
// Parse permissions JSON
|
||||
const rolesWithParsedPermissions = roles.map(role => ({
|
||||
...role,
|
||||
permissions: JSON.parse(role.permissions || '[]'),
|
||||
is_system_role: Boolean(role.is_system_role)
|
||||
}));
|
||||
|
||||
res.json(rolesWithParsedPermissions);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
/**
|
||||
* Get single role by key
|
||||
*/
|
||||
router.get('/roles/:roleKey', authenticate, requirePermission('users.view'), readLimiter, (req, res) => {
|
||||
const { roleKey } = req.params;
|
||||
|
||||
db.get(
|
||||
`SELECT id, role_key, name, description, permissions, is_system_role, created_at, updated_at
|
||||
FROM roles WHERE role_key = ?`,
|
||||
[roleKey],
|
||||
(err, role) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching role:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch role' });
|
||||
}
|
||||
|
||||
if (!role) {
|
||||
return res.status(404).json({ error: 'Role not found' });
|
||||
}
|
||||
|
||||
res.json({
|
||||
...role,
|
||||
permissions: JSON.parse(role.permissions || '[]'),
|
||||
is_system_role: Boolean(role.is_system_role)
|
||||
});
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
/**
|
||||
* Create custom role
|
||||
* Only admins with users.manage_roles permission
|
||||
*/
|
||||
router.post('/roles',
|
||||
authenticate,
|
||||
requireAllPermissions(['users.manage_roles', 'users.create']),
|
||||
modifyLimiter,
|
||||
[
|
||||
body('role_key').trim().isLength({ min: 2, max: 50 }).matches(/^[a-z_]+$/).withMessage('Role key must be lowercase with underscores only'),
|
||||
body('name').trim().isLength({ min: 2, max: 100 }),
|
||||
body('description').optional().trim().isLength({ max: 500 }),
|
||||
body('permissions').isArray().withMessage('Permissions must be an array'),
|
||||
body('permissions.*').isString().isIn(Object.keys(PERMISSIONS)).withMessage('Invalid permission')
|
||||
],
|
||||
async (req, res) => {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
return res.status(400).json({ errors: errors.array() });
|
||||
}
|
||||
|
||||
const { role_key, name, description, permissions } = req.body;
|
||||
|
||||
try {
|
||||
// Check if role key already exists
|
||||
db.get('SELECT id FROM roles WHERE role_key = ?', [role_key], (err, existing) => {
|
||||
if (err) {
|
||||
logger.error('Error checking role existence:', err);
|
||||
return res.status(500).json({ error: 'Failed to create role' });
|
||||
}
|
||||
|
||||
if (existing) {
|
||||
return res.status(409).json({ error: 'Role key already exists' });
|
||||
}
|
||||
|
||||
// Create new role
|
||||
db.run(
|
||||
`INSERT INTO roles (role_key, name, description, permissions, is_system_role)
|
||||
VALUES (?, ?, ?, ?, 0)`,
|
||||
[role_key, name, description || '', JSON.stringify(permissions)],
|
||||
function(err) {
|
||||
if (err) {
|
||||
logger.error('Error creating role:', err);
|
||||
return res.status(500).json({ error: 'Failed to create role' });
|
||||
}
|
||||
|
||||
// Log action
|
||||
logPermissionAction(
|
||||
req.user.userId,
|
||||
'role_created',
|
||||
'role',
|
||||
this.lastID,
|
||||
null,
|
||||
{ role_key, name, permissions },
|
||||
req
|
||||
);
|
||||
|
||||
logger.info(`Role created: ${role_key} by user ${req.user.userId}`);
|
||||
|
||||
// Fetch and return the created role
|
||||
db.get(
|
||||
'SELECT id, role_key, name, description, permissions, is_system_role, created_at FROM roles WHERE id = ?',
|
||||
[this.lastID],
|
||||
(err, role) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'Role created but failed to fetch details' });
|
||||
}
|
||||
res.status(201).json({
|
||||
...role,
|
||||
permissions: JSON.parse(role.permissions),
|
||||
is_system_role: Boolean(role.is_system_role)
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Role creation error:', error);
|
||||
res.status(500).json({ error: 'Failed to create role' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* Update role permissions
|
||||
* Cannot modify system roles
|
||||
*/
|
||||
router.patch('/roles/:roleKey',
|
||||
authenticate,
|
||||
requirePermission('users.manage_roles'),
|
||||
modifyLimiter,
|
||||
[
|
||||
body('name').optional().trim().isLength({ min: 2, max: 100 }),
|
||||
body('description').optional().trim().isLength({ max: 500 }),
|
||||
body('permissions').optional().isArray(),
|
||||
body('permissions.*').optional().isString().isIn(Object.keys(PERMISSIONS))
|
||||
],
|
||||
async (req, res) => {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
return res.status(400).json({ errors: errors.array() });
|
||||
}
|
||||
|
||||
const { roleKey } = req.params;
|
||||
const { name, description, permissions } = req.body;
|
||||
|
||||
try {
|
||||
// Check if role exists and is not a system role
|
||||
db.get('SELECT * FROM roles WHERE role_key = ?', [roleKey], (err, role) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching role:', err);
|
||||
return res.status(500).json({ error: 'Failed to update role' });
|
||||
}
|
||||
|
||||
if (!role) {
|
||||
return res.status(404).json({ error: 'Role not found' });
|
||||
}
|
||||
|
||||
if (role.is_system_role) {
|
||||
return res.status(403).json({ error: 'Cannot modify system roles' });
|
||||
}
|
||||
|
||||
// Build update query
|
||||
const updates = [];
|
||||
const params = [];
|
||||
|
||||
if (name !== undefined) {
|
||||
updates.push('name = ?');
|
||||
params.push(name);
|
||||
}
|
||||
if (description !== undefined) {
|
||||
updates.push('description = ?');
|
||||
params.push(description);
|
||||
}
|
||||
if (permissions !== undefined) {
|
||||
updates.push('permissions = ?');
|
||||
params.push(JSON.stringify(permissions));
|
||||
}
|
||||
|
||||
if (updates.length === 0) {
|
||||
return res.status(400).json({ error: 'No fields to update' });
|
||||
}
|
||||
|
||||
updates.push('updated_at = CURRENT_TIMESTAMP');
|
||||
params.push(roleKey);
|
||||
|
||||
// Update role
|
||||
db.run(
|
||||
`UPDATE roles SET ${updates.join(', ')} WHERE role_key = ?`,
|
||||
params,
|
||||
function(err) {
|
||||
if (err) {
|
||||
logger.error('Error updating role:', err);
|
||||
return res.status(500).json({ error: 'Failed to update role' });
|
||||
}
|
||||
|
||||
if (this.changes === 0) {
|
||||
return res.status(404).json({ error: 'Role not found' });
|
||||
}
|
||||
|
||||
// Log action
|
||||
logPermissionAction(
|
||||
req.user.userId,
|
||||
'role_updated',
|
||||
'role',
|
||||
role.id,
|
||||
{ name: role.name, description: role.description, permissions: JSON.parse(role.permissions) },
|
||||
{ name, description, permissions },
|
||||
req
|
||||
);
|
||||
|
||||
// Clear permission cache as role permissions changed
|
||||
clearAllPermissionCache();
|
||||
|
||||
logger.info(`Role updated: ${roleKey} by user ${req.user.userId}`);
|
||||
|
||||
// Fetch and return updated role
|
||||
db.get(
|
||||
'SELECT id, role_key, name, description, permissions, is_system_role, updated_at FROM roles WHERE role_key = ?',
|
||||
[roleKey],
|
||||
(err, updatedRole) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'Role updated but failed to fetch details' });
|
||||
}
|
||||
res.json({
|
||||
...updatedRole,
|
||||
permissions: JSON.parse(updatedRole.permissions),
|
||||
is_system_role: Boolean(updatedRole.is_system_role)
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Role update error:', error);
|
||||
res.status(500).json({ error: 'Failed to update role' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* Delete custom role
|
||||
* Cannot delete system roles or roles assigned to users
|
||||
*/
|
||||
router.delete('/roles/:roleKey',
|
||||
authenticate,
|
||||
requirePermission('users.manage_roles'),
|
||||
modifyLimiter,
|
||||
async (req, res) => {
|
||||
const { roleKey } = req.params;
|
||||
|
||||
try {
|
||||
// Check if role exists
|
||||
db.get('SELECT * FROM roles WHERE role_key = ?', [roleKey], (err, role) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching role:', err);
|
||||
return res.status(500).json({ error: 'Failed to delete role' });
|
||||
}
|
||||
|
||||
if (!role) {
|
||||
return res.status(404).json({ error: 'Role not found' });
|
||||
}
|
||||
|
||||
if (role.is_system_role) {
|
||||
return res.status(403).json({ error: 'Cannot delete system roles' });
|
||||
}
|
||||
|
||||
// Check if role is assigned to any users
|
||||
db.get('SELECT COUNT(*) as count FROM users WHERE role = ?', [roleKey], (err, result) => {
|
||||
if (err) {
|
||||
logger.error('Error checking role usage:', err);
|
||||
return res.status(500).json({ error: 'Failed to delete role' });
|
||||
}
|
||||
|
||||
if (result.count > 0) {
|
||||
return res.status(409).json({
|
||||
error: 'Cannot delete role that is assigned to users',
|
||||
users_count: result.count
|
||||
});
|
||||
}
|
||||
|
||||
// Delete role
|
||||
db.run('DELETE FROM roles WHERE role_key = ?', [roleKey], function(err) {
|
||||
if (err) {
|
||||
logger.error('Error deleting role:', err);
|
||||
return res.status(500).json({ error: 'Failed to delete role' });
|
||||
}
|
||||
|
||||
// Log action
|
||||
logPermissionAction(
|
||||
req.user.userId,
|
||||
'role_deleted',
|
||||
'role',
|
||||
role.id,
|
||||
{ role_key: roleKey, name: role.name },
|
||||
null,
|
||||
req
|
||||
);
|
||||
|
||||
logger.info(`Role deleted: ${roleKey} by user ${req.user.userId}`);
|
||||
|
||||
res.json({ message: 'Role deleted successfully' });
|
||||
});
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Role deletion error:', error);
|
||||
res.status(500).json({ error: 'Failed to delete role' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* Get user's current permissions
|
||||
*/
|
||||
router.get('/my-permissions', authenticate, readLimiter, async (req, res) => {
|
||||
try {
|
||||
const permissions = await getUserPermissions(req.user.userId);
|
||||
|
||||
// Get role info
|
||||
db.get(
|
||||
'SELECT u.role, r.name as role_name, r.description as role_description FROM users u LEFT JOIN roles r ON u.role = r.role_key WHERE u.id = ?',
|
||||
[req.user.userId],
|
||||
(err, roleInfo) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching role info:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch permissions' });
|
||||
}
|
||||
|
||||
res.json({
|
||||
role: roleInfo?.role || 'unknown',
|
||||
role_name: roleInfo?.role_name || 'Unknown',
|
||||
role_description: roleInfo?.role_description || '',
|
||||
permissions,
|
||||
permission_details: permissions.map(p => ({
|
||||
key: p,
|
||||
description: PERMISSIONS[p] || 'Unknown permission'
|
||||
}))
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Error fetching user permissions:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch permissions' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Assign role to user
|
||||
* Requires users.manage_roles permission
|
||||
*/
|
||||
router.post('/users/:userId/role',
|
||||
authenticate,
|
||||
requirePermission('users.manage_roles'),
|
||||
modifyLimiter,
|
||||
[
|
||||
body('role').trim().notEmpty().withMessage('Role is required')
|
||||
],
|
||||
async (req, res) => {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
return res.status(400).json({ errors: errors.array() });
|
||||
}
|
||||
|
||||
const { userId } = req.params;
|
||||
const { role } = req.body;
|
||||
|
||||
try {
|
||||
// Check if role exists
|
||||
db.get('SELECT role_key FROM roles WHERE role_key = ?', [role], (err, roleExists) => {
|
||||
if (err) {
|
||||
logger.error('Error checking role:', err);
|
||||
return res.status(500).json({ error: 'Failed to assign role' });
|
||||
}
|
||||
|
||||
if (!roleExists) {
|
||||
return res.status(404).json({ error: 'Role not found' });
|
||||
}
|
||||
|
||||
// Check if user exists
|
||||
db.get('SELECT id, username, role FROM users WHERE id = ?', [userId], (err, user) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching user:', err);
|
||||
return res.status(500).json({ error: 'Failed to assign role' });
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
return res.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
// Prevent modifying own role
|
||||
if (parseInt(userId) === req.user.userId) {
|
||||
return res.status(403).json({ error: 'Cannot modify your own role' });
|
||||
}
|
||||
|
||||
const oldRole = user.role;
|
||||
|
||||
// Update user role
|
||||
db.run(
|
||||
'UPDATE users SET role = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?',
|
||||
[role, userId],
|
||||
async function(err) {
|
||||
if (err) {
|
||||
logger.error('Error updating user role:', err);
|
||||
return res.status(500).json({ error: 'Failed to assign role' });
|
||||
}
|
||||
|
||||
// Clear user's permission cache
|
||||
clearUserPermissionCache(parseInt(userId));
|
||||
|
||||
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
|
||||
const userAgent = req.headers['user-agent'];
|
||||
|
||||
// CWE-778: Log comprehensive privilege change
|
||||
await SecurityAuditLogger.logPrivilegeChange(parseInt(userId), 'role_change', {
|
||||
ip,
|
||||
userAgent,
|
||||
previousRole: oldRole,
|
||||
newRole: role,
|
||||
changedBy: req.user.userId,
|
||||
changedByUsername: req.user.username || 'system',
|
||||
targetUsername: user.username
|
||||
});
|
||||
|
||||
// Log action
|
||||
logPermissionAction(
|
||||
req.user.userId,
|
||||
'role_assigned',
|
||||
'user',
|
||||
parseInt(userId),
|
||||
{ role: oldRole },
|
||||
{ role },
|
||||
req
|
||||
);
|
||||
|
||||
logger.info(`Role assigned: ${role} to user ${userId} by ${req.user.userId}`);
|
||||
|
||||
res.json({
|
||||
message: 'Role assigned successfully',
|
||||
user_id: userId,
|
||||
old_role: oldRole,
|
||||
new_role: role
|
||||
});
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Role assignment error:', error);
|
||||
res.status(500).json({ error: 'Failed to assign role' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* Get permission audit log
|
||||
* Admin only
|
||||
*/
|
||||
router.get('/audit-log',
|
||||
authenticate,
|
||||
requirePermission('security.view_audit'),
|
||||
readLimiter,
|
||||
async (req, res) => {
|
||||
const { limit = 100, offset = 0, userId, action, targetType } = req.query;
|
||||
|
||||
try {
|
||||
let query = `
|
||||
SELECT pal.*, u.username
|
||||
FROM permission_audit_log pal
|
||||
JOIN users u ON pal.user_id = u.id
|
||||
WHERE 1=1
|
||||
`;
|
||||
const params = [];
|
||||
|
||||
if (userId) {
|
||||
query += ' AND pal.user_id = ?';
|
||||
params.push(userId);
|
||||
}
|
||||
|
||||
if (action) {
|
||||
query += ' AND pal.action = ?';
|
||||
params.push(action);
|
||||
}
|
||||
|
||||
if (targetType) {
|
||||
query += ' AND pal.target_type = ?';
|
||||
params.push(targetType);
|
||||
}
|
||||
|
||||
query += ' ORDER BY pal.created_at DESC LIMIT ? OFFSET ?';
|
||||
params.push(parseInt(limit), parseInt(offset));
|
||||
|
||||
db.all(query, params, (err, logs) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching audit log:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch audit log' });
|
||||
}
|
||||
|
||||
// Parse JSON fields
|
||||
const parsedLogs = logs.map(log => ({
|
||||
...log,
|
||||
old_value: log.old_value ? JSON.parse(log.old_value) : null,
|
||||
new_value: log.new_value ? JSON.parse(log.new_value) : null
|
||||
}));
|
||||
|
||||
res.json({ logs: parsedLogs, limit: parseInt(limit), offset: parseInt(offset) });
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Audit log fetch error:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch audit log' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* Get permission statistics
|
||||
* Shows which permissions are most used
|
||||
*/
|
||||
router.get('/stats',
|
||||
authenticate,
|
||||
requirePermission('security.view_audit'),
|
||||
readLimiter,
|
||||
async (req, res) => {
|
||||
try {
|
||||
// Get role distribution
|
||||
db.all(
|
||||
`SELECT r.name, r.role_key, COUNT(u.id) as user_count
|
||||
FROM roles r
|
||||
LEFT JOIN users u ON r.role_key = u.role
|
||||
GROUP BY r.role_key
|
||||
ORDER BY user_count DESC`,
|
||||
[],
|
||||
(err, roleStats) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching role stats:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch statistics' });
|
||||
}
|
||||
|
||||
// Get recent permission actions
|
||||
db.all(
|
||||
`SELECT action, COUNT(*) as count
|
||||
FROM permission_audit_log
|
||||
WHERE created_at >= datetime('now', '-30 days')
|
||||
GROUP BY action
|
||||
ORDER BY count DESC`,
|
||||
[],
|
||||
(err, actionStats) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching action stats:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch statistics' });
|
||||
}
|
||||
|
||||
res.json({
|
||||
role_distribution: roleStats,
|
||||
recent_actions: actionStats,
|
||||
total_permissions: Object.keys(PERMISSIONS).length,
|
||||
total_roles: roleStats.length
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Stats fetch error:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch statistics' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
9
backend/routes/recordings.js
Normal file
9
backend/routes/recordings.js
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
|
||||
router.get('/', authenticate, (req, res) => {
|
||||
res.json({ message: 'Recordings endpoint' });
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
139
backend/routes/search.js
Normal file
139
backend/routes/search.js
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { db } = require('../database/db');
|
||||
const { authenticate, requireAdmin } = require('../middleware/auth');
|
||||
const { readLimiter } = require('../middleware/rateLimiter');
|
||||
const { sanitizeString } = require('../utils/inputValidator');
|
||||
const logger = require('../utils/logger');
|
||||
|
||||
/**
|
||||
* Global search endpoint
|
||||
* Searches across channels, radio stations, users, settings, etc.
|
||||
*/
|
||||
router.get('/', authenticate, readLimiter, async (req, res) => {
|
||||
try {
|
||||
const { q } = req.query;
|
||||
const isAdmin = req.user.role === 'admin';
|
||||
|
||||
if (!q || q.trim().length < 2) {
|
||||
return res.json({
|
||||
channels: [],
|
||||
radio: [],
|
||||
users: [],
|
||||
settings: [],
|
||||
groups: []
|
||||
});
|
||||
}
|
||||
|
||||
// Validate and sanitize search query
|
||||
const sanitized = sanitizeString(q.trim());
|
||||
if (sanitized.length > 100) {
|
||||
return res.status(400).json({ error: 'Search query too long' });
|
||||
}
|
||||
|
||||
const searchTerm = `%${sanitized}%`;
|
||||
const results = {
|
||||
channels: [],
|
||||
radio: [],
|
||||
users: [],
|
||||
settings: [],
|
||||
groups: []
|
||||
};
|
||||
|
||||
// Search TV channels (only from user's playlists)
|
||||
results.channels = await new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT DISTINCT c.id, c.name, c.url, COALESCE(c.custom_logo, c.logo) as logo, c.group_name, c.is_radio
|
||||
FROM channels c
|
||||
JOIN playlists p ON c.playlist_id = p.id
|
||||
WHERE p.user_id = ? AND c.is_radio = 0 AND c.is_active = 1
|
||||
AND (c.name LIKE ? OR c.group_name LIKE ?)
|
||||
ORDER BY c.name
|
||||
LIMIT 20`,
|
||||
[req.user.userId, searchTerm, searchTerm],
|
||||
(err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows || []);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Search Radio channels (only from user's playlists)
|
||||
results.radio = await new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT DISTINCT c.id, c.name, c.url, COALESCE(c.custom_logo, c.logo) as logo, c.group_name, c.is_radio
|
||||
FROM channels c
|
||||
JOIN playlists p ON c.playlist_id = p.id
|
||||
WHERE p.user_id = ? AND c.is_radio = 1 AND c.is_active = 1
|
||||
AND (c.name LIKE ? OR c.group_name LIKE ?)
|
||||
ORDER BY c.name
|
||||
LIMIT 20`,
|
||||
[req.user.userId, searchTerm, searchTerm],
|
||||
(err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows || []);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Search groups (only from user's playlists)
|
||||
results.groups = await new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT DISTINCT c.group_name as name, c.is_radio
|
||||
FROM channels c
|
||||
JOIN playlists p ON c.playlist_id = p.id
|
||||
WHERE p.user_id = ? AND c.is_active = 1
|
||||
AND c.group_name LIKE ?
|
||||
ORDER BY c.group_name
|
||||
LIMIT 10`,
|
||||
[req.user.userId, searchTerm],
|
||||
(err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows || []);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Search users (admin only)
|
||||
if (isAdmin) {
|
||||
results.users = await new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT id, username, email, role, created_at
|
||||
FROM users
|
||||
WHERE username LIKE ? OR email LIKE ?
|
||||
ORDER BY username
|
||||
LIMIT 10`,
|
||||
[searchTerm, searchTerm],
|
||||
(err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows || []);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// Add settings/pages results (static)
|
||||
const settingsOptions = [
|
||||
{ id: 'settings', name: 'Settings', path: '/settings', icon: 'settings' },
|
||||
{ id: 'user-management', name: 'User Management', path: '/settings?tab=users', icon: 'people' },
|
||||
{ id: 'vpn-settings', name: 'VPN Settings', path: '/settings?tab=vpn', icon: 'vpn_lock' },
|
||||
{ id: '2fa', name: 'Two-Factor Authentication', path: '/settings?tab=2fa', icon: 'security' },
|
||||
{ id: 'live-tv', name: 'Live TV', path: '/live', icon: 'tv' },
|
||||
{ id: 'radio', name: 'Radio', path: '/radio', icon: 'radio' },
|
||||
{ id: 'movies', name: 'Movies', path: '/movies', icon: 'movie' },
|
||||
{ id: 'series', name: 'Series', path: '/series', icon: 'subscriptions' },
|
||||
{ id: 'favorites', name: 'Favorites', path: '/favorites', icon: 'favorite' },
|
||||
];
|
||||
|
||||
results.settings = settingsOptions.filter(option =>
|
||||
option.name.toLowerCase().includes(q.toLowerCase())
|
||||
);
|
||||
|
||||
res.json(results);
|
||||
} catch (error) {
|
||||
console.error('Search error:', error);
|
||||
res.status(500).json({ error: 'Search failed' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
733
backend/routes/security-config.js
Normal file
733
backend/routes/security-config.js
Normal file
|
|
@ -0,0 +1,733 @@
|
|||
/**
|
||||
* Security Configuration API Routes
|
||||
* Manage thresholds, risk signatures, and response protocols
|
||||
* Admin-only endpoints for security configuration
|
||||
*/
|
||||
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { requirePermission } = require('../middleware/rbac');
|
||||
const logger = require('../utils/logger');
|
||||
const thresholdManager = require('../utils/thresholdManager');
|
||||
const riskSignatureManager = require('../utils/riskSignatureManager');
|
||||
const responseProtocolManager = require('../utils/responseProtocolManager');
|
||||
|
||||
// Validation middleware
|
||||
const validatePagination = (req, res, next) => {
|
||||
const limit = parseInt(req.query.limit) || 100;
|
||||
req.query.limit = Math.min(Math.max(limit, 1), 1000);
|
||||
next();
|
||||
};
|
||||
|
||||
const validateIdParam = (req, res, next) => {
|
||||
if (!req.params.id || typeof req.params.id !== 'string') {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'Invalid ID parameter'
|
||||
});
|
||||
}
|
||||
next();
|
||||
};
|
||||
|
||||
// ===========================
|
||||
// THRESHOLD MANAGEMENT ROUTES
|
||||
// ===========================
|
||||
|
||||
/**
|
||||
* GET /api/security-config/thresholds
|
||||
* Get all configured thresholds
|
||||
*/
|
||||
router.get('/thresholds',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
validatePagination,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const filters = {
|
||||
patternType: req.query.pattern_type,
|
||||
enabled: req.query.enabled !== undefined ? req.query.enabled === 'true' : undefined,
|
||||
limit: req.query.limit
|
||||
};
|
||||
|
||||
const thresholds = await thresholdManager.getThresholds(filters);
|
||||
const stats = await thresholdManager.getStatistics();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: thresholds,
|
||||
statistics: stats,
|
||||
count: thresholds.length
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SecurityConfig API] Error getting thresholds:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to get thresholds',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* GET /api/security-config/thresholds/:id
|
||||
* Get threshold by ID
|
||||
*/
|
||||
router.get('/thresholds/:id',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
validateIdParam,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const threshold = await thresholdManager.getThresholdById(req.params.id);
|
||||
|
||||
if (!threshold) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
message: 'Threshold not found'
|
||||
});
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: threshold
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SecurityConfig API] Error getting threshold:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to get threshold',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* POST /api/security-config/thresholds
|
||||
* Create new threshold
|
||||
*/
|
||||
router.post('/thresholds',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { name, description, pattern_type, metric_name, operator, threshold_value, time_window_minutes, severity, enabled } = req.body;
|
||||
|
||||
// Validation
|
||||
if (!name || !pattern_type || !metric_name || !operator || threshold_value === undefined || !severity) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'Missing required fields: name, pattern_type, metric_name, operator, threshold_value, severity'
|
||||
});
|
||||
}
|
||||
|
||||
const validOperators = ['>=', '>', '<=', '<', '==', '!='];
|
||||
if (!validOperators.includes(operator)) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'Invalid operator. Must be one of: ' + validOperators.join(', ')
|
||||
});
|
||||
}
|
||||
|
||||
const validSeverities = ['low', 'medium', 'high', 'critical'];
|
||||
if (!validSeverities.includes(severity)) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'Invalid severity. Must be one of: ' + validSeverities.join(', ')
|
||||
});
|
||||
}
|
||||
|
||||
const result = await thresholdManager.createThreshold({
|
||||
name,
|
||||
description,
|
||||
pattern_type,
|
||||
metric_name,
|
||||
operator,
|
||||
threshold_value: parseInt(threshold_value),
|
||||
time_window_minutes: time_window_minutes ? parseInt(time_window_minutes) : 30,
|
||||
severity,
|
||||
enabled
|
||||
}, req.user.id);
|
||||
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
message: 'Threshold created successfully',
|
||||
data: result
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SecurityConfig API] Error creating threshold:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to create threshold',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* PUT /api/security-config/thresholds/:id
|
||||
* Update threshold
|
||||
*/
|
||||
router.put('/thresholds/:id',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
validateIdParam,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const updates = {};
|
||||
const allowedFields = ['name', 'description', 'operator', 'threshold_value', 'time_window_minutes', 'severity', 'enabled'];
|
||||
|
||||
for (const field of allowedFields) {
|
||||
if (req.body[field] !== undefined) {
|
||||
updates[field] = req.body[field];
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(updates).length === 0) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'No valid fields to update'
|
||||
});
|
||||
}
|
||||
|
||||
await thresholdManager.updateThreshold(req.params.id, updates, req.user.id);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Threshold updated successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SecurityConfig API] Error updating threshold:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to update threshold',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* DELETE /api/security-config/thresholds/:id
|
||||
* Delete threshold
|
||||
*/
|
||||
router.delete('/thresholds/:id',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
validateIdParam,
|
||||
async (req, res) => {
|
||||
try {
|
||||
await thresholdManager.deleteThreshold(req.params.id, req.user.id);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Threshold deleted successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SecurityConfig API] Error deleting threshold:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to delete threshold',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// ===========================
|
||||
// RISK SIGNATURE ROUTES
|
||||
// ===========================
|
||||
|
||||
/**
|
||||
* GET /api/security-config/signatures
|
||||
* Get all risk signatures
|
||||
*/
|
||||
router.get('/signatures',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
validatePagination,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const filters = {
|
||||
signatureType: req.query.signature_type,
|
||||
threatLevel: req.query.threat_level,
|
||||
enabled: req.query.enabled !== undefined ? req.query.enabled === 'true' : undefined,
|
||||
limit: req.query.limit
|
||||
};
|
||||
|
||||
const signatures = await riskSignatureManager.getSignatures(filters);
|
||||
const stats = await riskSignatureManager.getStatistics();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: signatures,
|
||||
statistics: stats,
|
||||
count: signatures.length
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SecurityConfig API] Error getting signatures:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to get signatures',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* GET /api/security-config/signatures/:id
|
||||
* Get signature by ID
|
||||
*/
|
||||
router.get('/signatures/:id',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
validateIdParam,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const signature = await riskSignatureManager.getSignatureById(req.params.id);
|
||||
|
||||
if (!signature) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
message: 'Signature not found'
|
||||
});
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: signature
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SecurityConfig API] Error getting signature:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to get signature',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* POST /api/security-config/signatures
|
||||
* Create new risk signature
|
||||
*/
|
||||
router.post('/signatures',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { name, description, signature_type, pattern, match_type, threat_level, confidence, enabled, auto_block } = req.body;
|
||||
|
||||
// Validation
|
||||
if (!name || !signature_type || !pattern || !match_type || !threat_level) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'Missing required fields: name, signature_type, pattern, match_type, threat_level'
|
||||
});
|
||||
}
|
||||
|
||||
const validMatchTypes = ['regex', 'regex_case_insensitive', 'exact', 'contains', 'custom'];
|
||||
if (!validMatchTypes.includes(match_type)) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'Invalid match_type. Must be one of: ' + validMatchTypes.join(', ')
|
||||
});
|
||||
}
|
||||
|
||||
const validThreatLevels = ['low', 'medium', 'high', 'critical'];
|
||||
if (!validThreatLevels.includes(threat_level)) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'Invalid threat_level. Must be one of: ' + validThreatLevels.join(', ')
|
||||
});
|
||||
}
|
||||
|
||||
const result = await riskSignatureManager.createSignature({
|
||||
name,
|
||||
description,
|
||||
signature_type,
|
||||
pattern,
|
||||
match_type,
|
||||
threat_level,
|
||||
confidence: confidence !== undefined ? parseFloat(confidence) : 0.8,
|
||||
enabled,
|
||||
auto_block
|
||||
}, req.user.id);
|
||||
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
message: 'Signature created successfully',
|
||||
data: result
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SecurityConfig API] Error creating signature:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to create signature',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* PUT /api/security-config/signatures/:id
|
||||
* Update risk signature
|
||||
*/
|
||||
router.put('/signatures/:id',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
validateIdParam,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const updates = {};
|
||||
const allowedFields = ['name', 'description', 'pattern', 'match_type', 'threat_level', 'confidence', 'enabled', 'auto_block'];
|
||||
|
||||
for (const field of allowedFields) {
|
||||
if (req.body[field] !== undefined) {
|
||||
updates[field] = req.body[field];
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(updates).length === 0) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'No valid fields to update'
|
||||
});
|
||||
}
|
||||
|
||||
await riskSignatureManager.updateSignature(req.params.id, updates, req.user.id);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Signature updated successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SecurityConfig API] Error updating signature:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to update signature',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* DELETE /api/security-config/signatures/:id
|
||||
* Delete risk signature
|
||||
*/
|
||||
router.delete('/signatures/:id',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
validateIdParam,
|
||||
async (req, res) => {
|
||||
try {
|
||||
await riskSignatureManager.deleteSignature(req.params.id, req.user.id);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Signature deleted successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SecurityConfig API] Error deleting signature:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to delete signature',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// ===========================
|
||||
// RESPONSE PROTOCOL ROUTES
|
||||
// ===========================
|
||||
|
||||
/**
|
||||
* GET /api/security-config/protocols
|
||||
* Get all response protocols
|
||||
*/
|
||||
router.get('/protocols',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
validatePagination,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const filters = {
|
||||
triggerType: req.query.trigger_type,
|
||||
severity: req.query.severity,
|
||||
enabled: req.query.enabled !== undefined ? req.query.enabled === 'true' : undefined,
|
||||
limit: req.query.limit
|
||||
};
|
||||
|
||||
const protocols = await responseProtocolManager.getProtocols(filters);
|
||||
const stats = await responseProtocolManager.getStatistics();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: protocols,
|
||||
statistics: stats,
|
||||
count: protocols.length
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SecurityConfig API] Error getting protocols:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to get protocols',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* GET /api/security-config/protocols/:id
|
||||
* Get protocol by ID
|
||||
*/
|
||||
router.get('/protocols/:id',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
validateIdParam,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const protocol = await responseProtocolManager.getProtocolById(req.params.id);
|
||||
|
||||
if (!protocol) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
message: 'Protocol not found'
|
||||
});
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: protocol
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SecurityConfig API] Error getting protocol:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to get protocol',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* POST /api/security-config/protocols
|
||||
* Create new response protocol
|
||||
*/
|
||||
router.post('/protocols',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { name, description, trigger_type, trigger_condition, actions, severity, enabled, auto_execute, cooldown_minutes } = req.body;
|
||||
|
||||
// Validation
|
||||
if (!name || !trigger_type || !trigger_condition || !actions || !severity) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'Missing required fields: name, trigger_type, trigger_condition, actions, severity'
|
||||
});
|
||||
}
|
||||
|
||||
const validTriggerTypes = ['anomaly', 'threshold', 'signature'];
|
||||
if (!validTriggerTypes.includes(trigger_type)) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'Invalid trigger_type. Must be one of: ' + validTriggerTypes.join(', ')
|
||||
});
|
||||
}
|
||||
|
||||
const validSeverities = ['low', 'medium', 'high', 'critical'];
|
||||
if (!validSeverities.includes(severity)) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'Invalid severity. Must be one of: ' + validSeverities.join(', ')
|
||||
});
|
||||
}
|
||||
|
||||
if (!Array.isArray(actions) || actions.length === 0) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'actions must be a non-empty array'
|
||||
});
|
||||
}
|
||||
|
||||
const result = await responseProtocolManager.createProtocol({
|
||||
name,
|
||||
description,
|
||||
trigger_type,
|
||||
trigger_condition,
|
||||
actions,
|
||||
severity,
|
||||
enabled,
|
||||
auto_execute,
|
||||
cooldown_minutes: cooldown_minutes ? parseInt(cooldown_minutes) : 60
|
||||
}, req.user.id);
|
||||
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
message: 'Protocol created successfully',
|
||||
data: result
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SecurityConfig API] Error creating protocol:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to create protocol',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* PUT /api/security-config/protocols/:id
|
||||
* Update response protocol
|
||||
*/
|
||||
router.put('/protocols/:id',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
validateIdParam,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const updates = {};
|
||||
const allowedFields = ['name', 'description', 'trigger_condition', 'actions', 'severity', 'enabled', 'auto_execute', 'cooldown_minutes'];
|
||||
|
||||
for (const field of allowedFields) {
|
||||
if (req.body[field] !== undefined) {
|
||||
updates[field] = req.body[field];
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(updates).length === 0) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'No valid fields to update'
|
||||
});
|
||||
}
|
||||
|
||||
await responseProtocolManager.updateProtocol(req.params.id, updates, req.user.id);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Protocol updated successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SecurityConfig API] Error updating protocol:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to update protocol',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* DELETE /api/security-config/protocols/:id
|
||||
* Delete response protocol
|
||||
*/
|
||||
router.delete('/protocols/:id',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
validateIdParam,
|
||||
async (req, res) => {
|
||||
try {
|
||||
await responseProtocolManager.deleteProtocol(req.params.id, req.user.id);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Protocol deleted successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SecurityConfig API] Error deleting protocol:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to delete protocol',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* GET /api/security-config/protocols/:id/history
|
||||
* Get execution history for protocol
|
||||
*/
|
||||
router.get('/protocols/:id/history',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
validateIdParam,
|
||||
validatePagination,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const history = await responseProtocolManager.getExecutionHistory({
|
||||
protocolId: req.params.id,
|
||||
limit: req.query.limit
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: history,
|
||||
count: history.length
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SecurityConfig API] Error getting protocol history:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to get protocol history',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// ===========================
|
||||
// DASHBOARD/OVERVIEW ROUTES
|
||||
// ===========================
|
||||
|
||||
/**
|
||||
* GET /api/security-config/dashboard
|
||||
* Get security configuration dashboard overview
|
||||
*/
|
||||
router.get('/dashboard',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const [thresholdStats, signatureStats, protocolStats] = await Promise.all([
|
||||
thresholdManager.getStatistics(),
|
||||
riskSignatureManager.getStatistics(),
|
||||
responseProtocolManager.getStatistics()
|
||||
]);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
thresholds: thresholdStats,
|
||||
signatures: signatureStats,
|
||||
protocols: protocolStats
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SecurityConfig API] Error getting dashboard:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to get dashboard data',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
679
backend/routes/security-headers.js
Normal file
679
backend/routes/security-headers.js
Normal file
|
|
@ -0,0 +1,679 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { requirePermission } = require('../middleware/rbac');
|
||||
const { modifyLimiter, readLimiter } = require('../middleware/rateLimiter');
|
||||
const { db } = require('../database/db');
|
||||
const logger = require('../utils/logger');
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
|
||||
/**
|
||||
* Security Headers Configuration Management
|
||||
* Allows admins to view and configure HTTP security headers
|
||||
*/
|
||||
|
||||
// Create security_headers_config table
|
||||
db.run(`
|
||||
CREATE TABLE IF NOT EXISTS security_headers_config (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
config_name TEXT NOT NULL UNIQUE,
|
||||
csp_default_src TEXT,
|
||||
csp_script_src TEXT,
|
||||
csp_style_src TEXT,
|
||||
csp_img_src TEXT,
|
||||
csp_media_src TEXT,
|
||||
csp_connect_src TEXT,
|
||||
csp_font_src TEXT,
|
||||
csp_frame_src TEXT,
|
||||
csp_object_src TEXT,
|
||||
csp_base_uri TEXT,
|
||||
csp_form_action TEXT,
|
||||
csp_frame_ancestors TEXT,
|
||||
hsts_enabled INTEGER DEFAULT 1,
|
||||
hsts_max_age INTEGER DEFAULT 31536000,
|
||||
hsts_include_subdomains INTEGER DEFAULT 1,
|
||||
hsts_preload INTEGER DEFAULT 1,
|
||||
referrer_policy TEXT DEFAULT 'strict-origin-when-cross-origin',
|
||||
x_content_type_options INTEGER DEFAULT 1,
|
||||
x_frame_options TEXT DEFAULT 'SAMEORIGIN',
|
||||
x_xss_protection INTEGER DEFAULT 1,
|
||||
is_active INTEGER DEFAULT 0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
created_by INTEGER,
|
||||
FOREIGN KEY (created_by) REFERENCES users(id)
|
||||
)
|
||||
`);
|
||||
|
||||
// Create security_headers_history table for audit trail
|
||||
db.run(`
|
||||
CREATE TABLE IF NOT EXISTS security_headers_history (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
config_id INTEGER,
|
||||
action TEXT NOT NULL,
|
||||
previous_config TEXT,
|
||||
new_config TEXT,
|
||||
changed_by INTEGER,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (config_id) REFERENCES security_headers_config(id),
|
||||
FOREIGN KEY (changed_by) REFERENCES users(id)
|
||||
)
|
||||
`);
|
||||
|
||||
// Security presets
|
||||
const SECURITY_PRESETS = {
|
||||
strict: {
|
||||
name: 'Strict Security',
|
||||
description: 'Maximum security - blocks most external resources',
|
||||
config: {
|
||||
csp_default_src: "['self']",
|
||||
csp_script_src: "['self']",
|
||||
csp_style_src: "['self']",
|
||||
csp_img_src: "['self', 'data:', 'https:']",
|
||||
csp_media_src: "['self']",
|
||||
csp_connect_src: "['self']",
|
||||
csp_font_src: "['self', 'data:']",
|
||||
csp_frame_src: "['self']",
|
||||
csp_object_src: "['none']",
|
||||
csp_base_uri: "['self']",
|
||||
csp_form_action: "['self']",
|
||||
csp_frame_ancestors: "['self']",
|
||||
hsts_enabled: 1,
|
||||
hsts_max_age: 31536000,
|
||||
hsts_include_subdomains: 1,
|
||||
hsts_preload: 1,
|
||||
referrer_policy: 'no-referrer',
|
||||
x_content_type_options: 1,
|
||||
x_frame_options: 'DENY',
|
||||
x_xss_protection: 1
|
||||
}
|
||||
},
|
||||
balanced: {
|
||||
name: 'Balanced',
|
||||
description: 'Good security with common CDN support',
|
||||
config: {
|
||||
csp_default_src: "['self']",
|
||||
csp_script_src: "['self', 'https://www.gstatic.com', 'https://cdn.jsdelivr.net']",
|
||||
csp_style_src: "['self', 'https://fonts.googleapis.com', \"'unsafe-inline'\"]",
|
||||
csp_img_src: "['self', 'data:', 'blob:', 'https:', 'http:']",
|
||||
csp_media_src: "['self', 'blob:', 'data:', 'https:', 'http:']",
|
||||
csp_connect_src: "['self', 'https:', 'http:', 'ws:', 'wss:']",
|
||||
csp_font_src: "['self', 'data:', 'https://fonts.gstatic.com']",
|
||||
csp_frame_src: "['self', 'https://www.youtube.com', 'https://player.vimeo.com']",
|
||||
csp_object_src: "['none']",
|
||||
csp_base_uri: "['self']",
|
||||
csp_form_action: "['self']",
|
||||
csp_frame_ancestors: "['self']",
|
||||
hsts_enabled: 1,
|
||||
hsts_max_age: 31536000,
|
||||
hsts_include_subdomains: 1,
|
||||
hsts_preload: 0,
|
||||
referrer_policy: 'strict-origin-when-cross-origin',
|
||||
x_content_type_options: 1,
|
||||
x_frame_options: 'SAMEORIGIN',
|
||||
x_xss_protection: 1
|
||||
}
|
||||
},
|
||||
permissive: {
|
||||
name: 'Permissive (IPTV Streaming)',
|
||||
description: 'Allows external streams and APIs - current default',
|
||||
config: {
|
||||
csp_default_src: "['self']",
|
||||
csp_script_src: "['self', \"'unsafe-inline'\", \"'unsafe-eval'\", 'https://www.gstatic.com', 'https://cdn.jsdelivr.net', 'blob:']",
|
||||
csp_style_src: "['self', \"'unsafe-inline'\", 'https://fonts.googleapis.com']",
|
||||
csp_img_src: "['self', 'data:', 'blob:', 'https:', 'http:']",
|
||||
csp_media_src: "['self', 'blob:', 'data:', 'mediastream:', 'https:', 'http:', '*']",
|
||||
csp_connect_src: "['self', 'https:', 'http:', 'ws:', 'wss:', 'blob:', '*']",
|
||||
csp_font_src: "['self', 'data:', 'https://fonts.gstatic.com']",
|
||||
csp_frame_src: "['self', 'https://www.youtube.com', 'https://player.vimeo.com']",
|
||||
csp_object_src: "['none']",
|
||||
csp_base_uri: "['self']",
|
||||
csp_form_action: "['self']",
|
||||
csp_frame_ancestors: "['self']",
|
||||
hsts_enabled: 1,
|
||||
hsts_max_age: 31536000,
|
||||
hsts_include_subdomains: 1,
|
||||
hsts_preload: 1,
|
||||
referrer_policy: 'strict-origin-when-cross-origin',
|
||||
x_content_type_options: 1,
|
||||
x_frame_options: 'SAMEORIGIN',
|
||||
x_xss_protection: 1
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Get current active security headers configuration
|
||||
router.get('/current', authenticate, requirePermission('security.view_audit'), readLimiter, async (req, res) => {
|
||||
try {
|
||||
// Read current configuration from server.js
|
||||
const serverPath = path.join(__dirname, '../server.js');
|
||||
const serverContent = await fs.readFile(serverPath, 'utf8');
|
||||
|
||||
// Parse current CSP configuration
|
||||
const currentConfig = {
|
||||
environment: process.env.NODE_ENV || 'development',
|
||||
csp: {
|
||||
mode: process.env.NODE_ENV === 'production' ? 'enforcing' : 'report-only',
|
||||
directives: extractCSPFromCode(serverContent)
|
||||
},
|
||||
hsts: {
|
||||
enabled: process.env.NODE_ENV === 'production',
|
||||
maxAge: 31536000,
|
||||
includeSubDomains: true,
|
||||
preload: true
|
||||
},
|
||||
referrerPolicy: 'strict-origin-when-cross-origin',
|
||||
xContentTypeOptions: true,
|
||||
xFrameOptions: 'SAMEORIGIN',
|
||||
xssProtection: true
|
||||
};
|
||||
|
||||
// Get saved configurations
|
||||
db.all(
|
||||
'SELECT * FROM security_headers_config ORDER BY is_active DESC, updated_at DESC',
|
||||
[],
|
||||
(err, configs) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching security headers configs:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch configurations' });
|
||||
}
|
||||
|
||||
res.json({
|
||||
current: currentConfig,
|
||||
savedConfigs: configs || [],
|
||||
presets: SECURITY_PRESETS
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Error reading current security headers:', error);
|
||||
res.status(500).json({ error: 'Failed to read current configuration' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get security header recommendations
|
||||
router.get('/recommendations', authenticate, requirePermission('security.view_audit'), readLimiter, async (req, res) => {
|
||||
try {
|
||||
const recommendations = await generateSecurityRecommendations();
|
||||
res.json(recommendations);
|
||||
} catch (error) {
|
||||
logger.error('Error generating recommendations:', error);
|
||||
res.status(500).json({ error: 'Failed to generate recommendations' });
|
||||
}
|
||||
});
|
||||
|
||||
// Test security headers
|
||||
router.post('/test', authenticate, requirePermission('security.manage'), modifyLimiter, async (req, res) => {
|
||||
try {
|
||||
const { config } = req.body;
|
||||
|
||||
if (!config) {
|
||||
return res.status(400).json({ error: 'Configuration required' });
|
||||
}
|
||||
|
||||
const testResults = await testSecurityConfiguration(config);
|
||||
|
||||
res.json(testResults);
|
||||
} catch (error) {
|
||||
logger.error('Error testing security headers:', error);
|
||||
res.status(500).json({ error: 'Failed to test configuration' });
|
||||
}
|
||||
});
|
||||
|
||||
// Save security headers configuration
|
||||
router.post('/save', authenticate, requirePermission('security.manage'), modifyLimiter, async (req, res) => {
|
||||
try {
|
||||
const { configName, config, setActive } = req.body;
|
||||
|
||||
if (!configName || !config) {
|
||||
return res.status(400).json({ error: 'Configuration name and config required' });
|
||||
}
|
||||
|
||||
// If setting as active, deactivate all others first
|
||||
if (setActive) {
|
||||
await new Promise((resolve, reject) => {
|
||||
db.run('UPDATE security_headers_config SET is_active = 0', [], (err) => {
|
||||
if (err) reject(err);
|
||||
else resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Insert new configuration
|
||||
const stmt = db.prepare(`
|
||||
INSERT INTO security_headers_config (
|
||||
config_name, csp_default_src, csp_script_src, csp_style_src,
|
||||
csp_img_src, csp_media_src, csp_connect_src, csp_font_src,
|
||||
csp_frame_src, csp_object_src, csp_base_uri, csp_form_action,
|
||||
csp_frame_ancestors, hsts_enabled, hsts_max_age,
|
||||
hsts_include_subdomains, hsts_preload, referrer_policy,
|
||||
x_content_type_options, x_frame_options, x_xss_protection,
|
||||
is_active, created_by
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
stmt.run(
|
||||
configName,
|
||||
config.csp_default_src,
|
||||
config.csp_script_src,
|
||||
config.csp_style_src,
|
||||
config.csp_img_src,
|
||||
config.csp_media_src,
|
||||
config.csp_connect_src,
|
||||
config.csp_font_src,
|
||||
config.csp_frame_src,
|
||||
config.csp_object_src,
|
||||
config.csp_base_uri,
|
||||
config.csp_form_action,
|
||||
config.csp_frame_ancestors,
|
||||
config.hsts_enabled ? 1 : 0,
|
||||
config.hsts_max_age,
|
||||
config.hsts_include_subdomains ? 1 : 0,
|
||||
config.hsts_preload ? 1 : 0,
|
||||
config.referrer_policy,
|
||||
config.x_content_type_options ? 1 : 0,
|
||||
config.x_frame_options,
|
||||
config.x_xss_protection ? 1 : 0,
|
||||
setActive ? 1 : 0,
|
||||
req.user.id,
|
||||
function(err) {
|
||||
if (err) {
|
||||
logger.error('Error saving security headers config:', err);
|
||||
return res.status(500).json({ error: 'Failed to save configuration' });
|
||||
}
|
||||
|
||||
// Log to history
|
||||
db.run(
|
||||
`INSERT INTO security_headers_history (config_id, action, new_config, changed_by)
|
||||
VALUES (?, ?, ?, ?)`,
|
||||
[this.lastID, 'created', JSON.stringify(config), req.user.id]
|
||||
);
|
||||
|
||||
logger.info(`Security headers configuration '${configName}' saved by user ${req.user.id}`);
|
||||
res.json({
|
||||
success: true,
|
||||
configId: this.lastID,
|
||||
message: 'Configuration saved successfully'
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Error saving security headers:', error);
|
||||
res.status(500).json({ error: 'Failed to save configuration' });
|
||||
}
|
||||
});
|
||||
|
||||
// Apply security headers configuration (updates server.js)
|
||||
router.post('/apply/:configId', authenticate, requirePermission('security.manage'), modifyLimiter, async (req, res) => {
|
||||
try {
|
||||
const { configId } = req.params;
|
||||
|
||||
// Get configuration
|
||||
db.get(
|
||||
'SELECT * FROM security_headers_config WHERE id = ?',
|
||||
[configId],
|
||||
async (err, config) => {
|
||||
if (err || !config) {
|
||||
return res.status(404).json({ error: 'Configuration not found' });
|
||||
}
|
||||
|
||||
try {
|
||||
// Backup current server.js
|
||||
const serverPath = path.join(__dirname, '../server.js');
|
||||
const backupPath = path.join(__dirname, '../server.js.backup');
|
||||
await fs.copyFile(serverPath, backupPath);
|
||||
|
||||
// Note: Applying configuration requires server restart
|
||||
// This endpoint saves the config as active but warns user to restart
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
db.run('UPDATE security_headers_config SET is_active = 0', [], (err) => {
|
||||
if (err) reject(err);
|
||||
else resolve();
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
'UPDATE security_headers_config SET is_active = 1, updated_at = CURRENT_TIMESTAMP WHERE id = ?',
|
||||
[configId],
|
||||
(err) => {
|
||||
if (err) reject(err);
|
||||
else resolve();
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Log to history
|
||||
db.run(
|
||||
`INSERT INTO security_headers_history (config_id, action, new_config, changed_by)
|
||||
VALUES (?, ?, ?, ?)`,
|
||||
[configId, 'applied', JSON.stringify(config), req.user.id]
|
||||
);
|
||||
|
||||
logger.info(`Security headers configuration ${configId} marked as active by user ${req.user.id}`);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
warning: 'Configuration saved. Server restart required to apply changes.',
|
||||
requiresRestart: true
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error applying security headers:', error);
|
||||
res.status(500).json({ error: 'Failed to apply configuration' });
|
||||
}
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Error in apply endpoint:', error);
|
||||
res.status(500).json({ error: 'Failed to apply configuration' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get configuration history
|
||||
router.get('/history', authenticate, requirePermission('security.view_audit'), readLimiter, async (req, res) => {
|
||||
try {
|
||||
db.all(
|
||||
`SELECT h.*, u.username, c.config_name
|
||||
FROM security_headers_history h
|
||||
LEFT JOIN users u ON h.changed_by = u.id
|
||||
LEFT JOIN security_headers_config c ON h.config_id = c.id
|
||||
ORDER BY h.timestamp DESC
|
||||
LIMIT 50`,
|
||||
[],
|
||||
(err, history) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching security headers history:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch history' });
|
||||
}
|
||||
|
||||
res.json(history || []);
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Error fetching history:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch history' });
|
||||
}
|
||||
});
|
||||
|
||||
// Delete saved configuration
|
||||
router.delete('/:configId', authenticate, requirePermission('security.manage'), modifyLimiter, async (req, res) => {
|
||||
try {
|
||||
const { configId } = req.params;
|
||||
|
||||
db.get('SELECT is_active FROM security_headers_config WHERE id = ?', [configId], (err, config) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'Failed to check configuration' });
|
||||
}
|
||||
|
||||
if (!config) {
|
||||
return res.status(404).json({ error: 'Configuration not found' });
|
||||
}
|
||||
|
||||
if (config.is_active) {
|
||||
return res.status(400).json({ error: 'Cannot delete active configuration' });
|
||||
}
|
||||
|
||||
db.run('DELETE FROM security_headers_config WHERE id = ?', [configId], (err) => {
|
||||
if (err) {
|
||||
logger.error('Error deleting security headers config:', err);
|
||||
return res.status(500).json({ error: 'Failed to delete configuration' });
|
||||
}
|
||||
|
||||
logger.info(`Security headers configuration ${configId} deleted by user ${req.user.id}`);
|
||||
res.json({ success: true, message: 'Configuration deleted' });
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error deleting configuration:', error);
|
||||
res.status(500).json({ error: 'Failed to delete configuration' });
|
||||
}
|
||||
});
|
||||
|
||||
// Helper functions
|
||||
function extractCSPFromCode(serverCode) {
|
||||
// This is a simplified extraction - in production, you'd parse more carefully
|
||||
return {
|
||||
defaultSrc: ["'self'"],
|
||||
scriptSrc: ["'self'", "'unsafe-inline'", "'unsafe-eval'"],
|
||||
styleSrc: ["'self'", "'unsafe-inline'"],
|
||||
imgSrc: ["'self'", "data:", "blob:", "https:", "http:"],
|
||||
mediaSrc: ["'self'", "blob:", "data:", "mediastream:", "https:", "http:", "*"],
|
||||
connectSrc: ["'self'", "https:", "http:", "ws:", "wss:", "blob:", "*"],
|
||||
fontSrc: ["'self'", "data:"],
|
||||
frameSrc: ["'self'"],
|
||||
objectSrc: ["'none'"],
|
||||
baseUri: ["'self'"],
|
||||
formAction: ["'self'"],
|
||||
frameAncestors: ["'self'"]
|
||||
};
|
||||
}
|
||||
|
||||
async function generateSecurityRecommendations() {
|
||||
const recommendations = [];
|
||||
let score = 100;
|
||||
|
||||
// Check current environment
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
|
||||
if (!isProduction) {
|
||||
recommendations.push({
|
||||
severity: 'info',
|
||||
category: 'Environment',
|
||||
title: 'Development Mode Active',
|
||||
description: 'CSP is in report-only mode. Some security headers are disabled.',
|
||||
action: 'Deploy to production to enable full security',
|
||||
impact: 'low'
|
||||
});
|
||||
}
|
||||
|
||||
// Check for CSP violations
|
||||
const violationCount = await new Promise((resolve) => {
|
||||
db.get(
|
||||
'SELECT COUNT(*) as count FROM csp_violations WHERE created_at > datetime("now", "-7 days")',
|
||||
[],
|
||||
(err, row) => resolve(row?.count || 0)
|
||||
);
|
||||
});
|
||||
|
||||
if (violationCount > 10) {
|
||||
score -= 10;
|
||||
recommendations.push({
|
||||
severity: 'warning',
|
||||
category: 'CSP',
|
||||
title: `${violationCount} CSP Violations in Last 7 Days`,
|
||||
description: `Your Content Security Policy is being violated ${violationCount} times, indicating resources are being blocked or attempted to load from unauthorized sources. Common causes: (1) External scripts/styles not whitelisted in CSP, (2) Inline event handlers (onclick, onload, etc.), (3) Third-party widgets or ads, (4) Browser extensions injecting content, (5) Misconfigured CDN URLs. This could indicate attempted attacks or legitimate resources being blocked.`,
|
||||
action: 'Visit /security/csp dashboard to analyze violations by: (1) Violated Directive - identify which CSP rule is being broken, (2) Blocked URI - see what resources are blocked, (3) Source File - find where the violation originates. Then either: (a) Add legitimate sources to your CSP whitelist, (b) Remove inline scripts/handlers, (c) Block malicious sources, (d) Update third-party library configurations.',
|
||||
impact: 'medium',
|
||||
details: {
|
||||
threshold: 'More than 10 violations may indicate policy misconfiguration',
|
||||
monitoring: 'Check CSP Dashboard for patterns and trends',
|
||||
action: 'Use Statistics tab to group violations and identify root causes'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Check for unsafe CSP directives
|
||||
recommendations.push({
|
||||
severity: 'warning',
|
||||
category: 'CSP',
|
||||
title: 'Unsafe CSP Directives Detected',
|
||||
description: "Your CSP includes 'unsafe-inline' and 'unsafe-eval' in script-src, which weakens XSS (Cross-Site Scripting) protection. These directives allow inline JavaScript and dynamic code evaluation, making it easier for attackers to inject malicious scripts if they find a vulnerability. However, for IPTV streaming apps using React/Vite, these are often necessary for: (1) React's inline scripts and hot module replacement, (2) MUI's dynamic styling, (3) Third-party streaming libraries, (4) External IPTV APIs. Your server already generates cryptographic nonces for better security.",
|
||||
action: 'Current configuration is acceptable for an IPTV app. To improve: (1) Monitor CSP violations regularly in the CSP Dashboard, (2) Keep input validation strict (already implemented), (3) Update dependencies frequently, (4) For future major refactoring, explore migrating to nonce-only scripts by configuring Vite to inject nonces and removing unsafe-inline. Note: Your nonce generation is already in place at server.js - you have the foundation for future improvement.',
|
||||
impact: 'medium',
|
||||
details: {
|
||||
currentSetup: 'Multiple defense layers active: input validation, parameterized queries, authentication, rate limiting',
|
||||
tradeoff: 'Security vs Functionality: Current score 85-90 is excellent for feature-rich apps',
|
||||
futureWork: 'Nonce-based CSP requires: Vite config changes, React hydration updates, third-party library compatibility'
|
||||
}
|
||||
});
|
||||
|
||||
// Check HSTS
|
||||
if (!isProduction) {
|
||||
recommendations.push({
|
||||
severity: 'info',
|
||||
category: 'HSTS',
|
||||
title: 'HSTS Disabled in Development',
|
||||
description: 'HTTP Strict Transport Security (HSTS) forces browsers to only connect via HTTPS, preventing man-in-the-middle attacks and SSL stripping. Currently disabled in development mode to allow HTTP testing. In production, HSTS will be enabled with: max-age=31536000 (1 year), includeSubDomains, and preload flags.',
|
||||
action: 'For production deployment: (1) Ensure valid SSL/TLS certificate is installed, (2) Configure reverse proxy (nginx/Apache) for HTTPS, (3) Set NODE_ENV=production to enable HSTS, (4) Test HTTPS functionality before enabling, (5) Consider HSTS preload list submission at hstspreload.org for maximum security (permanent, cannot be undone easily).',
|
||||
impact: 'low',
|
||||
details: {
|
||||
currentMode: 'Development - HSTS off to allow HTTP testing',
|
||||
productionMode: 'HSTS enabled automatically with secure settings',
|
||||
preloadWarning: 'HSTS preload is permanent - only enable after thorough HTTPS testing'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Add positive recommendation if security is good
|
||||
if (recommendations.length === 0 || (recommendations.length === 1 && recommendations[0].severity === 'info')) {
|
||||
recommendations.push({
|
||||
severity: 'info',
|
||||
category: 'Security',
|
||||
title: 'Excellent Security Configuration',
|
||||
description: 'Your security headers are well-configured with minimal issues detected. All critical protections are active: CSP for XSS prevention, HSTS for HTTPS enforcement (in production), X-Content-Type-Options for MIME sniffing protection, X-Frame-Options for clickjacking prevention, and proper referrer policy.',
|
||||
action: 'Maintain current configuration and continue monitoring: (1) Review CSP violations weekly, (2) Keep dependencies updated with npm audit, (3) Monitor security audit logs for suspicious activity, (4) Backup security configurations before changes.',
|
||||
impact: 'low',
|
||||
details: {
|
||||
score: 'Grade A security for IPTV streaming application',
|
||||
maintenance: 'Regular monitoring and updates recommended',
|
||||
compliance: 'Meets OWASP security standards'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Security score calculation
|
||||
if (violationCount > 10) score -= 10;
|
||||
if (violationCount > 50) score -= 15;
|
||||
|
||||
return {
|
||||
score: Math.max(0, score),
|
||||
grade: score >= 90 ? 'A' : score >= 80 ? 'B' : score >= 70 ? 'C' : score >= 60 ? 'D' : 'F',
|
||||
recommendations: recommendations,
|
||||
summary: {
|
||||
total: recommendations.length,
|
||||
critical: recommendations.filter(r => r.severity === 'error').length,
|
||||
warnings: recommendations.filter(r => r.severity === 'warning').length,
|
||||
info: recommendations.filter(r => r.severity === 'info').length
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async function testSecurityConfiguration(config) {
|
||||
const results = {
|
||||
passed: [],
|
||||
warnings: [],
|
||||
errors: [],
|
||||
score: 100
|
||||
};
|
||||
|
||||
// Test CSP strictness
|
||||
if (config.csp_script_src && config.csp_script_src.includes("'unsafe-eval'")) {
|
||||
results.warnings.push({
|
||||
test: 'CSP Script Evaluation',
|
||||
message: "'unsafe-eval' allows dynamic code execution, reducing XSS protection"
|
||||
});
|
||||
results.score -= 5;
|
||||
} else {
|
||||
results.passed.push({
|
||||
test: 'CSP Script Evaluation',
|
||||
message: 'No unsafe-eval in script-src'
|
||||
});
|
||||
}
|
||||
|
||||
if (config.csp_script_src && config.csp_script_src.includes("'unsafe-inline'")) {
|
||||
results.warnings.push({
|
||||
test: 'CSP Inline Scripts',
|
||||
message: "'unsafe-inline' allows inline scripts, reducing XSS protection"
|
||||
});
|
||||
results.score -= 5;
|
||||
} else {
|
||||
results.passed.push({
|
||||
test: 'CSP Inline Scripts',
|
||||
message: 'No unsafe-inline in script-src'
|
||||
});
|
||||
}
|
||||
|
||||
// Test object-src
|
||||
if (config.csp_object_src && config.csp_object_src.includes("'none'")) {
|
||||
results.passed.push({
|
||||
test: 'Plugin Blocking',
|
||||
message: 'object-src is none - plugins blocked'
|
||||
});
|
||||
} else {
|
||||
results.warnings.push({
|
||||
test: 'Plugin Blocking',
|
||||
message: 'Consider setting object-src to none to block plugins'
|
||||
});
|
||||
results.score -= 5;
|
||||
}
|
||||
|
||||
// Test HSTS
|
||||
if (config.hsts_enabled) {
|
||||
if (config.hsts_max_age >= 31536000) {
|
||||
results.passed.push({
|
||||
test: 'HSTS Duration',
|
||||
message: 'HSTS max-age is 1 year or more'
|
||||
});
|
||||
} else {
|
||||
results.warnings.push({
|
||||
test: 'HSTS Duration',
|
||||
message: 'HSTS max-age should be at least 1 year (31536000 seconds)'
|
||||
});
|
||||
results.score -= 5;
|
||||
}
|
||||
|
||||
if (config.hsts_include_subdomains) {
|
||||
results.passed.push({
|
||||
test: 'HSTS Subdomains',
|
||||
message: 'HSTS includeSubDomains is enabled'
|
||||
});
|
||||
}
|
||||
} else {
|
||||
results.errors.push({
|
||||
test: 'HSTS Enabled',
|
||||
message: 'HSTS is disabled - HTTPS enforcement is not active'
|
||||
});
|
||||
results.score -= 15;
|
||||
}
|
||||
|
||||
// Test X-Frame-Options
|
||||
if (config.x_frame_options === 'DENY' || config.x_frame_options === 'SAMEORIGIN') {
|
||||
results.passed.push({
|
||||
test: 'Clickjacking Protection',
|
||||
message: `X-Frame-Options is set to ${config.x_frame_options}`
|
||||
});
|
||||
} else {
|
||||
results.warnings.push({
|
||||
test: 'Clickjacking Protection',
|
||||
message: 'X-Frame-Options should be DENY or SAMEORIGIN'
|
||||
});
|
||||
results.score -= 10;
|
||||
}
|
||||
|
||||
// Test X-Content-Type-Options
|
||||
if (config.x_content_type_options) {
|
||||
results.passed.push({
|
||||
test: 'MIME Sniffing Protection',
|
||||
message: 'X-Content-Type-Options: nosniff is enabled'
|
||||
});
|
||||
} else {
|
||||
results.errors.push({
|
||||
test: 'MIME Sniffing Protection',
|
||||
message: 'X-Content-Type-Options should be enabled'
|
||||
});
|
||||
results.score -= 10;
|
||||
}
|
||||
|
||||
return {
|
||||
score: Math.max(0, results.score),
|
||||
grade: results.score >= 90 ? 'A' : results.score >= 80 ? 'B' : results.score >= 70 ? 'C' : results.score >= 60 ? 'D' : 'F',
|
||||
passed: results.passed,
|
||||
warnings: results.warnings,
|
||||
errors: results.errors,
|
||||
summary: `${results.passed.length} passed, ${results.warnings.length} warnings, ${results.errors.length} errors`
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = router;
|
||||
479
backend/routes/security-monitor.js
Normal file
479
backend/routes/security-monitor.js
Normal file
|
|
@ -0,0 +1,479 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { requirePermission } = require('../middleware/rbac');
|
||||
const { readLimiter } = require('../middleware/rateLimiter');
|
||||
const { db } = require('../database/db');
|
||||
const logger = require('../utils/logger');
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
const { exec } = require('child_process');
|
||||
const { promisify } = require('util');
|
||||
|
||||
const execPromise = promisify(exec);
|
||||
|
||||
/**
|
||||
* Security Monitoring & Dependency Management
|
||||
* Provides comprehensive security status and vulnerability tracking
|
||||
*/
|
||||
|
||||
// Get comprehensive security status
|
||||
router.get('/status', authenticate, requirePermission('security.view_audit'), readLimiter, async (req, res) => {
|
||||
try {
|
||||
const securityStatus = {
|
||||
timestamp: new Date().toISOString(),
|
||||
dependencies: await checkDependencies(),
|
||||
vulnerabilities: await checkVulnerabilities(),
|
||||
securityHeaders: await checkSecurityHeaders(),
|
||||
auditSummary: await getAuditSummary(),
|
||||
systemHealth: await getSystemHealth()
|
||||
};
|
||||
|
||||
res.json(securityStatus);
|
||||
} catch (error) {
|
||||
logger.error('Error fetching security status:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch security status' });
|
||||
}
|
||||
});
|
||||
|
||||
// Check dependencies for updates
|
||||
async function checkDependencies() {
|
||||
try {
|
||||
const backendPackage = JSON.parse(
|
||||
await fs.readFile(path.join(__dirname, '../package.json'), 'utf8')
|
||||
);
|
||||
|
||||
const frontendPackage = JSON.parse(
|
||||
await fs.readFile(path.join(__dirname, '../../frontend/package.json'), 'utf8')
|
||||
);
|
||||
|
||||
return {
|
||||
backend: {
|
||||
dependencies: Object.keys(backendPackage.dependencies || {}).length,
|
||||
devDependencies: Object.keys(backendPackage.devDependencies || {}).length,
|
||||
lastChecked: new Date().toISOString()
|
||||
},
|
||||
frontend: {
|
||||
dependencies: Object.keys(frontendPackage.dependencies || {}).length,
|
||||
devDependencies: Object.keys(frontendPackage.devDependencies || {}).length,
|
||||
lastChecked: new Date().toISOString()
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Error checking dependencies:', error);
|
||||
return { error: 'Unable to check dependencies' };
|
||||
}
|
||||
}
|
||||
|
||||
// Check for known vulnerabilities
|
||||
async function checkVulnerabilities() {
|
||||
try {
|
||||
// Run npm audit in both backend and frontend
|
||||
const backendAudit = await runNpmAudit('backend');
|
||||
const frontendAudit = await runNpmAudit('frontend');
|
||||
|
||||
return {
|
||||
backend: backendAudit,
|
||||
frontend: frontendAudit,
|
||||
lastScanned: new Date().toISOString()
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Error checking vulnerabilities:', error);
|
||||
return { error: 'Unable to scan for vulnerabilities' };
|
||||
}
|
||||
}
|
||||
|
||||
async function runNpmAudit(project) {
|
||||
try {
|
||||
const projectPath = project === 'backend'
|
||||
? path.join(__dirname, '..')
|
||||
: path.join(__dirname, '../../frontend');
|
||||
|
||||
const { stdout } = await execPromise(`cd ${projectPath} && npm audit --json`, {
|
||||
timeout: 30000
|
||||
});
|
||||
|
||||
const auditData = JSON.parse(stdout);
|
||||
|
||||
return {
|
||||
total: auditData.metadata?.vulnerabilities?.total || 0,
|
||||
critical: auditData.metadata?.vulnerabilities?.critical || 0,
|
||||
high: auditData.metadata?.vulnerabilities?.high || 0,
|
||||
moderate: auditData.metadata?.vulnerabilities?.moderate || 0,
|
||||
low: auditData.metadata?.vulnerabilities?.low || 0,
|
||||
info: auditData.metadata?.vulnerabilities?.info || 0
|
||||
};
|
||||
} catch (error) {
|
||||
// npm audit returns non-zero exit code when vulnerabilities are found
|
||||
if (error.stdout) {
|
||||
try {
|
||||
const auditData = JSON.parse(error.stdout);
|
||||
return {
|
||||
total: auditData.metadata?.vulnerabilities?.total || 0,
|
||||
critical: auditData.metadata?.vulnerabilities?.critical || 0,
|
||||
high: auditData.metadata?.vulnerabilities?.high || 0,
|
||||
moderate: auditData.metadata?.vulnerabilities?.moderate || 0,
|
||||
low: auditData.metadata?.vulnerabilities?.low || 0,
|
||||
info: auditData.metadata?.vulnerabilities?.info || 0
|
||||
};
|
||||
} catch {
|
||||
return { error: 'Unable to parse audit results' };
|
||||
}
|
||||
}
|
||||
return { error: error.message };
|
||||
}
|
||||
}
|
||||
|
||||
// Check security headers configuration
|
||||
async function checkSecurityHeaders() {
|
||||
return {
|
||||
helmet: {
|
||||
enabled: true,
|
||||
features: [
|
||||
'Content-Security-Policy',
|
||||
'X-Content-Type-Options',
|
||||
'X-Frame-Options',
|
||||
'X-XSS-Protection',
|
||||
'Strict-Transport-Security',
|
||||
'Referrer-Policy'
|
||||
]
|
||||
},
|
||||
csp: {
|
||||
enabled: true,
|
||||
mode: process.env.NODE_ENV === 'production' ? 'enforcing' : 'report-only'
|
||||
},
|
||||
cors: {
|
||||
enabled: true
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Get security audit summary
|
||||
async function getAuditSummary() {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT
|
||||
action,
|
||||
result,
|
||||
COUNT(*) as count,
|
||||
MAX(timestamp) as last_occurrence
|
||||
FROM security_audit_log
|
||||
WHERE timestamp > datetime('now', '-7 days')
|
||||
GROUP BY action, result
|
||||
ORDER BY count DESC
|
||||
LIMIT 20`,
|
||||
[],
|
||||
(err, rows) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching audit summary:', err);
|
||||
resolve({ error: 'Unable to fetch audit summary' });
|
||||
} else {
|
||||
resolve(rows || []);
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// Get system health metrics
|
||||
async function getSystemHealth() {
|
||||
return new Promise((resolve, reject) => {
|
||||
Promise.all([
|
||||
// Active sessions count
|
||||
new Promise((res) => {
|
||||
db.get('SELECT COUNT(*) as count FROM sessions WHERE expires_at > ?',
|
||||
[new Date().toISOString()],
|
||||
(err, row) => res(row?.count || 0)
|
||||
);
|
||||
}),
|
||||
// Failed login attempts in last hour
|
||||
new Promise((res) => {
|
||||
db.get(
|
||||
`SELECT COUNT(*) as count FROM security_audit_log
|
||||
WHERE action = 'login' AND result = 'failed'
|
||||
AND timestamp > datetime('now', '-1 hour')`,
|
||||
[],
|
||||
(err, row) => res(row?.count || 0)
|
||||
);
|
||||
}),
|
||||
// Locked accounts
|
||||
new Promise((res) => {
|
||||
db.get(
|
||||
'SELECT COUNT(*) as count FROM users WHERE locked_until > ?',
|
||||
[new Date().toISOString()],
|
||||
(err, row) => res(row?.count || 0)
|
||||
);
|
||||
}),
|
||||
// Total users
|
||||
new Promise((res) => {
|
||||
db.get('SELECT COUNT(*) as count FROM users', [], (err, row) => res(row?.count || 0));
|
||||
})
|
||||
]).then(([activeSessions, failedLogins, lockedAccounts, totalUsers]) => {
|
||||
resolve({
|
||||
activeSessions,
|
||||
failedLogins,
|
||||
lockedAccounts,
|
||||
totalUsers,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Get detailed vulnerability report
|
||||
router.get('/vulnerabilities/detailed', authenticate, requirePermission('security.view_audit'), readLimiter, async (req, res) => {
|
||||
try {
|
||||
const backendPath = path.join(__dirname, '..');
|
||||
const frontendPath = path.join(__dirname, '../../frontend');
|
||||
|
||||
const backendAudit = await execPromise(`cd ${backendPath} && npm audit --json`, {
|
||||
timeout: 30000
|
||||
}).catch(e => ({ stdout: e.stdout }));
|
||||
|
||||
const frontendAudit = await execPromise(`cd ${frontendPath} && npm audit --json`, {
|
||||
timeout: 30000
|
||||
}).catch(e => ({ stdout: e.stdout }));
|
||||
|
||||
const backendData = JSON.parse(backendAudit.stdout || '{}');
|
||||
const frontendData = JSON.parse(frontendAudit.stdout || '{}');
|
||||
|
||||
res.json({
|
||||
backend: {
|
||||
vulnerabilities: backendData.vulnerabilities || {},
|
||||
metadata: backendData.metadata || {}
|
||||
},
|
||||
frontend: {
|
||||
vulnerabilities: frontendData.vulnerabilities || {},
|
||||
metadata: frontendData.metadata || {}
|
||||
},
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error fetching detailed vulnerabilities:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch vulnerability details' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get security audit log with filtering
|
||||
router.get('/audit-log', authenticate, requirePermission('security.view_audit'), readLimiter, async (req, res) => {
|
||||
try {
|
||||
const { action, result, userId, startDate, endDate, limit = 100, offset = 0 } = req.query;
|
||||
|
||||
let query = 'SELECT * FROM security_audit_log WHERE 1=1';
|
||||
const params = [];
|
||||
|
||||
if (action) {
|
||||
query += ' AND action = ?';
|
||||
params.push(action);
|
||||
}
|
||||
|
||||
if (result) {
|
||||
query += ' AND result = ?';
|
||||
params.push(result);
|
||||
}
|
||||
|
||||
if (userId) {
|
||||
query += ' AND user_id = ?';
|
||||
params.push(userId);
|
||||
}
|
||||
|
||||
if (startDate) {
|
||||
query += ' AND timestamp >= ?';
|
||||
params.push(startDate);
|
||||
}
|
||||
|
||||
if (endDate) {
|
||||
query += ' AND timestamp <= ?';
|
||||
params.push(endDate);
|
||||
}
|
||||
|
||||
query += ' ORDER BY timestamp DESC LIMIT ? OFFSET ?';
|
||||
params.push(parseInt(limit), parseInt(offset));
|
||||
|
||||
db.all(query, params, (err, rows) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching audit log:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch audit log' });
|
||||
}
|
||||
|
||||
// Get total count for pagination
|
||||
let countQuery = 'SELECT COUNT(*) as total FROM security_audit_log WHERE 1=1';
|
||||
const countParams = params.slice(0, -2); // Remove limit and offset
|
||||
|
||||
db.get(countQuery, countParams, (err, countRow) => {
|
||||
if (err) {
|
||||
logger.error('Error counting audit log:', err);
|
||||
return res.json({ logs: rows || [], total: 0 });
|
||||
}
|
||||
|
||||
res.json({
|
||||
logs: rows || [],
|
||||
total: countRow?.total || 0,
|
||||
limit: parseInt(limit),
|
||||
offset: parseInt(offset)
|
||||
});
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error fetching audit log:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch audit log' });
|
||||
}
|
||||
});
|
||||
|
||||
// Export audit log
|
||||
router.get('/audit-log/export', authenticate, requirePermission('security.view_audit'), readLimiter, async (req, res) => {
|
||||
try {
|
||||
const { format = 'json', startDate, endDate } = req.query;
|
||||
|
||||
let query = 'SELECT * FROM security_audit_log WHERE 1=1';
|
||||
const params = [];
|
||||
|
||||
if (startDate) {
|
||||
query += ' AND timestamp >= ?';
|
||||
params.push(startDate);
|
||||
}
|
||||
|
||||
if (endDate) {
|
||||
query += ' AND timestamp <= ?';
|
||||
params.push(endDate);
|
||||
}
|
||||
|
||||
query += ' ORDER BY timestamp DESC';
|
||||
|
||||
db.all(query, params, (err, rows) => {
|
||||
if (err) {
|
||||
logger.error('Error exporting audit log:', err);
|
||||
return res.status(500).json({ error: 'Failed to export audit log' });
|
||||
}
|
||||
|
||||
if (format === 'csv') {
|
||||
const csv = convertToCSV(rows);
|
||||
res.setHeader('Content-Type', 'text/csv');
|
||||
res.setHeader('Content-Disposition', `attachment; filename=security-audit-${Date.now()}.csv`);
|
||||
res.send(csv);
|
||||
} else {
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
res.setHeader('Content-Disposition', `attachment; filename=security-audit-${Date.now()}.json`);
|
||||
res.json(rows);
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error exporting audit log:', error);
|
||||
res.status(500).json({ error: 'Failed to export audit log' });
|
||||
}
|
||||
});
|
||||
|
||||
function convertToCSV(data) {
|
||||
if (!data || data.length === 0) return '';
|
||||
|
||||
const headers = Object.keys(data[0]);
|
||||
const csvRows = [headers.join(',')];
|
||||
|
||||
for (const row of data) {
|
||||
const values = headers.map(header => {
|
||||
const value = row[header];
|
||||
return typeof value === 'string' && value.includes(',')
|
||||
? `"${value}"`
|
||||
: value;
|
||||
});
|
||||
csvRows.push(values.join(','));
|
||||
}
|
||||
|
||||
return csvRows.join('\n');
|
||||
}
|
||||
|
||||
// Get security recommendations
|
||||
router.get('/recommendations', authenticate, requirePermission('security.view_audit'), readLimiter, async (req, res) => {
|
||||
try {
|
||||
const recommendations = [];
|
||||
|
||||
// Check for locked accounts
|
||||
const lockedAccounts = await new Promise((resolve) => {
|
||||
db.get(
|
||||
'SELECT COUNT(*) as count FROM users WHERE locked_until > ?',
|
||||
[new Date().toISOString()],
|
||||
(err, row) => resolve(row?.count || 0)
|
||||
);
|
||||
});
|
||||
|
||||
if (lockedAccounts > 0) {
|
||||
recommendations.push({
|
||||
severity: 'warning',
|
||||
category: 'account_security',
|
||||
title: 'Locked Accounts',
|
||||
description: `${lockedAccounts} account(s) are currently locked due to failed login attempts`,
|
||||
action: 'Review locked accounts and consider unlocking legitimate users'
|
||||
});
|
||||
}
|
||||
|
||||
// Check for users with old passwords
|
||||
const oldPasswords = await new Promise((resolve) => {
|
||||
db.all(
|
||||
`SELECT username, password_changed_at FROM users
|
||||
WHERE password_changed_at < datetime('now', '-90 days')`,
|
||||
[],
|
||||
(err, rows) => resolve(rows || [])
|
||||
);
|
||||
});
|
||||
|
||||
if (oldPasswords.length > 0) {
|
||||
recommendations.push({
|
||||
severity: 'info',
|
||||
category: 'password_policy',
|
||||
title: 'Old Passwords',
|
||||
description: `${oldPasswords.length} user(s) haven't changed their password in over 90 days`,
|
||||
action: 'Encourage users to update their passwords regularly'
|
||||
});
|
||||
}
|
||||
|
||||
// Check for recent failed logins
|
||||
const recentFailures = await new Promise((resolve) => {
|
||||
db.get(
|
||||
`SELECT COUNT(*) as count FROM security_audit_log
|
||||
WHERE action = 'login' AND result = 'failed'
|
||||
AND timestamp > datetime('now', '-1 hour')`,
|
||||
[],
|
||||
(err, row) => resolve(row?.count || 0)
|
||||
);
|
||||
});
|
||||
|
||||
if (recentFailures > 10) {
|
||||
recommendations.push({
|
||||
severity: 'high',
|
||||
category: 'threat_detection',
|
||||
title: 'High Failed Login Rate',
|
||||
description: `${recentFailures} failed login attempts in the last hour`,
|
||||
action: 'Investigate potential brute-force attack'
|
||||
});
|
||||
}
|
||||
|
||||
// Check for users without 2FA
|
||||
const no2FA = await new Promise((resolve) => {
|
||||
db.get(
|
||||
'SELECT COUNT(*) as count FROM users WHERE two_factor_secret IS NULL',
|
||||
[],
|
||||
(err, row) => resolve(row?.count || 0)
|
||||
);
|
||||
});
|
||||
|
||||
if (no2FA > 0) {
|
||||
recommendations.push({
|
||||
severity: 'warning',
|
||||
category: 'authentication',
|
||||
title: 'Two-Factor Authentication',
|
||||
description: `${no2FA} user(s) don't have 2FA enabled`,
|
||||
action: 'Encourage users to enable two-factor authentication'
|
||||
});
|
||||
}
|
||||
|
||||
res.json({
|
||||
recommendations,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error generating recommendations:', error);
|
||||
res.status(500).json({ error: 'Failed to generate recommendations' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
1002
backend/routes/security-testing.js
Normal file
1002
backend/routes/security-testing.js
Normal file
File diff suppressed because it is too large
Load diff
326
backend/routes/sessions.js
Normal file
326
backend/routes/sessions.js
Normal file
|
|
@ -0,0 +1,326 @@
|
|||
/**
|
||||
* Session Management Routes
|
||||
* Handles active session viewing, management, and termination
|
||||
*/
|
||||
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticate, requireAdmin } = require('../middleware/auth');
|
||||
const { readLimiter, modifyLimiter } = require('../middleware/rateLimiter');
|
||||
const { db } = require('../database/db');
|
||||
const logger = require('../utils/logger');
|
||||
const SecurityAuditLogger = require('../utils/securityAudit');
|
||||
|
||||
/**
|
||||
* Get all active sessions for current user
|
||||
*/
|
||||
router.get('/my-sessions', authenticate, readLimiter, async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.userId;
|
||||
const currentToken = req.headers.authorization?.split(' ')[1];
|
||||
|
||||
db.all(
|
||||
`SELECT
|
||||
id,
|
||||
session_token,
|
||||
ip_address,
|
||||
user_agent,
|
||||
created_at,
|
||||
last_activity,
|
||||
expires_at
|
||||
FROM active_sessions
|
||||
WHERE user_id = ? AND expires_at > ?
|
||||
ORDER BY last_activity DESC`,
|
||||
[userId, new Date().toISOString()],
|
||||
(err, sessions) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching sessions:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch sessions' });
|
||||
}
|
||||
|
||||
// Mark current session
|
||||
const sessionsWithCurrent = sessions.map(session => ({
|
||||
...session,
|
||||
isCurrent: session.session_token === currentToken,
|
||||
// Don't expose full token to client
|
||||
session_token: undefined
|
||||
}));
|
||||
|
||||
res.json(sessionsWithCurrent);
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Session fetch error:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch sessions' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Get all active sessions (admin only)
|
||||
*/
|
||||
router.get('/all', authenticate, requireAdmin, readLimiter, async (req, res) => {
|
||||
try {
|
||||
db.all(
|
||||
`SELECT
|
||||
s.id,
|
||||
s.user_id,
|
||||
s.ip_address,
|
||||
s.user_agent,
|
||||
s.created_at,
|
||||
s.last_activity,
|
||||
s.expires_at,
|
||||
u.username,
|
||||
u.email
|
||||
FROM active_sessions s
|
||||
JOIN users u ON s.user_id = u.id
|
||||
WHERE s.expires_at > ?
|
||||
ORDER BY s.last_activity DESC`,
|
||||
[new Date().toISOString()],
|
||||
(err, sessions) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching all sessions:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch sessions' });
|
||||
}
|
||||
|
||||
res.json(sessions);
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Session fetch error:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch sessions' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Terminate a specific session
|
||||
*/
|
||||
router.delete('/:sessionId', authenticate, modifyLimiter, async (req, res) => {
|
||||
try {
|
||||
const { sessionId } = req.params;
|
||||
const userId = req.user.userId;
|
||||
const currentToken = req.headers.authorization?.split(' ')[1];
|
||||
|
||||
// Validate session ID
|
||||
if (isNaN(parseInt(sessionId))) {
|
||||
return res.status(400).json({ error: 'Invalid session ID' });
|
||||
}
|
||||
|
||||
// Get session details first
|
||||
db.get(
|
||||
'SELECT * FROM active_sessions WHERE id = ?',
|
||||
[sessionId],
|
||||
async (err, session) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching session:', err);
|
||||
return res.status(500).json({ error: 'Failed to terminate session' });
|
||||
}
|
||||
|
||||
if (!session) {
|
||||
return res.status(404).json({ error: 'Session not found' });
|
||||
}
|
||||
|
||||
// Only allow users to terminate their own sessions (or admins to terminate any)
|
||||
if (session.user_id !== userId && req.user.role !== 'admin') {
|
||||
return res.status(403).json({ error: 'Permission denied' });
|
||||
}
|
||||
|
||||
// Prevent terminating current session
|
||||
if (session.session_token === currentToken) {
|
||||
return res.status(400).json({ error: 'Cannot terminate current session. Use logout instead.' });
|
||||
}
|
||||
|
||||
// Terminate session
|
||||
db.run(
|
||||
'DELETE FROM active_sessions WHERE id = ?',
|
||||
[sessionId],
|
||||
async (err) => {
|
||||
if (err) {
|
||||
logger.error('Error terminating session:', err);
|
||||
return res.status(500).json({ error: 'Failed to terminate session' });
|
||||
}
|
||||
|
||||
// Log the event
|
||||
await SecurityAuditLogger.logSessionEvent('SESSION_TERMINATED', userId, {
|
||||
ip: req.ip || req.headers['x-forwarded-for'],
|
||||
userAgent: req.headers['user-agent'],
|
||||
sessionId: sessionId,
|
||||
terminatedSessionIp: session.ip_address
|
||||
});
|
||||
|
||||
logger.info(`Session ${sessionId} terminated by user ${userId}`);
|
||||
res.json({ message: 'Session terminated successfully' });
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Session termination error:', error);
|
||||
res.status(500).json({ error: 'Failed to terminate session' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Terminate all other sessions (keep current)
|
||||
*/
|
||||
router.post('/terminate-all-others', authenticate, modifyLimiter, async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.userId;
|
||||
const currentToken = req.headers.authorization?.split(' ')[1];
|
||||
|
||||
db.run(
|
||||
'DELETE FROM active_sessions WHERE user_id = ? AND session_token != ?',
|
||||
[userId, currentToken],
|
||||
async function(err) {
|
||||
if (err) {
|
||||
logger.error('Error terminating sessions:', err);
|
||||
return res.status(500).json({ error: 'Failed to terminate sessions' });
|
||||
}
|
||||
|
||||
const terminatedCount = this.changes;
|
||||
|
||||
// Log the event
|
||||
await SecurityAuditLogger.logSessionEvent('SESSIONS_TERMINATED_BULK', userId, {
|
||||
ip: req.ip || req.headers['x-forwarded-for'],
|
||||
userAgent: req.headers['user-agent'],
|
||||
count: terminatedCount
|
||||
});
|
||||
|
||||
logger.info(`User ${userId} terminated ${terminatedCount} other sessions`);
|
||||
res.json({
|
||||
message: `${terminatedCount} session(s) terminated successfully`,
|
||||
count: terminatedCount
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Bulk session termination error:', error);
|
||||
res.status(500).json({ error: 'Failed to terminate sessions' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Force logout user (admin only) - terminates all sessions
|
||||
*/
|
||||
router.post('/force-logout/:userId', authenticate, requireAdmin, modifyLimiter, async (req, res) => {
|
||||
try {
|
||||
const { userId } = req.params;
|
||||
|
||||
// Validate user ID
|
||||
if (isNaN(parseInt(userId))) {
|
||||
return res.status(400).json({ error: 'Invalid user ID' });
|
||||
}
|
||||
|
||||
// Get user info
|
||||
db.get(
|
||||
'SELECT username FROM users WHERE id = ?',
|
||||
[userId],
|
||||
async (err, user) => {
|
||||
if (err || !user) {
|
||||
return res.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
// Terminate all sessions for this user
|
||||
db.run(
|
||||
'DELETE FROM active_sessions WHERE user_id = ?',
|
||||
[userId],
|
||||
async function(err) {
|
||||
if (err) {
|
||||
logger.error('Error force logging out user:', err);
|
||||
return res.status(500).json({ error: 'Failed to force logout' });
|
||||
}
|
||||
|
||||
const terminatedCount = this.changes;
|
||||
|
||||
// Log the event
|
||||
await SecurityAuditLogger.logSessionEvent('FORCE_LOGOUT', userId, {
|
||||
ip: req.ip || req.headers['x-forwarded-for'],
|
||||
userAgent: req.headers['user-agent'],
|
||||
adminId: req.user.userId,
|
||||
count: terminatedCount
|
||||
});
|
||||
|
||||
logger.warn(`Admin ${req.user.userId} force logged out user ${userId} (${user.username}), terminated ${terminatedCount} sessions`);
|
||||
res.json({
|
||||
message: `User ${user.username} has been logged out`,
|
||||
count: terminatedCount
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Force logout error:', error);
|
||||
res.status(500).json({ error: 'Failed to force logout' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Get session statistics (admin only)
|
||||
*/
|
||||
router.get('/stats', authenticate, requireAdmin, readLimiter, async (req, res) => {
|
||||
try {
|
||||
const now = new Date().toISOString();
|
||||
|
||||
// Total active sessions
|
||||
db.get(
|
||||
'SELECT COUNT(*) as total FROM active_sessions WHERE expires_at > ?',
|
||||
[now],
|
||||
(err, totalResult) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching session stats:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch statistics' });
|
||||
}
|
||||
|
||||
// Sessions by user
|
||||
db.all(
|
||||
`SELECT u.username, u.email, COUNT(s.id) as session_count
|
||||
FROM users u
|
||||
LEFT JOIN active_sessions s ON u.id = s.user_id AND s.expires_at > ?
|
||||
GROUP BY u.id
|
||||
ORDER BY session_count DESC
|
||||
LIMIT 10`,
|
||||
[now],
|
||||
(err, userSessions) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching user sessions:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch statistics' });
|
||||
}
|
||||
|
||||
// Recent sessions
|
||||
db.all(
|
||||
`SELECT s.*, u.username
|
||||
FROM active_sessions s
|
||||
JOIN users u ON s.user_id = u.id
|
||||
WHERE s.expires_at > ?
|
||||
ORDER BY s.created_at DESC
|
||||
LIMIT 20`,
|
||||
[now],
|
||||
(err, recentSessions) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching recent sessions:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch statistics' });
|
||||
}
|
||||
|
||||
res.json({
|
||||
totalActiveSessions: totalResult.total,
|
||||
topUsers: userSessions.filter(u => u.session_count > 0),
|
||||
recentSessions: recentSessions.map(s => ({
|
||||
username: s.username,
|
||||
ip: s.ip_address,
|
||||
created: s.created_at,
|
||||
lastActive: s.last_activity
|
||||
}))
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Session stats error:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch statistics' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
134
backend/routes/settings.js
Normal file
134
backend/routes/settings.js
Normal file
|
|
@ -0,0 +1,134 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { modifyLimiter, readLimiter } = require('../middleware/rateLimiter');
|
||||
const { db } = require('../database/db');
|
||||
const { validateSettings } = require('../middleware/inputValidation');
|
||||
const logger = require('../utils/logger');
|
||||
|
||||
const SecurityAuditLogger = require('../utils/securityAudit');
|
||||
|
||||
// Get user settings
|
||||
router.get('/', authenticate, readLimiter, async (req, res) => {
|
||||
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
|
||||
const userAgent = req.headers['user-agent'];
|
||||
|
||||
db.all(
|
||||
'SELECT key, value FROM settings WHERE user_id = ?',
|
||||
[req.user.userId],
|
||||
async (err, settings) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'Failed to fetch settings' });
|
||||
}
|
||||
|
||||
// CWE-778: Log sensitive data access
|
||||
await SecurityAuditLogger.logSensitiveDataAccess(req.user.userId, 'settings', {
|
||||
ip,
|
||||
userAgent,
|
||||
recordCount: settings.length,
|
||||
scope: 'own',
|
||||
accessMethod: 'view'
|
||||
});
|
||||
|
||||
// Convert array to object
|
||||
const settingsObj = {};
|
||||
settings.forEach(s => {
|
||||
try {
|
||||
settingsObj[s.key] = JSON.parse(s.value);
|
||||
} catch {
|
||||
settingsObj[s.key] = s.value;
|
||||
}
|
||||
});
|
||||
|
||||
res.json(settingsObj);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Update setting
|
||||
router.put('/:key', authenticate, modifyLimiter, validateSettings, (req, res) => {
|
||||
const { key } = req.params;
|
||||
const { value } = req.body;
|
||||
|
||||
const jsonValue = typeof value === 'string' ? value : JSON.stringify(value);
|
||||
|
||||
db.run(
|
||||
`INSERT INTO settings (user_id, key, value, updated_at)
|
||||
VALUES (?, ?, ?, CURRENT_TIMESTAMP)
|
||||
ON CONFLICT(user_id, key)
|
||||
DO UPDATE SET value = ?, updated_at = CURRENT_TIMESTAMP`,
|
||||
[req.user.userId, key, jsonValue, jsonValue],
|
||||
function(err) {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'Failed to update setting' });
|
||||
}
|
||||
res.json({ key, value });
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Get specific setting
|
||||
router.get('/:key', authenticate, readLimiter, (req, res) => {
|
||||
const { key } = req.params;
|
||||
|
||||
// Validate key format
|
||||
if (!key || !/^[a-zA-Z0-9_.-]+$/.test(key)) {
|
||||
return res.status(400).json({ error: 'Invalid setting key' });
|
||||
}
|
||||
|
||||
// Handle stream_settings specially - return defaults if not found
|
||||
if (key === 'stream_settings') {
|
||||
db.get(
|
||||
'SELECT value FROM settings WHERE user_id = ? AND key = ?',
|
||||
[req.user.userId, key],
|
||||
(err, setting) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'Failed to fetch setting' });
|
||||
}
|
||||
|
||||
// Return defaults if not found
|
||||
if (!setting) {
|
||||
return res.json({
|
||||
value: {
|
||||
hwaccel: 'auto',
|
||||
hwaccel_device: '/dev/dri/renderD128',
|
||||
codec: 'h264',
|
||||
preset: 'veryfast',
|
||||
buffer_size: '2M',
|
||||
max_bitrate: '8M'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
res.json({ value: JSON.parse(setting.value) });
|
||||
} catch {
|
||||
res.json({ value: setting.value });
|
||||
}
|
||||
}
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
db.get(
|
||||
'SELECT value FROM settings WHERE user_id = ? AND key = ?',
|
||||
[req.user.userId, key],
|
||||
(err, setting) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'Failed to fetch setting' });
|
||||
}
|
||||
|
||||
if (!setting) {
|
||||
return res.status(404).json({ error: 'Setting not found' });
|
||||
}
|
||||
|
||||
try {
|
||||
res.json({ value: JSON.parse(setting.value) });
|
||||
} catch {
|
||||
res.json({ value: setting.value });
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
425
backend/routes/siem.js
Normal file
425
backend/routes/siem.js
Normal file
|
|
@ -0,0 +1,425 @@
|
|||
/**
|
||||
* SIEM (Security Information and Event Management) API Routes
|
||||
* Provides access to centralized log aggregation, security intelligence,
|
||||
* anomaly detection, and real-time alerts
|
||||
*/
|
||||
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { requirePermission } = require('../middleware/rbac');
|
||||
const { validatePagination, validateIdParam } = require('../middleware/inputValidation');
|
||||
const logAggregator = require('../utils/logAggregator');
|
||||
const securityIntelligence = require('../utils/securityIntelligence');
|
||||
const alertSystem = require('../utils/alertSystem');
|
||||
const logger = require('../utils/logger');
|
||||
|
||||
/**
|
||||
* GET /api/siem/logs
|
||||
* Query aggregated logs with filtering
|
||||
*/
|
||||
router.get('/logs',
|
||||
authenticate,
|
||||
requirePermission('security.view_audit'),
|
||||
validatePagination,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const filters = {
|
||||
source: req.query.source,
|
||||
level: req.query.level,
|
||||
category: req.query.category,
|
||||
userId: req.query.userId ? parseInt(req.query.userId) : undefined,
|
||||
startDate: req.query.startDate,
|
||||
endDate: req.query.endDate,
|
||||
limit: req.query.limit ? parseInt(req.query.limit) : 100,
|
||||
offset: req.query.offset ? parseInt(req.query.offset) : 0
|
||||
};
|
||||
|
||||
const logs = await logAggregator.query(filters);
|
||||
|
||||
logAggregator.aggregate('siem', 'info', 'access', 'SIEM logs queried', {
|
||||
userId: req.user.id,
|
||||
filters
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: logs,
|
||||
filters,
|
||||
count: logs.length
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SIEM API] Error querying logs:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to query logs',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* POST /api/siem/logs/verify
|
||||
* Verify log integrity (check for tampering)
|
||||
*/
|
||||
router.post('/logs/verify',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const logIds = req.body.logIds || [];
|
||||
const result = await logAggregator.verifyIntegrity(logIds.length > 0 ? logIds : null);
|
||||
|
||||
logAggregator.aggregate('siem', 'info', 'security', 'Log integrity verification performed', {
|
||||
userId: req.user.id,
|
||||
checkedCount: result.total,
|
||||
tamperedCount: result.tampered
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: result
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SIEM API] Error verifying logs:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to verify log integrity',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* GET /api/siem/statistics
|
||||
* Get log statistics
|
||||
*/
|
||||
router.get('/statistics',
|
||||
authenticate,
|
||||
requirePermission('security.view_audit'),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const timeRange = req.query.timeRange ? parseInt(req.query.timeRange) : 24;
|
||||
const stats = await logAggregator.getStatistics(timeRange);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: stats,
|
||||
timeRange: `${timeRange} hours`
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SIEM API] Error getting statistics:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to get statistics',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* GET /api/siem/export
|
||||
* Export logs in JSON or CSV format
|
||||
*/
|
||||
router.get('/export',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const format = req.query.format || 'json';
|
||||
const filters = {
|
||||
source: req.query.source,
|
||||
level: req.query.level,
|
||||
startDate: req.query.startDate,
|
||||
endDate: req.query.endDate
|
||||
};
|
||||
|
||||
const exportData = await logAggregator.export(filters, format);
|
||||
|
||||
logAggregator.aggregate('siem', 'info', 'security', 'Logs exported', {
|
||||
userId: req.user.id,
|
||||
format,
|
||||
filters
|
||||
});
|
||||
|
||||
const contentType = format === 'csv' ? 'text/csv' : 'application/json';
|
||||
const filename = `siem_export_${Date.now()}.${format}`;
|
||||
|
||||
res.setHeader('Content-Type', contentType);
|
||||
res.setHeader('Content-Disposition', `attachment; filename="${filename}"`);
|
||||
res.send(exportData);
|
||||
} catch (error) {
|
||||
logger.error('[SIEM API] Error exporting logs:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to export logs',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* GET /api/siem/anomalies
|
||||
* Get detected anomalies
|
||||
*/
|
||||
router.get('/anomalies',
|
||||
authenticate,
|
||||
requirePermission('security.view_audit'),
|
||||
validatePagination,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const filters = {
|
||||
status: req.query.status || 'open',
|
||||
severity: req.query.severity,
|
||||
type: req.query.type,
|
||||
limit: req.query.limit ? parseInt(req.query.limit) : 100,
|
||||
offset: req.query.offset ? parseInt(req.query.offset) : 0
|
||||
};
|
||||
|
||||
const anomalies = await securityIntelligence.getAnomalies(filters);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: anomalies,
|
||||
count: anomalies.length
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SIEM API] Error getting anomalies:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to get anomalies',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* POST /api/siem/anomalies/:id/resolve
|
||||
* Resolve an anomaly
|
||||
*/
|
||||
router.post('/anomalies/:id/resolve',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
validateIdParam,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const anomalyId = req.params.id;
|
||||
const notes = req.body.notes || '';
|
||||
|
||||
await securityIntelligence.resolveAnomaly(anomalyId, req.user.id, notes);
|
||||
|
||||
logAggregator.aggregate('siem', 'info', 'security', 'Anomaly resolved', {
|
||||
userId: req.user.id,
|
||||
anomalyId,
|
||||
notes
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Anomaly resolved successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SIEM API] Error resolving anomaly:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to resolve anomaly',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* GET /api/siem/threats
|
||||
* Get threat intelligence data
|
||||
*/
|
||||
router.get('/threats',
|
||||
authenticate,
|
||||
requirePermission('security.view_audit'),
|
||||
validatePagination,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const filters = {
|
||||
level: req.query.level,
|
||||
type: req.query.type,
|
||||
limit: req.query.limit ? parseInt(req.query.limit) : 100
|
||||
};
|
||||
|
||||
const threats = await securityIntelligence.getThreatIntelligence(filters);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: threats,
|
||||
count: threats.length
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SIEM API] Error getting threats:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to get threat intelligence',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* GET /api/siem/alerts
|
||||
* Get active security alerts
|
||||
*/
|
||||
router.get('/alerts',
|
||||
authenticate,
|
||||
requirePermission('security.view_audit'),
|
||||
validatePagination,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const filters = {
|
||||
status: req.query.status || 'active',
|
||||
severity: req.query.severity,
|
||||
limit: req.query.limit ? parseInt(req.query.limit) : 100,
|
||||
offset: req.query.offset ? parseInt(req.query.offset) : 0
|
||||
};
|
||||
|
||||
const alerts = await alertSystem.getAlerts(filters);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: alerts,
|
||||
count: alerts.length
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SIEM API] Error getting alerts:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to get alerts',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* POST /api/siem/alerts/:id/acknowledge
|
||||
* Acknowledge an alert
|
||||
*/
|
||||
router.post('/alerts/:id/acknowledge',
|
||||
authenticate,
|
||||
requirePermission('security.view_audit'),
|
||||
validateIdParam,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const alertId = req.params.id;
|
||||
const notes = req.body.notes || '';
|
||||
|
||||
await alertSystem.acknowledgeAlert(alertId, req.user.id, notes);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Alert acknowledged successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SIEM API] Error acknowledging alert:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to acknowledge alert',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* POST /api/siem/alerts/:id/resolve
|
||||
* Resolve an alert
|
||||
*/
|
||||
router.post('/alerts/:id/resolve',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
validateIdParam,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const alertId = req.params.id;
|
||||
const notes = req.body.notes;
|
||||
|
||||
await alertSystem.resolveAlert(alertId, req.user.id, notes);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Alert resolved successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SIEM API] Error resolving alert:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to resolve alert',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* GET /api/siem/dashboard
|
||||
* Get comprehensive security intelligence dashboard data
|
||||
*/
|
||||
router.get('/dashboard',
|
||||
authenticate,
|
||||
requirePermission('security.view_audit'),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const [dashboardData, alertStats] = await Promise.all([
|
||||
securityIntelligence.getDashboardData(),
|
||||
alertSystem.getStatistics()
|
||||
]);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
...dashboardData,
|
||||
alertStats
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SIEM API] Error getting dashboard data:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to get dashboard data',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* GET /api/siem/alert-rules
|
||||
* Get configured alert rules
|
||||
*/
|
||||
router.get('/alert-rules',
|
||||
authenticate,
|
||||
requirePermission('security.manage'),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const rules = Array.from(alertSystem.alertRules.values());
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: rules,
|
||||
count: rules.length
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[SIEM API] Error getting alert rules:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to get alert rules',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
243
backend/routes/stats.js
Normal file
243
backend/routes/stats.js
Normal file
|
|
@ -0,0 +1,243 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticate, requireAdmin } = require('../middleware/auth');
|
||||
const { readLimiter } = require('../middleware/rateLimiter');
|
||||
const { db } = require('../database/db');
|
||||
const os = require('os');
|
||||
const logger = require('../utils/logger');
|
||||
|
||||
/**
|
||||
* Get overall statistics (admin only)
|
||||
*/
|
||||
router.get('/overview', readLimiter, authenticate, requireAdmin, (req, res) => {
|
||||
const stats = {};
|
||||
|
||||
// Get total counts
|
||||
db.get('SELECT COUNT(*) as count FROM users', [], (err, result) => {
|
||||
stats.totalUsers = result?.count || 0;
|
||||
|
||||
db.get('SELECT COUNT(*) as count FROM channels WHERE is_active = 1', [], (err, result) => {
|
||||
stats.totalChannels = result?.count || 0;
|
||||
|
||||
db.get('SELECT COUNT(*) as count FROM channels WHERE is_radio = 1 AND is_active = 1', [], (err, result) => {
|
||||
stats.totalRadioChannels = result?.count || 0;
|
||||
stats.totalTvChannels = stats.totalChannels - stats.totalRadioChannels;
|
||||
|
||||
db.get('SELECT COUNT(*) as count FROM playlists', [], (err, result) => {
|
||||
stats.totalPlaylists = result?.count || 0;
|
||||
|
||||
db.get('SELECT COUNT(*) as count FROM watch_history', [], (err, result) => {
|
||||
stats.totalWatchHistory = result?.count || 0;
|
||||
|
||||
// Channel health stats
|
||||
db.all(`
|
||||
SELECT health_status, COUNT(*) as count
|
||||
FROM channels
|
||||
WHERE is_active = 1
|
||||
GROUP BY health_status
|
||||
`, [], (err, rows) => {
|
||||
stats.channelHealth = {
|
||||
healthy: 0,
|
||||
degraded: 0,
|
||||
dead: 0,
|
||||
unknown: 0
|
||||
};
|
||||
|
||||
rows.forEach(row => {
|
||||
if (row.health_status) {
|
||||
stats.channelHealth[row.health_status] = row.count;
|
||||
}
|
||||
});
|
||||
|
||||
// System resource usage
|
||||
stats.system = {
|
||||
cpuUsage: process.cpuUsage(),
|
||||
memoryUsage: process.memoryUsage(),
|
||||
uptime: process.uptime(),
|
||||
platform: os.platform(),
|
||||
totalMemory: os.totalmem(),
|
||||
freeMemory: os.freemem(),
|
||||
cpuCount: os.cpus().length,
|
||||
loadAverage: os.loadavg()
|
||||
};
|
||||
|
||||
res.json(stats);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Get most watched channels across all users (admin only)
|
||||
*/
|
||||
router.get('/top-channels', readLimiter, authenticate, requireAdmin, (req, res) => {
|
||||
const { limit = 20, days = 30 } = req.query;
|
||||
|
||||
db.all(`
|
||||
SELECT
|
||||
c.id,
|
||||
c.name,
|
||||
c.logo,
|
||||
c.custom_logo,
|
||||
c.group_name,
|
||||
c.is_radio,
|
||||
COUNT(wh.id) as watch_count,
|
||||
COUNT(DISTINCT wh.user_id) as unique_users,
|
||||
SUM(wh.duration) as total_duration,
|
||||
AVG(wh.duration) as avg_duration,
|
||||
MAX(wh.watched_at) as last_watched
|
||||
FROM watch_history wh
|
||||
INNER JOIN channels c ON wh.channel_id = c.id
|
||||
WHERE wh.watched_at >= datetime('now', '-' || ? || ' days')
|
||||
GROUP BY c.id
|
||||
ORDER BY watch_count DESC
|
||||
LIMIT ?
|
||||
`, [days, parseInt(limit)], (err, rows) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching top channels:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch top channels' });
|
||||
}
|
||||
res.json(rows);
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Get usage patterns by hour (admin only)
|
||||
*/
|
||||
router.get('/usage-by-hour', readLimiter, authenticate, requireAdmin, (req, res) => {
|
||||
const { days = 7 } = req.query;
|
||||
|
||||
db.all(`
|
||||
SELECT
|
||||
CAST(strftime('%H', watched_at) AS INTEGER) as hour,
|
||||
COUNT(*) as view_count,
|
||||
COUNT(DISTINCT user_id) as unique_users
|
||||
FROM watch_history
|
||||
WHERE watched_at >= datetime('now', '-' || ? || ' days')
|
||||
GROUP BY hour
|
||||
ORDER BY hour
|
||||
`, [days], (err, rows) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching usage by hour:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch usage by hour' });
|
||||
}
|
||||
res.json(rows);
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Get usage trends (admin only)
|
||||
*/
|
||||
router.get('/trends', readLimiter, authenticate, requireAdmin, (req, res) => {
|
||||
const { days = 30 } = req.query;
|
||||
|
||||
db.all(`
|
||||
SELECT
|
||||
DATE(watched_at) as date,
|
||||
COUNT(*) as view_count,
|
||||
COUNT(DISTINCT user_id) as unique_users,
|
||||
COUNT(DISTINCT channel_id) as unique_channels,
|
||||
SUM(duration) as total_duration
|
||||
FROM watch_history
|
||||
WHERE watched_at >= datetime('now', '-' || ? || ' days')
|
||||
GROUP BY date
|
||||
ORDER BY date DESC
|
||||
`, [days], (err, rows) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching trends:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch trends' });
|
||||
}
|
||||
res.json(rows);
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Get user activity stats (admin only)
|
||||
*/
|
||||
router.get('/user-activity', readLimiter, authenticate, requireAdmin, (req, res) => {
|
||||
const { days = 30 } = req.query;
|
||||
|
||||
db.all(`
|
||||
SELECT
|
||||
u.id,
|
||||
u.username,
|
||||
u.email,
|
||||
COUNT(wh.id) as watch_count,
|
||||
SUM(wh.duration) as total_duration,
|
||||
MAX(wh.watched_at) as last_active
|
||||
FROM users u
|
||||
LEFT JOIN watch_history wh ON u.id = wh.user_id
|
||||
AND wh.watched_at >= datetime('now', '-' || ? || ' days')
|
||||
WHERE u.is_active = 1
|
||||
GROUP BY u.id
|
||||
ORDER BY watch_count DESC
|
||||
`, [days], (err, rows) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching user activity:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch user activity' });
|
||||
}
|
||||
res.json(rows);
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Get current user's personal stats
|
||||
*/
|
||||
router.get('/my-stats', readLimiter, authenticate, (req, res) => {
|
||||
const user_id = req.user.userId;
|
||||
const { days = 30 } = req.query;
|
||||
|
||||
const stats = {};
|
||||
|
||||
// Total watch count
|
||||
db.get(`
|
||||
SELECT
|
||||
COUNT(*) as watch_count,
|
||||
SUM(duration) as total_duration,
|
||||
COUNT(DISTINCT channel_id) as unique_channels
|
||||
FROM watch_history
|
||||
WHERE user_id = ?
|
||||
AND watched_at >= datetime('now', '-' || ? || ' days')
|
||||
`, [user_id, days], (err, result) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'Failed to fetch stats' });
|
||||
}
|
||||
|
||||
stats.overview = result;
|
||||
|
||||
// Most watched group
|
||||
db.get(`
|
||||
SELECT c.group_name, COUNT(*) as count
|
||||
FROM watch_history wh
|
||||
INNER JOIN channels c ON wh.channel_id = c.id
|
||||
WHERE wh.user_id = ?
|
||||
AND wh.watched_at >= datetime('now', '-' || ? || ' days')
|
||||
AND c.group_name IS NOT NULL
|
||||
GROUP BY c.group_name
|
||||
ORDER BY count DESC
|
||||
LIMIT 1
|
||||
`, [user_id, days], (err, favGroup) => {
|
||||
stats.favoriteGroup = favGroup?.group_name || null;
|
||||
|
||||
// Watch by day of week
|
||||
db.all(`
|
||||
SELECT
|
||||
CAST(strftime('%w', watched_at) AS INTEGER) as day_of_week,
|
||||
COUNT(*) as count
|
||||
FROM watch_history
|
||||
WHERE user_id = ?
|
||||
AND watched_at >= datetime('now', '-' || ? || ' days')
|
||||
GROUP BY day_of_week
|
||||
ORDER BY day_of_week
|
||||
`, [user_id, days], (err, weekData) => {
|
||||
stats.byDayOfWeek = weekData || [];
|
||||
res.json(stats);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
418
backend/routes/stream.js
Normal file
418
backend/routes/stream.js
Normal file
|
|
@ -0,0 +1,418 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { spawn, exec } = require('child_process');
|
||||
const axios = require('axios');
|
||||
const https = require('https');
|
||||
const { SocksProxyAgent } = require('socks-proxy-agent');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { heavyLimiter, readLimiter } = require('../middleware/rateLimiter');
|
||||
const { db } = require('../database/db');
|
||||
const logger = require('../utils/logger');
|
||||
|
||||
// HTTPS agent to bypass SSL certificate verification for IPTV streams
|
||||
const httpsAgent = new https.Agent({
|
||||
rejectUnauthorized: false
|
||||
});
|
||||
|
||||
// Check if user has active VPN connection and return appropriate agent
|
||||
const getVPNAgent = async (userId) => {
|
||||
return new Promise((resolve) => {
|
||||
db.get(
|
||||
'SELECT connected FROM vpn_settings WHERE user_id = ? AND connected = 1',
|
||||
[userId],
|
||||
(err, row) => {
|
||||
if (err || !row) {
|
||||
// No VPN, use standard HTTPS agent
|
||||
resolve(httpsAgent);
|
||||
} else {
|
||||
// VPN active - traffic will automatically route through VPN interface
|
||||
// Use standard agent, OS routing will handle VPN
|
||||
logger.info('VPN active for user, traffic will route through VPN');
|
||||
resolve(httpsAgent);
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
// Check hardware acceleration availability
|
||||
const checkHardwareAcceleration = () => {
|
||||
const capabilities = {
|
||||
quicksync: false,
|
||||
nvenc: false,
|
||||
vaapi: false,
|
||||
videotoolbox: false
|
||||
};
|
||||
|
||||
// Check for Intel Quick Sync (typically /dev/dri/renderD128)
|
||||
try {
|
||||
const fs = require('fs');
|
||||
if (fs.existsSync('/dev/dri/renderD128')) {
|
||||
capabilities.quicksync = true;
|
||||
capabilities.vaapi = true;
|
||||
}
|
||||
} catch (err) {
|
||||
logger.debug('Quick Sync not available');
|
||||
}
|
||||
|
||||
// Check for NVIDIA
|
||||
try {
|
||||
const { execSync } = require('child_process');
|
||||
execSync('nvidia-smi', { stdio: 'ignore' });
|
||||
capabilities.nvenc = true;
|
||||
} catch (err) {
|
||||
logger.debug('NVENC not available');
|
||||
}
|
||||
|
||||
return capabilities;
|
||||
};
|
||||
|
||||
// Get hardware acceleration capabilities
|
||||
router.get('/capabilities', authenticate, readLimiter, (req, res) => {
|
||||
const capabilities = checkHardwareAcceleration();
|
||||
res.json(capabilities);
|
||||
});
|
||||
|
||||
// Get user's stream settings
|
||||
const getStreamSettings = (userId, callback) => {
|
||||
db.get(
|
||||
'SELECT value FROM settings WHERE user_id = ? AND key = ?',
|
||||
[userId, 'stream_settings'],
|
||||
(err, result) => {
|
||||
if (err || !result) {
|
||||
// Default settings
|
||||
return callback({
|
||||
hwaccel: 'auto',
|
||||
hwaccel_device: '/dev/dri/renderD128',
|
||||
codec: 'h264',
|
||||
preset: 'veryfast',
|
||||
buffer_size: '2M',
|
||||
max_bitrate: '8M'
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
callback(JSON.parse(result.value));
|
||||
} catch {
|
||||
callback({
|
||||
hwaccel: 'auto',
|
||||
hwaccel_device: '/dev/dri/renderD128',
|
||||
codec: 'h264',
|
||||
preset: 'veryfast',
|
||||
buffer_size: '2M',
|
||||
max_bitrate: '8M'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
// Universal proxy for all streams with geo-blocking bypass
|
||||
router.get('/proxy/:channelId', authenticate, heavyLimiter, async (req, res) => {
|
||||
const { channelId } = req.params;
|
||||
console.log(`[STREAM] Proxy request for channel ${channelId}`);
|
||||
|
||||
try {
|
||||
const channel = await new Promise((resolve, reject) => {
|
||||
db.get('SELECT url, name, is_radio FROM channels WHERE id = ?', [channelId], (err, row) => {
|
||||
if (err) {
|
||||
console.error('[STREAM] Database error:', err);
|
||||
reject(err);
|
||||
} else if (!row) {
|
||||
console.error('[STREAM] Channel not found:', channelId);
|
||||
reject(new Error('Channel not found'));
|
||||
} else {
|
||||
console.log('[STREAM] Found channel:', row.name);
|
||||
resolve(row);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (!channel.url) {
|
||||
return res.status(400).json({ error: 'Channel has no URL' });
|
||||
}
|
||||
|
||||
logger.info(`Proxying ${channel.is_radio ? 'radio' : 'video'} stream: ${channel.name} - ${channel.url}`);
|
||||
|
||||
// Extract origin from URL for proper spoofing
|
||||
const urlObj = new URL(channel.url);
|
||||
const origin = `${urlObj.protocol}//${urlObj.hostname}`;
|
||||
const referer = origin;
|
||||
|
||||
// Get VPN-aware agent for this user
|
||||
const agent = await getVPNAgent(req.user.userId);
|
||||
|
||||
const requestConfig = {
|
||||
method: 'GET',
|
||||
url: channel.url,
|
||||
responseType: channel.url.includes('.m3u8') ? 'text' : 'stream',
|
||||
timeout: 30000,
|
||||
validateStatus: (status) => status < 500,
|
||||
httpsAgent: agent,
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
'Accept': '*/*',
|
||||
'Accept-Language': 'en-US,en;q=0.9',
|
||||
'Accept-Encoding': 'identity',
|
||||
'Origin': origin,
|
||||
'Referer': referer,
|
||||
'Connection': 'keep-alive',
|
||||
'Sec-Fetch-Dest': 'empty',
|
||||
'Sec-Fetch-Mode': 'cors',
|
||||
'Sec-Fetch-Site': 'cross-site'
|
||||
}
|
||||
};
|
||||
|
||||
// Proxy with origin spoofing to bypass geo-blocking
|
||||
const response = await axios(requestConfig);
|
||||
|
||||
if (response.status >= 400) {
|
||||
logger.error(`Stream returned status ${response.status}`);
|
||||
return res.status(response.status).json({ error: 'Stream unavailable' });
|
||||
}
|
||||
|
||||
// Handle HLS manifests - rewrite URLs to go through our proxy
|
||||
if (channel.url.includes('.m3u8')) {
|
||||
const m3u8Content = response.data;
|
||||
const baseUrl = channel.url.substring(0, channel.url.lastIndexOf('/') + 1);
|
||||
|
||||
// Rewrite relative URLs in playlist to absolute URLs through our proxy
|
||||
const rewrittenContent = m3u8Content.split('\n').map(line => {
|
||||
if (line.startsWith('#') || line.trim() === '') {
|
||||
return line;
|
||||
}
|
||||
// Convert relative to absolute URL
|
||||
let absoluteUrl = line;
|
||||
if (!line.startsWith('http')) {
|
||||
absoluteUrl = baseUrl + line;
|
||||
}
|
||||
// Proxy the URL through our server
|
||||
const proxyUrl = `/api/stream/hls-segment?url=${encodeURIComponent(absoluteUrl)}&token=${req.query.token}`;
|
||||
return proxyUrl;
|
||||
}).join('\n');
|
||||
|
||||
res.setHeader('Content-Type', 'application/vnd.apple.mpegurl');
|
||||
res.setHeader('Access-Control-Allow-Origin', '*');
|
||||
res.setHeader('Access-Control-Allow-Headers', '*');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
return res.send(rewrittenContent);
|
||||
}
|
||||
|
||||
// For binary streams, pipe directly
|
||||
res.setHeader('Content-Type', response.headers['content-type'] || 'application/octet-stream');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
res.setHeader('Access-Control-Allow-Origin', '*');
|
||||
res.setHeader('Access-Control-Allow-Headers', '*');
|
||||
res.setHeader('Access-Control-Allow-Methods', 'GET, HEAD, OPTIONS');
|
||||
|
||||
if (response.headers['content-length']) {
|
||||
res.setHeader('Content-Length', response.headers['content-length']);
|
||||
}
|
||||
if (response.headers['content-range']) {
|
||||
res.setHeader('Content-Range', response.headers['content-range']);
|
||||
res.setHeader('Accept-Ranges', 'bytes');
|
||||
}
|
||||
|
||||
response.data.pipe(res);
|
||||
|
||||
response.data.on('error', (error) => {
|
||||
logger.error('Stream error:', error.message);
|
||||
if (!res.headersSent) {
|
||||
res.status(500).json({ error: 'Stream failed' });
|
||||
}
|
||||
});
|
||||
|
||||
req.on('close', () => {
|
||||
logger.info('Client disconnected from stream');
|
||||
if (response.data && !response.data.destroyed) {
|
||||
response.data.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('[STREAM] Proxy error:', error);
|
||||
logger.error('Proxy error:', error.message);
|
||||
if (!res.headersSent) {
|
||||
res.status(500).json({ error: 'Failed to proxy stream', details: error.message });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Proxy HLS segments (playlists and .ts chunks)
|
||||
router.get('/hls-segment', authenticate, heavyLimiter, async (req, res) => {
|
||||
const { url } = req.query;
|
||||
|
||||
if (!url) {
|
||||
return res.status(400).json({ error: 'URL parameter required' });
|
||||
}
|
||||
|
||||
try {
|
||||
const urlObj = new URL(url);
|
||||
const origin = `${urlObj.protocol}//${urlObj.hostname}`;
|
||||
|
||||
logger.info(`Proxying HLS segment: ${url}`);
|
||||
|
||||
const response = await axios({
|
||||
method: 'GET',
|
||||
url: url,
|
||||
responseType: url.includes('.m3u8') ? 'text' : 'stream',
|
||||
timeout: 15000,
|
||||
httpsAgent: httpsAgent,
|
||||
validateStatus: (status) => status < 500,
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36',
|
||||
'Accept': '*/*',
|
||||
'Origin': origin,
|
||||
'Referer': origin,
|
||||
'Connection': 'keep-alive'
|
||||
}
|
||||
});
|
||||
|
||||
if (response.status >= 400) {
|
||||
return res.status(response.status).json({ error: 'Segment unavailable' });
|
||||
}
|
||||
|
||||
// Handle nested m3u8 playlists
|
||||
if (url.includes('.m3u8')) {
|
||||
const m3u8Content = response.data;
|
||||
const baseUrl = url.substring(0, url.lastIndexOf('/') + 1);
|
||||
|
||||
const rewrittenContent = m3u8Content.split('\n').map(line => {
|
||||
if (line.startsWith('#') || line.trim() === '') {
|
||||
return line;
|
||||
}
|
||||
let absoluteUrl = line;
|
||||
if (!line.startsWith('http')) {
|
||||
absoluteUrl = baseUrl + line;
|
||||
}
|
||||
return `/api/stream/hls-segment?url=${encodeURIComponent(absoluteUrl)}&token=${req.query.token}`;
|
||||
}).join('\n');
|
||||
|
||||
res.setHeader('Content-Type', 'application/vnd.apple.mpegurl');
|
||||
res.setHeader('Access-Control-Allow-Origin', '*');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
return res.send(rewrittenContent);
|
||||
}
|
||||
|
||||
// Stream binary segments
|
||||
res.setHeader('Content-Type', response.headers['content-type'] || 'video/mp2t');
|
||||
res.setHeader('Access-Control-Allow-Origin', '*');
|
||||
res.setHeader('Cache-Control', 'public, max-age=300');
|
||||
|
||||
if (response.headers['content-length']) {
|
||||
res.setHeader('Content-Length', response.headers['content-length']);
|
||||
}
|
||||
|
||||
response.data.pipe(res);
|
||||
|
||||
response.data.on('error', (error) => {
|
||||
logger.error('HLS segment error:', error.message);
|
||||
if (!res.headersSent) {
|
||||
res.status(500).end();
|
||||
}
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
logger.error('HLS segment proxy error:', error.message);
|
||||
if (!res.headersSent) {
|
||||
res.status(500).json({ error: 'Failed to proxy segment' });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Stream proxy with hardware acceleration (for transcoding if needed)
|
||||
router.get('/proxy-ffmpeg/:channelId', authenticate, heavyLimiter, (req, res) => {
|
||||
const { channelId } = req.params;
|
||||
|
||||
db.get(
|
||||
'SELECT url FROM channels WHERE id = ?',
|
||||
[channelId],
|
||||
(err, channel) => {
|
||||
if (err || !channel) {
|
||||
return res.status(404).json({ error: 'Channel not found' });
|
||||
}
|
||||
|
||||
getStreamSettings(req.user.userId, (settings) => {
|
||||
const capabilities = checkHardwareAcceleration();
|
||||
|
||||
// Build FFmpeg command with hardware acceleration
|
||||
const ffmpegArgs = [
|
||||
'-re',
|
||||
'-i', channel.url,
|
||||
'-c:v', 'copy',
|
||||
'-c:a', 'copy',
|
||||
'-f', 'mpegts',
|
||||
'pipe:1'
|
||||
];
|
||||
|
||||
// Add hardware acceleration if enabled and available
|
||||
if (settings.hwaccel !== 'none') {
|
||||
if (settings.hwaccel === 'quicksync' && capabilities.quicksync) {
|
||||
ffmpegArgs.unshift(
|
||||
'-hwaccel', 'qsv',
|
||||
'-hwaccel_device', settings.hwaccel_device || '/dev/dri/renderD128',
|
||||
'-hwaccel_output_format', 'qsv'
|
||||
);
|
||||
} else if (settings.hwaccel === 'vaapi' && capabilities.vaapi) {
|
||||
ffmpegArgs.unshift(
|
||||
'-hwaccel', 'vaapi',
|
||||
'-hwaccel_device', settings.hwaccel_device || '/dev/dri/renderD128',
|
||||
'-hwaccel_output_format', 'vaapi'
|
||||
);
|
||||
} else if (settings.hwaccel === 'nvenc' && capabilities.nvenc) {
|
||||
ffmpegArgs.unshift(
|
||||
'-hwaccel', 'cuda',
|
||||
'-hwaccel_output_format', 'cuda'
|
||||
);
|
||||
} else if (settings.hwaccel === 'auto') {
|
||||
// Auto-detect best available
|
||||
if (capabilities.quicksync) {
|
||||
ffmpegArgs.unshift(
|
||||
'-hwaccel', 'qsv',
|
||||
'-hwaccel_device', '/dev/dri/renderD128',
|
||||
'-hwaccel_output_format', 'qsv'
|
||||
);
|
||||
} else if (capabilities.nvenc) {
|
||||
ffmpegArgs.unshift(
|
||||
'-hwaccel', 'cuda',
|
||||
'-hwaccel_output_format', 'cuda'
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Streaming channel ${channelId} with args:`, ffmpegArgs);
|
||||
|
||||
const ffmpeg = spawn('ffmpeg', ffmpegArgs);
|
||||
|
||||
res.setHeader('Content-Type', 'video/mp2t');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
|
||||
ffmpeg.stdout.pipe(res);
|
||||
|
||||
ffmpeg.stderr.on('data', (data) => {
|
||||
logger.debug(`FFmpeg: ${data}`);
|
||||
});
|
||||
|
||||
ffmpeg.on('error', (error) => {
|
||||
logger.error('FFmpeg error:', error);
|
||||
if (!res.headersSent) {
|
||||
res.status(500).json({ error: 'Stream failed' });
|
||||
}
|
||||
});
|
||||
|
||||
ffmpeg.on('close', (code) => {
|
||||
logger.info(`FFmpeg closed with code ${code}`);
|
||||
});
|
||||
|
||||
req.on('close', () => {
|
||||
logger.info('Client disconnected, stopping stream');
|
||||
ffmpeg.kill('SIGTERM');
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
331
backend/routes/twoFactor.js
Normal file
331
backend/routes/twoFactor.js
Normal file
|
|
@ -0,0 +1,331 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const speakeasy = require('speakeasy');
|
||||
const QRCode = require('qrcode');
|
||||
const crypto = require('crypto');
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { modifyLimiter, authLimiter, readLimiter } = require('../middleware/rateLimiter');
|
||||
const { db } = require('../database/db');
|
||||
const logger = require('../utils/logger');
|
||||
const { promisify } = require('util');
|
||||
|
||||
const dbRun = promisify(db.run.bind(db));
|
||||
const dbGet = promisify(db.get.bind(db));
|
||||
const dbAll = promisify(db.all.bind(db));
|
||||
|
||||
// Generate 2FA secret and QR code
|
||||
router.post('/setup', authenticate, modifyLimiter, async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.userId;
|
||||
|
||||
// Check if user already has 2FA enabled
|
||||
const user = await dbGet('SELECT two_factor_enabled FROM users WHERE id = ?', [userId]);
|
||||
|
||||
if (user.two_factor_enabled) {
|
||||
return res.status(400).json({ error: '2FA is already enabled' });
|
||||
}
|
||||
|
||||
// Generate secret with StreamFlow branding
|
||||
const secret = speakeasy.generateSecret({
|
||||
name: `StreamFlow IPTV:${req.user.username || 'User'}`,
|
||||
issuer: 'StreamFlow'
|
||||
});
|
||||
|
||||
// Generate QR code with custom options for better readability
|
||||
const qrCodeDataURL = await QRCode.toDataURL(secret.otpauth_url, {
|
||||
width: 300,
|
||||
margin: 2,
|
||||
color: {
|
||||
dark: '#000000',
|
||||
light: '#FFFFFF'
|
||||
},
|
||||
errorCorrectionLevel: 'H'
|
||||
});
|
||||
|
||||
// Store secret temporarily (not enabled yet)
|
||||
await dbRun(
|
||||
'UPDATE users SET two_factor_secret = ? WHERE id = ?',
|
||||
[secret.base32, userId]
|
||||
);
|
||||
|
||||
res.json({
|
||||
secret: secret.base32,
|
||||
qrCode: qrCodeDataURL,
|
||||
manualEntryKey: secret.base32
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('2FA setup error:', error);
|
||||
res.status(500).json({ error: 'Failed to setup 2FA' });
|
||||
}
|
||||
});
|
||||
|
||||
// Verify and enable 2FA
|
||||
router.post('/enable', authenticate, authLimiter, async (req, res) => {
|
||||
try {
|
||||
const { token } = req.body;
|
||||
const userId = req.user.userId;
|
||||
|
||||
if (!token) {
|
||||
return res.status(400).json({ error: 'Verification token required' });
|
||||
}
|
||||
|
||||
// Get user's secret
|
||||
const user = await dbGet(
|
||||
'SELECT two_factor_secret FROM users WHERE id = ?',
|
||||
[userId]
|
||||
);
|
||||
|
||||
if (!user.two_factor_secret) {
|
||||
return res.status(400).json({ error: '2FA not set up. Call /setup first' });
|
||||
}
|
||||
|
||||
// Verify token
|
||||
const verified = speakeasy.totp.verify({
|
||||
secret: user.two_factor_secret,
|
||||
encoding: 'base32',
|
||||
token: token,
|
||||
window: 2 // Allow 2 time steps before/after
|
||||
});
|
||||
|
||||
if (!verified) {
|
||||
return res.status(400).json({ error: 'Invalid verification code' });
|
||||
}
|
||||
|
||||
// Generate backup codes
|
||||
const backupCodes = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const code = crypto.randomBytes(4).toString('hex').toUpperCase();
|
||||
backupCodes.push(code);
|
||||
|
||||
await dbRun(
|
||||
'INSERT INTO two_factor_backup_codes (user_id, code) VALUES (?, ?)',
|
||||
[userId, code]
|
||||
);
|
||||
}
|
||||
|
||||
// Enable 2FA
|
||||
await dbRun(
|
||||
'UPDATE users SET two_factor_enabled = 1 WHERE id = ?',
|
||||
[userId]
|
||||
);
|
||||
|
||||
logger.info(`2FA enabled for user ${userId}`);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: '2FA enabled successfully',
|
||||
backupCodes: backupCodes
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('2FA enable error:', error);
|
||||
res.status(500).json({ error: 'Failed to enable 2FA' });
|
||||
}
|
||||
});
|
||||
|
||||
// Disable 2FA
|
||||
router.post('/disable', authenticate, authLimiter, async (req, res) => {
|
||||
try {
|
||||
const { password, token } = req.body;
|
||||
const userId = req.user.userId;
|
||||
|
||||
if (!password) {
|
||||
return res.status(400).json({ error: 'Password required to disable 2FA' });
|
||||
}
|
||||
|
||||
// Verify password
|
||||
const bcrypt = require('bcrypt');
|
||||
const user = await dbGet('SELECT password, two_factor_secret FROM users WHERE id = ?', [userId]);
|
||||
const validPassword = await bcrypt.compare(password, user.password);
|
||||
|
||||
if (!validPassword) {
|
||||
return res.status(401).json({ error: 'Invalid password' });
|
||||
}
|
||||
|
||||
// Verify 2FA token
|
||||
const verified = speakeasy.totp.verify({
|
||||
secret: user.two_factor_secret,
|
||||
encoding: 'base32',
|
||||
token: token,
|
||||
window: 2
|
||||
});
|
||||
|
||||
if (!verified) {
|
||||
return res.status(400).json({ error: 'Invalid 2FA code' });
|
||||
}
|
||||
|
||||
// Disable 2FA and remove secret
|
||||
await dbRun(
|
||||
'UPDATE users SET two_factor_enabled = 0, two_factor_secret = NULL WHERE id = ?',
|
||||
[userId]
|
||||
);
|
||||
|
||||
// Delete all backup codes
|
||||
await dbRun('DELETE FROM two_factor_backup_codes WHERE user_id = ?', [userId]);
|
||||
|
||||
logger.info(`2FA disabled for user ${userId}`);
|
||||
|
||||
res.json({ success: true, message: '2FA disabled successfully' });
|
||||
} catch (error) {
|
||||
logger.error('2FA disable error:', error);
|
||||
res.status(500).json({ error: 'Failed to disable 2FA' });
|
||||
}
|
||||
});
|
||||
|
||||
// Verify 2FA token (for login)
|
||||
router.post('/verify', authLimiter, async (req, res) => {
|
||||
try {
|
||||
const { userId, token } = req.body;
|
||||
|
||||
if (!userId || !token) {
|
||||
return res.status(400).json({ error: 'User ID and token required' });
|
||||
}
|
||||
|
||||
// Get user's secret
|
||||
const user = await dbGet(
|
||||
'SELECT two_factor_secret, two_factor_enabled FROM users WHERE id = ?',
|
||||
[userId]
|
||||
);
|
||||
|
||||
if (!user || !user.two_factor_enabled) {
|
||||
return res.status(400).json({ error: '2FA not enabled for this user' });
|
||||
}
|
||||
|
||||
// Check if it's a backup code
|
||||
const backupCode = await dbGet(
|
||||
'SELECT id FROM two_factor_backup_codes WHERE user_id = ? AND code = ? AND used = 0',
|
||||
[userId, token.toUpperCase()]
|
||||
);
|
||||
|
||||
if (backupCode) {
|
||||
// Mark backup code as used
|
||||
await dbRun(
|
||||
'UPDATE two_factor_backup_codes SET used = 1, used_at = CURRENT_TIMESTAMP WHERE id = ?',
|
||||
[backupCode.id]
|
||||
);
|
||||
|
||||
logger.info(`Backup code used for user ${userId}`);
|
||||
return res.json({ valid: true, method: 'backup_code' });
|
||||
}
|
||||
|
||||
// Verify TOTP token
|
||||
const verified = speakeasy.totp.verify({
|
||||
secret: user.two_factor_secret,
|
||||
encoding: 'base32',
|
||||
token: token,
|
||||
window: 2
|
||||
});
|
||||
|
||||
if (verified) {
|
||||
return res.json({ valid: true, method: 'totp' });
|
||||
} else {
|
||||
return res.status(400).json({ error: 'Invalid 2FA code' });
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('2FA verify error:', error);
|
||||
res.status(500).json({ error: 'Failed to verify 2FA code' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get backup codes
|
||||
router.get('/backup-codes', authenticate, readLimiter, async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.userId;
|
||||
|
||||
const codes = await dbAll(
|
||||
'SELECT code, used, used_at, created_at FROM two_factor_backup_codes WHERE user_id = ? ORDER BY created_at DESC',
|
||||
[userId]
|
||||
);
|
||||
|
||||
res.json(codes);
|
||||
} catch (error) {
|
||||
logger.error('Get backup codes error:', error);
|
||||
res.status(500).json({ error: 'Failed to retrieve backup codes' });
|
||||
}
|
||||
});
|
||||
|
||||
// Regenerate backup codes
|
||||
router.post('/backup-codes/regenerate', authenticate, modifyLimiter, async (req, res) => {
|
||||
try {
|
||||
const { password, token } = req.body;
|
||||
const userId = req.user.userId;
|
||||
|
||||
if (!password || !token) {
|
||||
return res.status(400).json({ error: 'Password and 2FA token required' });
|
||||
}
|
||||
|
||||
// Verify password
|
||||
const bcrypt = require('bcrypt');
|
||||
const user = await dbGet('SELECT password, two_factor_secret FROM users WHERE id = ?', [userId]);
|
||||
const validPassword = await bcrypt.compare(password, user.password);
|
||||
|
||||
if (!validPassword) {
|
||||
return res.status(401).json({ error: 'Invalid password' });
|
||||
}
|
||||
|
||||
// Verify 2FA token
|
||||
const verified = speakeasy.totp.verify({
|
||||
secret: user.two_factor_secret,
|
||||
encoding: 'base32',
|
||||
token: token,
|
||||
window: 2
|
||||
});
|
||||
|
||||
if (!verified) {
|
||||
return res.status(400).json({ error: 'Invalid 2FA code' });
|
||||
}
|
||||
|
||||
// Delete old backup codes
|
||||
await dbRun('DELETE FROM two_factor_backup_codes WHERE user_id = ?', [userId]);
|
||||
|
||||
// Generate new backup codes
|
||||
const backupCodes = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const code = crypto.randomBytes(4).toString('hex').toUpperCase();
|
||||
backupCodes.push(code);
|
||||
|
||||
await dbRun(
|
||||
'INSERT INTO two_factor_backup_codes (user_id, code) VALUES (?, ?)',
|
||||
[userId, code]
|
||||
);
|
||||
}
|
||||
|
||||
logger.info(`Backup codes regenerated for user ${userId}`);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Backup codes regenerated',
|
||||
backupCodes: backupCodes
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Backup codes regenerate error:', error);
|
||||
res.status(500).json({ error: 'Failed to regenerate backup codes' });
|
||||
}
|
||||
});
|
||||
|
||||
// Check 2FA status
|
||||
router.get('/status', authenticate, readLimiter, async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.userId;
|
||||
|
||||
const user = await dbGet(
|
||||
'SELECT two_factor_enabled FROM users WHERE id = ?',
|
||||
[userId]
|
||||
);
|
||||
|
||||
const backupCodesCount = await dbGet(
|
||||
'SELECT COUNT(*) as total, SUM(CASE WHEN used = 0 THEN 1 ELSE 0 END) as unused FROM two_factor_backup_codes WHERE user_id = ?',
|
||||
[userId]
|
||||
);
|
||||
|
||||
res.json({
|
||||
enabled: !!user.two_factor_enabled,
|
||||
backupCodesTotal: backupCodesCount.total || 0,
|
||||
backupCodesUnused: backupCodesCount.unused || 0
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('2FA status error:', error);
|
||||
res.status(500).json({ error: 'Failed to get 2FA status' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
434
backend/routes/users.js
Normal file
434
backend/routes/users.js
Normal file
|
|
@ -0,0 +1,434 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const bcrypt = require('bcryptjs');
|
||||
const { body, validationResult } = require('express-validator');
|
||||
const { authenticate, requireAdmin } = require('../middleware/auth');
|
||||
const { modifyLimiter, readLimiter } = require('../middleware/rateLimiter');
|
||||
const { db } = require('../database/db');
|
||||
const logger = require('../utils/logger');
|
||||
const SecurityAuditLogger = require('../utils/securityAudit');
|
||||
|
||||
// Get all users (admin only)
|
||||
router.get('/', readLimiter, authenticate, requireAdmin, async (req, res) => {
|
||||
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
|
||||
const userAgent = req.headers['user-agent'];
|
||||
|
||||
db.all(
|
||||
`SELECT id, username, email, role, is_active, created_at, updated_at, created_by,
|
||||
failed_login_attempts, last_failed_login, locked_until, last_login_at, last_login_ip,
|
||||
password_changed_at, password_expires_at
|
||||
FROM users
|
||||
ORDER BY created_at DESC`,
|
||||
[],
|
||||
async (err, users) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching users:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch users' });
|
||||
}
|
||||
|
||||
// CWE-778: Log sensitive data access
|
||||
await SecurityAuditLogger.logSensitiveDataAccess(req.user.userId, 'user_list', {
|
||||
ip,
|
||||
userAgent,
|
||||
recordCount: users.length,
|
||||
scope: 'all',
|
||||
accessMethod: 'view'
|
||||
});
|
||||
|
||||
res.json(users);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Get single user (admin only)
|
||||
router.get('/:id', readLimiter, authenticate, requireAdmin, async (req, res) => {
|
||||
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
|
||||
const userAgent = req.headers['user-agent'];
|
||||
|
||||
db.get(
|
||||
`SELECT id, username, email, role, is_active, created_at, updated_at, created_by
|
||||
FROM users WHERE id = ?`,
|
||||
[req.params.id],
|
||||
async (err, user) => {
|
||||
if (err) {
|
||||
logger.error('Error fetching user:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch user' });
|
||||
}
|
||||
if (!user) {
|
||||
return res.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
// CWE-778: Log sensitive data access
|
||||
await SecurityAuditLogger.logSensitiveDataAccess(req.user.userId, 'user_details', {
|
||||
ip,
|
||||
userAgent,
|
||||
recordCount: 1,
|
||||
scope: 'specific',
|
||||
accessMethod: 'view',
|
||||
filters: { userId: req.params.id }
|
||||
});
|
||||
|
||||
res.json(user);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Create user (admin only)
|
||||
router.post('/',
|
||||
modifyLimiter,
|
||||
authenticate,
|
||||
requireAdmin,
|
||||
[
|
||||
body('username').trim().isLength({ min: 3, max: 50 }).isAlphanumeric(),
|
||||
body('email').isEmail().normalizeEmail(),
|
||||
body('password').isLength({ min: 8 }),
|
||||
body('role').isIn(['user', 'admin'])
|
||||
],
|
||||
async (req, res) => {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
return res.status(400).json({ errors: errors.array() });
|
||||
}
|
||||
|
||||
const { username, email, password, role } = req.body;
|
||||
|
||||
try {
|
||||
const hashedPassword = await bcrypt.hash(password, 10);
|
||||
|
||||
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
|
||||
const userAgent = req.headers['user-agent'];
|
||||
|
||||
db.run(
|
||||
`INSERT INTO users (username, email, password, role, must_change_password, created_by)
|
||||
VALUES (?, ?, ?, ?, ?, ?)`,
|
||||
[username, email, hashedPassword, role, 1, req.user.userId],
|
||||
async function(err) {
|
||||
if (err) {
|
||||
if (err.message.includes('UNIQUE')) {
|
||||
return res.status(400).json({ error: 'Username or email already exists' });
|
||||
}
|
||||
logger.error('User creation error:', err);
|
||||
return res.status(500).json({ error: 'Failed to create user' });
|
||||
}
|
||||
|
||||
const newUserId = this.lastID;
|
||||
|
||||
// CWE-778: Log admin activity
|
||||
await SecurityAuditLogger.logAdminActivity(req.user.userId, 'user_created', {
|
||||
ip,
|
||||
userAgent,
|
||||
targetUserId: newUserId,
|
||||
targetUsername: username,
|
||||
adminUsername: req.user.username || 'admin',
|
||||
changes: { username, email, role }
|
||||
});
|
||||
|
||||
db.get(
|
||||
`SELECT id, username, email, role, is_active, created_at, created_by
|
||||
FROM users WHERE id = ?`,
|
||||
[newUserId],
|
||||
(err, user) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'User created but failed to fetch details' });
|
||||
}
|
||||
res.status(201).json(user);
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('User creation error:', error);
|
||||
res.status(500).json({ error: 'Failed to create user' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Update user (admin only)
|
||||
router.patch('/:id',
|
||||
modifyLimiter,
|
||||
authenticate,
|
||||
requireAdmin,
|
||||
[
|
||||
body('username').optional().trim().isLength({ min: 3, max: 50 }).isAlphanumeric(),
|
||||
body('email').optional().isEmail().normalizeEmail(),
|
||||
body('role').optional().isIn(['user', 'admin']),
|
||||
body('is_active').optional().isBoolean()
|
||||
],
|
||||
async (req, res) => {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
return res.status(400).json({ errors: errors.array() });
|
||||
}
|
||||
|
||||
const { id } = req.params;
|
||||
const updates = [];
|
||||
const values = [];
|
||||
|
||||
// Build dynamic update query
|
||||
if (req.body.username !== undefined) {
|
||||
updates.push('username = ?');
|
||||
values.push(req.body.username);
|
||||
}
|
||||
if (req.body.email !== undefined) {
|
||||
updates.push('email = ?');
|
||||
values.push(req.body.email);
|
||||
}
|
||||
// Check if role or is_active is being changed (for audit logging)
|
||||
const isRoleChange = req.body.role !== undefined;
|
||||
const isStatusChange = req.body.is_active !== undefined;
|
||||
|
||||
if (req.body.role !== undefined) {
|
||||
updates.push('role = ?');
|
||||
values.push(req.body.role);
|
||||
}
|
||||
if (req.body.is_active !== undefined) {
|
||||
updates.push('is_active = ?');
|
||||
values.push(req.body.is_active ? 1 : 0);
|
||||
}
|
||||
|
||||
if (updates.length === 0) {
|
||||
return res.status(400).json({ error: 'No valid fields to update' });
|
||||
}
|
||||
|
||||
// Get current user data for audit logging
|
||||
db.get('SELECT role, is_active, username FROM users WHERE id = ?', [id], async (err, existingUser) => {
|
||||
if (err || !existingUser) {
|
||||
return res.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
updates.push('updated_at = CURRENT_TIMESTAMP');
|
||||
values.push(id);
|
||||
|
||||
db.run(
|
||||
`UPDATE users SET ${updates.join(', ')} WHERE id = ?`,
|
||||
values,
|
||||
async function(err) {
|
||||
if (err) {
|
||||
if (err.message.includes('UNIQUE')) {
|
||||
return res.status(400).json({ error: 'Username or email already exists' });
|
||||
}
|
||||
logger.error('User update error:', err);
|
||||
return res.status(500).json({ error: 'Failed to update user' });
|
||||
}
|
||||
|
||||
if (this.changes === 0) {
|
||||
return res.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
|
||||
const userAgent = req.headers['user-agent'];
|
||||
|
||||
// CWE-778: Log privilege changes if role changed
|
||||
if (isRoleChange && req.body.role !== existingUser.role) {
|
||||
await SecurityAuditLogger.logPrivilegeChange(parseInt(id), 'role_change', {
|
||||
ip,
|
||||
userAgent,
|
||||
previousRole: existingUser.role,
|
||||
newRole: req.body.role,
|
||||
changedBy: req.user.userId,
|
||||
changedByUsername: req.user.username || 'system',
|
||||
targetUsername: existingUser.username
|
||||
});
|
||||
}
|
||||
|
||||
// CWE-778: Log account status changes
|
||||
if (isStatusChange && req.body.is_active !== (existingUser.is_active === 1)) {
|
||||
const newStatus = req.body.is_active ? 'active' : 'inactive';
|
||||
await SecurityAuditLogger.logAccountStatusChange(parseInt(id), newStatus, {
|
||||
ip,
|
||||
userAgent,
|
||||
previousStatus: existingUser.is_active === 1 ? 'active' : 'inactive',
|
||||
changedBy: req.user.userId,
|
||||
changedByUsername: req.user.username || 'system',
|
||||
targetUsername: existingUser.username,
|
||||
reason: 'admin_action'
|
||||
});
|
||||
}
|
||||
|
||||
db.get(
|
||||
`SELECT id, username, email, role, is_active, created_at, updated_at, created_by
|
||||
FROM users WHERE id = ?`,
|
||||
[id],
|
||||
(err, user) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: 'User updated but failed to fetch details' });
|
||||
}
|
||||
res.json(user);
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// Reset user password (admin only)
|
||||
router.post('/:id/reset-password',
|
||||
modifyLimiter,
|
||||
authenticate,
|
||||
requireAdmin,
|
||||
[
|
||||
body('newPassword').isLength({ min: 8 })
|
||||
],
|
||||
async (req, res) => {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
return res.status(400).json({ errors: errors.array() });
|
||||
}
|
||||
|
||||
const { id } = req.params;
|
||||
const { newPassword } = req.body;
|
||||
|
||||
try {
|
||||
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
|
||||
const userAgent = req.headers['user-agent'];
|
||||
|
||||
// Get user info first
|
||||
db.get('SELECT username FROM users WHERE id = ?', [id], async (err, user) => {
|
||||
if (err || !user) {
|
||||
return res.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
const hashedPassword = await bcrypt.hash(newPassword, 10);
|
||||
|
||||
db.run(
|
||||
'UPDATE users SET password = ?, must_change_password = 1, updated_at = CURRENT_TIMESTAMP WHERE id = ?',
|
||||
[hashedPassword, id],
|
||||
async function(err) {
|
||||
if (err) {
|
||||
logger.error('Password reset error:', err);
|
||||
return res.status(500).json({ error: 'Failed to reset password' });
|
||||
}
|
||||
|
||||
if (this.changes === 0) {
|
||||
return res.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
// CWE-778: Log admin activity
|
||||
await SecurityAuditLogger.logAdminActivity(req.user.userId, 'password_reset', {
|
||||
ip,
|
||||
userAgent,
|
||||
targetUserId: id,
|
||||
targetUsername: user.username,
|
||||
adminUsername: req.user.username || 'admin',
|
||||
reason: 'admin_initiated'
|
||||
});
|
||||
|
||||
res.json({ message: 'Password reset successfully. User must change password on next login.' });
|
||||
}
|
||||
);
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Password reset error:', error);
|
||||
res.status(500).json({ error: 'Failed to reset password' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Unlock account (admin only)
|
||||
router.post('/:id/unlock', modifyLimiter, authenticate, requireAdmin, async (req, res) => {
|
||||
const { id } = req.params;
|
||||
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
|
||||
const userAgent = req.headers['user-agent'];
|
||||
|
||||
try {
|
||||
// Get user info first
|
||||
db.get('SELECT username, locked_until FROM users WHERE id = ?', [id], async (err, user) => {
|
||||
if (err || !user) {
|
||||
return res.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
db.run(
|
||||
'UPDATE users SET locked_until = NULL, failed_login_attempts = 0 WHERE id = ?',
|
||||
[id],
|
||||
async function(err) {
|
||||
if (err) {
|
||||
logger.error('Account unlock error:', err);
|
||||
return res.status(500).json({ error: 'Failed to unlock account' });
|
||||
}
|
||||
|
||||
if (this.changes === 0) {
|
||||
return res.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
// CWE-778: Log admin activity
|
||||
await SecurityAuditLogger.logAdminActivity(req.user.userId, 'account_unlocked', {
|
||||
ip,
|
||||
userAgent,
|
||||
targetUserId: id,
|
||||
targetUsername: user.username,
|
||||
adminUsername: req.user.username || 'admin',
|
||||
changes: { locked_until: user.locked_until, failed_login_attempts: 0 },
|
||||
reason: 'admin_unlock'
|
||||
});
|
||||
|
||||
logger.info(`Admin ${req.user.userId} unlocked account ${id}`);
|
||||
res.json({ message: 'Account unlocked successfully' });
|
||||
}
|
||||
);
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Account unlock error:', error);
|
||||
res.status(500).json({ error: 'Failed to unlock account' });
|
||||
}
|
||||
});
|
||||
|
||||
// Delete user (admin only)
|
||||
router.delete('/:id', modifyLimiter, authenticate, requireAdmin, async (req, res) => {
|
||||
const { id } = req.params;
|
||||
const ip = req.ip || req.headers['x-forwarded-for'] || req.connection.remoteAddress;
|
||||
const userAgent = req.headers['user-agent'];
|
||||
|
||||
// Prevent deleting yourself
|
||||
if (parseInt(id) === req.user.userId) {
|
||||
return res.status(400).json({ error: 'Cannot delete your own account' });
|
||||
}
|
||||
|
||||
// Check if this is the last admin
|
||||
db.get(
|
||||
"SELECT COUNT(*) as count FROM users WHERE role = 'admin' AND is_active = 1",
|
||||
[],
|
||||
(err, result) => {
|
||||
if (err) {
|
||||
logger.error('Error checking admin count:', err);
|
||||
return res.status(500).json({ error: 'Failed to delete user' });
|
||||
}
|
||||
|
||||
db.get('SELECT username, email, role FROM users WHERE id = ?', [id], async (err, user) => {
|
||||
if (err || !user) {
|
||||
return res.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
if (user.role === 'admin' && result.count <= 1) {
|
||||
return res.status(400).json({ error: 'Cannot delete the last admin account' });
|
||||
}
|
||||
|
||||
db.run('DELETE FROM users WHERE id = ?', [id], async function(err) {
|
||||
if (err) {
|
||||
logger.error('User deletion error:', err);
|
||||
return res.status(500).json({ error: 'Failed to delete user' });
|
||||
}
|
||||
|
||||
if (this.changes === 0) {
|
||||
return res.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
// CWE-778: Log admin activity - user deletion
|
||||
await SecurityAuditLogger.logAdminActivity(req.user.userId, 'user_deleted', {
|
||||
ip,
|
||||
userAgent,
|
||||
targetUserId: id,
|
||||
targetUsername: user.username,
|
||||
adminUsername: req.user.username || 'admin',
|
||||
changes: { deleted: { username: user.username, email: user.email, role: user.role } },
|
||||
reason: 'admin_deletion'
|
||||
});
|
||||
|
||||
res.json({ message: 'User deleted successfully' });
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
738
backend/routes/vpn-configs.js
Normal file
738
backend/routes/vpn-configs.js
Normal file
|
|
@ -0,0 +1,738 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { db } = require('../database/db');
|
||||
const { spawn } = require('child_process');
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
const { modifyLimiter, readLimiter } = require('../middleware/rateLimiter');
|
||||
const encryption = require('../utils/encryption');
|
||||
|
||||
// Use centralized encryption module for CWE-311 compliance
|
||||
const encrypt = (data) => encryption.encryptVPN(data);
|
||||
const decrypt = (data) => encryption.decryptVPN(data);
|
||||
|
||||
// Parse WireGuard config
|
||||
function parseWireGuardConfig(content) {
|
||||
// Security: Limit content size
|
||||
if (content.length > 100000) {
|
||||
throw new Error('Configuration file too large');
|
||||
}
|
||||
|
||||
// Security: Check for script injection attempts
|
||||
if (content.includes('<script') || content.includes('${') || content.includes('eval(')) {
|
||||
throw new Error('Invalid configuration content detected');
|
||||
}
|
||||
|
||||
const lines = content.split('\n');
|
||||
const parsed = {
|
||||
type: 'wireguard',
|
||||
interface: {},
|
||||
peer: {}
|
||||
};
|
||||
|
||||
let section = null;
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
|
||||
if (trimmed === '[Interface]') {
|
||||
section = 'interface';
|
||||
} else if (trimmed === '[Peer]') {
|
||||
section = 'peer';
|
||||
} else if (trimmed && !trimmed.startsWith('#') && section) {
|
||||
const [key, ...valueParts] = trimmed.split('=');
|
||||
if (key && valueParts.length > 0) {
|
||||
const value = valueParts.join('=').trim();
|
||||
// Security: Sanitize values, no shell commands
|
||||
if (value.includes('$(') || value.includes('`') || value.includes(';')) {
|
||||
throw new Error('Invalid configuration value detected');
|
||||
}
|
||||
parsed[section][key.trim()] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate required fields
|
||||
console.log('[VPN-CONFIG] Parsed interface:', Object.keys(parsed.interface));
|
||||
console.log('[VPN-CONFIG] Parsed peer:', Object.keys(parsed.peer));
|
||||
|
||||
if (!parsed.interface.PrivateKey || !parsed.peer.PublicKey || !parsed.peer.Endpoint) {
|
||||
console.log('[VPN-CONFIG] Missing fields - PrivateKey:', !!parsed.interface.PrivateKey, 'PublicKey:', !!parsed.peer.PublicKey, 'Endpoint:', !!parsed.peer.Endpoint);
|
||||
throw new Error('Missing required WireGuard configuration fields');
|
||||
}
|
||||
|
||||
// Extract metadata
|
||||
const endpoint = parsed.peer.Endpoint || '';
|
||||
const country = extractCountryFromEndpoint(endpoint);
|
||||
const serverName = endpoint.split(':')[0];
|
||||
|
||||
return {
|
||||
type: 'wireguard',
|
||||
country,
|
||||
serverName,
|
||||
endpoint,
|
||||
data: parsed
|
||||
};
|
||||
}
|
||||
|
||||
// Parse OpenVPN config
|
||||
function parseOpenVPNConfig(content) {
|
||||
// Security: Limit content size
|
||||
if (content.length > 100000) {
|
||||
throw new Error('Configuration file too large');
|
||||
}
|
||||
|
||||
// Security: Check for script injection attempts
|
||||
if (content.includes('<script') || content.includes('${') || content.includes('eval(')) {
|
||||
throw new Error('Invalid configuration content detected');
|
||||
}
|
||||
|
||||
// Security: Check for dangerous directives
|
||||
const dangerousDirectives = ['script-security 3', 'up /bin/sh', 'down /bin/sh', 'route-up /bin/sh'];
|
||||
for (const directive of dangerousDirectives) {
|
||||
if (content.includes(directive)) {
|
||||
throw new Error('Configuration contains potentially dangerous directives');
|
||||
}
|
||||
}
|
||||
|
||||
const lines = content.split('\n');
|
||||
const parsed = {
|
||||
type: 'openvpn',
|
||||
remote: null,
|
||||
port: null,
|
||||
proto: null
|
||||
};
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
|
||||
if (trimmed.startsWith('remote ')) {
|
||||
const parts = trimmed.split(/\s+/);
|
||||
parsed.remote = parts[1];
|
||||
parsed.port = parts[2] || '1194';
|
||||
parsed.proto = parts[3] || 'udp';
|
||||
}
|
||||
}
|
||||
|
||||
// Validate required fields
|
||||
if (!parsed.remote) {
|
||||
throw new Error('Missing required OpenVPN remote server');
|
||||
}
|
||||
|
||||
// Extract metadata
|
||||
const country = extractCountryFromEndpoint(parsed.remote);
|
||||
const serverName = parsed.remote || 'Unknown';
|
||||
|
||||
return {
|
||||
type: 'openvpn',
|
||||
country,
|
||||
serverName,
|
||||
endpoint: `${parsed.remote}:${parsed.port}`,
|
||||
proto: parsed.proto,
|
||||
data: { config: content }
|
||||
};
|
||||
}
|
||||
|
||||
// Extract country code from endpoint/hostname
|
||||
function extractCountryFromEndpoint(endpoint) {
|
||||
if (!endpoint) return null;
|
||||
|
||||
// Common patterns: us-01.server.com, node-us-01, 185.107.57.98 (Romania)
|
||||
const countryMatch = endpoint.match(/[-_]([a-z]{2})[-_\d]/i);
|
||||
if (countryMatch) {
|
||||
return countryMatch[1].toUpperCase();
|
||||
}
|
||||
|
||||
// Known IP ranges (basic lookup)
|
||||
const ipRanges = {
|
||||
'185.107.57': 'RO',
|
||||
'185.163.110': 'RO',
|
||||
'169.150': 'US',
|
||||
'103.107': 'JP'
|
||||
};
|
||||
|
||||
for (const [range, country] of Object.entries(ipRanges)) {
|
||||
if (endpoint.startsWith(range)) {
|
||||
return country;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Get all VPN configs for user
|
||||
router.get('/configs', authenticate, readLimiter, async (req, res) => {
|
||||
try {
|
||||
const configs = await new Promise((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT id, name, config_type, country, server_name, endpoint, is_active, created_at
|
||||
FROM vpn_configs
|
||||
WHERE user_id = ?
|
||||
ORDER BY is_active DESC, created_at DESC`,
|
||||
[req.user.userId],
|
||||
(err, rows) => {
|
||||
if (err) reject(err);
|
||||
else resolve(rows || []);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Verify actual VPN connection status for active configs
|
||||
for (const config of configs) {
|
||||
if (config.is_active) {
|
||||
const actuallyConnected = await checkVPNConnection(config.config_type, req.user.userId);
|
||||
if (!actuallyConnected) {
|
||||
// Update database to reflect actual state
|
||||
await new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
'UPDATE vpn_configs SET is_active = 0 WHERE id = ?',
|
||||
[config.id],
|
||||
(err) => (err ? reject(err) : resolve())
|
||||
);
|
||||
});
|
||||
config.is_active = 0;
|
||||
console.log(`[VPN-CONFIG] Reset stale active state for config ${config.id}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
res.json({ configs });
|
||||
} catch (err) {
|
||||
console.error('Error fetching VPN configs:', err);
|
||||
res.status(500).json({ error: 'Failed to fetch VPN configurations' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get specific config
|
||||
router.get('/configs/:id', authenticate, readLimiter, (req, res) => {
|
||||
db.get(
|
||||
`SELECT id, name, config_type, config_data, country, server_name, endpoint, is_active, created_at
|
||||
FROM vpn_configs
|
||||
WHERE id = ? AND user_id = ?`,
|
||||
[req.params.id, req.user.userId],
|
||||
(err, config) => {
|
||||
if (err) {
|
||||
console.error('Error fetching VPN config:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch configuration' });
|
||||
}
|
||||
|
||||
if (!config) {
|
||||
return res.status(404).json({ error: 'Configuration not found' });
|
||||
}
|
||||
|
||||
// Decrypt config data
|
||||
try {
|
||||
config.config_data = JSON.stringify(decrypt(config.config_data));
|
||||
} catch (error) {
|
||||
console.error('Error decrypting config:', error);
|
||||
return res.status(500).json({ error: 'Failed to decrypt configuration' });
|
||||
}
|
||||
|
||||
res.json({ config });
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Upload VPN config file
|
||||
router.post('/configs/upload', authenticate, modifyLimiter, async (req, res) => {
|
||||
try {
|
||||
console.log('[VPN-CONFIG] Upload request received');
|
||||
console.log('[VPN-CONFIG] Files:', req.files ? Object.keys(req.files) : 'none');
|
||||
// CWE-532: Do not log request body - may contain sensitive VPN credentials
|
||||
|
||||
if (!req.files || !req.files.config) {
|
||||
return res.status(400).json({ error: 'No file uploaded' });
|
||||
}
|
||||
|
||||
const uploadedFile = req.files.config;
|
||||
|
||||
// Validate file size (1MB max)
|
||||
if (uploadedFile.size > 1024 * 1024) {
|
||||
return res.status(400).json({ error: 'File too large (max 1MB)' });
|
||||
}
|
||||
|
||||
// Validate file extension
|
||||
const ext = path.extname(uploadedFile.name).toLowerCase();
|
||||
if (ext !== '.conf' && ext !== '.ovpn') {
|
||||
return res.status(400).json({ error: 'Only .conf and .ovpn files are allowed' });
|
||||
}
|
||||
|
||||
const { name } = req.body;
|
||||
if (!name || name.trim().length === 0) {
|
||||
return res.status(400).json({ error: 'Configuration name is required' });
|
||||
}
|
||||
|
||||
// Validate name
|
||||
if (!/^[a-zA-Z0-9\s\-_.()]+$/.test(name)) {
|
||||
return res.status(400).json({ error: 'Invalid configuration name. Use only letters, numbers, spaces, and common punctuation.' });
|
||||
}
|
||||
|
||||
if (name.length > 100) {
|
||||
return res.status(400).json({ error: 'Configuration name too long (max 100 characters)' });
|
||||
}
|
||||
|
||||
// Read file content from temp file
|
||||
const content = await fs.readFile(uploadedFile.tempFilePath, 'utf8');
|
||||
console.log('[VPN-CONFIG] File extension:', ext);
|
||||
console.log('[VPN-CONFIG] Content length:', content.length);
|
||||
console.log('[VPN-CONFIG] First 200 chars:', content.substring(0, 200));
|
||||
|
||||
// Parse based on file type
|
||||
let parsed;
|
||||
if (ext === '.conf') {
|
||||
parsed = parseWireGuardConfig(content);
|
||||
} else if (ext === '.ovpn') {
|
||||
parsed = parseOpenVPNConfig(content);
|
||||
} else {
|
||||
return res.status(400).json({ error: 'Unsupported file format' });
|
||||
}
|
||||
|
||||
// Encrypt config data using centralized encryption (CWE-311)
|
||||
const encryptedData = encrypt(parsed.data);
|
||||
|
||||
// Save to database
|
||||
db.run(
|
||||
`INSERT INTO vpn_configs (user_id, name, config_type, config_data, country, server_name, endpoint, is_active)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, 0)`,
|
||||
[req.user.userId, name.trim(), parsed.type, encryptedData, parsed.country, parsed.serverName, parsed.endpoint],
|
||||
function(err) {
|
||||
if (err) {
|
||||
console.error('Error saving VPN config:', err);
|
||||
return res.status(500).json({ error: 'Failed to save configuration' });
|
||||
}
|
||||
|
||||
res.json({
|
||||
message: 'Configuration uploaded successfully',
|
||||
config: {
|
||||
id: this.lastID,
|
||||
name: name.trim(),
|
||||
type: parsed.type,
|
||||
country: parsed.country,
|
||||
serverName: parsed.serverName,
|
||||
endpoint: parsed.endpoint
|
||||
}
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
console.error('[VPN-CONFIG] Error processing config upload:', error);
|
||||
console.error('[VPN-CONFIG] Error stack:', error.stack);
|
||||
res.status(500).json({ error: error.message || 'Failed to process configuration file' });
|
||||
}
|
||||
});
|
||||
|
||||
// Delete config
|
||||
router.delete('/configs/:id', authenticate, modifyLimiter, (req, res) => {
|
||||
// First check if config is active
|
||||
db.get(
|
||||
'SELECT is_active FROM vpn_configs WHERE id = ? AND user_id = ?',
|
||||
[req.params.id, req.user.userId],
|
||||
(err, config) => {
|
||||
if (err) {
|
||||
console.error('Error checking config:', err);
|
||||
return res.status(500).json({ error: 'Failed to delete configuration' });
|
||||
}
|
||||
|
||||
if (!config) {
|
||||
return res.status(404).json({ error: 'Configuration not found' });
|
||||
}
|
||||
|
||||
if (config.is_active) {
|
||||
return res.status(400).json({ error: 'Cannot delete active configuration. Disconnect first.' });
|
||||
}
|
||||
|
||||
// Delete the config
|
||||
db.run(
|
||||
'DELETE FROM vpn_configs WHERE id = ? AND user_id = ?',
|
||||
[req.params.id, req.user.userId],
|
||||
function(err) {
|
||||
if (err) {
|
||||
console.error('Error deleting config:', err);
|
||||
return res.status(500).json({ error: 'Failed to delete configuration' });
|
||||
}
|
||||
|
||||
res.json({ message: 'Configuration deleted successfully' });
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Set active config
|
||||
router.post('/configs/:id/activate', authenticate, modifyLimiter, (req, res) => {
|
||||
db.serialize(() => {
|
||||
// Deactivate all configs for user
|
||||
db.run(
|
||||
'UPDATE vpn_configs SET is_active = 0 WHERE user_id = ?',
|
||||
[req.user.userId],
|
||||
(err) => {
|
||||
if (err) {
|
||||
console.error('Error deactivating configs:', err);
|
||||
return res.status(500).json({ error: 'Failed to activate configuration' });
|
||||
}
|
||||
|
||||
// Activate the selected config
|
||||
db.run(
|
||||
'UPDATE vpn_configs SET is_active = 1 WHERE id = ? AND user_id = ?',
|
||||
[req.params.id, req.user.userId],
|
||||
function(err) {
|
||||
if (err) {
|
||||
console.error('Error activating config:', err);
|
||||
return res.status(500).json({ error: 'Failed to activate configuration' });
|
||||
}
|
||||
|
||||
if (this.changes === 0) {
|
||||
return res.status(404).json({ error: 'Configuration not found' });
|
||||
}
|
||||
|
||||
res.json({ message: 'Configuration activated successfully' });
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
// Connect using config
|
||||
router.post('/configs/:id/connect', authenticate, modifyLimiter, async (req, res) => {
|
||||
try {
|
||||
// Get config
|
||||
const config = await new Promise((resolve, reject) => {
|
||||
db.get(
|
||||
'SELECT config_type, config_data FROM vpn_configs WHERE id = ? AND user_id = ?',
|
||||
[req.params.id, req.user.userId],
|
||||
(err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
if (!config) {
|
||||
return res.status(404).json({ error: 'Configuration not found' });
|
||||
}
|
||||
|
||||
// Decrypt config
|
||||
const decryptedData = decrypt(config.config_data);
|
||||
|
||||
console.log(`[VPN-CONFIG] Connecting config ${req.params.id} for user ${req.user.userId}`);
|
||||
|
||||
// Connect based on type
|
||||
if (config.config_type === 'wireguard') {
|
||||
await connectWireGuard(decryptedData, req.user.userId);
|
||||
} else if (config.config_type === 'openvpn') {
|
||||
await connectOpenVPN(decryptedData, req.user.userId);
|
||||
}
|
||||
|
||||
console.log(`[VPN-CONFIG] Successfully connected, updating database`);
|
||||
|
||||
// Deactivate all other configs for this user first
|
||||
await new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
'UPDATE vpn_configs SET is_active = 0 WHERE user_id = ?',
|
||||
[req.user.userId],
|
||||
(err) => (err ? reject(err) : resolve())
|
||||
);
|
||||
});
|
||||
|
||||
// Mark this config as active
|
||||
await new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
'UPDATE vpn_configs SET is_active = 1 WHERE id = ? AND user_id = ?',
|
||||
[req.params.id, req.user.userId],
|
||||
(err) => (err ? reject(err) : resolve())
|
||||
);
|
||||
});
|
||||
|
||||
// CWE-532: Logged without exposing sensitive config ID
|
||||
console.log(`[VPN-CONFIG] Configuration marked as active for user ${req.user.userId}`);
|
||||
res.json({ message: 'Connected to VPN successfully', success: true });
|
||||
} catch (error) {
|
||||
console.error('Error connecting to VPN:', error);
|
||||
res.status(500).json({ error: error.message || 'Failed to connect to VPN' });
|
||||
}
|
||||
});
|
||||
|
||||
// Disconnect VPN
|
||||
router.post('/configs/:id/disconnect', authenticate, modifyLimiter, async (req, res) => {
|
||||
try {
|
||||
// Get config
|
||||
const config = await new Promise((resolve, reject) => {
|
||||
db.get(
|
||||
'SELECT config_type FROM vpn_configs WHERE id = ? AND user_id = ? AND is_active = 1',
|
||||
[req.params.id, req.user.userId],
|
||||
(err, row) => {
|
||||
if (err) reject(err);
|
||||
else resolve(row);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
if (!config) {
|
||||
// No active config found, but still try to clean up any interfaces
|
||||
console.log('[VPN] No active config found, attempting cleanup anyway');
|
||||
try {
|
||||
await disconnectWireGuard(req.user.userId);
|
||||
} catch (e) {
|
||||
// Ignore errors during cleanup
|
||||
}
|
||||
// Mark as inactive regardless
|
||||
await new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
'UPDATE vpn_configs SET is_active = 0 WHERE id = ? AND user_id = ?',
|
||||
[req.params.id, req.user.userId],
|
||||
(err) => (err ? reject(err) : resolve())
|
||||
);
|
||||
});
|
||||
return res.json({ message: 'VPN state cleaned up' });
|
||||
}
|
||||
|
||||
// Disconnect based on type
|
||||
if (config.config_type === 'wireguard') {
|
||||
await disconnectWireGuard(req.user.userId);
|
||||
} else if (config.config_type === 'openvpn') {
|
||||
await disconnectOpenVPN(req.user.userId);
|
||||
}
|
||||
|
||||
// Mark as inactive
|
||||
await new Promise((resolve, reject) => {
|
||||
db.run(
|
||||
'UPDATE vpn_configs SET is_active = 0 WHERE id = ? AND user_id = ?',
|
||||
[req.params.id, req.user.userId],
|
||||
(err) => (err ? reject(err) : resolve())
|
||||
);
|
||||
});
|
||||
|
||||
res.json({ message: 'Disconnected from VPN successfully' });
|
||||
} catch (error) {
|
||||
console.error('Error disconnecting from VPN:', error);
|
||||
res.status(500).json({ error: error.message || 'Failed to disconnect from VPN' });
|
||||
}
|
||||
});
|
||||
|
||||
// Helper: Connect WireGuard
|
||||
async function connectWireGuard(config, userId) {
|
||||
const interfaceName = `wg${userId}`;
|
||||
const confPath = `/etc/wireguard/${interfaceName}.conf`;
|
||||
|
||||
// Build WireGuard config with split-tunnel for local network access
|
||||
// Note: DNS is not set in config to avoid conflicts with Docker's DNS
|
||||
// Extract VPN endpoint IP to exclude from tunnel (prevent routing loop)
|
||||
const vpnEndpointIP = config.peer.Endpoint ? config.peer.Endpoint.split(':')[0] : null;
|
||||
|
||||
// Use DNS from config (VPN provider's DNS) with public DNS fallbacks
|
||||
const dnsServers = config.interface.DNS || '1.1.1.1';
|
||||
const primaryDNS = dnsServers.split(',')[0].trim();
|
||||
|
||||
const wgConfig = `[Interface]
|
||||
PrivateKey = ${config.interface.PrivateKey}
|
||||
Address = ${config.interface.Address}
|
||||
Table = off
|
||||
FwMark = 0xca6c
|
||||
PostUp = ip route add default dev %i table 51820${vpnEndpointIP ? `
|
||||
PostUp = ip route add ${vpnEndpointIP}/32 via 172.20.0.1 dev eth0` : ''}
|
||||
PostUp = ip rule add to 172.20.0.0/16 table main priority 50
|
||||
PostUp = ip rule add to 192.168.0.0/16 table main priority 51
|
||||
PostUp = ip rule add not fwmark 0xca6c table 51820 priority 100
|
||||
PostUp = ip route replace default via 172.20.0.1 dev eth0 metric 200
|
||||
PostUp = ip route add default dev %i metric 50
|
||||
PostUp = cp /etc/resolv.conf /etc/resolv.conf.vpn-backup
|
||||
PostUp = echo 'nameserver ${primaryDNS}' > /etc/resolv.conf
|
||||
PostUp = echo 'nameserver 1.1.1.1' >> /etc/resolv.conf
|
||||
PostUp = echo 'nameserver 8.8.8.8' >> /etc/resolv.conf
|
||||
PreDown = ip route del default dev %i metric 50
|
||||
PreDown = ip route replace default via 172.20.0.1 dev eth0 metric 0
|
||||
PreDown = ip rule del to 172.20.0.0/16 table main priority 50
|
||||
PreDown = ip rule del to 192.168.0.0/16 table main priority 51
|
||||
PreDown = ip rule del to 10.0.0.0/8 table main priority 52
|
||||
PreDown = ip rule del not fwmark 0xca6c table 51820 priority 100
|
||||
PreDown = ip route del default dev %i table 51820${vpnEndpointIP ? `
|
||||
PreDown = ip route del ${vpnEndpointIP}/32 via 172.20.0.1 dev eth0` : ''}
|
||||
PreDown = mv /etc/resolv.conf.vpn-backup /etc/resolv.conf 2>/dev/null || true
|
||||
|
||||
[Peer]
|
||||
PublicKey = ${config.peer.PublicKey}
|
||||
AllowedIPs = ${config.peer.AllowedIPs || '0.0.0.0/0, ::/0'}
|
||||
Endpoint = ${config.peer.Endpoint}
|
||||
${config.peer.PersistentKeepalive ? `PersistentKeepalive = ${config.peer.PersistentKeepalive}` : ''}
|
||||
`;
|
||||
|
||||
console.log('[WireGuard] Creating config file at', confPath);
|
||||
console.log('[WireGuard] Current user:', process.getuid ? process.getuid() : 'unknown');
|
||||
|
||||
// Write config file (this will work as root or with proper permissions)
|
||||
try {
|
||||
await fs.writeFile(confPath, wgConfig, { mode: 0o600 });
|
||||
console.log('[WireGuard] Config file created successfully');
|
||||
} catch (err) {
|
||||
console.error('[WireGuard] Failed to create config file:', err.message);
|
||||
throw new Error(`Failed to create WireGuard config: ${err.message}`);
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
// Use shell to ensure root context
|
||||
const wg = spawn('wg-quick', ['up', interfaceName], {
|
||||
uid: 0, // Run as root
|
||||
gid: 0
|
||||
});
|
||||
|
||||
let output = '';
|
||||
wg.stdout.on('data', (data) => {
|
||||
output += data;
|
||||
console.log('[WireGuard]', data.toString().trim());
|
||||
});
|
||||
wg.stderr.on('data', (data) => {
|
||||
output += data;
|
||||
console.log('[WireGuard]', data.toString().trim());
|
||||
});
|
||||
|
||||
wg.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
console.log('[WireGuard] Connected successfully to', config.peer.Endpoint);
|
||||
resolve();
|
||||
} else {
|
||||
console.error('[WireGuard] Connection failed (code', code, '):', output);
|
||||
// Check for Docker networking limitation
|
||||
if (output.includes('Nexthop has invalid gateway') || output.includes('Error: Nexthop')) {
|
||||
reject(new Error('VPN connection requires host network mode. Docker containers have limited network access. Please use the desktop app for VPN connections.'));
|
||||
} else {
|
||||
reject(new Error('WireGuard connection failed'));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
wg.on('error', (err) => {
|
||||
console.error('[WireGuard] Spawn error:', err.message);
|
||||
reject(new Error(`Failed to start wg-quick: ${err.message}`));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Helper: Connect OpenVPN
|
||||
async function connectOpenVPN(config, userId) {
|
||||
const confPath = `/tmp/ovpn_${userId}.conf`;
|
||||
|
||||
await fs.writeFile(confPath, config.config);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const ovpn = spawn('openvpn', [
|
||||
'--config', confPath,
|
||||
'--daemon',
|
||||
'--log', `/tmp/ovpn_${userId}.log`
|
||||
]);
|
||||
|
||||
ovpn.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
console.log('[OpenVPN] Connected successfully');
|
||||
resolve();
|
||||
} else {
|
||||
reject(new Error('OpenVPN connection failed. VPN requires host network mode. Please use the desktop app for VPN connections.'));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Helper: Disconnect WireGuard
|
||||
async function disconnectWireGuard(userId) {
|
||||
const interfaceName = `wg${userId}`;
|
||||
|
||||
console.log('[WireGuard] Disconnecting interface:', interfaceName);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const wg = spawn('wg-quick', ['down', interfaceName], {
|
||||
uid: 0,
|
||||
gid: 0
|
||||
});
|
||||
|
||||
let output = '';
|
||||
wg.stdout.on('data', (data) => {
|
||||
output += data;
|
||||
console.log('[WireGuard]', data.toString().trim());
|
||||
});
|
||||
wg.stderr.on('data', (data) => {
|
||||
output += data;
|
||||
console.log('[WireGuard]', data.toString().trim());
|
||||
});
|
||||
|
||||
wg.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
console.log('[WireGuard] Disconnected successfully');
|
||||
resolve();
|
||||
} else if (output.includes('is not a WireGuard interface') || output.includes('does not exist')) {
|
||||
// Interface doesn't exist - already disconnected
|
||||
console.log('[WireGuard] Interface already disconnected or config file missing');
|
||||
resolve();
|
||||
} else {
|
||||
console.error('[WireGuard] Disconnect failed (code', code, '):', output);
|
||||
reject(new Error('WireGuard disconnect failed'));
|
||||
}
|
||||
});
|
||||
|
||||
wg.on('error', (err) => {
|
||||
console.error('[WireGuard] Spawn error:', err.message);
|
||||
reject(new Error(`Failed to stop wg-quick: ${err.message}`));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Helper: Disconnect OpenVPN
|
||||
async function disconnectOpenVPN(userId) {
|
||||
console.log('[OpenVPN] Disconnecting...');
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
// Kill OpenVPN process
|
||||
const pkill = spawn('pkill', ['-f', `openvpn.*ovpn_${userId}`]);
|
||||
|
||||
pkill.on('close', (code) => {
|
||||
// pkill returns 0 if processes were killed, 1 if none were found
|
||||
if (code === 0 || code === 1) {
|
||||
console.log('[OpenVPN] Disconnected successfully');
|
||||
resolve();
|
||||
} else {
|
||||
console.error('[OpenVPN] Disconnect failed with code:', code);
|
||||
reject(new Error('OpenVPN disconnect failed'));
|
||||
}
|
||||
});
|
||||
|
||||
pkill.on('error', (err) => {
|
||||
console.error('[OpenVPN] Spawn error:', err.message);
|
||||
reject(new Error(`Failed to stop OpenVPN: ${err.message}`));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Helper: Check if VPN is actually connected
|
||||
async function checkVPNConnection(configType, userId) {
|
||||
try {
|
||||
if (configType === 'wireguard') {
|
||||
const interfaceName = `wg${userId}`;
|
||||
return new Promise((resolve) => {
|
||||
const wg = spawn('wg', ['show', interfaceName]);
|
||||
let hasOutput = false;
|
||||
|
||||
wg.stdout.on('data', () => { hasOutput = true; });
|
||||
wg.on('close', (code) => {
|
||||
resolve(code === 0 && hasOutput);
|
||||
});
|
||||
wg.on('error', () => resolve(false));
|
||||
});
|
||||
} else if (configType === 'openvpn') {
|
||||
return new Promise((resolve) => {
|
||||
const pgrep = spawn('pgrep', ['-f', `openvpn.*ovpn_${userId}`]);
|
||||
let hasOutput = false;
|
||||
|
||||
pgrep.stdout.on('data', () => { hasOutput = true; });
|
||||
pgrep.on('close', (code) => {
|
||||
resolve(code === 0 && hasOutput);
|
||||
});
|
||||
pgrep.on('error', () => resolve(false));
|
||||
});
|
||||
}
|
||||
return false;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = router;
|
||||
Loading…
Add table
Add a link
Reference in a new issue