Initial commit: StreamFlow IPTV platform
This commit is contained in:
commit
73a8ae9ffd
1240 changed files with 278451 additions and 0 deletions
471
backend/routes/backup.js
Normal file
471
backend/routes/backup.js
Normal file
|
|
@ -0,0 +1,471 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { db } = require('../database/db');
|
||||
const { authenticate } = require('../middleware/auth');
|
||||
const { backupLimiter, heavyLimiter, readLimiter } = require('../middleware/rateLimiter');
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
const archiver = require('archiver');
|
||||
const unzipper = require('unzipper');
|
||||
const { promisify } = require('util');
|
||||
|
||||
const dbAll = promisify(db.all.bind(db));
|
||||
const dbRun = promisify(db.run.bind(db));
|
||||
|
||||
const BACKUP_DIR = path.join(__dirname, '../../data/backups');
|
||||
|
||||
// Ensure backup directory exists
|
||||
const ensureBackupDir = async () => {
|
||||
try {
|
||||
await fs.mkdir(BACKUP_DIR, { recursive: true });
|
||||
} catch (error) {
|
||||
console.error('Failed to create backup directory:', error);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* GET /api/backup/list
|
||||
* List all backups for the current user
|
||||
*/
|
||||
router.get('/list', authenticate, readLimiter, async (req, res) => {
|
||||
try {
|
||||
await ensureBackupDir();
|
||||
const userId = req.user.id;
|
||||
const username = req.user.username;
|
||||
|
||||
const files = await fs.readdir(BACKUP_DIR);
|
||||
const userBackups = files.filter(f =>
|
||||
(f.startsWith(`StreamFlow_Backup_${username}_`) || f.startsWith(`backup_user${userId}_`)) && f.endsWith('.zip')
|
||||
);
|
||||
|
||||
const backupList = await Promise.all(
|
||||
userBackups.map(async (filename) => {
|
||||
const filePath = path.join(BACKUP_DIR, filename);
|
||||
const stats = await fs.stat(filePath);
|
||||
|
||||
// Parse filename: StreamFlow_Backup_{username}_{timestamp}.zip or old format backup_user{id}_{timestamp}.zip
|
||||
let match = filename.match(/StreamFlow_Backup_[^_]+_(\d+)\.zip/);
|
||||
if (!match) {
|
||||
match = filename.match(/backup_user\d+_(\d+)\.zip/);
|
||||
}
|
||||
const filenameTimestamp = match ? parseInt(match[1]) : 0;
|
||||
|
||||
// Use file's actual mtime (modification time) which is more reliable
|
||||
const actualTimestamp = stats.mtimeMs;
|
||||
|
||||
return {
|
||||
filename,
|
||||
size: stats.size,
|
||||
created: new Date(actualTimestamp).toISOString(),
|
||||
timestamp: actualTimestamp
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
// Sort by timestamp descending (newest first)
|
||||
backupList.sort((a, b) => b.timestamp - a.timestamp);
|
||||
|
||||
res.json(backupList);
|
||||
} catch (error) {
|
||||
console.error('Failed to list backups:', error);
|
||||
res.status(500).json({ error: 'Failed to list backups' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/backup/create
|
||||
* Create a new backup of user data
|
||||
*/
|
||||
router.post('/create', authenticate, backupLimiter, async (req, res) => {
|
||||
try {
|
||||
await ensureBackupDir();
|
||||
const userId = req.user.id;
|
||||
const username = req.user.username;
|
||||
const timestamp = Date.now();
|
||||
const filename = `StreamFlow_Backup_${username}_${timestamp}.zip`;
|
||||
const backupPath = path.join(BACKUP_DIR, filename);
|
||||
|
||||
// Create write stream for zip
|
||||
const output = require('fs').createWriteStream(backupPath);
|
||||
const archive = archiver('zip', { zlib: { level: 9 } });
|
||||
|
||||
output.on('close', () => {
|
||||
res.json({
|
||||
success: true,
|
||||
filename,
|
||||
size: archive.pointer(),
|
||||
created: new Date(timestamp).toISOString()
|
||||
});
|
||||
});
|
||||
|
||||
archive.on('error', (err) => {
|
||||
throw err;
|
||||
});
|
||||
|
||||
archive.pipe(output);
|
||||
|
||||
// Export user data - CWE-532: Exclude password and sensitive fields
|
||||
const userData = await dbAll(
|
||||
`SELECT id, username, email, role, two_factor_enabled, is_active,
|
||||
created_at, updated_at, last_login_at, last_login_ip,
|
||||
password_changed_at, password_expires_at
|
||||
FROM users WHERE id = ?`,
|
||||
[userId]
|
||||
);
|
||||
archive.append(JSON.stringify(userData, null, 2), { name: 'user.json' });
|
||||
|
||||
// Export playlists
|
||||
const playlists = await dbAll('SELECT * FROM playlists WHERE user_id = ?', [userId]);
|
||||
archive.append(JSON.stringify(playlists, null, 2), { name: 'playlists.json' });
|
||||
|
||||
// Export channels
|
||||
const channels = await dbAll(
|
||||
`SELECT DISTINCT c.* FROM channels c
|
||||
JOIN playlists p ON c.playlist_id = p.id
|
||||
WHERE p.user_id = ?`,
|
||||
[userId]
|
||||
);
|
||||
archive.append(JSON.stringify(channels, null, 2), { name: 'channels.json' });
|
||||
|
||||
// Export favorites
|
||||
const favorites = await dbAll('SELECT * FROM favorites WHERE user_id = ?', [userId]);
|
||||
archive.append(JSON.stringify(favorites, null, 2), { name: 'favorites.json' });
|
||||
|
||||
// Export custom channel logos
|
||||
const customLogos = await dbAll(
|
||||
`SELECT c.id, c.name, c.custom_logo FROM channels c
|
||||
JOIN playlists p ON c.playlist_id = p.id
|
||||
WHERE p.user_id = ? AND c.custom_logo IS NOT NULL`,
|
||||
[userId]
|
||||
);
|
||||
|
||||
// Add custom logo files
|
||||
for (const channel of customLogos) {
|
||||
if (channel.custom_logo) {
|
||||
const logoPath = path.join(__dirname, '../../data/logos', channel.custom_logo);
|
||||
try {
|
||||
const logoExists = await fs.access(logoPath).then(() => true).catch(() => false);
|
||||
if (logoExists) {
|
||||
archive.file(logoPath, { name: `logos/${channel.custom_logo}` });
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Failed to add logo ${channel.custom_logo}:`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Export m3u files
|
||||
const m3uFiles = await dbAll('SELECT * FROM m3u_files WHERE user_id = ?', [userId]);
|
||||
archive.append(JSON.stringify(m3uFiles, null, 2), { name: 'm3u_files.json' });
|
||||
|
||||
// Add actual m3u files
|
||||
for (const m3uFile of m3uFiles) {
|
||||
if (m3uFile.file_path) {
|
||||
const m3uPath = path.join(__dirname, '../../data/playlists', m3uFile.file_path);
|
||||
try {
|
||||
const m3uExists = await fs.access(m3uPath).then(() => true).catch(() => false);
|
||||
if (m3uExists) {
|
||||
archive.file(m3uPath, { name: `m3u_files/${m3uFile.file_path}` });
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Failed to add m3u file ${m3uFile.file_path}:`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Export user settings (stream settings, preferences)
|
||||
const settings = {
|
||||
created: new Date(timestamp).toISOString(),
|
||||
version: '1.0',
|
||||
userId: userId,
|
||||
username: req.user.username
|
||||
};
|
||||
archive.append(JSON.stringify(settings, null, 2), { name: 'backup_info.json' });
|
||||
|
||||
// Finalize the archive
|
||||
await archive.finalize();
|
||||
|
||||
} catch (error) {
|
||||
console.error('Failed to create backup:', error);
|
||||
res.status(500).json({ error: 'Failed to create backup' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/backup/download/:filename
|
||||
* Download a backup file
|
||||
*/
|
||||
router.get('/download/:filename', authenticate, heavyLimiter, async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const username = req.user.username;
|
||||
const { filename } = req.params;
|
||||
|
||||
// Validate filename belongs to user (support both old and new format)
|
||||
const isOldFormat = filename.startsWith(`backup_user${userId}_`);
|
||||
const isNewFormat = filename.startsWith(`StreamFlow_Backup_${username}_`);
|
||||
if ((!isOldFormat && !isNewFormat) || !filename.endsWith('.zip')) {
|
||||
return res.status(403).json({ error: 'Unauthorized' });
|
||||
}
|
||||
|
||||
const filePath = path.join(BACKUP_DIR, filename);
|
||||
|
||||
// Check if file exists
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
} catch (err) {
|
||||
return res.status(404).json({ error: 'Backup not found' });
|
||||
}
|
||||
|
||||
res.download(filePath, filename);
|
||||
} catch (error) {
|
||||
console.error('Failed to download backup:', error);
|
||||
res.status(500).json({ error: 'Failed to download backup' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* DELETE /api/backup/:filename
|
||||
* Delete a backup file
|
||||
*/
|
||||
router.delete('/:filename', authenticate, readLimiter, async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const username = req.user.username;
|
||||
const { filename } = req.params;
|
||||
|
||||
// Validate filename belongs to user (support both old and new format)
|
||||
const isOldFormat = filename.startsWith(`backup_user${userId}_`);
|
||||
const isNewFormat = filename.startsWith(`StreamFlow_Backup_${username}_`);
|
||||
if ((!isOldFormat && !isNewFormat) || !filename.endsWith('.zip')) {
|
||||
return res.status(403).json({ error: 'Unauthorized' });
|
||||
}
|
||||
|
||||
const filePath = path.join(BACKUP_DIR, filename);
|
||||
await fs.unlink(filePath);
|
||||
|
||||
res.json({ success: true, message: 'Backup deleted' });
|
||||
} catch (error) {
|
||||
console.error('Failed to delete backup:', error);
|
||||
res.status(500).json({ error: 'Failed to delete backup' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/backup/upload
|
||||
* Upload a backup file for restoration
|
||||
*/
|
||||
router.post('/upload', authenticate, heavyLimiter, async (req, res) => {
|
||||
try {
|
||||
await ensureBackupDir();
|
||||
|
||||
if (!req.files || !req.files.backup) {
|
||||
return res.status(400).json({ error: 'No backup file provided' });
|
||||
}
|
||||
|
||||
const userId = req.user.id;
|
||||
const username = req.user.username;
|
||||
const backupFile = req.files.backup;
|
||||
const timestamp = Date.now();
|
||||
const filename = `StreamFlow_Backup_${username}_${timestamp}.zip`;
|
||||
const uploadPath = path.join(BACKUP_DIR, filename);
|
||||
|
||||
await backupFile.mv(uploadPath);
|
||||
|
||||
const stats = await fs.stat(uploadPath);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
filename,
|
||||
size: stats.size,
|
||||
created: new Date(timestamp).toISOString()
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to upload backup:', error);
|
||||
res.status(500).json({ error: 'Failed to upload backup' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/backup/restore/:filename
|
||||
* Restore data from a backup file
|
||||
*/
|
||||
router.post('/restore/:filename', authenticate, backupLimiter, async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const username = req.user.username;
|
||||
const { filename } = req.params;
|
||||
|
||||
// Validate filename belongs to user (support both old and new format)
|
||||
const isOldFormat = filename.startsWith(`backup_user${userId}_`);
|
||||
const isNewFormat = filename.startsWith(`StreamFlow_Backup_${username}_`);
|
||||
if ((!isOldFormat && !isNewFormat) || !filename.endsWith('.zip')) {
|
||||
return res.status(403).json({ error: 'Unauthorized' });
|
||||
}
|
||||
|
||||
const backupPath = path.join(BACKUP_DIR, filename);
|
||||
const extractPath = path.join(BACKUP_DIR, `extract_${userId}_${Date.now()}`);
|
||||
|
||||
// Check if backup exists
|
||||
try {
|
||||
await fs.access(backupPath);
|
||||
} catch (err) {
|
||||
return res.status(404).json({ error: 'Backup not found' });
|
||||
}
|
||||
|
||||
// Extract backup
|
||||
await fs.mkdir(extractPath, { recursive: true });
|
||||
|
||||
await require('fs')
|
||||
.createReadStream(backupPath)
|
||||
.pipe(unzipper.Extract({ path: extractPath }))
|
||||
.promise();
|
||||
|
||||
// Read backup data
|
||||
const playlistsData = JSON.parse(await fs.readFile(path.join(extractPath, 'playlists.json'), 'utf8'));
|
||||
const channelsData = JSON.parse(await fs.readFile(path.join(extractPath, 'channels.json'), 'utf8'));
|
||||
const favoritesData = JSON.parse(await fs.readFile(path.join(extractPath, 'favorites.json'), 'utf8'));
|
||||
|
||||
// Start transaction-like operations
|
||||
let restoredPlaylists = 0;
|
||||
let restoredChannels = 0;
|
||||
let restoredFavorites = 0;
|
||||
|
||||
// Restore playlists
|
||||
for (const playlist of playlistsData) {
|
||||
try {
|
||||
await dbRun(
|
||||
`INSERT OR IGNORE INTO playlists (name, url, username, password, user_id, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?)`,
|
||||
[playlist.name, playlist.url, playlist.username, playlist.password, userId, playlist.created_at]
|
||||
);
|
||||
restoredPlaylists++;
|
||||
} catch (err) {
|
||||
console.error('Failed to restore playlist:', err);
|
||||
}
|
||||
}
|
||||
|
||||
// Get new playlist IDs mapping
|
||||
const newPlaylists = await dbAll('SELECT id, name FROM playlists WHERE user_id = ?', [userId]);
|
||||
const playlistMap = {};
|
||||
playlistsData.forEach((oldP, idx) => {
|
||||
const newP = newPlaylists.find(p => p.name === oldP.name);
|
||||
if (newP) playlistMap[oldP.id] = newP.id;
|
||||
});
|
||||
|
||||
// Restore channels
|
||||
for (const channel of channelsData) {
|
||||
const newPlaylistId = playlistMap[channel.playlist_id];
|
||||
if (!newPlaylistId) continue;
|
||||
|
||||
try {
|
||||
await dbRun(
|
||||
`INSERT OR IGNORE INTO channels
|
||||
(name, url, logo, group_name, playlist_id, custom_logo, is_radio, tvg_id, tvg_name)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[
|
||||
channel.name, channel.url, channel.logo, channel.group_name,
|
||||
newPlaylistId, channel.custom_logo, channel.is_radio,
|
||||
channel.tvg_id, channel.tvg_name
|
||||
]
|
||||
);
|
||||
restoredChannels++;
|
||||
} catch (err) {
|
||||
console.error('Failed to restore channel:', err);
|
||||
}
|
||||
}
|
||||
|
||||
// Restore custom logos
|
||||
const logosDir = path.join(extractPath, 'logos');
|
||||
try {
|
||||
const logoFiles = await fs.readdir(logosDir);
|
||||
const targetLogosDir = path.join(__dirname, '../../data/logos');
|
||||
await fs.mkdir(targetLogosDir, { recursive: true });
|
||||
|
||||
for (const logoFile of logoFiles) {
|
||||
const src = path.join(logosDir, logoFile);
|
||||
const dest = path.join(targetLogosDir, logoFile);
|
||||
await fs.copyFile(src, dest);
|
||||
}
|
||||
} catch (err) {
|
||||
// Logos directory might not exist in older backups
|
||||
console.log('No custom logos to restore');
|
||||
}
|
||||
|
||||
// Restore favorites (map to new channel IDs)
|
||||
const newChannels = await dbAll(
|
||||
`SELECT c.id, c.name, c.url FROM channels c
|
||||
JOIN playlists p ON c.playlist_id = p.id
|
||||
WHERE p.user_id = ?`,
|
||||
[userId]
|
||||
);
|
||||
|
||||
for (const fav of favoritesData) {
|
||||
const oldChannel = channelsData.find(c => c.id === fav.channel_id);
|
||||
if (!oldChannel) continue;
|
||||
|
||||
const newChannel = newChannels.find(c => c.name === oldChannel.name && c.url === oldChannel.url);
|
||||
if (!newChannel) continue;
|
||||
|
||||
try {
|
||||
await dbRun(
|
||||
`INSERT OR IGNORE INTO favorites (user_id, channel_id, custom_logo, is_radio)
|
||||
VALUES (?, ?, ?, ?)`,
|
||||
[userId, newChannel.id, fav.custom_logo, fav.is_radio]
|
||||
);
|
||||
restoredFavorites++;
|
||||
} catch (err) {
|
||||
console.error('Failed to restore favorite:', err);
|
||||
}
|
||||
}
|
||||
|
||||
// Restore M3U files
|
||||
try {
|
||||
const m3uFilesData = JSON.parse(await fs.readFile(path.join(extractPath, 'm3u_files.json'), 'utf8'));
|
||||
const m3uFilesDir = path.join(extractPath, 'm3u_files');
|
||||
const targetM3uDir = path.join(__dirname, '../../data/playlists');
|
||||
await fs.mkdir(targetM3uDir, { recursive: true });
|
||||
|
||||
for (const m3uFile of m3uFilesData) {
|
||||
try {
|
||||
// Copy physical file
|
||||
if (m3uFile.file_path) {
|
||||
const src = path.join(m3uFilesDir, m3uFile.file_path);
|
||||
const dest = path.join(targetM3uDir, m3uFile.file_path);
|
||||
await fs.copyFile(src, dest);
|
||||
}
|
||||
|
||||
// Insert database record
|
||||
await dbRun(
|
||||
`INSERT OR IGNORE INTO m3u_files (user_id, name, file_path, uploaded_at)
|
||||
VALUES (?, ?, ?, ?)`,
|
||||
[userId, m3uFile.name, m3uFile.file_path, m3uFile.uploaded_at]
|
||||
);
|
||||
} catch (err) {
|
||||
console.error('Failed to restore m3u file:', err);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.log('No M3U files to restore');
|
||||
}
|
||||
|
||||
// Cleanup extraction directory
|
||||
await fs.rm(extractPath, { recursive: true, force: true });
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Backup restored successfully',
|
||||
stats: {
|
||||
playlists: restoredPlaylists,
|
||||
channels: restoredChannels,
|
||||
favorites: restoredFavorites
|
||||
}
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Failed to restore backup:', error);
|
||||
res.status(500).json({ error: 'Failed to restore backup' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
Loading…
Add table
Add a link
Reference in a new issue