214 lines
6.9 KiB
JavaScript
214 lines
6.9 KiB
JavaScript
|
|
const { db } = require('../database/db');
|
||
|
|
const axios = require('axios');
|
||
|
|
const fs = require('fs').promises;
|
||
|
|
const path = require('path');
|
||
|
|
const crypto = require('crypto');
|
||
|
|
const logger = require('../utils/logger');
|
||
|
|
|
||
|
|
const LOGO_CACHE_DIR = path.join(__dirname, '../../data/logo-cache');
|
||
|
|
const CACHE_REFRESH_INTERVAL = 7 * 24 * 60 * 60 * 1000; // 7 days
|
||
|
|
const BATCH_SIZE = 5; // Process 5 logos at a time
|
||
|
|
const BATCH_DELAY = 2000; // 2 seconds between batches
|
||
|
|
|
||
|
|
// Ensure cache directory exists
|
||
|
|
async function ensureCacheDir() {
|
||
|
|
try {
|
||
|
|
await fs.mkdir(LOGO_CACHE_DIR, { recursive: true });
|
||
|
|
} catch (err) {
|
||
|
|
logger.error('Failed to create logo cache directory:', err);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// Download and cache a single logo
|
||
|
|
async function cacheLogo(channelName, logoUrl) {
|
||
|
|
if (!logoUrl || !logoUrl.startsWith('http')) {
|
||
|
|
return null;
|
||
|
|
}
|
||
|
|
|
||
|
|
try {
|
||
|
|
// Generate filename from URL hash
|
||
|
|
const hash = crypto.createHash('md5').update(logoUrl).digest('hex');
|
||
|
|
const ext = path.extname(new URL(logoUrl).pathname) || '.png';
|
||
|
|
const filename = `${hash}${ext}`;
|
||
|
|
const localPath = path.join(LOGO_CACHE_DIR, filename);
|
||
|
|
const relativeLocalPath = path.join('/app/data/logo-cache', filename);
|
||
|
|
console.log(`[LogoCacher] Path for ${channelName}: ${relativeLocalPath}`);
|
||
|
|
|
||
|
|
// Check if already cached in database and file exists
|
||
|
|
const existingEntry = await new Promise((resolve) => {
|
||
|
|
db.get(
|
||
|
|
'SELECT local_path, last_updated FROM logo_cache WHERE logo_url = ? LIMIT 1',
|
||
|
|
[logoUrl],
|
||
|
|
(err, row) => resolve(row)
|
||
|
|
);
|
||
|
|
});
|
||
|
|
|
||
|
|
if (existingEntry) {
|
||
|
|
try {
|
||
|
|
const stats = await fs.stat(localPath);
|
||
|
|
const age = Date.now() - new Date(existingEntry.last_updated).getTime();
|
||
|
|
if (age < CACHE_REFRESH_INTERVAL) {
|
||
|
|
logger.debug(`Logo already cached for ${channelName}`);
|
||
|
|
return relativeLocalPath;
|
||
|
|
}
|
||
|
|
} catch (err) {
|
||
|
|
// File doesn't exist anymore, re-download
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// Download logo
|
||
|
|
const response = await axios.get(logoUrl, {
|
||
|
|
responseType: 'arraybuffer',
|
||
|
|
timeout: 45000, // Increased for VPN connection
|
||
|
|
headers: {
|
||
|
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36',
|
||
|
|
'Accept': 'image/*',
|
||
|
|
},
|
||
|
|
maxRedirects: 5
|
||
|
|
});
|
||
|
|
|
||
|
|
if (!response.data || response.data.length === 0) {
|
||
|
|
console.error(`[LogoCacher] Empty response for ${channelName}`);
|
||
|
|
return null;
|
||
|
|
}
|
||
|
|
|
||
|
|
// Save to disk
|
||
|
|
await fs.writeFile(localPath, response.data);
|
||
|
|
console.log(`[LogoCacher] Cached logo for ${channelName} (${response.data.length} bytes)`);
|
||
|
|
logger.info(`Cached logo for ${channelName}: ${logoUrl}`);
|
||
|
|
|
||
|
|
// Update database
|
||
|
|
return new Promise((resolve, reject) => {
|
||
|
|
console.log(`[LogoCacher] Inserting DB: name="${channelName}", url="${logoUrl}", path="${relativeLocalPath}"`);
|
||
|
|
db.run(
|
||
|
|
`INSERT OR REPLACE INTO logo_cache (channel_name, logo_url, local_path, last_updated)
|
||
|
|
VALUES (?, ?, ?, CURRENT_TIMESTAMP)`,
|
||
|
|
[channelName, logoUrl, relativeLocalPath],
|
||
|
|
function(err) {
|
||
|
|
if (err) {
|
||
|
|
console.error(`[LogoCacher] DB INSERT FAILED for ${channelName}:`, err);
|
||
|
|
logger.error(`Failed to update logo cache DB for ${channelName}:`, err);
|
||
|
|
reject(err);
|
||
|
|
} else {
|
||
|
|
console.log(`[LogoCacher] DB INSERT SUCCESS for ${channelName}, rowID: ${this.lastID}`);
|
||
|
|
resolve(relativeLocalPath);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
);
|
||
|
|
});
|
||
|
|
} catch (error) {
|
||
|
|
logger.error(`Failed to cache logo for ${channelName}:`, error.message);
|
||
|
|
return null;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// Get all channels with logos that need caching
|
||
|
|
function getChannelsNeedingCache() {
|
||
|
|
return new Promise((resolve, reject) => {
|
||
|
|
db.all(
|
||
|
|
`SELECT DISTINCT c.name, COALESCE(c.custom_logo, c.logo) as logo
|
||
|
|
FROM channels c
|
||
|
|
WHERE (c.logo IS NOT NULL AND c.logo LIKE 'http%')
|
||
|
|
OR (c.custom_logo IS NOT NULL AND c.custom_logo LIKE 'http%')
|
||
|
|
ORDER BY c.name`,
|
||
|
|
[],
|
||
|
|
(err, rows) => {
|
||
|
|
if (err) {
|
||
|
|
logger.error('Error fetching channels for logo caching:', err);
|
||
|
|
reject(err);
|
||
|
|
} else {
|
||
|
|
resolve(rows || []);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
);
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
// Process logos in batches
|
||
|
|
async function cacheAllLogos() {
|
||
|
|
try {
|
||
|
|
console.log('[LogoCacher] Starting logo caching...');
|
||
|
|
await ensureCacheDir();
|
||
|
|
|
||
|
|
const channels = await getChannelsNeedingCache();
|
||
|
|
console.log(`[LogoCacher] Found ${channels.length} channels with logos`);
|
||
|
|
|
||
|
|
if (channels.length === 0) {
|
||
|
|
logger.info('All channel logos are already cached');
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
logger.info(`Caching logos for ${channels.length} channels...`);
|
||
|
|
let cached = 0;
|
||
|
|
|
||
|
|
// Process in batches
|
||
|
|
for (let i = 0; i < channels.length; i += BATCH_SIZE) {
|
||
|
|
const batch = channels.slice(i, i + BATCH_SIZE);
|
||
|
|
console.log(`[LogoCacher] Processing batch ${Math.floor(i/BATCH_SIZE) + 1}/${Math.ceil(channels.length/BATCH_SIZE)}`);
|
||
|
|
|
||
|
|
const results = await Promise.all(
|
||
|
|
batch.map(channel => cacheLogo(channel.name, channel.logo))
|
||
|
|
);
|
||
|
|
|
||
|
|
cached += results.filter(r => r !== null).length;
|
||
|
|
|
||
|
|
// Wait between batches to avoid overwhelming servers
|
||
|
|
if (i + BATCH_SIZE < channels.length) {
|
||
|
|
await new Promise(resolve => setTimeout(resolve, BATCH_DELAY));
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
logger.info(`Logo caching complete. Cached ${cached}/${channels.length} logos.`);
|
||
|
|
console.log(`[LogoCacher] Completed: ${cached}/${channels.length} logos cached`);
|
||
|
|
} catch (error) {
|
||
|
|
logger.error('Error in logo caching job:', error);
|
||
|
|
console.error('[LogoCacher] Error:', error.message);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// Clean up old cached logos
|
||
|
|
async function cleanupOldLogos() {
|
||
|
|
try {
|
||
|
|
// First, delete all database entries
|
||
|
|
await new Promise((resolve, reject) => {
|
||
|
|
db.run('DELETE FROM logo_cache', (err) => {
|
||
|
|
if (err) reject(err);
|
||
|
|
else resolve();
|
||
|
|
});
|
||
|
|
});
|
||
|
|
console.log('[LogoCacher] Cleared all logo cache database entries');
|
||
|
|
|
||
|
|
// Then delete old files
|
||
|
|
const files = await fs.readdir(LOGO_CACHE_DIR);
|
||
|
|
const cutoffTime = Date.now() - (30 * 24 * 60 * 60 * 1000); // 30 days
|
||
|
|
|
||
|
|
let deleted = 0;
|
||
|
|
for (const file of files) {
|
||
|
|
const filePath = path.join(LOGO_CACHE_DIR, file);
|
||
|
|
try {
|
||
|
|
const stats = await fs.stat(filePath);
|
||
|
|
if (stats.mtime.getTime() < cutoffTime) {
|
||
|
|
await fs.unlink(filePath);
|
||
|
|
deleted++;
|
||
|
|
}
|
||
|
|
} catch (err) {
|
||
|
|
// Ignore errors for individual files
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
if (deleted > 0) {
|
||
|
|
logger.info(`Cleaned up ${deleted} old cached logos`);
|
||
|
|
}
|
||
|
|
} catch (error) {
|
||
|
|
logger.error('Error cleaning up old logos:', error);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// Initialize cache directory on startup but don't auto-cache
|
||
|
|
// (VPN must be connected first for external logo downloads)
|
||
|
|
ensureCacheDir().then(() => {
|
||
|
|
logger.info('Logo caching job initialized (manual trigger required)');
|
||
|
|
});
|
||
|
|
|
||
|
|
module.exports = { cacheAllLogos, cacheLogo, cleanupOldLogos };
|