436 lines
14 KiB
Python
436 lines
14 KiB
Python
"""
|
||
Lycostorrent - Système de cache pour Latest et Discover
|
||
Permet de pré-charger les données en arrière-plan
|
||
"""
|
||
|
||
import os
|
||
import json
|
||
import logging
|
||
import threading
|
||
import time
|
||
from datetime import datetime, timedelta
|
||
from typing import Dict, Any, Optional, List
|
||
from apscheduler.schedulers.background import BackgroundScheduler
|
||
from apscheduler.triggers.interval import IntervalTrigger
|
||
|
||
logger = logging.getLogger(__name__)
|
||
|
||
# Chemins des fichiers
|
||
CACHE_DIR = '/app/config/cache'
|
||
CONFIG_FILE = '/app/config/cache_config.json'
|
||
META_FILE = os.path.join(CACHE_DIR, 'cache_meta.json')
|
||
|
||
# Scheduler global
|
||
_scheduler: Optional[BackgroundScheduler] = None
|
||
_cache_lock = threading.Lock()
|
||
_is_refreshing = False
|
||
|
||
# Configuration par défaut
|
||
DEFAULT_CONFIG = {
|
||
'enabled': False,
|
||
'interval_minutes': 60, # 1h par défaut
|
||
'latest': {
|
||
'enabled': True,
|
||
'categories': ['movies', 'tv'],
|
||
'trackers': [], # Tous si vide
|
||
'limit': 50
|
||
},
|
||
'discover': {
|
||
'enabled': True,
|
||
'limit': 30
|
||
}
|
||
}
|
||
|
||
# ============================================================
|
||
# GESTION DE LA CONFIGURATION
|
||
# ============================================================
|
||
|
||
def get_cache_config() -> Dict[str, Any]:
|
||
"""Récupère la configuration du cache"""
|
||
try:
|
||
if os.path.exists(CONFIG_FILE):
|
||
with open(CONFIG_FILE, 'r') as f:
|
||
config = json.load(f)
|
||
# Fusionner avec les valeurs par défaut
|
||
return {**DEFAULT_CONFIG, **config}
|
||
except Exception as e:
|
||
logger.error(f"Erreur lecture config cache: {e}")
|
||
return DEFAULT_CONFIG.copy()
|
||
|
||
|
||
def save_cache_config(config: Dict[str, Any]) -> bool:
|
||
"""Sauvegarde la configuration du cache"""
|
||
try:
|
||
os.makedirs(os.path.dirname(CONFIG_FILE), exist_ok=True)
|
||
with open(CONFIG_FILE, 'w') as f:
|
||
json.dump(config, f, indent=2)
|
||
return True
|
||
except Exception as e:
|
||
logger.error(f"Erreur sauvegarde config cache: {e}")
|
||
return False
|
||
|
||
|
||
# ============================================================
|
||
# GESTION DES MÉTADONNÉES
|
||
# ============================================================
|
||
|
||
def get_cache_meta() -> Dict[str, Any]:
|
||
"""Récupère les métadonnées du cache (timestamps, etc.)"""
|
||
try:
|
||
if os.path.exists(META_FILE):
|
||
with open(META_FILE, 'r') as f:
|
||
return json.load(f)
|
||
except Exception as e:
|
||
logger.error(f"Erreur lecture meta cache: {e}")
|
||
return {
|
||
'last_refresh': None,
|
||
'next_refresh': None,
|
||
'status': 'never',
|
||
'latest': {},
|
||
'discover': {}
|
||
}
|
||
|
||
|
||
def save_cache_meta(meta: Dict[str, Any]) -> bool:
|
||
"""Sauvegarde les métadonnées du cache"""
|
||
try:
|
||
os.makedirs(CACHE_DIR, exist_ok=True)
|
||
with open(META_FILE, 'w') as f:
|
||
json.dump(meta, f, indent=2)
|
||
return True
|
||
except Exception as e:
|
||
logger.error(f"Erreur sauvegarde meta cache: {e}")
|
||
return False
|
||
|
||
|
||
def get_cache_status() -> Dict[str, Any]:
|
||
"""Retourne le statut du cache pour l'affichage"""
|
||
global _is_refreshing
|
||
|
||
config = get_cache_config()
|
||
meta = get_cache_meta()
|
||
|
||
# Calculer la taille du cache
|
||
cache_size = 0
|
||
try:
|
||
if os.path.exists(CACHE_DIR):
|
||
for f in os.listdir(CACHE_DIR):
|
||
filepath = os.path.join(CACHE_DIR, f)
|
||
if os.path.isfile(filepath):
|
||
cache_size += os.path.getsize(filepath)
|
||
except:
|
||
pass
|
||
|
||
# Calculer "il y a X minutes"
|
||
last_refresh_ago = None
|
||
if meta.get('last_refresh'):
|
||
try:
|
||
last_dt = datetime.fromisoformat(meta['last_refresh'])
|
||
delta = datetime.now() - last_dt
|
||
minutes = int(delta.total_seconds() / 60)
|
||
if minutes < 1:
|
||
last_refresh_ago = "à l'instant"
|
||
elif minutes < 60:
|
||
last_refresh_ago = f"il y a {minutes} min"
|
||
else:
|
||
hours = minutes // 60
|
||
last_refresh_ago = f"il y a {hours}h"
|
||
except:
|
||
pass
|
||
|
||
return {
|
||
'enabled': config.get('enabled', False),
|
||
'interval_minutes': config.get('interval_minutes', 60),
|
||
'last_refresh': meta.get('last_refresh'),
|
||
'last_refresh_ago': last_refresh_ago,
|
||
'next_refresh': meta.get('next_refresh'),
|
||
'status': meta.get('status', 'never'),
|
||
'is_refreshing': _is_refreshing,
|
||
'cache_size_bytes': cache_size,
|
||
'cache_size_mb': round(cache_size / (1024 * 1024), 2),
|
||
'latest_categories': config.get('latest', {}).get('categories', []),
|
||
'discover_enabled': config.get('discover', {}).get('enabled', True)
|
||
}
|
||
|
||
|
||
# ============================================================
|
||
# LECTURE/ÉCRITURE DU CACHE
|
||
# ============================================================
|
||
|
||
def get_cached_data(cache_type: str, category: str = None) -> Optional[Dict[str, Any]]:
|
||
"""
|
||
Récupère les données en cache
|
||
|
||
Args:
|
||
cache_type: 'latest' ou 'discover'
|
||
category: Pour latest: 'movies', 'tv', 'anime', 'music'
|
||
Pour discover: 'movies', 'tv'
|
||
"""
|
||
try:
|
||
if category:
|
||
filename = f"{cache_type}_{category}.json"
|
||
else:
|
||
filename = f"{cache_type}.json"
|
||
|
||
filepath = os.path.join(CACHE_DIR, filename)
|
||
|
||
if os.path.exists(filepath):
|
||
with open(filepath, 'r') as f:
|
||
data = json.load(f)
|
||
return data
|
||
except Exception as e:
|
||
logger.error(f"Erreur lecture cache {cache_type}/{category}: {e}")
|
||
|
||
return None
|
||
|
||
|
||
def save_cached_data(cache_type: str, category: str, data: Any) -> bool:
|
||
"""Sauvegarde les données en cache"""
|
||
try:
|
||
os.makedirs(CACHE_DIR, exist_ok=True)
|
||
|
||
filename = f"{cache_type}_{category}.json"
|
||
filepath = os.path.join(CACHE_DIR, filename)
|
||
|
||
cache_data = {
|
||
'timestamp': datetime.now().isoformat(),
|
||
'type': cache_type,
|
||
'category': category,
|
||
'data': data
|
||
}
|
||
|
||
with open(filepath, 'w') as f:
|
||
json.dump(cache_data, f, ensure_ascii=False)
|
||
|
||
return True
|
||
except Exception as e:
|
||
logger.error(f"Erreur sauvegarde cache {cache_type}/{category}: {e}")
|
||
return False
|
||
|
||
|
||
def clear_cache() -> bool:
|
||
"""Vide tout le cache"""
|
||
try:
|
||
if os.path.exists(CACHE_DIR):
|
||
for f in os.listdir(CACHE_DIR):
|
||
filepath = os.path.join(CACHE_DIR, f)
|
||
if os.path.isfile(filepath) and f.endswith('.json'):
|
||
os.remove(filepath)
|
||
return True
|
||
except Exception as e:
|
||
logger.error(f"Erreur suppression cache: {e}")
|
||
return False
|
||
|
||
|
||
# ============================================================
|
||
# JOB DE REFRESH DU CACHE
|
||
# ============================================================
|
||
|
||
def refresh_cache(app=None):
|
||
"""
|
||
Job principal de refresh du cache
|
||
Appelé par le scheduler ou manuellement
|
||
"""
|
||
global _is_refreshing
|
||
|
||
if _is_refreshing:
|
||
logger.info("⏳ Refresh déjà en cours, ignoré")
|
||
return
|
||
|
||
with _cache_lock:
|
||
_is_refreshing = True
|
||
|
||
try:
|
||
logger.info("🔄 Début du refresh du cache...")
|
||
config = get_cache_config()
|
||
meta = get_cache_meta()
|
||
|
||
meta['status'] = 'refreshing'
|
||
meta['last_refresh_start'] = datetime.now().isoformat()
|
||
save_cache_meta(meta)
|
||
|
||
# Refresh Latest
|
||
if config.get('latest', {}).get('enabled', True):
|
||
refresh_latest_cache(config, app)
|
||
|
||
# Refresh Discover
|
||
if config.get('discover', {}).get('enabled', True):
|
||
refresh_discover_cache(config, app)
|
||
|
||
# Mettre à jour les métadonnées
|
||
now = datetime.now()
|
||
interval = config.get('interval_minutes', 60)
|
||
next_refresh = now + timedelta(minutes=interval)
|
||
|
||
meta['last_refresh'] = now.isoformat()
|
||
meta['next_refresh'] = next_refresh.isoformat()
|
||
meta['status'] = 'success'
|
||
save_cache_meta(meta)
|
||
|
||
logger.info(f"✅ Cache rafraîchi avec succès. Prochain refresh: {next_refresh.strftime('%H:%M')}")
|
||
|
||
except Exception as e:
|
||
logger.error(f"❌ Erreur refresh cache: {e}")
|
||
meta = get_cache_meta()
|
||
meta['status'] = 'error'
|
||
meta['last_error'] = str(e)
|
||
save_cache_meta(meta)
|
||
|
||
finally:
|
||
with _cache_lock:
|
||
_is_refreshing = False
|
||
|
||
|
||
def refresh_latest_cache(config: Dict, app=None):
|
||
"""Refresh le cache des nouveautés (Latest)"""
|
||
from main import fetch_latest_releases_internal
|
||
|
||
latest_config = config.get('latest', {})
|
||
categories = latest_config.get('categories', ['movies', 'tv'])
|
||
trackers = latest_config.get('trackers', [])
|
||
limit = latest_config.get('limit', 50)
|
||
|
||
logger.info(f"📥 Refresh Latest: catégories={categories}, limit={limit}")
|
||
|
||
for category in categories:
|
||
try:
|
||
logger.info(f" → Chargement {category}...")
|
||
|
||
# Appeler la fonction interne de récupération
|
||
results = fetch_latest_releases_internal(
|
||
trackers_list=trackers if trackers else None,
|
||
category=category,
|
||
limit=limit
|
||
)
|
||
|
||
if results:
|
||
save_cached_data('latest', category, results)
|
||
logger.info(f" ✅ {category}: {len(results)} résultats cachés")
|
||
else:
|
||
logger.warning(f" ⚠️ {category}: aucun résultat")
|
||
|
||
except Exception as e:
|
||
logger.error(f" ❌ Erreur {category}: {e}", exc_info=True)
|
||
|
||
|
||
def refresh_discover_cache(config: Dict, app=None):
|
||
"""Refresh le cache Discover (TMDb + torrents)"""
|
||
from main import fetch_discover_internal
|
||
|
||
discover_config = config.get('discover', {})
|
||
limit = discover_config.get('limit', 30)
|
||
|
||
logger.info(f"📥 Refresh Discover: limit={limit}")
|
||
|
||
for media_type in ['movies', 'tv']:
|
||
try:
|
||
logger.info(f" → Chargement {media_type}...")
|
||
|
||
# Appeler la fonction interne de récupération
|
||
results = fetch_discover_internal(
|
||
media_type=media_type,
|
||
limit=limit
|
||
)
|
||
|
||
if results:
|
||
save_cached_data('discover', media_type, results)
|
||
# results est un dict avec les catégories
|
||
total = sum(len(v) for v in results.values()) if isinstance(results, dict) else len(results)
|
||
logger.info(f" ✅ {media_type}: {total} résultats cachés")
|
||
else:
|
||
logger.warning(f" ⚠️ {media_type}: aucun résultat")
|
||
|
||
except Exception as e:
|
||
logger.error(f" ❌ Erreur {media_type}: {e}", exc_info=True)
|
||
|
||
|
||
# ============================================================
|
||
# SCHEDULER
|
||
# ============================================================
|
||
|
||
def init_scheduler(app=None):
|
||
"""Initialise le scheduler de cache"""
|
||
global _scheduler
|
||
|
||
config = get_cache_config()
|
||
|
||
if not config.get('enabled', False):
|
||
logger.info("ℹ️ Cache désactivé, scheduler non démarré")
|
||
return
|
||
|
||
if _scheduler is not None:
|
||
logger.info("⚠️ Scheduler déjà initialisé")
|
||
return
|
||
|
||
try:
|
||
_scheduler = BackgroundScheduler()
|
||
|
||
interval_minutes = config.get('interval_minutes', 60)
|
||
|
||
# Ajouter le job de refresh
|
||
_scheduler.add_job(
|
||
func=lambda: refresh_cache(app),
|
||
trigger=IntervalTrigger(minutes=interval_minutes),
|
||
id='cache_refresh',
|
||
name='Refresh du cache Latest/Discover',
|
||
replace_existing=True
|
||
)
|
||
|
||
_scheduler.start()
|
||
logger.info(f"✅ Scheduler démarré (intervalle: {interval_minutes} min)")
|
||
|
||
# Mettre à jour le prochain refresh
|
||
meta = get_cache_meta()
|
||
next_refresh = datetime.now() + timedelta(minutes=interval_minutes)
|
||
meta['next_refresh'] = next_refresh.isoformat()
|
||
save_cache_meta(meta)
|
||
|
||
# Lancer un refresh initial si le cache est vide ou ancien
|
||
if should_refresh_now(config):
|
||
logger.info("🔄 Lancement du refresh initial...")
|
||
threading.Thread(target=lambda: refresh_cache(app), daemon=True).start()
|
||
|
||
except Exception as e:
|
||
logger.error(f"❌ Erreur démarrage scheduler: {e}")
|
||
|
||
|
||
def should_refresh_now(config: Dict) -> bool:
|
||
"""Détermine si un refresh immédiat est nécessaire"""
|
||
meta = get_cache_meta()
|
||
|
||
# Si jamais rafraîchi
|
||
if not meta.get('last_refresh'):
|
||
return True
|
||
|
||
# Si le dernier refresh est plus vieux que l'intervalle
|
||
try:
|
||
last_refresh = datetime.fromisoformat(meta['last_refresh'])
|
||
interval_minutes = config.get('interval_minutes', 60)
|
||
if datetime.now() - last_refresh > timedelta(minutes=interval_minutes):
|
||
return True
|
||
except:
|
||
return True
|
||
|
||
return False
|
||
|
||
|
||
def stop_scheduler():
|
||
"""Arrête le scheduler"""
|
||
global _scheduler
|
||
|
||
if _scheduler:
|
||
_scheduler.shutdown(wait=False)
|
||
_scheduler = None
|
||
logger.info("🛑 Scheduler arrêté")
|
||
|
||
|
||
def restart_scheduler(app=None):
|
||
"""Redémarre le scheduler avec la nouvelle config"""
|
||
stop_scheduler()
|
||
init_scheduler(app)
|
||
|
||
|
||
def is_scheduler_running() -> bool:
|
||
"""Vérifie si le scheduler est actif"""
|
||
return _scheduler is not None and _scheduler.running
|