2946 lines
100 KiB
Python
2946 lines
100 KiB
Python
from flask import Flask, render_template, request, jsonify, session, redirect, url_for
|
||
from functools import wraps
|
||
import logging
|
||
import os
|
||
import re
|
||
import json
|
||
import difflib
|
||
import secrets
|
||
from datetime import datetime, timedelta
|
||
from pathlib import Path
|
||
|
||
# Version de l'application
|
||
APP_VERSION = "2.0.0"
|
||
try:
|
||
# Chercher VERSION dans plusieurs emplacements possibles
|
||
possible_paths = [
|
||
Path(__file__).parent / 'VERSION', # /app/VERSION
|
||
Path(__file__).parent.parent / 'VERSION', # /VERSION
|
||
Path('/app/VERSION'),
|
||
Path('/VERSION'),
|
||
]
|
||
for version_file in possible_paths:
|
||
if version_file.exists():
|
||
APP_VERSION = version_file.read_text().strip()
|
||
break
|
||
except:
|
||
pass
|
||
|
||
from config import Config
|
||
from indexer_manager import IndexerManager
|
||
from torrent_parser import TorrentParser
|
||
from tmdb_api import TMDbAPI
|
||
from lastfm_api import LastFmAPI
|
||
from rss_source import RSSManager
|
||
|
||
# Module de sécurité
|
||
import security
|
||
from security import (
|
||
hash_password, verify_password, is_password_hashed, migrate_password_if_needed,
|
||
rate_limiter, generate_csrf_token, validate_csrf_token,
|
||
get_security_headers, sanitize_input, get_client_ip,
|
||
load_security_config, save_security_config, log_security_event
|
||
)
|
||
|
||
# Configuration du logging
|
||
log_level = os.getenv('LOG_LEVEL', 'INFO')
|
||
logging.basicConfig(
|
||
level=getattr(logging, log_level),
|
||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||
handlers=[
|
||
logging.FileHandler('/app/logs/lycostorrent.log'),
|
||
logging.StreamHandler()
|
||
]
|
||
)
|
||
logger = logging.getLogger(__name__)
|
||
|
||
# Initialisation Flask
|
||
app = Flask(__name__)
|
||
app.config['JSON_AS_ASCII'] = False
|
||
|
||
# Constantes de sécurité
|
||
MAX_QUERY_LENGTH = 200
|
||
MAX_TRACKERS = 50
|
||
MAX_LIMIT = 100
|
||
|
||
# Configuration sécurité et authentification
|
||
app.secret_key = os.getenv('SECRET_KEY', secrets.token_hex(32))
|
||
app.config['PERMANENT_SESSION_LIFETIME'] = int(os.getenv('SESSION_LIFETIME', 86400 * 7)) # 7 jours par défaut
|
||
app.config['SESSION_COOKIE_SECURE'] = os.getenv('SESSION_COOKIE_SECURE', 'false').lower() == 'true'
|
||
app.config['SESSION_COOKIE_HTTPONLY'] = True
|
||
app.config['SESSION_COOKIE_SAMESITE'] = 'Lax'
|
||
|
||
# Identifiants (via variables d'environnement)
|
||
AUTH_USERNAME = os.getenv('AUTH_USERNAME', 'admin')
|
||
AUTH_PASSWORD_ENV = os.getenv('AUTH_PASSWORD', '') # Mot de passe depuis env
|
||
|
||
# Charger ou migrer le mot de passe hashé
|
||
_security_config = load_security_config()
|
||
if AUTH_PASSWORD_ENV:
|
||
# Si le mot de passe env n'est pas hashé, le hasher et sauvegarder
|
||
if not is_password_hashed(AUTH_PASSWORD_ENV):
|
||
_hashed, _migrated = migrate_password_if_needed(AUTH_PASSWORD_ENV)
|
||
if _migrated:
|
||
_security_config['password_hash'] = _hashed
|
||
_security_config['password_migrated'] = True
|
||
_security_config['last_password_change'] = datetime.now().isoformat()
|
||
save_security_config(_security_config)
|
||
logger.info("🔐 Mot de passe migré vers format sécurisé (hashé)")
|
||
AUTH_PASSWORD_HASH = _hashed
|
||
else:
|
||
AUTH_PASSWORD_HASH = AUTH_PASSWORD_ENV
|
||
else:
|
||
AUTH_PASSWORD_HASH = _security_config.get('password_hash', '')
|
||
|
||
# Pour compatibilité (ne jamais utiliser directement)
|
||
AUTH_PASSWORD = AUTH_PASSWORD_HASH
|
||
ADMIN_PASSWORD = AUTH_PASSWORD_HASH
|
||
|
||
|
||
# ============================================================
|
||
# SÉCURITÉ & AUTHENTIFICATION
|
||
# ============================================================
|
||
|
||
def login_required(f):
|
||
"""Décorateur pour protéger les routes - nécessite une connexion"""
|
||
@wraps(f)
|
||
def decorated_function(*args, **kwargs):
|
||
# Si pas de mot de passe configuré, pas d'auth requise
|
||
if not AUTH_PASSWORD_HASH:
|
||
return f(*args, **kwargs)
|
||
|
||
# Vérifier la session
|
||
if not session.get('authenticated'):
|
||
if request.is_json:
|
||
return jsonify({'success': False, 'error': 'Authentification requise'}), 401
|
||
return redirect(url_for('login'))
|
||
|
||
# Vérifier l'expiration de session
|
||
if session.get('expires_at'):
|
||
if datetime.fromisoformat(session['expires_at']) < datetime.now():
|
||
session.clear()
|
||
if request.is_json:
|
||
return jsonify({'success': False, 'error': 'Session expirée'}), 401
|
||
return redirect(url_for('login'))
|
||
|
||
return f(*args, **kwargs)
|
||
return decorated_function
|
||
|
||
|
||
def admin_required(f):
|
||
"""Décorateur pour protéger les routes admin (alias de login_required)"""
|
||
return login_required(f)
|
||
|
||
|
||
@app.before_request
|
||
def check_rate_limit():
|
||
"""Vérifie le rate limiting avant chaque requête"""
|
||
# Exclure les ressources statiques
|
||
if request.path.startswith('/static/'):
|
||
return None
|
||
|
||
ip = get_client_ip(request)
|
||
|
||
# Vérifier si l'IP est rate-limitée
|
||
if rate_limiter.is_rate_limited(ip):
|
||
log_security_event('RATE_LIMITED', ip, f"Path: {request.path}")
|
||
return jsonify({'success': False, 'error': 'Trop de requêtes. Réessayez plus tard.'}), 429
|
||
|
||
return None
|
||
|
||
|
||
@app.after_request
|
||
def add_security_headers(response):
|
||
"""Ajoute les headers de sécurité HTTP"""
|
||
headers = get_security_headers()
|
||
for header, value in headers.items():
|
||
# Ne pas écraser si déjà défini
|
||
if header not in response.headers:
|
||
response.headers[header] = value
|
||
return response
|
||
|
||
|
||
@app.route('/login', methods=['GET', 'POST'])
|
||
def login():
|
||
"""Page de connexion sécurisée"""
|
||
# Si pas de mot de passe configuré, rediriger vers l'accueil
|
||
if not AUTH_PASSWORD_HASH:
|
||
return redirect(url_for('index'))
|
||
|
||
# Si déjà connecté, rediriger vers l'accueil
|
||
if session.get('authenticated'):
|
||
return redirect(url_for('index'))
|
||
|
||
ip = get_client_ip(request)
|
||
error = None
|
||
locked_message = None
|
||
|
||
# Vérifier si l'IP est bloquée
|
||
is_locked, remaining = rate_limiter.is_locked_out(ip)
|
||
if is_locked:
|
||
locked_message = f"Trop de tentatives. Réessayez dans {remaining} secondes."
|
||
log_security_event('LOCKOUT_ACTIVE', ip)
|
||
return render_template('login.html', error=None, locked_message=locked_message)
|
||
|
||
if request.method == 'POST':
|
||
username = sanitize_input(request.form.get('username', ''), 50)
|
||
password = request.form.get('password', '')
|
||
csrf_token = request.form.get('csrf_token', '')
|
||
|
||
# Vérifier le token CSRF
|
||
if not validate_csrf_token(session.get('csrf_token'), csrf_token):
|
||
log_security_event('CSRF_INVALID', ip, f"User: {username}")
|
||
error = "Token de sécurité invalide. Rechargez la page."
|
||
# Générer un nouveau token
|
||
session['csrf_token'] = generate_csrf_token()
|
||
return render_template('login.html', error=error, csrf_token=session['csrf_token'])
|
||
|
||
# Vérifier les identifiants
|
||
if username == AUTH_USERNAME and verify_password(password, AUTH_PASSWORD_HASH):
|
||
# Connexion réussie
|
||
session['authenticated'] = True
|
||
session['username'] = username
|
||
session['login_time'] = datetime.now().isoformat()
|
||
session['expires_at'] = (datetime.now() + timedelta(seconds=app.config['PERMANENT_SESSION_LIFETIME'])).isoformat()
|
||
session['ip'] = ip
|
||
session.permanent = True
|
||
|
||
# Réinitialiser les tentatives échouées
|
||
rate_limiter.record_successful_login(ip)
|
||
|
||
log_security_event('LOGIN_SUCCESS', ip, f"User: {username}")
|
||
logger.info(f"🔐 Connexion réussie: {username} depuis {ip}")
|
||
|
||
# Rediriger vers la page demandée ou l'accueil
|
||
next_page = request.args.get('next', url_for('index'))
|
||
# Sécurité : ne pas rediriger vers des URLs externes
|
||
if not next_page.startswith('/'):
|
||
next_page = url_for('index')
|
||
return redirect(next_page)
|
||
else:
|
||
# Connexion échouée
|
||
rate_limiter.record_failed_attempt(ip, username)
|
||
log_security_event('LOGIN_FAILED', ip, f"User: {username}")
|
||
|
||
# Vérifier si maintenant bloqué
|
||
is_locked, remaining = rate_limiter.is_locked_out(ip)
|
||
if is_locked:
|
||
locked_message = f"Trop de tentatives. Réessayez dans {remaining} secondes."
|
||
return render_template('login.html', error=None, locked_message=locked_message)
|
||
|
||
error = "Identifiants incorrects"
|
||
|
||
# Générer un token CSRF pour le formulaire
|
||
if 'csrf_token' not in session:
|
||
session['csrf_token'] = generate_csrf_token()
|
||
|
||
return render_template('login.html', error=error, csrf_token=session.get('csrf_token'), locked_message=locked_message)
|
||
|
||
|
||
@app.route('/logout')
|
||
def logout():
|
||
"""Déconnexion"""
|
||
ip = get_client_ip(request)
|
||
username = session.get('username', 'unknown')
|
||
session.clear()
|
||
log_security_event('LOGOUT', ip, f"User: {username}")
|
||
logger.info(f"🔓 Déconnexion: {username}")
|
||
return redirect(url_for('login'))
|
||
|
||
|
||
# Routes legacy pour compatibilité
|
||
@app.route('/admin/login', methods=['GET', 'POST'])
|
||
def admin_login():
|
||
"""Redirection vers le nouveau login"""
|
||
return redirect(url_for('login'))
|
||
|
||
|
||
@app.route('/admin/logout')
|
||
def admin_logout():
|
||
"""Redirection vers le nouveau logout"""
|
||
return redirect(url_for('logout'))
|
||
|
||
# Initialisation des services
|
||
config = Config()
|
||
indexer_manager = IndexerManager(config)
|
||
parser = TorrentParser()
|
||
tmdb = TMDbAPI(config.tmdb_api_key)
|
||
lastfm = LastFmAPI(config.lastfm_api_key)
|
||
rss_manager = RSSManager()
|
||
|
||
# Alias pour compatibilité avec le code existant
|
||
jackett = indexer_manager
|
||
|
||
# Mapping des catégories Jackett
|
||
CATEGORIES = {
|
||
'all': {'name': 'Tout', 'id': None},
|
||
'console': {'name': 'Console', 'id': '1000'},
|
||
'movies': {'name': 'Films', 'id': '2000'},
|
||
'audio': {'name': 'Audio', 'id': '3000'},
|
||
'pc': {'name': 'PC', 'id': '4000'},
|
||
'tv': {'name': 'Séries TV', 'id': '5000'},
|
||
'books': {'name': 'Livres', 'id': '7000'},
|
||
'other': {'name': 'Autre', 'id': '8000'},
|
||
}
|
||
|
||
|
||
@app.route('/')
|
||
@login_required
|
||
def index():
|
||
"""Page principale de recherche"""
|
||
return render_template('index.html')
|
||
|
||
|
||
@app.route('/discover')
|
||
@login_required
|
||
def discover():
|
||
"""Page Découvrir - Nouveautés TMDb"""
|
||
# Vérifier si le module est activé
|
||
modules = load_modules_config()
|
||
if not modules.get('discover', False):
|
||
return redirect(url_for('index'))
|
||
return render_template('discover.html')
|
||
|
||
|
||
@app.route('/api/trackers', methods=['GET'])
|
||
@login_required
|
||
def get_trackers():
|
||
"""Récupère la liste des trackers disponibles (Jackett + Prowlarr + optionnellement RSS)"""
|
||
try:
|
||
trackers = indexer_manager.get_indexers()
|
||
|
||
# Paramètre pour inclure les RSS (par défaut: non pour la recherche)
|
||
include_rss = request.args.get('include_rss', 'false').lower() == 'true'
|
||
|
||
# Ajouter les flux RSS uniquement si demandé
|
||
if include_rss:
|
||
rss_feeds = rss_manager.get_feeds()
|
||
for feed in rss_feeds:
|
||
if feed.get('enabled', True):
|
||
trackers.append({
|
||
'id': f"rss:{feed['id']}",
|
||
'name': feed['name'],
|
||
'sources': ['rss'],
|
||
'category': feed.get('category', 'all'),
|
||
'type': 'rss'
|
||
})
|
||
logger.info(f"✅ {len(trackers)} trackers récupérés (dont {len(rss_feeds)} RSS)")
|
||
else:
|
||
logger.info(f"✅ {len(trackers)} trackers récupérés (sans RSS)")
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'trackers': trackers,
|
||
'sources': indexer_manager.sources_status
|
||
})
|
||
except Exception as e:
|
||
logger.error(f"❌ Erreur récupération trackers: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/status', methods=['GET'])
|
||
@login_required
|
||
def get_status():
|
||
"""Retourne le statut des sources d'indexation"""
|
||
return jsonify({
|
||
'success': True,
|
||
'sources': indexer_manager.sources_status,
|
||
'has_any_source': indexer_manager.has_any_source
|
||
})
|
||
|
||
|
||
@app.route('/api/modules', methods=['GET'])
|
||
def get_modules():
|
||
"""Récupère la configuration des modules activés"""
|
||
try:
|
||
modules = load_modules_config()
|
||
return jsonify({
|
||
'success': True,
|
||
'modules': modules
|
||
})
|
||
except Exception as e:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
})
|
||
|
||
|
||
@app.route('/api/admin/modules', methods=['GET'])
|
||
@login_required
|
||
def get_admin_modules():
|
||
"""Récupère la configuration des modules pour l'admin"""
|
||
try:
|
||
modules = load_modules_config()
|
||
return jsonify({
|
||
'success': True,
|
||
'modules': modules
|
||
})
|
||
except Exception as e:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
})
|
||
|
||
|
||
@app.route('/api/admin/modules', methods=['POST'])
|
||
@login_required
|
||
def save_admin_modules():
|
||
"""Sauvegarde la configuration des modules"""
|
||
try:
|
||
data = request.json
|
||
modules = data.get('modules', {})
|
||
|
||
save_modules_config(modules)
|
||
logger.info(f"✅ Modules sauvegardés: {modules}")
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'message': 'Modules sauvegardés'
|
||
})
|
||
except Exception as e:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
})
|
||
|
||
|
||
def load_modules_config():
|
||
"""Charge la configuration des modules"""
|
||
import json
|
||
config_path = '/app/config/modules.json'
|
||
|
||
default_modules = {
|
||
'search': True,
|
||
'latest': True,
|
||
'discover': False
|
||
}
|
||
|
||
try:
|
||
if os.path.exists(config_path):
|
||
with open(config_path, 'r') as f:
|
||
return json.load(f)
|
||
except:
|
||
pass
|
||
|
||
return default_modules
|
||
|
||
|
||
def save_modules_config(modules):
|
||
"""Sauvegarde la configuration des modules"""
|
||
import json
|
||
config_path = '/app/config/modules.json'
|
||
|
||
os.makedirs(os.path.dirname(config_path), exist_ok=True)
|
||
|
||
with open(config_path, 'w') as f:
|
||
json.dump(modules, f, indent=2)
|
||
|
||
|
||
@app.route('/api/admin/discover-trackers', methods=['GET'])
|
||
@login_required
|
||
def get_discover_trackers():
|
||
"""Récupère la liste des trackers configurés pour Discover"""
|
||
try:
|
||
trackers = load_discover_trackers_config()
|
||
return jsonify({
|
||
'success': True,
|
||
'trackers': trackers
|
||
})
|
||
except Exception as e:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
})
|
||
|
||
|
||
@app.route('/api/admin/discover-trackers', methods=['POST'])
|
||
@login_required
|
||
def save_discover_trackers():
|
||
"""Sauvegarde la liste des trackers pour Discover"""
|
||
try:
|
||
data = request.json
|
||
trackers = data.get('trackers', [])
|
||
|
||
save_discover_trackers_config(trackers)
|
||
logger.info(f"✅ Trackers Discover sauvegardés: {len(trackers)} trackers")
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'message': 'Trackers sauvegardés'
|
||
})
|
||
except Exception as e:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
})
|
||
|
||
|
||
def load_discover_trackers_config():
|
||
"""Charge la configuration des trackers pour Discover"""
|
||
config_path = '/app/config/discover_trackers.json'
|
||
|
||
try:
|
||
if os.path.exists(config_path):
|
||
with open(config_path, 'r') as f:
|
||
return json.load(f)
|
||
except:
|
||
pass
|
||
|
||
return None # None = tous les trackers
|
||
|
||
|
||
def save_discover_trackers_config(trackers):
|
||
"""Sauvegarde la configuration des trackers pour Discover"""
|
||
config_path = '/app/config/discover_trackers.json'
|
||
|
||
os.makedirs(os.path.dirname(config_path), exist_ok=True)
|
||
|
||
with open(config_path, 'w') as f:
|
||
json.dump(trackers, f, indent=2)
|
||
|
||
|
||
@app.route('/api/categories', methods=['GET'])
|
||
@login_required
|
||
def get_categories():
|
||
"""Récupère la liste des catégories disponibles"""
|
||
return jsonify({
|
||
'success': True,
|
||
'categories': CATEGORIES
|
||
})
|
||
|
||
|
||
# ============================================================
|
||
# API DISCOVER - TMDb
|
||
# ============================================================
|
||
|
||
@app.route('/api/discover/<category>', methods=['GET'])
|
||
@login_required
|
||
def discover_category(category):
|
||
"""Récupère les films/séries depuis TMDb selon la catégorie (nouvelle version simplifiée)"""
|
||
try:
|
||
if not config.tmdb_api_key:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Clé API TMDb non configurée'
|
||
})
|
||
|
||
import requests
|
||
from datetime import datetime, timedelta
|
||
|
||
base_url = 'https://api.themoviedb.org/3'
|
||
|
||
if category == 'movies':
|
||
# Films récents : cinéma + streaming < 3 mois
|
||
results = _fetch_recent_movies()
|
||
media_type = 'movie'
|
||
elif category == 'tv':
|
||
# Séries populaires en cours
|
||
results = _fetch_popular_tv()
|
||
media_type = 'tv'
|
||
else:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Catégorie invalide. Utilisez "movies" ou "tv"'
|
||
})
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'results': results,
|
||
'media_type': media_type,
|
||
'total': len(results)
|
||
})
|
||
|
||
except Exception as e:
|
||
logger.error(f"Erreur Discover TMDb: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
})
|
||
|
||
|
||
def _fetch_recent_movies():
|
||
"""Récupère les films récents : cinéma + streaming < 3 mois"""
|
||
import requests
|
||
from datetime import datetime, timedelta
|
||
|
||
base_url = 'https://api.themoviedb.org/3'
|
||
params = {
|
||
'api_key': config.tmdb_api_key,
|
||
'language': 'fr-FR',
|
||
'region': 'FR'
|
||
}
|
||
|
||
all_movies = []
|
||
seen_ids = set()
|
||
|
||
# Date limite : 3 mois en arrière
|
||
three_months_ago = (datetime.now() - timedelta(days=90)).strftime('%Y-%m-%d')
|
||
today = datetime.now().strftime('%Y-%m-%d')
|
||
|
||
# 1. Films au cinéma (now_playing)
|
||
try:
|
||
response = requests.get(f'{base_url}/movie/now_playing', params=params, timeout=10)
|
||
if response.status_code == 200:
|
||
data = response.json()
|
||
for movie in data.get('results', [])[:20]:
|
||
if movie['id'] not in seen_ids:
|
||
movie['source'] = 'cinema'
|
||
all_movies.append(movie)
|
||
seen_ids.add(movie['id'])
|
||
except Exception as e:
|
||
logger.warning(f"Erreur now_playing: {e}")
|
||
|
||
# 2. Sorties streaming récentes (discover avec filtres)
|
||
try:
|
||
discover_params = {
|
||
**params,
|
||
'sort_by': 'popularity.desc',
|
||
'with_release_type': '4|5|6', # 4=Digital, 5=Physical, 6=TV
|
||
'release_date.gte': three_months_ago,
|
||
'release_date.lte': today,
|
||
'vote_count.gte': 10 # Au moins quelques votes pour filtrer les obscurs
|
||
}
|
||
response = requests.get(f'{base_url}/discover/movie', params=discover_params, timeout=10)
|
||
if response.status_code == 200:
|
||
data = response.json()
|
||
for movie in data.get('results', [])[:30]:
|
||
if movie['id'] not in seen_ids:
|
||
movie['source'] = 'streaming'
|
||
all_movies.append(movie)
|
||
seen_ids.add(movie['id'])
|
||
except Exception as e:
|
||
logger.warning(f"Erreur discover streaming: {e}")
|
||
|
||
# Trier par popularité
|
||
all_movies.sort(key=lambda x: x.get('popularity', 0), reverse=True)
|
||
|
||
logger.info(f"📽️ Films récents: {len(all_movies)} (cinéma + streaming < 3 mois)")
|
||
|
||
return all_movies[:30]
|
||
|
||
|
||
def _fetch_popular_tv():
|
||
"""Récupère les séries populaires en cours de diffusion"""
|
||
import requests
|
||
|
||
base_url = 'https://api.themoviedb.org/3'
|
||
params = {
|
||
'api_key': config.tmdb_api_key,
|
||
'language': 'fr-FR'
|
||
}
|
||
|
||
all_series = []
|
||
seen_ids = set()
|
||
|
||
# 1. Séries en cours de diffusion (on_the_air)
|
||
try:
|
||
response = requests.get(f'{base_url}/tv/on_the_air', params=params, timeout=10)
|
||
if response.status_code == 200:
|
||
data = response.json()
|
||
for serie in data.get('results', [])[:20]:
|
||
if serie['id'] not in seen_ids:
|
||
serie['source'] = 'on_air'
|
||
all_series.append(serie)
|
||
seen_ids.add(serie['id'])
|
||
except Exception as e:
|
||
logger.warning(f"Erreur on_the_air: {e}")
|
||
|
||
# 2. Séries populaires (pour compléter)
|
||
try:
|
||
response = requests.get(f'{base_url}/tv/popular', params=params, timeout=10)
|
||
if response.status_code == 200:
|
||
data = response.json()
|
||
for serie in data.get('results', [])[:20]:
|
||
if serie['id'] not in seen_ids:
|
||
serie['source'] = 'popular'
|
||
all_series.append(serie)
|
||
seen_ids.add(serie['id'])
|
||
except Exception as e:
|
||
logger.warning(f"Erreur tv/popular: {e}")
|
||
|
||
# Trier par popularité
|
||
all_series.sort(key=lambda x: x.get('popularity', 0), reverse=True)
|
||
|
||
logger.info(f"📺 Séries en cours: {len(all_series)}")
|
||
|
||
return all_series[:30]
|
||
|
||
|
||
@app.route('/api/discover/detail/<media_type>/<int:tmdb_id>', methods=['GET'])
|
||
@login_required
|
||
def discover_detail(media_type, tmdb_id):
|
||
"""Récupère les détails d'un film/série depuis TMDb avec bande-annonce"""
|
||
try:
|
||
if not config.tmdb_api_key:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Clé API TMDb non configurée'
|
||
})
|
||
|
||
import requests
|
||
|
||
if media_type not in ['movie', 'tv']:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Type de média invalide'
|
||
})
|
||
|
||
# Récupérer les détails avec les vidéos
|
||
url = f'https://api.themoviedb.org/3/{media_type}/{tmdb_id}'
|
||
params = {
|
||
'api_key': config.tmdb_api_key,
|
||
'language': 'fr-FR',
|
||
'append_to_response': 'videos'
|
||
}
|
||
|
||
response = requests.get(url, params=params, timeout=10)
|
||
response.raise_for_status()
|
||
detail = response.json()
|
||
|
||
# Chercher la bande-annonce YouTube
|
||
trailer_url = None
|
||
videos = detail.get('videos', {}).get('results', [])
|
||
|
||
# Priorité : trailer FR > trailer EN > teaser
|
||
for video_type in ['Trailer', 'Teaser']:
|
||
for video in videos:
|
||
if video.get('site') == 'YouTube' and video.get('type') == video_type:
|
||
trailer_url = f"https://www.youtube.com/embed/{video.get('key')}"
|
||
break
|
||
if trailer_url:
|
||
break
|
||
|
||
# Si pas de vidéo FR, chercher en anglais
|
||
if not trailer_url:
|
||
url_en = f'https://api.themoviedb.org/3/{media_type}/{tmdb_id}/videos'
|
||
params_en = {
|
||
'api_key': config.tmdb_api_key,
|
||
'language': 'en-US'
|
||
}
|
||
try:
|
||
response_en = requests.get(url_en, params=params_en, timeout=5)
|
||
if response_en.ok:
|
||
videos_en = response_en.json().get('results', [])
|
||
for video_type in ['Trailer', 'Teaser']:
|
||
for video in videos_en:
|
||
if video.get('site') == 'YouTube' and video.get('type') == video_type:
|
||
trailer_url = f"https://www.youtube.com/embed/{video.get('key')}"
|
||
break
|
||
if trailer_url:
|
||
break
|
||
except:
|
||
pass
|
||
|
||
detail['trailer_url'] = trailer_url
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'detail': detail
|
||
})
|
||
|
||
except Exception as e:
|
||
logger.error(f"Erreur détail TMDb: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
})
|
||
|
||
|
||
@app.route('/api/discover/search-torrents', methods=['POST'])
|
||
@login_required
|
||
def discover_search_torrents():
|
||
"""Recherche des torrents pour un film/série depuis la page Discover"""
|
||
try:
|
||
data = request.json
|
||
title = data.get('title', '')
|
||
original_title = data.get('original_title', '')
|
||
year = data.get('year', '')
|
||
media_type = data.get('media_type', 'movie')
|
||
tmdb_id = data.get('tmdb_id', '')
|
||
|
||
if not title and not original_title:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Requête vide'
|
||
})
|
||
|
||
# Catégorie Jackett selon le type
|
||
cat_id = '2000' if media_type == 'movie' else '5000'
|
||
|
||
# Construire les requêtes de recherche à essayer
|
||
# On va essayer TOUTES les requêtes pour maximiser les résultats
|
||
search_queries = []
|
||
|
||
# Titre original (souvent en anglais)
|
||
if original_title and _is_latin_text(original_title):
|
||
if year:
|
||
search_queries.append(f"{original_title} {year}")
|
||
search_queries.append(original_title)
|
||
|
||
# Titre localisé (français)
|
||
if title and _is_latin_text(title) and title != original_title:
|
||
if year:
|
||
search_queries.append(f"{title} {year}")
|
||
search_queries.append(title)
|
||
|
||
# Si aucun titre latin, essayer quand même
|
||
if not search_queries:
|
||
if original_title:
|
||
search_queries.append(original_title)
|
||
if title and title != original_title:
|
||
search_queries.append(title)
|
||
|
||
logger.info(f"🔍 Discover search: queries={search_queries} (type: {media_type})")
|
||
|
||
# Recherche sur les trackers configurés
|
||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||
|
||
all_trackers = indexer_manager.get_indexers()
|
||
|
||
# Filtrer par les trackers configurés pour Discover
|
||
configured_trackers = load_discover_trackers_config()
|
||
if configured_trackers is not None:
|
||
all_trackers = [t for t in all_trackers if t.get('id') in configured_trackers]
|
||
logger.info(f"🎯 Discover: {len(all_trackers)} trackers configurés")
|
||
|
||
all_results = []
|
||
seen_titles = set() # Pour éviter les doublons
|
||
|
||
def search_tracker_with_query(tracker, query):
|
||
try:
|
||
tracker_id = tracker.get('id', '')
|
||
results = jackett.search(query, indexers=[tracker_id], category=cat_id, max_results=30)
|
||
return results
|
||
except:
|
||
return []
|
||
|
||
# Essayer TOUTES les requêtes pour maximiser les résultats
|
||
for search_query in search_queries:
|
||
logger.info(f"🔍 Essai avec: '{search_query}'")
|
||
|
||
with ThreadPoolExecutor(max_workers=10) as executor:
|
||
futures = {executor.submit(search_tracker_with_query, t, search_query): t for t in all_trackers}
|
||
|
||
try:
|
||
for future in as_completed(futures, timeout=20):
|
||
try:
|
||
results = future.result(timeout=1)
|
||
for r in results:
|
||
# Éviter les doublons
|
||
title_key = r.get('Title', '').lower()
|
||
if title_key not in seen_titles:
|
||
seen_titles.add(title_key)
|
||
all_results.append(r)
|
||
except Exception:
|
||
pass
|
||
except TimeoutError:
|
||
# Timeout global - on continue avec les résultats qu'on a déjà
|
||
logger.warning(f"⏱️ Timeout recherche Discover, {len(all_results)} résultats récupérés")
|
||
pass
|
||
|
||
# Si on a assez de résultats, on peut s'arrêter
|
||
if len(all_results) >= 30:
|
||
logger.info(f"✅ Assez de résultats ({len(all_results)}), arrêt des recherches")
|
||
break
|
||
|
||
# Parser et enrichir les résultats
|
||
for torrent in all_results:
|
||
parser.enrich_torrent(torrent)
|
||
|
||
# Filtrer les résultats pour ne garder que ceux qui correspondent au titre
|
||
filtered_results = _filter_relevant_torrents(all_results, title, original_title, year)
|
||
|
||
# Trier par seeders
|
||
filtered_results.sort(key=lambda x: x.get('Seeders', 0) or 0, reverse=True)
|
||
|
||
logger.info(f"✅ Discover: {len(filtered_results)}/{len(all_results)} torrents pertinents")
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'results': filtered_results[:30], # Limiter à 30
|
||
'total': len(filtered_results)
|
||
})
|
||
|
||
except Exception as e:
|
||
logger.error(f"Erreur recherche torrents Discover: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
})
|
||
|
||
|
||
def _filter_relevant_torrents(torrents, title, original_title, year):
|
||
"""Filtre les torrents pour ne garder que ceux qui correspondent au titre recherché"""
|
||
if not torrents:
|
||
return []
|
||
|
||
# Préparer les titres de référence (normalisés)
|
||
ref_titles = []
|
||
if title:
|
||
ref_titles.append(_normalize_for_matching(title))
|
||
if original_title and original_title != title:
|
||
ref_titles.append(_normalize_for_matching(original_title))
|
||
|
||
if not ref_titles:
|
||
return torrents
|
||
|
||
# Extraire les mots significatifs (4+ caractères, pas les mots communs)
|
||
stop_words = {'the', 'les', 'der', 'das', 'die', 'and', 'for', 'with', 'dans', 'pour', 'avec',
|
||
'from', 'that', 'this', 'une', 'des', 'aux', 'sur', 'par', 'tout', 'tous'}
|
||
|
||
significant_words = set()
|
||
for ref in ref_titles:
|
||
words = ref.split()
|
||
for w in words:
|
||
if len(w) >= 4 and w not in stop_words:
|
||
significant_words.add(w)
|
||
|
||
# Détecter les numéros de suite (2, 3, II, III, etc.) dans le titre
|
||
sequel_number = None
|
||
for ref in ref_titles:
|
||
# Chercher un chiffre ou chiffre romain à la fin ou après le titre principal
|
||
match = re.search(r'\b([2-9]|ii|iii|iv|v|vi|vii|viii|ix|x)\b', ref, re.IGNORECASE)
|
||
if match:
|
||
sequel_number = match.group(1).lower()
|
||
break
|
||
|
||
# Si pas de mots significatifs, prendre le premier mot de 3+ caractères
|
||
if not significant_words:
|
||
for ref in ref_titles:
|
||
words = [w for w in ref.split() if len(w) >= 3 and w not in stop_words]
|
||
if words:
|
||
significant_words.add(words[0])
|
||
break
|
||
|
||
# Calculer le nombre minimum de mots à matcher
|
||
num_significant = len(significant_words)
|
||
if num_significant <= 1:
|
||
min_matches = 1
|
||
elif num_significant <= 3:
|
||
min_matches = 2
|
||
else:
|
||
min_matches = max(2, num_significant // 2)
|
||
|
||
logger.info(f"🔍 Filtrage: titres={ref_titles}, mots_clés={significant_words}, min_requis={min_matches}, année={year}, suite={sequel_number}")
|
||
|
||
if not significant_words:
|
||
return torrents
|
||
|
||
relevant = []
|
||
|
||
for torrent in torrents:
|
||
torrent_title = torrent.get('Title', '')
|
||
if not torrent_title:
|
||
continue
|
||
|
||
torrent_normalized = _normalize_for_matching(torrent_title)
|
||
|
||
# Compter combien de mots significatifs sont présents
|
||
matches = 0
|
||
for word in significant_words:
|
||
if word in torrent_normalized:
|
||
matches += 1
|
||
|
||
# Vérifier le nombre minimum de matches
|
||
if matches < min_matches:
|
||
continue
|
||
|
||
# Si c'est une suite (2, 3, etc.), vérifier que le numéro est présent
|
||
if sequel_number:
|
||
# Convertir les chiffres romains en arabes pour la comparaison
|
||
roman_to_arabic = {'ii': '2', 'iii': '3', 'iv': '4', 'v': '5', 'vi': '6', 'vii': '7', 'viii': '8', 'ix': '9', 'x': '10'}
|
||
sequel_variants = [sequel_number]
|
||
if sequel_number in roman_to_arabic:
|
||
sequel_variants.append(roman_to_arabic[sequel_number])
|
||
elif sequel_number.isdigit():
|
||
# Ajouter la version romaine
|
||
arabic_to_roman = {'2': 'ii', '3': 'iii', '4': 'iv', '5': 'v', '6': 'vi', '7': 'vii', '8': 'viii', '9': 'ix', '10': 'x'}
|
||
if sequel_number in arabic_to_roman:
|
||
sequel_variants.append(arabic_to_roman[sequel_number])
|
||
|
||
has_sequel_number = False
|
||
for variant in sequel_variants:
|
||
if re.search(rf'\b{variant}\b', torrent_normalized, re.IGNORECASE):
|
||
has_sequel_number = True
|
||
break
|
||
|
||
if not has_sequel_number:
|
||
continue
|
||
|
||
# Vérifier l'année si disponible (tolérance de 1 an)
|
||
if year:
|
||
try:
|
||
search_year = int(year)
|
||
# Extraire l'année du torrent
|
||
year_match = re.search(r'\b(19\d{2}|20\d{2})\b', torrent_title)
|
||
if year_match:
|
||
torrent_year = int(year_match.group(1))
|
||
# Si les années sont trop différentes (plus de 1 an), exclure
|
||
if abs(torrent_year - search_year) > 1:
|
||
continue
|
||
except (ValueError, TypeError):
|
||
pass
|
||
|
||
relevant.append(torrent)
|
||
|
||
if not relevant:
|
||
logger.info(f"ℹ️ Aucun torrent pertinent trouvé pour: {ref_titles}")
|
||
else:
|
||
logger.info(f"✅ Filtrage: {len(relevant)}/{len(torrents)} torrents pertinents")
|
||
|
||
return relevant
|
||
|
||
|
||
def _normalize_for_matching(text):
|
||
"""Normalise un texte pour la comparaison"""
|
||
if not text:
|
||
return ''
|
||
# Minuscules, supprimer accents et caractères spéciaux
|
||
import unicodedata
|
||
text = unicodedata.normalize('NFD', text)
|
||
text = ''.join(c for c in text if unicodedata.category(c) != 'Mn')
|
||
text = text.lower()
|
||
text = re.sub(r'[^a-z0-9\s]', ' ', text)
|
||
text = re.sub(r'\s+', ' ', text).strip()
|
||
return text
|
||
|
||
|
||
def _is_latin_text(text):
|
||
"""Vérifie si le texte utilise principalement des caractères latins"""
|
||
if not text:
|
||
return False
|
||
latin_count = sum(1 for c in text if c.isascii() or c in 'àâäéèêëïîôùûüÿœæçÀÂÄÉÈÊËÏÎÔÙÛÜŸŒÆÇ')
|
||
return latin_count / len(text) > 0.5
|
||
|
||
|
||
@app.route('/api/search', methods=['POST'])
|
||
@login_required
|
||
def search():
|
||
"""
|
||
Recherche sur Jackett et retourne les résultats avec métadonnées parsées.
|
||
Utilise des requêtes parallèles pour accélérer la recherche.
|
||
Le filtrage se fait côté frontend.
|
||
"""
|
||
try:
|
||
data = request.json
|
||
if not data:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Données JSON invalides'
|
||
}), 400
|
||
|
||
query = data.get('query', '').strip()
|
||
trackers = data.get('trackers', [])
|
||
category = data.get('category', 'all')
|
||
|
||
# Validation renforcée
|
||
if not query:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Requête de recherche vide'
|
||
}), 400
|
||
|
||
if len(query) > MAX_QUERY_LENGTH:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': f'Requête trop longue (max {MAX_QUERY_LENGTH} caractères)'
|
||
}), 400
|
||
|
||
if not trackers:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Aucun tracker sélectionné'
|
||
}), 400
|
||
|
||
if not isinstance(trackers, list) or len(trackers) > MAX_TRACKERS:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': f'Nombre de trackers invalide (max {MAX_TRACKERS})'
|
||
}), 400
|
||
|
||
# Valider que la catégorie existe
|
||
if category not in CATEGORIES:
|
||
category = 'all'
|
||
|
||
logger.info(f"🔍 Recherche: '{query}' | Catégorie: {category} | Trackers: {len(trackers)}")
|
||
|
||
# Récupérer l'ID de catégorie Jackett
|
||
cat_id = CATEGORIES.get(category, {}).get('id')
|
||
|
||
# ============================================================
|
||
# REQUÊTES PARALLÈLES PAR TRACKER
|
||
# ============================================================
|
||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||
import time
|
||
|
||
start_time = time.time()
|
||
all_results = []
|
||
|
||
def search_tracker(tracker):
|
||
"""Recherche sur un tracker spécifique"""
|
||
try:
|
||
results = jackett.search(query, indexers=[tracker], category=cat_id, max_results=500)
|
||
logger.info(f"✅ {tracker}: {len(results)} résultats")
|
||
return results
|
||
except Exception as e:
|
||
logger.warning(f"⚠️ Erreur {tracker}: {e}")
|
||
return []
|
||
|
||
# Exécuter les recherches en parallèle
|
||
max_workers = min(10, len(trackers))
|
||
|
||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||
futures = {executor.submit(search_tracker, t): t for t in trackers}
|
||
|
||
for future in as_completed(futures, timeout=120):
|
||
tracker = futures[future]
|
||
try:
|
||
results = future.result()
|
||
all_results.extend(results)
|
||
except Exception as e:
|
||
logger.warning(f"⚠️ Erreur {tracker}: {e}")
|
||
|
||
elapsed = time.time() - start_time
|
||
logger.info(f"📦 Recherche parallèle: {len(all_results)} résultats bruts (en {elapsed:.2f}s)")
|
||
|
||
# Parser chaque torrent pour extraire les métadonnées
|
||
for torrent in all_results:
|
||
parser.enrich_torrent(torrent)
|
||
|
||
# Déduplication : garder le torrent avec le plus de seeders pour chaque titre
|
||
seen = {}
|
||
for torrent in all_results:
|
||
# Créer une clé normalisée pour comparer les titres
|
||
title = torrent.get('Title', '')
|
||
key = _normalize_title(title)
|
||
|
||
current_seeders = torrent.get('Seeders', 0) or 0
|
||
|
||
if key not in seen:
|
||
seen[key] = torrent
|
||
else:
|
||
existing_seeders = seen[key].get('Seeders', 0) or 0
|
||
if current_seeders > existing_seeders:
|
||
seen[key] = torrent
|
||
|
||
unique_results = list(seen.values())
|
||
|
||
# Trier par seeders décroissant
|
||
unique_results.sort(key=lambda x: x.get('Seeders', 0) or 0, reverse=True)
|
||
|
||
logger.info(f"✅ Recherche terminée: {len(unique_results)} résultats uniques")
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'results': unique_results,
|
||
'total': len(unique_results),
|
||
'query': query
|
||
})
|
||
|
||
except Exception as e:
|
||
logger.error(f"❌ Erreur recherche: {e}", exc_info=True)
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
def _normalize_title(title):
|
||
"""Normalise un titre pour la comparaison/déduplication"""
|
||
import re
|
||
# Minuscules
|
||
title = title.lower()
|
||
# Remplacer les séparateurs par des espaces
|
||
title = re.sub(r'[.\-_]', ' ', title)
|
||
# Supprimer les espaces multiples
|
||
title = re.sub(r'\s+', ' ', title)
|
||
return title.strip()
|
||
|
||
|
||
@app.route('/health', methods=['GET'])
|
||
def health():
|
||
"""Endpoint de santé pour Docker"""
|
||
return jsonify({
|
||
'status': 'healthy',
|
||
'version': APP_VERSION,
|
||
'timestamp': datetime.now().isoformat()
|
||
})
|
||
|
||
|
||
@app.route('/api/version', methods=['GET'])
|
||
def get_version():
|
||
"""Retourne la version de l'application"""
|
||
return jsonify({
|
||
'success': True,
|
||
'version': APP_VERSION,
|
||
'name': 'Lycostorrent'
|
||
})
|
||
|
||
|
||
@app.route('/latest')
|
||
@login_required
|
||
def latest():
|
||
"""Page des nouveautés"""
|
||
return render_template('latest.html')
|
||
|
||
|
||
@app.route('/api/indexers', methods=['GET'])
|
||
@login_required
|
||
def get_indexers():
|
||
"""Récupère la liste des indexers (alias pour /api/trackers)"""
|
||
return get_trackers()
|
||
|
||
|
||
@app.route('/api/latest', methods=['POST'])
|
||
@login_required
|
||
def get_latest():
|
||
"""Récupère les dernières sorties avec enrichissement TMDb/Last.fm"""
|
||
try:
|
||
data = request.json
|
||
if not data:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Données JSON invalides'
|
||
}), 400
|
||
|
||
trackers = data.get('trackers', [])
|
||
category = data.get('category', 'video')
|
||
limit = data.get('limit', 20)
|
||
|
||
# Validation renforcée
|
||
if not isinstance(trackers, list) or len(trackers) > MAX_TRACKERS:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': f'Nombre de trackers invalide (max {MAX_TRACKERS})'
|
||
}), 400
|
||
|
||
# Valider et limiter le nombre de résultats
|
||
try:
|
||
limit = int(limit)
|
||
limit = min(max(1, limit), MAX_LIMIT)
|
||
except (ValueError, TypeError):
|
||
limit = 20
|
||
|
||
# Valider la catégorie
|
||
valid_categories = ['video', 'movies', 'tv', 'anime', 'music']
|
||
if category not in valid_categories:
|
||
category = 'video'
|
||
|
||
logger.info(f"📥 Nouveautés: catégorie={category}, limite={limit}, trackers={len(trackers)}")
|
||
|
||
# Séparer les trackers Jackett/Prowlarr des flux RSS
|
||
indexer_trackers = [t for t in trackers if not t.startswith('rss:')]
|
||
rss_trackers = [t.replace('rss:', '') for t in trackers if t.startswith('rss:')]
|
||
|
||
# Charger la configuration admin
|
||
latest_config = _load_latest_config()
|
||
|
||
# Catégories par défaut
|
||
default_categories = {
|
||
'video': '2000,5000',
|
||
'movies': '2000',
|
||
'tv': '5000',
|
||
'anime': '5070',
|
||
'music': '3000'
|
||
}
|
||
|
||
all_results = []
|
||
|
||
# ============================================================
|
||
# REQUÊTES PARALLÈLES
|
||
# ============================================================
|
||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||
import time
|
||
|
||
start_time = time.time()
|
||
|
||
def fetch_tracker(tracker):
|
||
"""Fonction pour récupérer les résultats d'un tracker"""
|
||
tracker_config = latest_config.get(tracker, {})
|
||
cat_id = tracker_config.get(category) or default_categories.get(category, '2000,5000')
|
||
|
||
logger.info(f"🔍 {tracker}: catégorie {category} → IDs {cat_id}")
|
||
|
||
try:
|
||
results = jackett.search('', indexers=[tracker], category=cat_id, max_results=limit * 2)
|
||
logger.info(f"✅ {tracker}: {len(results)} résultats")
|
||
return results
|
||
except Exception as e:
|
||
logger.warning(f"⚠️ Erreur {tracker}: {e}")
|
||
return []
|
||
|
||
def fetch_rss(rss_id):
|
||
"""Fonction pour récupérer les résultats d'un flux RSS"""
|
||
try:
|
||
for feed in rss_manager.feeds:
|
||
if feed.get('id') == rss_id:
|
||
logger.info(f"📡 RSS {feed['name']}: récupération...")
|
||
rss_results = rss_manager.rss_source.fetch_feed(feed, max_results=limit * 2)
|
||
if rss_results:
|
||
logger.info(f"✅ RSS {feed['name']}: {len(rss_results)} résultats")
|
||
return rss_results
|
||
return []
|
||
except Exception as e:
|
||
logger.warning(f"⚠️ Erreur RSS {rss_id}: {e}")
|
||
return []
|
||
|
||
# Exécuter toutes les requêtes en parallèle
|
||
max_workers = min(10, len(indexer_trackers) + len(rss_trackers)) # Max 10 threads
|
||
|
||
if max_workers > 0:
|
||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||
futures = {}
|
||
|
||
# Soumettre les requêtes trackers
|
||
for tracker in indexer_trackers:
|
||
future = executor.submit(fetch_tracker, tracker)
|
||
futures[future] = f"tracker:{tracker}"
|
||
|
||
# Soumettre les requêtes RSS
|
||
for rss_id in rss_trackers:
|
||
future = executor.submit(fetch_rss, rss_id)
|
||
futures[future] = f"rss:{rss_id}"
|
||
|
||
# Récupérer les résultats au fur et à mesure
|
||
for future in as_completed(futures, timeout=60):
|
||
source = futures[future]
|
||
try:
|
||
results = future.result()
|
||
if results:
|
||
all_results.extend(results)
|
||
except Exception as e:
|
||
logger.warning(f"⚠️ Erreur {source}: {e}")
|
||
|
||
elapsed = time.time() - start_time
|
||
logger.info(f"📦 Total: {len(all_results)} résultats bruts (en {elapsed:.2f}s)")
|
||
|
||
if not all_results:
|
||
return jsonify({
|
||
'success': True,
|
||
'results': [],
|
||
'total': 0
|
||
})
|
||
|
||
# Filtrer les animes/documentaires si catégorie = tv
|
||
if category == 'tv':
|
||
all_results = _filter_tv_results(all_results)
|
||
|
||
# Trier par date
|
||
all_results.sort(key=lambda x: x.get('PublishDateRaw', ''), reverse=True)
|
||
|
||
# Regrouper les torrents similaires
|
||
grouped = _group_similar_torrents(all_results)
|
||
|
||
# Enrichir avec TMDb ou Last.fm
|
||
enriched = []
|
||
for group in grouped[:limit]:
|
||
main_torrent = group['torrents'][0]
|
||
|
||
if category == 'music':
|
||
music_data = lastfm.enrich_torrent(main_torrent['Title'])
|
||
if music_data:
|
||
group['music'] = music_data
|
||
group['is_music'] = True
|
||
else:
|
||
cat = 'movie' if category == 'movies' else ('tv' if category in ['tv', 'anime'] else None)
|
||
tmdb_data = tmdb.enrich_torrent(main_torrent['Title'], cat)
|
||
if tmdb_data:
|
||
group['tmdb'] = tmdb_data
|
||
if category == 'anime':
|
||
group['is_anime'] = True
|
||
|
||
enriched.append(group)
|
||
|
||
logger.info(f"✅ {len(enriched)} nouveautés enrichies")
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'results': enriched,
|
||
'total': len(enriched)
|
||
})
|
||
|
||
except Exception as e:
|
||
logger.error(f"❌ Erreur nouveautés: {e}", exc_info=True)
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
def _filter_tv_results(results):
|
||
"""Filtre les résultats TV pour exclure animes/documentaires"""
|
||
filtered = []
|
||
|
||
anime_patterns = [
|
||
r'\bintégrale\b',
|
||
r'\bcomplete\b|\bintegral\b',
|
||
r'(?:subfrench|vostfr).*(?:1080p|720p)',
|
||
r'(?:adn|crunchyroll)',
|
||
]
|
||
|
||
doc_patterns = [
|
||
r'\bdocument',
|
||
r'\bsport',
|
||
r'\brugby\b|\bfootball\b|\bfoot\b',
|
||
r'\bligue\b|\bcoupe\b',
|
||
]
|
||
|
||
for r in results:
|
||
title_lower = r.get('Title', '').lower()
|
||
cat_str = str(r.get('Category', [])).lower()
|
||
|
||
is_anime = any(re.search(p, title_lower) for p in anime_patterns)
|
||
is_doc = any(re.search(p, title_lower) for p in doc_patterns)
|
||
is_anime_cat = 'anime' in cat_str
|
||
|
||
if not (is_anime or is_doc or is_anime_cat):
|
||
filtered.append(r)
|
||
|
||
logger.info(f"📺 Filtrage TV: {len(filtered)}/{len(results)} conservés")
|
||
return filtered
|
||
|
||
|
||
def _group_similar_torrents(torrents):
|
||
"""Regroupe les torrents similaires par titre"""
|
||
groups = []
|
||
used_indices = set()
|
||
|
||
for i, torrent in enumerate(torrents):
|
||
if i in used_indices:
|
||
continue
|
||
|
||
group = {
|
||
'torrents': [torrent],
|
||
'title': torrent['Title']
|
||
}
|
||
used_indices.add(i)
|
||
|
||
base_title_1, year_1 = _extract_base_title_and_year(torrent['Title'])
|
||
|
||
for j, other in enumerate(torrents[i+1:], start=i+1):
|
||
if j in used_indices:
|
||
continue
|
||
|
||
base_title_2, year_2 = _extract_base_title_and_year(other['Title'])
|
||
|
||
# Si les deux ont des années différentes, ne pas grouper
|
||
if year_1 and year_2 and year_1 != year_2:
|
||
continue
|
||
|
||
# Comparer les titres de base
|
||
base_similarity = difflib.SequenceMatcher(None, base_title_1, base_title_2).ratio()
|
||
|
||
# Seuil strict pour éviter les faux positifs
|
||
if base_similarity > 0.85:
|
||
group['torrents'].append(other)
|
||
used_indices.add(j)
|
||
|
||
# Trier par seeders
|
||
group['torrents'].sort(key=lambda x: x.get('Seeders', 0) or 0, reverse=True)
|
||
group['title'] = group['torrents'][0]['Title']
|
||
groups.append(group)
|
||
|
||
# Trier par date
|
||
groups.sort(key=lambda g: g['torrents'][0].get('PublishDateRaw', ''), reverse=True)
|
||
|
||
logger.info(f"📦 Regroupement: {len(torrents)} → {len(groups)} groupes")
|
||
return groups
|
||
|
||
|
||
def _extract_base_title_and_year(title):
|
||
"""Extrait le titre de base et l'année"""
|
||
# Supprimer l'extension
|
||
title = re.sub(r'\.(mkv|avi|mp4|torrent)$', '', title, flags=re.IGNORECASE)
|
||
|
||
# Chercher l'année (1900-2099) - avec ou sans parenthèses
|
||
year_match = re.search(r'[\.\s\-_\(](19\d{2}|20\d{2})[\.\s\-_\)]', title)
|
||
year = year_match.group(1) if year_match else None
|
||
|
||
# Chercher où commencent les métadonnées (après l'année ou qualité)
|
||
# Pattern pour année avec ou sans parenthèses
|
||
match = re.search(r'[\.\s\-_\(]+(19|20)\d{2}[\.\s\-_\)]', title, flags=re.IGNORECASE)
|
||
|
||
if match:
|
||
base_title = title[:match.start()]
|
||
else:
|
||
# Sinon chercher la qualité
|
||
match = re.search(r'[\.\s\-_]+(720p|1080p|2160p|4K|HDTV|WEB|BluRay|BDRip|DVDRip|FRENCH|MULTi|VFi|VOSTFR)', title, flags=re.IGNORECASE)
|
||
if match:
|
||
base_title = title[:match.start()]
|
||
else:
|
||
base_title = title
|
||
|
||
# Nettoyer le titre
|
||
base_title = base_title.replace('.', ' ').replace('_', ' ').replace('-', ' ')
|
||
base_title = re.sub(r'\s+', ' ', base_title).strip().lower()
|
||
|
||
return base_title, year
|
||
|
||
|
||
def _extract_base_title(title):
|
||
"""Extrait le titre de base avant les métadonnées (année, qualité, etc.)"""
|
||
base_title, _ = _extract_base_title_and_year(title)
|
||
return base_title
|
||
|
||
|
||
def _clean_title_for_comparison(title):
|
||
"""Nettoie un titre pour la comparaison"""
|
||
title = re.sub(r'[Ss]\d{2}[Ee]\d{2}.*', '', title)
|
||
title = re.sub(r'[Ss]\d{2}.*', '', title)
|
||
title = re.sub(r'\b(19|20)\d{2}\b', '', title)
|
||
|
||
patterns = [
|
||
r'\b1080p\b', r'\b720p\b', r'\b2160p\b', r'\b480p\b',
|
||
r'\b4K\b', r'\bUHD\b', r'\bHEVC\b', r'\bx264\b', r'\bx265\b',
|
||
r'\bBluRay\b', r'\bWEB\b', r'\bFRENCH\b', r'\bMULTI\b'
|
||
]
|
||
|
||
for pattern in patterns:
|
||
title = re.sub(pattern, '', title, flags=re.IGNORECASE)
|
||
|
||
title = re.sub(r'-[A-Z0-9]+$', '', title, flags=re.IGNORECASE)
|
||
title = title.replace('.', ' ').replace('_', ' ').replace('-', ' ')
|
||
title = re.sub(r'\s+', ' ', title).strip().lower()
|
||
|
||
return title
|
||
|
||
|
||
# ============================================================
|
||
# ADMINISTRATION - Catégories Latest
|
||
# ============================================================
|
||
|
||
LATEST_CONFIG_PATH = '/app/config/latest_categories.json'
|
||
PARSING_TAGS_PATH = '/app/config/parsing_tags.json'
|
||
|
||
|
||
def _load_latest_config():
|
||
"""Charge la configuration des catégories pour les nouveautés"""
|
||
try:
|
||
if os.path.exists(LATEST_CONFIG_PATH):
|
||
with open(LATEST_CONFIG_PATH, 'r', encoding='utf-8') as f:
|
||
return json.load(f)
|
||
except Exception as e:
|
||
logger.error(f"Erreur chargement config latest: {e}")
|
||
return {}
|
||
|
||
|
||
def _save_latest_config(config_data):
|
||
"""Sauvegarde la configuration des catégories"""
|
||
try:
|
||
os.makedirs(os.path.dirname(LATEST_CONFIG_PATH), exist_ok=True)
|
||
with open(LATEST_CONFIG_PATH, 'w', encoding='utf-8') as f:
|
||
json.dump(config_data, f, indent=2, ensure_ascii=False)
|
||
return True
|
||
except Exception as e:
|
||
logger.error(f"Erreur sauvegarde config latest: {e}")
|
||
return False
|
||
|
||
|
||
@app.route('/admin')
|
||
@admin_required
|
||
def admin():
|
||
"""Page d'administration unifiée"""
|
||
return render_template('admin.html')
|
||
|
||
|
||
@app.route('/admin/latest')
|
||
@admin_required
|
||
def admin_latest():
|
||
"""Page d'administration des catégories pour les nouveautés"""
|
||
return render_template('admin_latest.html')
|
||
|
||
|
||
@app.route('/api/admin/latest-config', methods=['GET'])
|
||
@admin_required
|
||
def get_latest_config():
|
||
"""Récupère la configuration des catégories latest"""
|
||
try:
|
||
config_data = _load_latest_config()
|
||
return jsonify({
|
||
'success': True,
|
||
'config': config_data
|
||
})
|
||
except Exception as e:
|
||
logger.error(f"Erreur récupération config: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/admin/latest-config', methods=['POST'])
|
||
@admin_required
|
||
def save_latest_config():
|
||
"""Sauvegarde la configuration des catégories latest"""
|
||
try:
|
||
data = request.json
|
||
config_data = data.get('config', {})
|
||
|
||
tracker_id = config_data.get('tracker')
|
||
categories = config_data.get('categories', {})
|
||
|
||
if not tracker_id:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Tracker ID requis'
|
||
}), 400
|
||
|
||
# Charger la config existante
|
||
existing_config = _load_latest_config()
|
||
|
||
# Mettre à jour la config pour ce tracker
|
||
existing_config[tracker_id] = categories
|
||
|
||
# Sauvegarder
|
||
if _save_latest_config(existing_config):
|
||
logger.info(f"✅ Configuration sauvegardée pour {tracker_id}")
|
||
return jsonify({
|
||
'success': True,
|
||
'message': 'Configuration sauvegardée'
|
||
})
|
||
else:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Erreur lors de la sauvegarde'
|
||
}), 500
|
||
except Exception as e:
|
||
logger.error(f"Erreur sauvegarde config: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/admin/tracker-categories/<path:tracker_id>', methods=['GET'])
|
||
@admin_required
|
||
def get_tracker_categories(tracker_id):
|
||
"""Récupère les catégories disponibles pour un tracker (Jackett ou Prowlarr)"""
|
||
try:
|
||
categories = indexer_manager.get_indexer_categories(tracker_id)
|
||
|
||
logger.info(f"✅ {len(categories)} catégories trouvées pour {tracker_id}")
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'categories': categories
|
||
})
|
||
|
||
except Exception as e:
|
||
logger.error(f"Erreur récupération catégories {tracker_id}: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e),
|
||
'categories': []
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/admin/tracker-categories', methods=['GET'])
|
||
@admin_required
|
||
def get_tracker_categories_query():
|
||
"""Récupère les catégories via query param"""
|
||
tracker_id = request.args.get('tracker', '')
|
||
if not tracker_id:
|
||
return jsonify({'success': False, 'error': 'Tracker requis', 'categories': []}), 400
|
||
return get_tracker_categories(tracker_id)
|
||
|
||
|
||
# ============================================================
|
||
# ADMINISTRATION - Tags de parsing
|
||
# ============================================================
|
||
|
||
@app.route('/admin/parsing')
|
||
@admin_required
|
||
def admin_parsing():
|
||
"""Page d'administration des tags de parsing"""
|
||
return render_template('admin_parsing.html')
|
||
|
||
|
||
@app.route('/api/admin/parsing-tags', methods=['GET'])
|
||
@admin_required
|
||
def get_parsing_tags():
|
||
"""Récupère les tags de parsing actuels"""
|
||
try:
|
||
from tmdb_api import _load_parsing_tags
|
||
tags = _load_parsing_tags()
|
||
return jsonify({
|
||
'success': True,
|
||
'tags': tags
|
||
})
|
||
except Exception as e:
|
||
logger.error(f"Erreur récupération tags: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/admin/parsing-tags', methods=['POST'])
|
||
@admin_required
|
||
def save_parsing_tags():
|
||
"""Sauvegarde les tags de parsing"""
|
||
try:
|
||
from tmdb_api import _save_parsing_tags
|
||
data = request.json
|
||
tags = data.get('tags', [])
|
||
|
||
# Nettoyer les tags (supprimer les vides, les espaces)
|
||
tags = [tag.strip() for tag in tags if tag.strip()]
|
||
|
||
if _save_parsing_tags(tags):
|
||
logger.info(f"✅ {len(tags)} tags de parsing sauvegardés")
|
||
return jsonify({
|
||
'success': True,
|
||
'message': f'{len(tags)} tags sauvegardés'
|
||
})
|
||
else:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Erreur lors de la sauvegarde'
|
||
}), 500
|
||
except Exception as e:
|
||
logger.error(f"Erreur sauvegarde tags: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/admin/parsing-tags/reset', methods=['POST'])
|
||
@admin_required
|
||
def reset_parsing_tags():
|
||
"""Réinitialise les tags de parsing aux valeurs par défaut"""
|
||
try:
|
||
from tmdb_api import _save_parsing_tags, DEFAULT_PARSING_TAGS
|
||
|
||
if _save_parsing_tags(DEFAULT_PARSING_TAGS):
|
||
logger.info("✅ Tags de parsing réinitialisés")
|
||
return jsonify({
|
||
'success': True,
|
||
'tags': DEFAULT_PARSING_TAGS
|
||
})
|
||
else:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Erreur lors de la réinitialisation'
|
||
}), 500
|
||
except Exception as e:
|
||
logger.error(f"Erreur reset tags: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/admin/test-parsing', methods=['POST'])
|
||
@admin_required
|
||
def test_parsing():
|
||
"""Teste le parsing d'un titre"""
|
||
try:
|
||
data = request.json
|
||
title = data.get('title', '')
|
||
|
||
if not title:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Titre requis'
|
||
}), 400
|
||
|
||
# Utiliser la fonction de nettoyage de TMDb
|
||
cleaned = tmdb._clean_title(title)
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'original': title,
|
||
'cleaned': cleaned
|
||
})
|
||
except Exception as e:
|
||
logger.error(f"Erreur test parsing: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
# ============================================================
|
||
# ADMIN FILTRES
|
||
# ============================================================
|
||
|
||
@app.route('/api/filters', methods=['GET'])
|
||
@login_required
|
||
def get_filters_public():
|
||
"""Récupère la configuration des filtres (route pour la recherche)"""
|
||
try:
|
||
from torrent_parser import load_filters_config
|
||
filters = load_filters_config()
|
||
return jsonify({
|
||
'success': True,
|
||
'filters': filters
|
||
})
|
||
except Exception as e:
|
||
logger.error(f"Erreur récupération filtres: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/admin/filters', methods=['GET'])
|
||
@admin_required
|
||
def get_filters_config():
|
||
"""Récupère la configuration des filtres (admin)"""
|
||
try:
|
||
from torrent_parser import load_filters_config
|
||
filters = load_filters_config()
|
||
return jsonify({
|
||
'success': True,
|
||
'filters': filters
|
||
})
|
||
except Exception as e:
|
||
logger.error(f"Erreur récupération filtres: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/admin/filters', methods=['POST'])
|
||
@admin_required
|
||
def save_filters_config_route():
|
||
"""Sauvegarde la configuration des filtres"""
|
||
try:
|
||
from torrent_parser import save_filters_config, reload_parser
|
||
data = request.json
|
||
filters = data.get('filters', {})
|
||
|
||
if save_filters_config(filters):
|
||
# Recharger le parser avec la nouvelle config
|
||
reload_parser()
|
||
logger.info("✅ Configuration des filtres sauvegardée")
|
||
return jsonify({
|
||
'success': True,
|
||
'message': 'Filtres sauvegardés'
|
||
})
|
||
else:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Erreur lors de la sauvegarde'
|
||
}), 500
|
||
except Exception as e:
|
||
logger.error(f"Erreur sauvegarde filtres: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/admin/filters/reset', methods=['POST'])
|
||
@admin_required
|
||
def reset_filters_config():
|
||
"""Réinitialise les filtres aux valeurs par défaut"""
|
||
try:
|
||
from torrent_parser import save_filters_config, get_default_filters, reload_parser
|
||
default_filters = get_default_filters()
|
||
|
||
if save_filters_config(default_filters):
|
||
reload_parser()
|
||
logger.info("✅ Filtres réinitialisés")
|
||
return jsonify({
|
||
'success': True,
|
||
'filters': default_filters
|
||
})
|
||
else:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Erreur lors de la réinitialisation'
|
||
}), 500
|
||
except Exception as e:
|
||
logger.error(f"Erreur reset filtres: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/admin/filters/test', methods=['POST'])
|
||
@admin_required
|
||
def test_filters_parsing():
|
||
"""Teste le parsing d'un titre avec les filtres actuels"""
|
||
try:
|
||
data = request.json
|
||
title = data.get('title', '')
|
||
|
||
if not title:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Titre requis'
|
||
}), 400
|
||
|
||
# Parser le titre
|
||
parsed = parser.parse(title)
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'title': title,
|
||
'parsed': parsed
|
||
})
|
||
except Exception as e:
|
||
logger.error(f"Erreur test filtres: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
# ============================================================
|
||
# ADMIN RSS
|
||
# ============================================================
|
||
|
||
@app.route('/admin/rss')
|
||
@admin_required
|
||
def admin_rss():
|
||
"""Page d'administration des flux RSS"""
|
||
return render_template('admin_rss.html')
|
||
|
||
|
||
@app.route('/api/admin/rss', methods=['GET'])
|
||
@admin_required
|
||
def get_rss_feeds():
|
||
"""Récupère la liste des flux RSS configurés"""
|
||
try:
|
||
feeds = rss_manager.get_feeds()
|
||
# Masquer les passkeys et cookies dans la réponse
|
||
safe_feeds = []
|
||
for feed in feeds:
|
||
safe_feed = feed.copy()
|
||
if safe_feed.get('passkey'):
|
||
safe_feed['passkey'] = '***'
|
||
# Indiquer si des cookies sont configurés sans les exposer
|
||
safe_feed['has_cookies'] = bool(safe_feed.get('cookies'))
|
||
if 'cookies' in safe_feed:
|
||
del safe_feed['cookies']
|
||
safe_feeds.append(safe_feed)
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'feeds': safe_feeds
|
||
})
|
||
except Exception as e:
|
||
logger.error(f"Erreur récupération flux RSS: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/admin/rss', methods=['POST'])
|
||
@admin_required
|
||
def add_rss_feed():
|
||
"""Ajoute un nouveau flux RSS"""
|
||
try:
|
||
data = request.get_json()
|
||
|
||
if not data.get('name') or not data.get('url') or not data.get('category'):
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Nom, URL et catégorie requis'
|
||
}), 400
|
||
|
||
feed = {
|
||
'name': data['name'],
|
||
'url': data['url'],
|
||
'category': data['category'],
|
||
'passkey': data.get('passkey', ''),
|
||
'use_flaresolverr': data.get('use_flaresolverr', False),
|
||
'cookies': data.get('cookies', ''),
|
||
'enabled': True
|
||
}
|
||
|
||
result = rss_manager.add_feed(feed)
|
||
|
||
logger.info(f"✅ Flux RSS ajouté: {feed['name']}")
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'feed': result
|
||
})
|
||
except Exception as e:
|
||
logger.error(f"Erreur ajout flux RSS: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/admin/rss/<feed_id>', methods=['DELETE'])
|
||
@admin_required
|
||
def delete_rss_feed(feed_id):
|
||
"""Supprime un flux RSS"""
|
||
try:
|
||
rss_manager.delete_feed(feed_id)
|
||
logger.info(f"🗑️ Flux RSS supprimé: {feed_id}")
|
||
|
||
return jsonify({
|
||
'success': True
|
||
})
|
||
except Exception as e:
|
||
logger.error(f"Erreur suppression flux RSS: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/admin/rss/<feed_id>/toggle', methods=['POST'])
|
||
@admin_required
|
||
def toggle_rss_feed(feed_id):
|
||
"""Active/désactive un flux RSS"""
|
||
try:
|
||
for feed in rss_manager.feeds:
|
||
if feed.get('id') == feed_id:
|
||
feed['enabled'] = not feed.get('enabled', True)
|
||
rss_manager.save_config()
|
||
logger.info(f"🔄 Flux RSS {feed_id} {'activé' if feed['enabled'] else 'désactivé'}")
|
||
return jsonify({
|
||
'success': True,
|
||
'enabled': feed['enabled']
|
||
})
|
||
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Flux non trouvé'
|
||
}), 404
|
||
except Exception as e:
|
||
logger.error(f"Erreur toggle flux RSS: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/admin/rss/test', methods=['POST'])
|
||
@admin_required
|
||
def test_rss_feed():
|
||
"""Teste un flux RSS"""
|
||
try:
|
||
data = request.get_json()
|
||
url = data.get('url', '')
|
||
passkey = data.get('passkey', '')
|
||
use_flaresolverr = data.get('use_flaresolverr', False)
|
||
cookies = data.get('cookies', '')
|
||
|
||
if not url:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'URL requise'
|
||
}), 400
|
||
|
||
result = rss_manager.test_feed(url, passkey, use_flaresolverr, cookies)
|
||
|
||
return jsonify(result)
|
||
except Exception as e:
|
||
logger.error(f"Erreur test flux RSS: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/admin/rss/<feed_id>/test', methods=['POST'])
|
||
@admin_required
|
||
def test_existing_rss_feed(feed_id):
|
||
"""Teste un flux RSS existant"""
|
||
try:
|
||
for feed in rss_manager.feeds:
|
||
if feed.get('id') == feed_id:
|
||
result = rss_manager.test_feed(feed['url'], feed.get('passkey', ''))
|
||
return jsonify(result)
|
||
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Flux non trouvé'
|
||
}), 404
|
||
except Exception as e:
|
||
logger.error(f"Erreur test flux RSS: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
# ============================================================
|
||
# CLIENT TORRENT (Plugins)
|
||
# ============================================================
|
||
|
||
@app.route('/api/admin/torrent-client/plugins', methods=['GET'])
|
||
@admin_required
|
||
def get_torrent_client_plugins():
|
||
"""Liste les plugins de clients torrent disponibles"""
|
||
try:
|
||
from plugins.torrent_clients import get_available_plugins
|
||
plugins = get_available_plugins()
|
||
return jsonify({
|
||
'success': True,
|
||
'plugins': plugins
|
||
})
|
||
except Exception as e:
|
||
logger.error(f"Erreur récupération plugins: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/admin/torrent-client/config', methods=['GET'])
|
||
@admin_required
|
||
def get_torrent_client_config():
|
||
"""Récupère la configuration du client torrent"""
|
||
try:
|
||
from plugins.torrent_clients import get_active_config, get_active_client
|
||
|
||
config = get_active_config() or {}
|
||
client = get_active_client()
|
||
|
||
# Masquer le mot de passe
|
||
safe_config = config.copy() if config else {}
|
||
if 'password' in safe_config and safe_config['password']:
|
||
safe_config['password'] = '********'
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'config': safe_config,
|
||
'connected': client.is_connected() if client else False
|
||
})
|
||
except Exception as e:
|
||
logger.error(f"Erreur récupération config client: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/admin/torrent-client/config', methods=['POST'])
|
||
@admin_required
|
||
def save_torrent_client_config():
|
||
"""Sauvegarde et active la configuration du client torrent"""
|
||
try:
|
||
from plugins.torrent_clients import create_client, set_active_client, save_client_config
|
||
|
||
data = request.json
|
||
|
||
# Gérer le port vide ou invalide
|
||
port_str = str(data.get('port', '')).strip()
|
||
port = int(port_str) if port_str and port_str.isdigit() else 0
|
||
|
||
config = {
|
||
'enabled': data.get('enabled', False),
|
||
'plugin': data.get('plugin', ''),
|
||
'host': data.get('host', 'localhost'),
|
||
'port': port,
|
||
'username': data.get('username', ''),
|
||
'password': data.get('password', ''),
|
||
'use_ssl': data.get('use_ssl', False),
|
||
'path': data.get('path', '') # Chemin optionnel (ex: /qbittorrent)
|
||
}
|
||
|
||
# Sauvegarder la config
|
||
if not save_client_config(config):
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Erreur de sauvegarde'
|
||
}), 500
|
||
|
||
# Si activé, créer et connecter le client
|
||
if config['enabled'] and config['plugin']:
|
||
client = create_client(config['plugin'], config)
|
||
|
||
if client and client.connect():
|
||
set_active_client(client, config)
|
||
logger.info(f"✅ Client torrent configuré: {config['plugin']}")
|
||
return jsonify({
|
||
'success': True,
|
||
'message': 'Configuration sauvegardée et client connecté'
|
||
})
|
||
else:
|
||
return jsonify({
|
||
'success': True,
|
||
'message': 'Configuration sauvegardée mais connexion échouée',
|
||
'warning': 'Vérifiez les paramètres de connexion'
|
||
})
|
||
else:
|
||
set_active_client(None, config)
|
||
return jsonify({
|
||
'success': True,
|
||
'message': 'Client torrent désactivé'
|
||
})
|
||
|
||
except Exception as e:
|
||
logger.error(f"Erreur sauvegarde config client: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/admin/torrent-client/test', methods=['POST'])
|
||
@admin_required
|
||
def test_torrent_client():
|
||
"""Teste la connexion au client torrent"""
|
||
try:
|
||
from plugins.torrent_clients import create_client
|
||
|
||
data = request.json
|
||
|
||
# Gérer le port vide ou invalide
|
||
port_str = str(data.get('port', '')).strip()
|
||
port = int(port_str) if port_str and port_str.isdigit() else 0
|
||
|
||
client = create_client(data.get('plugin', ''), {
|
||
'host': data.get('host', 'localhost'),
|
||
'port': port,
|
||
'username': data.get('username', ''),
|
||
'password': data.get('password', ''),
|
||
'use_ssl': data.get('use_ssl', False),
|
||
'path': data.get('path', '')
|
||
})
|
||
|
||
if not client:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Plugin non trouvé'
|
||
}), 400
|
||
|
||
result = client.test_connection()
|
||
return jsonify(result)
|
||
|
||
except Exception as e:
|
||
logger.error(f"Erreur test client: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/torrent-client/add', methods=['POST'])
|
||
@login_required
|
||
def add_torrent_to_client():
|
||
"""Envoie un torrent au client configuré"""
|
||
try:
|
||
from plugins.torrent_clients import get_active_client
|
||
|
||
client = get_active_client()
|
||
|
||
if not client:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Aucun client torrent configuré'
|
||
}), 400
|
||
|
||
if not client.is_connected():
|
||
if not client.connect():
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Impossible de se connecter au client torrent'
|
||
}), 500
|
||
|
||
data = request.json
|
||
url = data.get('url', '') # Magnet ou URL .torrent
|
||
category = data.get('category', None)
|
||
save_path = data.get('save_path', None)
|
||
paused = data.get('paused', False)
|
||
|
||
if not url:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'URL requise'
|
||
}), 400
|
||
|
||
if client.add_torrent_url(url, save_path=save_path, category=category, paused=paused):
|
||
logger.info(f"✅ Torrent envoyé au client: {url[:50]}...")
|
||
return jsonify({
|
||
'success': True,
|
||
'message': 'Torrent ajouté avec succès'
|
||
})
|
||
else:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Échec de l\'ajout du torrent'
|
||
}), 500
|
||
|
||
except Exception as e:
|
||
logger.error(f"Erreur ajout torrent: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
@app.route('/api/torrent-client/categories', methods=['GET'])
|
||
@login_required
|
||
def get_torrent_client_categories():
|
||
"""Récupère les catégories du client torrent et les catégories personnalisées"""
|
||
try:
|
||
from plugins.torrent_clients import get_active_client
|
||
|
||
# Charger les catégories personnalisées
|
||
custom_categories = load_custom_categories()
|
||
|
||
client = get_active_client()
|
||
client_categories = []
|
||
|
||
if client and client.is_connected():
|
||
client_categories = client.get_categories()
|
||
|
||
# Fusionner : priorité aux catégories personnalisées
|
||
all_categories = list(custom_categories.keys())
|
||
for cat in client_categories:
|
||
if cat not in all_categories:
|
||
all_categories.append(cat)
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'categories': all_categories,
|
||
'custom_categories': custom_categories,
|
||
'client_categories': client_categories
|
||
})
|
||
|
||
except Exception as e:
|
||
logger.error(f"Erreur récupération catégories: {e}")
|
||
return jsonify({
|
||
'success': False,
|
||
'categories': [],
|
||
'custom_categories': {},
|
||
'client_categories': []
|
||
})
|
||
|
||
|
||
@app.route('/api/admin/torrent-client/categories', methods=['GET'])
|
||
@login_required
|
||
def get_admin_custom_categories():
|
||
"""Récupère les catégories personnalisées pour l'admin"""
|
||
try:
|
||
custom_categories = load_custom_categories()
|
||
return jsonify({
|
||
'success': True,
|
||
'categories': custom_categories
|
||
})
|
||
except Exception as e:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
})
|
||
|
||
|
||
@app.route('/api/admin/torrent-client/categories', methods=['POST'])
|
||
@login_required
|
||
def save_admin_custom_categories():
|
||
"""Sauvegarde les catégories personnalisées"""
|
||
try:
|
||
data = request.json
|
||
categories = data.get('categories', {})
|
||
|
||
save_custom_categories(categories)
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'message': 'Catégories sauvegardées'
|
||
})
|
||
except Exception as e:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
})
|
||
|
||
|
||
@app.route('/api/admin/torrent-client/sync-categories', methods=['POST'])
|
||
@login_required
|
||
def sync_categories_with_client():
|
||
"""Synchronise les catégories avec le client torrent (crée les catégories manquantes)"""
|
||
try:
|
||
from plugins.torrent_clients import get_active_client
|
||
|
||
client = get_active_client()
|
||
if not client:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Aucun client torrent configuré'
|
||
})
|
||
|
||
if not client.is_connected():
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Client torrent non connecté'
|
||
})
|
||
|
||
custom_categories = load_custom_categories()
|
||
|
||
# Pour qBittorrent, on peut créer les catégories
|
||
if hasattr(client, 'create_category'):
|
||
created = []
|
||
for name, path in custom_categories.items():
|
||
if client.create_category(name, path):
|
||
created.append(name)
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'message': f'{len(created)} catégories créées',
|
||
'created': created
|
||
})
|
||
else:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': 'Ce client ne supporte pas la création de catégories'
|
||
})
|
||
|
||
except Exception as e:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
})
|
||
|
||
|
||
def load_custom_categories():
|
||
"""Charge les catégories personnalisées depuis le fichier JSON"""
|
||
import json
|
||
config_path = '/app/config/torrent_categories.json'
|
||
|
||
try:
|
||
if os.path.exists(config_path):
|
||
with open(config_path, 'r') as f:
|
||
return json.load(f)
|
||
except:
|
||
pass
|
||
|
||
return {}
|
||
|
||
|
||
def save_custom_categories(categories):
|
||
"""Sauvegarde les catégories personnalisées"""
|
||
import json
|
||
config_path = '/app/config/torrent_categories.json'
|
||
|
||
os.makedirs(os.path.dirname(config_path), exist_ok=True)
|
||
|
||
with open(config_path, 'w') as f:
|
||
json.dump(categories, f, indent=2)
|
||
|
||
|
||
@app.route('/api/torrent-client/status', methods=['GET'])
|
||
@login_required
|
||
def get_torrent_client_status():
|
||
"""Vérifie si un client torrent est configuré et connecté"""
|
||
try:
|
||
from plugins.torrent_clients import get_active_client, get_active_config
|
||
|
||
client = get_active_client()
|
||
config = get_active_config()
|
||
|
||
if not client or not config or not config.get('enabled'):
|
||
return jsonify({
|
||
'success': True,
|
||
'enabled': False,
|
||
'connected': False
|
||
})
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'enabled': True,
|
||
'connected': client.is_connected(),
|
||
'plugin': config.get('plugin', ''),
|
||
'supportsTorrentFiles': getattr(client, 'SUPPORTS_TORRENT_FILES', True)
|
||
})
|
||
|
||
except Exception as e:
|
||
return jsonify({
|
||
'success': False,
|
||
'error': str(e)
|
||
}), 500
|
||
|
||
|
||
# ============================================================
|
||
# API CACHE
|
||
# ============================================================
|
||
|
||
@app.route('/api/cache/status', methods=['GET'])
|
||
@login_required
|
||
def api_cache_status():
|
||
"""Récupère le statut du cache"""
|
||
try:
|
||
import cache_manager
|
||
status = cache_manager.get_cache_status()
|
||
return jsonify({'success': True, **status})
|
||
except Exception as e:
|
||
logger.error(f"Erreur statut cache: {e}")
|
||
return jsonify({'success': False, 'error': str(e)}), 500
|
||
|
||
|
||
@app.route('/api/cache/config', methods=['GET'])
|
||
@admin_required
|
||
def api_get_cache_config():
|
||
"""Récupère la configuration du cache"""
|
||
try:
|
||
import cache_manager
|
||
config = cache_manager.get_cache_config()
|
||
return jsonify({'success': True, 'config': config})
|
||
except Exception as e:
|
||
return jsonify({'success': False, 'error': str(e)}), 500
|
||
|
||
|
||
@app.route('/api/cache/config', methods=['POST'])
|
||
@admin_required
|
||
def api_save_cache_config():
|
||
"""Sauvegarde la configuration du cache"""
|
||
try:
|
||
import cache_manager
|
||
|
||
data = request.json
|
||
config = cache_manager.get_cache_config()
|
||
|
||
# Mettre à jour la config
|
||
config['enabled'] = data.get('enabled', False)
|
||
config['interval_minutes'] = int(data.get('interval_minutes', 60))
|
||
|
||
# Config Latest
|
||
config['latest'] = {
|
||
'enabled': data.get('latest_enabled', True),
|
||
'categories': data.get('latest_categories', ['movies', 'tv']),
|
||
'trackers': data.get('latest_trackers', []),
|
||
'limit': int(data.get('latest_limit', 50))
|
||
}
|
||
|
||
# Config Discover
|
||
config['discover'] = {
|
||
'enabled': data.get('discover_enabled', True),
|
||
'limit': int(data.get('discover_limit', 30))
|
||
}
|
||
|
||
if cache_manager.save_cache_config(config):
|
||
# Redémarrer le scheduler avec la nouvelle config
|
||
cache_manager.restart_scheduler(app)
|
||
return jsonify({'success': True, 'message': 'Configuration sauvegardée'})
|
||
else:
|
||
return jsonify({'success': False, 'error': 'Erreur de sauvegarde'}), 500
|
||
|
||
except Exception as e:
|
||
logger.error(f"Erreur sauvegarde config cache: {e}")
|
||
return jsonify({'success': False, 'error': str(e)}), 500
|
||
|
||
|
||
@app.route('/api/cache/refresh', methods=['POST'])
|
||
@admin_required
|
||
def api_refresh_cache():
|
||
"""Force un refresh du cache"""
|
||
try:
|
||
import cache_manager
|
||
import threading
|
||
|
||
# Lancer le refresh en arrière-plan
|
||
thread = threading.Thread(target=lambda: cache_manager.refresh_cache(app), daemon=True)
|
||
thread.start()
|
||
|
||
return jsonify({'success': True, 'message': 'Refresh lancé en arrière-plan'})
|
||
except Exception as e:
|
||
return jsonify({'success': False, 'error': str(e)}), 500
|
||
|
||
|
||
@app.route('/api/cache/clear', methods=['POST'])
|
||
@admin_required
|
||
def api_clear_cache():
|
||
"""Vide le cache"""
|
||
try:
|
||
import cache_manager
|
||
if cache_manager.clear_cache():
|
||
return jsonify({'success': True, 'message': 'Cache vidé'})
|
||
else:
|
||
return jsonify({'success': False, 'error': 'Erreur'}), 500
|
||
except Exception as e:
|
||
return jsonify({'success': False, 'error': str(e)}), 500
|
||
|
||
|
||
@app.route('/api/cache/data/<cache_type>/<category>', methods=['GET'])
|
||
@login_required
|
||
def api_get_cached_data(cache_type, category):
|
||
"""Récupère les données en cache"""
|
||
try:
|
||
import cache_manager
|
||
|
||
data = cache_manager.get_cached_data(cache_type, category)
|
||
|
||
if data:
|
||
return jsonify({
|
||
'success': True,
|
||
'cached': True,
|
||
'timestamp': data.get('timestamp'),
|
||
'data': data.get('data', [])
|
||
})
|
||
else:
|
||
return jsonify({
|
||
'success': True,
|
||
'cached': False,
|
||
'data': []
|
||
})
|
||
except Exception as e:
|
||
return jsonify({'success': False, 'error': str(e)}), 500
|
||
|
||
|
||
# ============================================================
|
||
# FONCTIONS INTERNES POUR LE CACHE
|
||
# ============================================================
|
||
|
||
def fetch_latest_releases_internal(trackers_list=None, category='movies', limit=50):
|
||
"""
|
||
Fonction interne pour récupérer les dernières sorties
|
||
Utilisée par le cache manager - copie de la logique de get_latest()
|
||
"""
|
||
try:
|
||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||
import time
|
||
|
||
logger.info(f"📦 Cache: fetch_latest_releases_internal category={category}, limit={limit}")
|
||
|
||
# Si pas de trackers spécifiés, récupérer tous les trackers actifs
|
||
if not trackers_list:
|
||
all_trackers = jackett.get_indexers()
|
||
trackers_list = [t.get('id') for t in all_trackers if t.get('configured', False)]
|
||
|
||
if not trackers_list:
|
||
logger.warning("Cache: Aucun tracker disponible")
|
||
return []
|
||
|
||
# Séparer les trackers Jackett/Prowlarr des flux RSS
|
||
indexer_trackers = [t for t in trackers_list if not str(t).startswith('rss:')]
|
||
rss_trackers = [t.replace('rss:', '') for t in trackers_list if str(t).startswith('rss:')]
|
||
|
||
# Charger la configuration admin
|
||
latest_config = _load_latest_config()
|
||
|
||
# Catégories par défaut
|
||
default_categories = {
|
||
'video': '2000,5000',
|
||
'movies': '2000',
|
||
'tv': '5000',
|
||
'anime': '5070',
|
||
'music': '3000'
|
||
}
|
||
|
||
all_results = []
|
||
start_time = time.time()
|
||
|
||
def fetch_tracker(tracker):
|
||
"""Fonction pour récupérer les résultats d'un tracker"""
|
||
tracker_config = latest_config.get(tracker, {})
|
||
cat_id = tracker_config.get(category) or default_categories.get(category, '2000,5000')
|
||
|
||
try:
|
||
results = jackett.search('', indexers=[tracker], category=cat_id, max_results=limit * 2)
|
||
return results
|
||
except Exception as e:
|
||
logger.warning(f"⚠️ Cache erreur {tracker}: {e}")
|
||
return []
|
||
|
||
def fetch_rss(rss_id):
|
||
"""Fonction pour récupérer les résultats d'un flux RSS"""
|
||
try:
|
||
for feed in rss_manager.feeds:
|
||
if feed.get('id') == rss_id:
|
||
rss_results = rss_manager.rss_source.fetch_feed(feed, max_results=limit * 2)
|
||
return rss_results if rss_results else []
|
||
except Exception as e:
|
||
logger.warning(f"⚠️ Cache erreur RSS {rss_id}: {e}")
|
||
return []
|
||
|
||
# Exécuter les requêtes en parallèle
|
||
max_workers = min(10, len(indexer_trackers) + len(rss_trackers))
|
||
|
||
if max_workers > 0:
|
||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||
futures = {}
|
||
|
||
for tracker in indexer_trackers:
|
||
future = executor.submit(fetch_tracker, tracker)
|
||
futures[future] = f"tracker:{tracker}"
|
||
|
||
for rss_id in rss_trackers:
|
||
future = executor.submit(fetch_rss, rss_id)
|
||
futures[future] = f"rss:{rss_id}"
|
||
|
||
for future in as_completed(futures, timeout=60):
|
||
try:
|
||
results = future.result()
|
||
if results:
|
||
all_results.extend(results)
|
||
except Exception as e:
|
||
pass
|
||
|
||
elapsed = time.time() - start_time
|
||
logger.info(f"📦 Cache: {len(all_results)} résultats bruts (en {elapsed:.2f}s)")
|
||
|
||
if not all_results:
|
||
return []
|
||
|
||
# Filtrer les animes/documentaires si catégorie = tv
|
||
if category == 'tv':
|
||
all_results = _filter_tv_results(all_results)
|
||
|
||
# Trier par date
|
||
all_results.sort(key=lambda x: x.get('PublishDateRaw', ''), reverse=True)
|
||
|
||
# Regrouper les torrents similaires
|
||
grouped = _group_similar_torrents(all_results)
|
||
|
||
# Enrichir avec TMDb ou Last.fm
|
||
enriched = []
|
||
for group in grouped[:limit]:
|
||
main_torrent = group['torrents'][0]
|
||
|
||
if category == 'music':
|
||
music_data = lastfm.enrich_torrent(main_torrent['Title'])
|
||
if music_data:
|
||
group['music'] = music_data
|
||
group['is_music'] = True
|
||
else:
|
||
cat = 'movie' if category == 'movies' else ('tv' if category in ['tv', 'anime'] else None)
|
||
tmdb_data = tmdb.enrich_torrent(main_torrent['Title'], cat)
|
||
if tmdb_data:
|
||
group['tmdb'] = tmdb_data
|
||
if category == 'anime':
|
||
group['is_anime'] = True
|
||
|
||
enriched.append(group)
|
||
|
||
logger.info(f"📦 Cache: {len(enriched)} résultats enrichis pour {category}")
|
||
return enriched
|
||
|
||
except Exception as e:
|
||
logger.error(f"❌ Cache fetch_latest_releases_internal: {e}", exc_info=True)
|
||
return []
|
||
|
||
|
||
def fetch_discover_internal(media_type='movies', limit=30):
|
||
"""
|
||
Fonction interne pour récupérer les contenus Discover AVEC détails TMDb ET torrents pré-cachés
|
||
Utilisée par le cache manager
|
||
"""
|
||
try:
|
||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||
import requests
|
||
import time
|
||
|
||
logger.info(f"📦 Cache Discover: fetch {media_type}, limit={limit}")
|
||
|
||
if not config.tmdb_api_key:
|
||
logger.warning("Cache: Clé API TMDb non configurée")
|
||
return []
|
||
|
||
# 1. Récupérer les films/séries depuis TMDb
|
||
if media_type == 'movies':
|
||
items = _fetch_recent_movies()
|
||
else:
|
||
items = _fetch_popular_tv()
|
||
|
||
items = items[:limit]
|
||
logger.info(f"📦 Cache Discover: {len(items)} {media_type} récupérés depuis TMDb")
|
||
|
||
if not items:
|
||
return []
|
||
|
||
# 2. Pré-charger les détails TMDb ET les torrents pour chaque item
|
||
start_time = time.time()
|
||
|
||
def fetch_details_and_torrents(item):
|
||
"""Récupère les détails TMDb ET recherche les torrents"""
|
||
try:
|
||
tmdb_id = item.get('id')
|
||
title = item.get('title') or item.get('name', '')
|
||
original_title = item.get('original_title') or item.get('original_name', '')
|
||
year = ''
|
||
if item.get('release_date'):
|
||
year = item['release_date'][:4]
|
||
elif item.get('first_air_date'):
|
||
year = item['first_air_date'][:4]
|
||
|
||
item_media_type = 'movie' if media_type == 'movies' else 'tv'
|
||
|
||
# ========================================
|
||
# A. Récupérer les détails TMDb (genres, synopsis, trailer)
|
||
# ========================================
|
||
try:
|
||
url = f'https://api.themoviedb.org/3/{item_media_type}/{tmdb_id}'
|
||
params = {
|
||
'api_key': config.tmdb_api_key,
|
||
'language': 'fr-FR',
|
||
'append_to_response': 'videos'
|
||
}
|
||
|
||
response = requests.get(url, params=params, timeout=10)
|
||
if response.status_code == 200:
|
||
detail = response.json()
|
||
|
||
# Ajouter les genres
|
||
item['genres'] = detail.get('genres', [])
|
||
|
||
# Ajouter le synopsis complet
|
||
if detail.get('overview'):
|
||
item['overview'] = detail['overview']
|
||
|
||
# Ajouter le runtime/nombre d'épisodes
|
||
if item_media_type == 'movie':
|
||
item['runtime'] = detail.get('runtime')
|
||
else:
|
||
item['number_of_seasons'] = detail.get('number_of_seasons')
|
||
item['number_of_episodes'] = detail.get('number_of_episodes')
|
||
|
||
# Chercher la bande-annonce YouTube
|
||
trailer_url = None
|
||
videos = detail.get('videos', {}).get('results', [])
|
||
|
||
for video_type in ['Trailer', 'Teaser']:
|
||
for video in videos:
|
||
if video.get('site') == 'YouTube' and video.get('type') == video_type:
|
||
trailer_url = f"https://www.youtube.com/embed/{video.get('key')}"
|
||
break
|
||
if trailer_url:
|
||
break
|
||
|
||
# Si pas de vidéo FR, chercher en anglais
|
||
if not trailer_url:
|
||
try:
|
||
url_en = f'https://api.themoviedb.org/3/{item_media_type}/{tmdb_id}/videos'
|
||
params_en = {'api_key': config.tmdb_api_key, 'language': 'en-US'}
|
||
response_en = requests.get(url_en, params=params_en, timeout=5)
|
||
if response_en.ok:
|
||
videos_en = response_en.json().get('results', [])
|
||
for video_type in ['Trailer', 'Teaser']:
|
||
for video in videos_en:
|
||
if video.get('site') == 'YouTube' and video.get('type') == video_type:
|
||
trailer_url = f"https://www.youtube.com/embed/{video.get('key')}"
|
||
break
|
||
if trailer_url:
|
||
break
|
||
except:
|
||
pass
|
||
|
||
item['trailer_url'] = trailer_url
|
||
|
||
except Exception as e:
|
||
logger.warning(f"Erreur détails TMDb pour {title}: {e}")
|
||
|
||
# ========================================
|
||
# B. Rechercher les torrents
|
||
# ========================================
|
||
cat_id = '2000' if item_media_type == 'movie' else '5000'
|
||
|
||
search_queries = []
|
||
if original_title and _is_latin_text(original_title):
|
||
search_queries.append(f"{original_title} {year}" if year else original_title)
|
||
if title and _is_latin_text(title) and title != original_title:
|
||
search_queries.append(f"{title} {year}" if year else title)
|
||
|
||
if not search_queries:
|
||
return item, []
|
||
|
||
all_torrents = []
|
||
seen_titles = set()
|
||
|
||
configured_trackers = load_discover_trackers_config()
|
||
all_trackers = indexer_manager.get_indexers()
|
||
if configured_trackers:
|
||
all_trackers = [t for t in all_trackers if t.get('id') in configured_trackers]
|
||
|
||
# Limiter à 3 trackers pour la vitesse
|
||
all_trackers = all_trackers[:3]
|
||
|
||
for query in search_queries[:2]:
|
||
for tracker in all_trackers:
|
||
try:
|
||
tracker_id = tracker.get('id', '')
|
||
results = jackett.search(query, indexers=[tracker_id], category=cat_id, max_results=10)
|
||
for r in results:
|
||
title_key = r.get('Title', '').lower()
|
||
if title_key not in seen_titles:
|
||
seen_titles.add(title_key)
|
||
all_torrents.append(r)
|
||
except:
|
||
pass
|
||
|
||
if len(all_torrents) >= 10:
|
||
break
|
||
|
||
# Parser et enrichir
|
||
for torrent in all_torrents:
|
||
parser.enrich_torrent(torrent)
|
||
|
||
# Filtrer les pertinents
|
||
filtered = _filter_relevant_torrents(all_torrents, title, original_title, year)
|
||
filtered.sort(key=lambda x: x.get('Seeders', 0) or 0, reverse=True)
|
||
|
||
return item, filtered[:10]
|
||
|
||
except Exception as e:
|
||
logger.warning(f"Erreur pour {item.get('title', '?')}: {e}")
|
||
return item, []
|
||
|
||
# Exécuter en parallèle (5 items en même temps)
|
||
results_with_data = []
|
||
|
||
with ThreadPoolExecutor(max_workers=5) as executor:
|
||
futures = {executor.submit(fetch_details_and_torrents, item): item for item in items}
|
||
|
||
try:
|
||
for future in as_completed(futures, timeout=180): # 3 min max
|
||
try:
|
||
item, torrents = future.result(timeout=10)
|
||
item['torrents'] = torrents
|
||
item['torrent_count'] = len(torrents)
|
||
item['details_cached'] = True # Marquer que les détails sont en cache
|
||
results_with_data.append(item)
|
||
logger.info(f" ✅ {item.get('title') or item.get('name')}: {len(torrents)} torrents, détails OK")
|
||
except Exception as e:
|
||
item = futures[future]
|
||
item['torrents'] = []
|
||
item['torrent_count'] = 0
|
||
item['details_cached'] = False
|
||
results_with_data.append(item)
|
||
except TimeoutError:
|
||
logger.warning(f"⏱️ Timeout cache Discover, {len(results_with_data)}/{len(items)} traités")
|
||
for future in futures:
|
||
if not future.done():
|
||
item = futures[future]
|
||
if item not in [r for r in results_with_data]:
|
||
item['torrents'] = []
|
||
item['torrent_count'] = 0
|
||
item['details_cached'] = False
|
||
results_with_data.append(item)
|
||
|
||
elapsed = time.time() - start_time
|
||
total_torrents = sum(item.get('torrent_count', 0) for item in results_with_data)
|
||
logger.info(f"📦 Cache Discover: {len(results_with_data)} {media_type}, {total_torrents} torrents (en {elapsed:.1f}s)")
|
||
|
||
return results_with_data
|
||
|
||
except Exception as e:
|
||
logger.error(f"❌ Cache fetch_discover_internal: {e}", exc_info=True)
|
||
return []
|
||
|
||
|
||
if __name__ == '__main__':
|
||
# Charger le client torrent au démarrage
|
||
try:
|
||
from plugins.torrent_clients import load_client_from_config
|
||
load_client_from_config()
|
||
except Exception as e:
|
||
logger.warning(f"⚠️ Client torrent non chargé: {e}")
|
||
|
||
# Initialiser le cache scheduler
|
||
try:
|
||
import cache_manager
|
||
cache_manager.init_scheduler(app)
|
||
except Exception as e:
|
||
logger.warning(f"⚠️ Cache scheduler non démarré: {e}")
|
||
|
||
logger.info("🚀 Démarrage de Lycostorrent...")
|
||
logger.info(f"📡 Jackett URL: {config.jackett_url}")
|
||
app.run(host='0.0.0.0', port=5097, debug=False)
|