initial commit

This commit is contained in:
2026-03-23 22:24:24 +01:00
commit 082506d00c
13 changed files with 3362 additions and 0 deletions

59
plugins/__init__.py Normal file
View File

@@ -0,0 +1,59 @@
"""
Registre de plugins SeedMover.
Tout plugin placé dans ce dossier et héritant de AbstractFS
est automatiquement découvert et enregistré.
"""
import importlib
import os
from .base import AbstractFS
_registry: dict[str, type] = {}
def _discover():
"""Scan le dossier plugins/ et importe chaque module."""
plugins_dir = os.path.dirname(__file__)
for fname in os.listdir(plugins_dir):
if fname.startswith('_') or not fname.endswith('.py'):
continue
module_name = fname[:-3]
if module_name in ('base',):
continue
try:
mod = importlib.import_module(f'.{module_name}', package='plugins')
for attr in dir(mod):
cls = getattr(mod, attr)
if (
isinstance(cls, type)
and issubclass(cls, AbstractFS)
and cls is not AbstractFS
and cls.PLUGIN_NAME
):
_registry[cls.PLUGIN_NAME] = cls
except Exception as e:
print(f"[plugins] Impossible de charger {fname}: {e}")
def get_plugin(name: str) -> type:
"""Retourne la classe plugin pour un nom donné."""
if not _registry:
_discover()
return _registry.get(name)
def list_plugins() -> list:
"""Retourne la liste des plugins disponibles avec leur config."""
if not _registry:
_discover()
return [
{
'name': cls.PLUGIN_NAME,
'label': cls.PLUGIN_LABEL,
'fields': cls.get_config_fields()
}
for cls in _registry.values()
]
# Découverte au chargement du module
_discover()

140
plugins/base.py Normal file
View File

@@ -0,0 +1,140 @@
from abc import ABC, abstractmethod
class AbstractFS(ABC):
"""
Interface abstraite pour tous les plugins de système de fichiers.
Pour créer un nouveau plugin :
1. Créer plugins/monplugin.py
2. Hériter de AbstractFS
3. Implémenter toutes les méthodes abstraites
4. Définir PLUGIN_NAME et PLUGIN_LABEL
Le plugin sera automatiquement découvert au démarrage.
"""
PLUGIN_NAME = None # identifiant interne ex: "sftp"
PLUGIN_LABEL = None # label affiché ex: "SFTP"
# ─── Cycle de vie ────────────────────────────────────────────
@abstractmethod
def connect(self, config: dict):
"""Établir la connexion avec la config fournie."""
pass
@abstractmethod
def disconnect(self):
"""Fermer proprement la connexion."""
pass
@abstractmethod
def is_connected(self) -> bool:
"""Retourner True si la connexion est active."""
pass
# ─── Navigation ──────────────────────────────────────────────
@abstractmethod
def list(self, path: str) -> list:
"""
Lister le contenu d'un dossier.
Retourne une liste de dicts :
{ name, path, is_dir, size, mtime }
Triés : dossiers d'abord, puis fichiers, alphabétique.
"""
pass
@abstractmethod
def isdir(self, path: str) -> bool:
pass
@abstractmethod
def exists(self, path: str) -> bool:
pass
@abstractmethod
def getsize(self, path: str) -> int:
pass
@abstractmethod
def join(self, *parts) -> str:
"""Équivalent os.path.join pour ce FS."""
pass
@abstractmethod
def basename(self, path: str) -> str:
pass
@abstractmethod
def dirname(self, path: str) -> str:
pass
@abstractmethod
def relpath(self, path: str, base: str) -> str:
pass
# ─── Opérations ──────────────────────────────────────────────
@abstractmethod
def mkdir(self, path: str):
"""Créer un dossier (et les parents si nécessaire)."""
pass
@abstractmethod
def rename(self, old_path: str, new_path: str):
pass
@abstractmethod
def remove(self, path: str):
pass
@abstractmethod
def walk(self, path: str):
"""
Générateur identique à os.walk :
yield (root, dirs, files)
"""
pass
# ─── Transfert ───────────────────────────────────────────────
@abstractmethod
def read_chunks(self, path: str, chunk_size: int = 4 * 1024 * 1024):
"""
Générateur qui yield des bytes chunk par chunk.
Utilisé par le moteur de copie.
"""
pass
@abstractmethod
def write_chunks(self, path: str, chunks):
"""
Écrire un fichier à partir d'un générateur de chunks bytes.
Utilisé par le moteur de copie.
"""
pass
def get_total_size(self, path: str) -> int:
"""Taille totale d'un fichier ou dossier récursif."""
if not self.isdir(path):
return self.getsize(path)
total = 0
for root, dirs, files in self.walk(path):
for f in files:
try:
total += self.getsize(self.join(root, f))
except Exception:
pass
return total
# ─── Métadonnées du plugin ────────────────────────────────────
@classmethod
def get_config_fields(cls) -> list:
"""
Retourne la liste des champs de config nécessaires pour ce plugin.
Chaque champ : { name, label, type, required, default }
type : "text" | "password" | "number" | "file"
Surcharger dans chaque plugin.
"""
return []

111
plugins/local.py Normal file
View File

@@ -0,0 +1,111 @@
import os
from .base import AbstractFS
class LocalFS(AbstractFS):
PLUGIN_NAME = "local"
PLUGIN_LABEL = "Local"
def __init__(self):
self._root = "/"
self._connected = False
def connect(self, config: dict):
self._root = config.get("root_path", "/")
self._connected = True
def disconnect(self):
self._connected = False
def is_connected(self) -> bool:
return self._connected
# ─── Navigation ──────────────────────────────────────────────
def list(self, path: str) -> list:
entries = sorted(os.scandir(path), key=lambda e: (not e.is_dir(), e.name.lower()))
items = []
for entry in entries:
try:
stat = entry.stat()
items.append({
'name': entry.name,
'path': entry.path,
'is_dir': entry.is_dir(),
'size': stat.st_size if not entry.is_dir() else 0,
'mtime': stat.st_mtime,
})
except PermissionError:
continue
return items
def isdir(self, path: str) -> bool:
return os.path.isdir(path)
def exists(self, path: str) -> bool:
return os.path.exists(path)
def getsize(self, path: str) -> int:
return os.path.getsize(path)
def join(self, *parts) -> str:
return os.path.join(*parts)
def basename(self, path: str) -> str:
return os.path.basename(path)
def dirname(self, path: str) -> str:
return os.path.dirname(path)
def relpath(self, path: str, base: str) -> str:
return os.path.relpath(path, base)
# ─── Opérations ──────────────────────────────────────────────
def mkdir(self, path: str):
os.makedirs(path, exist_ok=True)
def rename(self, old_path: str, new_path: str):
os.rename(old_path, new_path)
def remove(self, path: str):
if os.path.isdir(path):
import shutil
shutil.rmtree(path)
else:
os.remove(path)
def walk(self, path: str):
yield from os.walk(path)
# ─── Transfert ───────────────────────────────────────────────
def read_chunks(self, path: str, chunk_size: int = 4 * 1024 * 1024):
with open(path, 'rb') as f:
while True:
buf = f.read(chunk_size)
if not buf:
break
yield buf
def write_chunks(self, path: str, chunks):
os.makedirs(os.path.dirname(path), exist_ok=True)
with open(path, 'wb') as f:
for chunk in chunks:
f.write(chunk)
# ─── Config ──────────────────────────────────────────────────
@classmethod
def get_config_fields(cls) -> list:
return [
{
'name': 'root_path',
'label': 'Chemin racine',
'type': 'text',
'required': True,
'default': '/',
'placeholder': '/mnt/nas'
}
]

202
plugins/sftp.py Normal file
View File

@@ -0,0 +1,202 @@
import os
import stat
import posixpath
import paramiko
from .base import AbstractFS
class SFTPfs(AbstractFS):
PLUGIN_NAME = "sftp"
PLUGIN_LABEL = "SFTP"
def __init__(self):
self._client = None # paramiko SSHClient
self._sftp = None # paramiko SFTPClient
self._root = "/"
self._connected = False
def connect(self, config: dict):
host = config['host']
port = int(config.get('port', 22))
username = config['username']
password = config.get('password') or None
key_path = config.get('key_path') or None
self._root = config.get('root_path', '/')
self._client = paramiko.SSHClient()
self._client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
connect_kwargs = dict(hostname=host, port=port, username=username, timeout=10)
if key_path:
connect_kwargs['key_filename'] = key_path
if password:
connect_kwargs['password'] = password
self._client.connect(**connect_kwargs)
self._sftp = self._client.open_sftp()
self._connected = True
def disconnect(self):
try:
if self._sftp:
self._sftp.close()
if self._client:
self._client.close()
except Exception:
pass
self._sftp = None
self._client = None
self._connected = False
def is_connected(self) -> bool:
if not self._connected or not self._sftp:
return False
try:
self._sftp.stat('.')
return True
except Exception:
self._connected = False
return False
def _reconnect_if_needed(self, config):
if not self.is_connected():
self.connect(config)
# ─── Navigation ──────────────────────────────────────────────
def list(self, path: str) -> list:
items = []
for attr in self._sftp.listdir_attr(path):
is_dir = stat.S_ISDIR(attr.st_mode)
items.append({
'name': attr.filename,
'path': posixpath.join(path, attr.filename),
'is_dir': is_dir,
'size': attr.st_size if not is_dir else 0,
'mtime': attr.st_mtime,
})
items.sort(key=lambda e: (not e['is_dir'], e['name'].lower()))
return items
def isdir(self, path: str) -> bool:
try:
return stat.S_ISDIR(self._sftp.stat(path).st_mode)
except Exception:
return False
def exists(self, path: str) -> bool:
try:
self._sftp.stat(path)
return True
except Exception:
return False
def getsize(self, path: str) -> int:
return self._sftp.stat(path).st_size
def join(self, *parts) -> str:
return posixpath.join(*parts)
def basename(self, path: str) -> str:
return posixpath.basename(path)
def dirname(self, path: str) -> str:
return posixpath.dirname(path)
def relpath(self, path: str, base: str) -> str:
# posixpath n'a pas relpath, on le simule
if path.startswith(base):
rel = path[len(base):]
return rel.lstrip('/')
return path
# ─── Opérations ──────────────────────────────────────────────
def mkdir(self, path: str):
"""Crée le dossier et tous les parents manquants."""
parts = path.split('/')
current = ''
for part in parts:
if not part:
current = '/'
continue
current = posixpath.join(current, part)
try:
self._sftp.stat(current)
except IOError:
self._sftp.mkdir(current)
def rename(self, old_path: str, new_path: str):
self._sftp.rename(old_path, new_path)
def remove(self, path: str):
if self.isdir(path):
for attr in self._sftp.listdir_attr(path):
child = posixpath.join(path, attr.filename)
if stat.S_ISDIR(attr.st_mode):
self.remove(child)
else:
self._sftp.remove(child)
self._sftp.rmdir(path)
else:
self._sftp.remove(path)
def walk(self, path: str):
"""os.walk équivalent pour SFTP."""
dirs = []
files = []
for attr in self._sftp.listdir_attr(path):
if stat.S_ISDIR(attr.st_mode):
dirs.append(attr.filename)
else:
files.append(attr.filename)
yield path, dirs, files
for d in dirs:
yield from self.walk(posixpath.join(path, d))
# ─── Transfert ───────────────────────────────────────────────
def read_chunks(self, path: str, chunk_size: int = 4 * 1024 * 1024):
# Limiter le pipelining Paramiko pour contrôler la RAM
import paramiko
old_max = paramiko.sftp_file.SFTPFile.MAX_REQUEST_SIZE
paramiko.sftp_file.SFTPFile.MAX_REQUEST_SIZE = 32768 # 32KB natif SFTP
import gc
SFTP_BLOCK = 32768
try:
with self._sftp.open(path, 'rb') as f:
accumulated = bytearray()
while True:
block = f.read(SFTP_BLOCK)
if not block:
break
accumulated += block
if len(accumulated) >= chunk_size:
data = bytes(accumulated)
accumulated = bytearray()
gc.collect() # forcer Python à rendre la mémoire
yield data
if accumulated:
yield bytes(accumulated)
finally:
paramiko.sftp_file.SFTPFile.MAX_REQUEST_SIZE = old_max
def write_chunks(self, path: str, chunks):
self.mkdir(posixpath.dirname(path))
with self._sftp.open(path, 'wb') as f:
for chunk in chunks:
f.write(chunk)
# ─── Config ──────────────────────────────────────────────────
@classmethod
def get_config_fields(cls) -> list:
return [
{'name': 'host', 'label': 'Hôte', 'type': 'text', 'required': True, 'placeholder': 'sftp.exemple.com'},
{'name': 'port', 'label': 'Port', 'type': 'number', 'required': False, 'default': 22},
{'name': 'username', 'label': 'Utilisateur', 'type': 'text', 'required': True, 'placeholder': 'user'},
{'name': 'password', 'label': 'Mot de passe', 'type': 'password', 'required': False, 'placeholder': 'Laisser vide si clé SSH'},
{'name': 'key_path', 'label': 'Clé SSH (chemin)', 'type': 'text', 'required': False, 'placeholder': '/root/.ssh/id_rsa'},
{'name': 'root_path', 'label': 'Dossier racine', 'type': 'text', 'required': False, 'default': '/', 'placeholder': '/home/user'},
]