diff --git a/Pipfile b/Pipfile index 362b2b5..76da48d 100644 --- a/Pipfile +++ b/Pipfile @@ -6,6 +6,7 @@ name = "pypi" [packages] bottle = "*" pillow = "*" +tifffile = "*" [dev-packages] diff --git a/Pipfile.lock b/Pipfile.lock index 906853c..4da2fd0 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "32de86725c93602d28c892b89df1895b4d6e587b1c4dc93365217e5146a0812a" + "sha256": "3b903e30841d47458bcc4b52b597eb6a669b5a2e24c886664c4181923959f1e6" }, "pipfile-spec": 6, "requires": { @@ -24,6 +24,57 @@ "index": "pypi", "version": "==0.13.4" }, + "numpy": { + "hashes": [ + "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a", + "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195", + "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951", + "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1", + "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c", + "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc", + "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b", + "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd", + "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4", + "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd", + "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318", + "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448", + "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece", + "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d", + "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5", + "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8", + "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57", + "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78", + "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66", + "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a", + "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e", + "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c", + "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa", + "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d", + "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c", + "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729", + "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97", + "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c", + "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9", + "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669", + "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4", + "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73", + "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385", + "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8", + "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c", + "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b", + "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692", + "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15", + "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131", + "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a", + "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326", + "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b", + "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded", + "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04", + "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd" + ], + "markers": "python_version >= '3.9'", + "version": "==2.0.2" + }, "pillow": { "hashes": [ "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2", @@ -136,6 +187,15 @@ "index": "pypi", "markers": "python_version >= '3.9'", "version": "==11.3.0" + }, + "tifffile": { + "hashes": [ + "sha256:2c9508fe768962e30f87def61819183fb07692c258cb175b3c114828368485a4", + "sha256:8bc59a8f02a2665cd50a910ec64961c5373bee0b8850ec89d3b7b485bf7be7ad" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==2024.8.30" } }, "develop": {} diff --git a/app.py b/app.py index 4932a4e..38eb006 100644 --- a/app.py +++ b/app.py @@ -1,58 +1,149 @@ -from bottle import Bottle, response, request, run, static_file, template, BaseRequest +from bottle import Bottle, request, run, static_file, template, BaseRequest, response from PIL import Image import os import tempfile import zipfile +import json from concurrent.futures import ProcessPoolExecutor from functools import partial +from threading import Lock +import shutil +import tifffile +import tifffile +import subprocess +import numpy as np -BaseRequest.MEMFILE_MAX = 100 * 1024 * 1024 # Supporte jusqu'à 100 Mo + + +def clear_temp(): + try: + shutil.rmtree(UPLOAD_DIR) + print("🧹 Cache temporaire supprimé") + except Exception as e: + print(f"⚠️ Erreur suppression cache : {e}") + + + +# ⚙️ Configuration mémoire upload +BaseRequest.MEMFILE_MAX = 5 * 1024 * 1024 * 1024 app = Bottle() -UPLOAD_DIR = tempfile.mkdtemp() +def fresh_upload_dir(): + path = tempfile.mkdtemp() + os.makedirs(os.path.join(path, 'resized'), exist_ok=True) + return path + +UPLOAD_DIR = fresh_upload_dir() OUTPUT_DIR = os.path.join(UPLOAD_DIR, 'resized') ZIP_PATH = os.path.join(UPLOAD_DIR, 'resized_images.zip') + +OUTPUT_DIR = os.path.join(UPLOAD_DIR, 'resized') +ZIP_PATH = os.path.join(UPLOAD_DIR, 'resized_images.zip') os.makedirs(OUTPUT_DIR, exist_ok=True) -def resize_image(filepath, output_dir, ratio): - from PIL import Image # Re-importé dans chaque processus - import os + +def indexed_resize(args): + idx, file_path, output_dir, ratio, total = args + return resize_image(file_path, output_dir, ratio, index=idx + 1, total=total) + + +# 📊 Progression partagée +progress_data = {"total": 0, "current": 0} +progress_lock = Lock() + +# 📦 Route pour obtenir la progression en JSON +@app.route('/progress') +def progress(): + response.content_type = 'application/json' + with progress_lock: + return json.dumps(progress_data) + +# 🖼️ Vérifie et traite une image +def resize_image(filepath, output_dir, ratio, index=None, total=None): + try: + name, ext = os.path.splitext(os.path.basename(filepath)) + ext = ext.lower() + + # 🧠 Image valide + with Image.open(filepath) as test_img: + test_img.verify() + + except Exception: + print(f"⛔️ Ignoré (non image ou corrompu) : {filepath}") + return None try: - with Image.open(filepath) as img: - exif_data = img.info.get('exif') + output_path = None + + if ext in ['.tif', '.tiff']: + # ✅ Lecture via tifffile + original_array = tifffile.imread(filepath) + img = Image.fromarray(original_array) + + # Redimensionner width, height = img.size new_size = (int(width / ratio), int(height / ratio)) - try: img = img.resize(new_size, Image.LANCZOS) except ValueError: img = Image.open(filepath).point(lambda x: x / 256).convert("L") img = img.resize(new_size, Image.LANCZOS) - name, ext = os.path.splitext(os.path.basename(filepath)) - output_path = os.path.join(output_dir, f"{name}_resized{ext}") - img.save(output_path, quality=85, optimize=True, exif=exif_data or []) - print(f"✅ Image enregistrée : {output_path} === ({width, height}) => ({new_size})") # DEBUG - return output_path + # Convertir en array NumPy et écrire + resized_array = np.array(img) + output_path = os.path.join(output_dir, f"{name}_resized.tif") + tifffile.imwrite(output_path, resized_array) + + # ✅ Copier les EXIF depuis original + subprocess.run([ + 'exiftool', + '-overwrite_original', + f'-tagsFromFile={filepath}', + output_path + ], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + + else: + # ✅ JPEG, PNG, etc. + with Image.open(filepath) as img: + exif_data = img.info.get('exif') + img = img.convert('RGB') + width, height = img.size + new_size = (int(width / ratio), int(height / ratio)) + try: + img = img.resize(new_size, Image.LANCZOS) + except ValueError: + img = Image.open(filepath).point(lambda x: x / 256).convert("L") + img = img.resize(new_size, Image.LANCZOS) + + output_path = os.path.join(output_dir, f"{name}_resized.jpg") + img.save(output_path, quality=85, optimize=True, exif=exif_data or b"") + + # ✅ Affichage progression + if index and total: + print(f"✅ Traitement {index}/{total} : {os.path.basename(output_path)}") + + with progress_lock: + progress_data["current"] += 1 + + return output_path + except Exception as e: print(f"❌ Erreur pour {filepath}: {e}") return None + + + @app.route('/') def index(): return template('index.tpl') - @app.post('/upload') def upload(): files = request.files.getall('files') ratio = float(request.forms.get('ratio', 2)) - if not files: - return "Aucun fichier reçu." - saved_paths = [] for file in files: filename = os.path.basename(file.filename) @@ -60,19 +151,24 @@ def upload(): file.save(save_path, overwrite=True) saved_paths.append(save_path) - # 🧠 Traitement en parallèle - resize_func = partial(resize_image, output_dir=OUTPUT_DIR, ratio=ratio) - with ProcessPoolExecutor() as executor: - resized_files = list(executor.map(resize_func, saved_paths)) + with progress_lock: + progress_data["total"] = len(saved_paths) + progress_data["current"] = 0 + + args_list = [ + (idx, path, OUTPUT_DIR, ratio, len(saved_paths)) + for idx, path in enumerate(saved_paths) + ] + + with ProcessPoolExecutor() as executor: + resized_files = list(executor.map(indexed_resize, args_list)) - # 📦 Création du ZIP with zipfile.ZipFile(ZIP_PATH, 'w') as zipf: for path in resized_files: - if path: # Skip si erreur + if path: zipf.write(path, arcname=os.path.basename(path)) - return template( - 'result.tpl', + return template('result.tpl', count=len([p for p in resized_files if p]), ratio=ratio, images=[os.path.basename(p) for p in resized_files if p] @@ -83,7 +179,15 @@ def upload(): def download(): if not os.path.exists(ZIP_PATH): return "ZIP non généré ❌" - return static_file('resized_images.zip', root=UPLOAD_DIR, download='images_reduites.zip') + + # ⚠️ Sauvegarder ZIP dans une variable avant suppression + zip_file = static_file('resized_images.zip', root=UPLOAD_DIR, download='images_reduites.zip') + + # 🔥 Nettoyage asynchrone (on ne bloque pas la réponse) + import threading + threading.Timer(2.0, clear_temp).start() # Attend 2s puis supprime + return zip_file + @app.route('/resized/') @@ -93,7 +197,5 @@ def serve_resized(filename): response.headers['Expires'] = '0' return static_file(filename, root=OUTPUT_DIR) - - if __name__ == "__main__": - run(app, host='localhost', port=8080, debug=True) + run(app, host='0.0.0.0', port=4000, debug=True, reloader=True) diff --git a/dockerfile b/dockerfile index 06012dd..e8a7484 100644 --- a/dockerfile +++ b/dockerfile @@ -3,7 +3,7 @@ FROM python:3.11-slim # 📦 Installe les dépendances système pour Pillow RUN apt-get update && apt-get install -y \ - libjpeg-dev zlib1g-dev libpng-dev \ + libjpeg-dev zlib1g-dev libpng-dev exiftool libimage-exiftool-perl \ && rm -rf /var/lib/apt/lists/* # 📁 Dossier de travail dans le conteneur diff --git a/views/index.tpl b/views/index.tpl index a95beaf..6ac3f90 100644 --- a/views/index.tpl +++ b/views/index.tpl @@ -1,13 +1,12 @@ - - Réduction d'images + Réduction d’images @@ -88,28 +83,47 @@
-

Traitement des images en cours... Patientez ⏳

-
+

Traitement des images en cours...

+
+
+
diff --git a/views/result.tpl b/views/result.tpl index e4093ab..f5a81d8 100644 --- a/views/result.tpl +++ b/views/result.tpl @@ -1,13 +1,9 @@ - - Téléchargement prêt - - + Vos images sont prêtes -

📦 Vos images sont prêtes !

-
-

{{ count }} images ont été redimensionnées

-

Ratio appliqué : {{ ratio }}

+

{{ count }} images redimensionnées

+

Ratio utilisé : {{ ratio }}

- ⬇️ Télécharger les images réduites + ⬇️ Télécharger toutes les images
-