diff --git a/app.py b/app.py index e9b253c..2178f57 100644 --- a/app.py +++ b/app.py @@ -20,11 +20,13 @@ def create_app(): # Initialize extensions db.init_app(app) migrate.init_app(app, db) + + # Initialize storage backend + app.storage = StorageBackend(app.config['STORAGE_URL']) return app app = create_app() -storage = StorageBackend(app.config['STORAGE_URL']) def generate_unique_filename(original_filename): # Get the file extension @@ -78,7 +80,7 @@ def add_asset(): ) # Save featured image with unique filename using storage backend - storage.save(processed_file, unique_featured_filename) + app.storage.save(processed_file, unique_featured_filename) # Create asset with unique filename asset = Asset( @@ -96,7 +98,7 @@ def add_asset(): if file and allowed_file(file.filename): original_filename = secure_filename(file.filename) unique_filename = generate_unique_filename(original_filename) - storage.save(file, unique_filename) + app.storage.save(file, unique_filename) asset_file = AssetFile( filename=unique_filename, original_filename=original_filename, @@ -113,6 +115,7 @@ def add_asset(): except Exception as e: db.session.rollback() + app.logger.error(f"Error adding asset: {str(e)}", exc_info=True) return jsonify({ 'success': False, 'error': str(e) @@ -147,7 +150,7 @@ def edit_asset(id): # Delete old featured image if asset.featured_image: - storage.delete(asset.featured_image) + app.storage.delete(asset.featured_image) # Process and convert featured image to WebP processed_image, ext = ImageProcessor.process_featured_image(featured_image) @@ -164,7 +167,7 @@ def edit_asset(id): ) # Save the processed image - storage.save(processed_file, unique_featured_filename) + app.storage.save(processed_file, unique_featured_filename) asset.featured_image = unique_featured_filename asset.original_featured_image = original_featured_filename @@ -174,7 +177,7 @@ def edit_asset(id): if file and allowed_file(file.filename): original_filename = secure_filename(file.filename) unique_filename = generate_unique_filename(original_filename) - storage.save(file, unique_filename) + app.storage.save(file, unique_filename) asset_file = AssetFile( filename=unique_filename, original_filename=original_filename, @@ -206,12 +209,12 @@ def delete_asset(id): # Delete featured image if asset.featured_image: - if not storage.delete(asset.featured_image): + if not app.storage.delete(asset.featured_image): deletion_errors.append(f"Failed to delete featured image: {asset.featured_image}") # Delete additional files for file in asset.files: - if not storage.delete(file.filename): + if not app.storage.delete(file.filename): deletion_errors.append(f"Failed to delete file: {file.filename}") db.session.delete(file) @@ -241,7 +244,7 @@ def delete_asset_file(id): display_name = asset_file.original_filename or asset_file.filename # Delete the file using storage backend - if not storage.delete(filename): + if not app.storage.delete(filename): error_msg = f'Failed to delete file {display_name} from storage' app.logger.error(error_msg) flash(error_msg, 'error') diff --git a/storage.py b/storage.py index 75e72bd..1792d39 100644 --- a/storage.py +++ b/storage.py @@ -1,9 +1,10 @@ import os import fsspec +import logging import asyncio from typing import BinaryIO, Optional, Union from urllib.parse import urlparse -from flask import current_app +from flask import current_app, url_for from werkzeug.datastructures import FileStorage class StorageBackend: @@ -18,6 +19,15 @@ class StorageBackend: self.parsed_url = urlparse(storage_url) self.protocol = self.parsed_url.scheme or 'file' + # Set up logging - use Flask logger if in app context, otherwise use Python logging + try: + current_app.name # Check if we're in app context + self.logger = current_app.logger + except RuntimeError: + self.logger = logging.getLogger(__name__) + + self.logger.info(f"Initializing StorageBackend with URL: {storage_url}, protocol: {self.protocol}") + # Configure filesystem if self.protocol == 's3': self.fs = fsspec.filesystem( @@ -31,28 +41,70 @@ class StorageBackend: ) self.bucket = self.parsed_url.netloc self.base_path = self.parsed_url.path.lstrip('/') + self.logger.debug(f"Configured S3 storage with bucket: {self.bucket}, base_path: {self.base_path}") else: self.fs = fsspec.filesystem('file') self.base_path = self.parsed_url.path or '/uploads' + self.logger.debug(f"Configured local storage with base_path: {self.base_path}") def _get_full_path(self, filename: str) -> str: """Get full path for a file""" if self.protocol == 's3': - return os.path.join(self.base_path, filename) - return os.path.join(current_app.root_path, self.base_path, filename) + full_path = os.path.join(self.base_path, filename) + self.logger.debug(f"Generated S3 full path: {full_path}") + return full_path + + full_path = os.path.join(current_app.root_path, self.base_path, filename) + self.logger.debug(f"Generated local full path: {full_path}") + return full_path def save(self, file_storage: FileStorage, filename: str) -> str: """Save a file to storage""" - full_path = self._get_full_path(filename) - - if self.protocol == 's3': - with self.fs.open(f"{self.bucket}/{full_path}", 'wb') as f: - file_storage.save(f) - return f"s3://{self.bucket}/{full_path}" - else: - os.makedirs(os.path.dirname(full_path), exist_ok=True) - file_storage.save(full_path) - return f"file://{full_path}" + try: + full_path = self._get_full_path(filename) + self.logger.info(f"Attempting to save file {filename} to {full_path}") + + if not isinstance(file_storage, FileStorage): + self.logger.error(f"Invalid file_storage object type: {type(file_storage)}") + raise ValueError("file_storage must be a FileStorage object") + + if self.protocol == 's3': + s3_path = f"{self.bucket}/{full_path}" + self.logger.debug(f"Opening S3 file for writing: {s3_path}") + with self.fs.open(s3_path, 'wb') as f: + self.logger.debug("Saving file content to S3...") + file_storage.save(f) + + # Verify the file was saved + if self.fs.exists(s3_path): + self.logger.info(f"Successfully saved file to S3: {s3_path}") + else: + self.logger.error(f"Failed to verify file existence in S3: {s3_path}") + raise RuntimeError(f"Failed to verify file existence in S3: {s3_path}") + + return f"s3://{self.bucket}/{full_path}" + else: + # Create directory structure if it doesn't exist + dir_path = os.path.dirname(full_path) + self.logger.debug(f"Creating local directory structure: {dir_path}") + os.makedirs(dir_path, exist_ok=True) + + self.logger.debug(f"Saving file to local path: {full_path}") + file_storage.save(full_path) + + # Verify the file was saved + if os.path.exists(full_path): + self.logger.info(f"Successfully saved file locally: {full_path}") + self.logger.debug(f"File size: {os.path.getsize(full_path)} bytes") + else: + self.logger.error(f"Failed to verify file existence locally: {full_path}") + raise RuntimeError(f"Failed to verify file existence locally: {full_path}") + + return f"file://{full_path}" + + except Exception as e: + self.logger.error(f"Error saving file {filename}: {str(e)}", exc_info=True) + raise def open(self, filename: str, mode: str = 'rb') -> BinaryIO: """Open a file from storage""" @@ -70,33 +122,33 @@ class StorageBackend: full_path = self._get_full_path(filename) if self.protocol == 's3': path = f"{self.bucket}/{full_path}" - current_app.logger.debug(f"Attempting to delete S3 file: {path}") + self.logger.debug(f"Attempting to delete S3 file: {path}") if self.fs.exists(path): - current_app.logger.debug(f"File exists, deleting: {path}") + self.logger.debug(f"File exists, deleting: {path}") self.fs.delete(path) deleted = not self.fs.exists(path) if deleted: - current_app.logger.debug(f"Successfully deleted file: {path}") + self.logger.debug(f"Successfully deleted file: {path}") else: - current_app.logger.error(f"Failed to delete file: {path}") + self.logger.error(f"Failed to delete file: {path}") return deleted - current_app.logger.debug(f"File doesn't exist, skipping delete: {path}") + self.logger.debug(f"File doesn't exist, skipping delete: {path}") return True # File didn't exist else: - current_app.logger.debug(f"Attempting to delete local file: {full_path}") + self.logger.debug(f"Attempting to delete local file: {full_path}") if self.fs.exists(full_path): - current_app.logger.debug(f"File exists, deleting: {full_path}") + self.logger.debug(f"File exists, deleting: {full_path}") self.fs.delete(full_path) deleted = not os.path.exists(full_path) if deleted: - current_app.logger.debug(f"Successfully deleted file: {full_path}") + self.logger.debug(f"Successfully deleted file: {full_path}") else: - current_app.logger.error(f"Failed to delete file: {full_path}") + self.logger.error(f"Failed to delete file: {full_path}") return deleted - current_app.logger.debug(f"File doesn't exist, skipping delete: {full_path}") + self.logger.debug(f"File doesn't exist, skipping delete: {full_path}") return True # File didn't exist except Exception as e: - current_app.logger.error(f"Failed to delete file {filename}: {str(e)}", exc_info=True) + self.logger.error(f"Failed to delete file {filename}: {str(e)}", exc_info=True) return False def url_for(self, filename: str) -> str: @@ -112,7 +164,8 @@ class StorageBackend: return f"{endpoint}/{self.bucket}/{full_path}" return f"s3://{self.bucket}/{full_path}" else: - return f"/uploads/{filename}" + # For local storage, use static/uploads path + return url_for('static', filename=f'uploads/{filename}') def exists(self, filename: str) -> bool: """Check if a file exists"""