diff --git a/.dockerignore b/.dockerignore index 1fb3901..c628f4a 100644 --- a/.dockerignore +++ b/.dockerignore @@ -21,5 +21,3 @@ coverage.xml venv/ ENV/ instance/ -repo-images/ -.forgejo/ \ No newline at end of file diff --git a/README.md b/README.md index 5497001..8a8c287 100644 --- a/README.md +++ b/README.md @@ -10,26 +10,6 @@ A digital asset management system built with Flask and S3-compatible storage. - License key management - Docker container support -## Screenshots -
- Home Page -

- -

-
-
- Asset View -

- -

-
-
- Edit Page -

- -

-
- ## Container Registry This project includes automated container builds using Forgejo CI/CD. The container images are published to the project's container registry. diff --git a/app.py b/app.py index d223557..e9b253c 100644 --- a/app.py +++ b/app.py @@ -1,7 +1,6 @@ import os import uuid -import mimetypes -from flask import Flask, render_template, request, redirect, url_for, flash, jsonify, send_from_directory, send_file, Response, stream_with_context +from flask import Flask, render_template, request, redirect, url_for, flash, jsonify from werkzeug.utils import secure_filename from config import Config from flask_migrate import Migrate @@ -21,13 +20,11 @@ def create_app(): # Initialize extensions db.init_app(app) migrate.init_app(app, db) - - # Initialize storage backend - app.storage = StorageBackend(app.config['STORAGE_URL']) return app app = create_app() +storage = StorageBackend(app.config['STORAGE_URL']) def generate_unique_filename(original_filename): # Get the file extension @@ -81,7 +78,7 @@ def add_asset(): ) # Save featured image with unique filename using storage backend - app.storage.save(processed_file, unique_featured_filename) + storage.save(processed_file, unique_featured_filename) # Create asset with unique filename asset = Asset( @@ -99,7 +96,7 @@ def add_asset(): if file and allowed_file(file.filename): original_filename = secure_filename(file.filename) unique_filename = generate_unique_filename(original_filename) - app.storage.save(file, unique_filename) + storage.save(file, unique_filename) asset_file = AssetFile( filename=unique_filename, original_filename=original_filename, @@ -116,7 +113,6 @@ def add_asset(): except Exception as e: db.session.rollback() - app.logger.error(f"Error adding asset: {str(e)}", exc_info=True) return jsonify({ 'success': False, 'error': str(e) @@ -151,7 +147,7 @@ def edit_asset(id): # Delete old featured image if asset.featured_image: - app.storage.delete(asset.featured_image) + storage.delete(asset.featured_image) # Process and convert featured image to WebP processed_image, ext = ImageProcessor.process_featured_image(featured_image) @@ -168,7 +164,7 @@ def edit_asset(id): ) # Save the processed image - app.storage.save(processed_file, unique_featured_filename) + storage.save(processed_file, unique_featured_filename) asset.featured_image = unique_featured_filename asset.original_featured_image = original_featured_filename @@ -178,7 +174,7 @@ def edit_asset(id): if file and allowed_file(file.filename): original_filename = secure_filename(file.filename) unique_filename = generate_unique_filename(original_filename) - app.storage.save(file, unique_filename) + storage.save(file, unique_filename) asset_file = AssetFile( filename=unique_filename, original_filename=original_filename, @@ -210,12 +206,12 @@ def delete_asset(id): # Delete featured image if asset.featured_image: - if not app.storage.delete(asset.featured_image): + if not storage.delete(asset.featured_image): deletion_errors.append(f"Failed to delete featured image: {asset.featured_image}") # Delete additional files for file in asset.files: - if not app.storage.delete(file.filename): + if not storage.delete(file.filename): deletion_errors.append(f"Failed to delete file: {file.filename}") db.session.delete(file) @@ -245,7 +241,7 @@ def delete_asset_file(id): display_name = asset_file.original_filename or asset_file.filename # Delete the file using storage backend - if not app.storage.delete(filename): + if not storage.delete(filename): error_msg = f'Failed to delete file {display_name} from storage' app.logger.error(error_msg) flash(error_msg, 'error') @@ -264,50 +260,5 @@ def delete_asset_file(id): flash('Failed to delete file: ' + str(e), 'error') return redirect(url_for('asset_detail', id=asset_id)) -@app.route('/download/') -def download_file(file_id): - """Download a file with its original filename""" - try: - asset_file = AssetFile.query.get_or_404(file_id) - filename = asset_file.filename - download_name = asset_file.original_filename or filename - - # Guess the mime type - mime_type, _ = mimetypes.guess_type(download_name) - if mime_type is None: - mime_type = 'application/octet-stream' - - app.logger.debug(f"Starting download of {filename} as {download_name} with type {mime_type}") - - try: - file_stream = app.storage.get_file_stream(filename) - - def generate(): - try: - while True: - chunk = file_stream.read(8192) # Read in 8KB chunks - if not chunk: - break - yield chunk - finally: - file_stream.close() - - response = Response( - stream_with_context(generate()), - mimetype=mime_type - ) - response.headers['Content-Disposition'] = f'attachment; filename="{download_name}"' - return response - - except Exception as e: - app.logger.error(f"Error streaming file {filename}: {str(e)}", exc_info=True) - flash('Error downloading file. Please try again.', 'error') - return redirect(url_for('asset_detail', id=asset_file.asset_id)) - - except Exception as e: - app.logger.error(f"Error in download_file: {str(e)}", exc_info=True) - flash('File not found or error occurred.', 'error') - return redirect(url_for('index')) - if __name__ == '__main__': app.run(host='0.0.0.0', port=5432, debug=True) diff --git a/repo-images/asset-view.webp b/repo-images/asset-view.webp deleted file mode 100644 index c946ff5..0000000 Binary files a/repo-images/asset-view.webp and /dev/null differ diff --git a/repo-images/edit-view.webp b/repo-images/edit-view.webp deleted file mode 100644 index ffe86c9..0000000 Binary files a/repo-images/edit-view.webp and /dev/null differ diff --git a/repo-images/list-of-assets.webp b/repo-images/list-of-assets.webp deleted file mode 100644 index 9121740..0000000 Binary files a/repo-images/list-of-assets.webp and /dev/null differ diff --git a/storage.py b/storage.py index 6886564..75e72bd 100644 --- a/storage.py +++ b/storage.py @@ -1,10 +1,9 @@ import os import fsspec -import logging import asyncio from typing import BinaryIO, Optional, Union from urllib.parse import urlparse -from flask import current_app, url_for +from flask import current_app from werkzeug.datastructures import FileStorage class StorageBackend: @@ -19,15 +18,6 @@ class StorageBackend: self.parsed_url = urlparse(storage_url) self.protocol = self.parsed_url.scheme or 'file' - # Set up logging - use Flask logger if in app context, otherwise use Python logging - try: - current_app.name # Check if we're in app context - self.logger = current_app.logger - except RuntimeError: - self.logger = logging.getLogger(__name__) - - self.logger.info(f"Initializing StorageBackend with URL: {storage_url}, protocol: {self.protocol}") - # Configure filesystem if self.protocol == 's3': self.fs = fsspec.filesystem( @@ -41,70 +31,28 @@ class StorageBackend: ) self.bucket = self.parsed_url.netloc self.base_path = self.parsed_url.path.lstrip('/') - self.logger.debug(f"Configured S3 storage with bucket: {self.bucket}, base_path: {self.base_path}") else: self.fs = fsspec.filesystem('file') self.base_path = self.parsed_url.path or '/uploads' - self.logger.debug(f"Configured local storage with base_path: {self.base_path}") def _get_full_path(self, filename: str) -> str: """Get full path for a file""" if self.protocol == 's3': - full_path = os.path.join(self.base_path, filename) - self.logger.debug(f"Generated S3 full path: {full_path}") - return full_path - - full_path = os.path.join(current_app.root_path, self.base_path, filename) - self.logger.debug(f"Generated local full path: {full_path}") - return full_path + return os.path.join(self.base_path, filename) + return os.path.join(current_app.root_path, self.base_path, filename) def save(self, file_storage: FileStorage, filename: str) -> str: """Save a file to storage""" - try: - full_path = self._get_full_path(filename) - self.logger.info(f"Attempting to save file {filename} to {full_path}") - - if not isinstance(file_storage, FileStorage): - self.logger.error(f"Invalid file_storage object type: {type(file_storage)}") - raise ValueError("file_storage must be a FileStorage object") - - if self.protocol == 's3': - s3_path = f"{self.bucket}/{full_path}" - self.logger.debug(f"Opening S3 file for writing: {s3_path}") - with self.fs.open(s3_path, 'wb') as f: - self.logger.debug("Saving file content to S3...") - file_storage.save(f) - - # Verify the file was saved - if self.fs.exists(s3_path): - self.logger.info(f"Successfully saved file to S3: {s3_path}") - else: - self.logger.error(f"Failed to verify file existence in S3: {s3_path}") - raise RuntimeError(f"Failed to verify file existence in S3: {s3_path}") - - return f"s3://{self.bucket}/{full_path}" - else: - # Create directory structure if it doesn't exist - dir_path = os.path.dirname(full_path) - self.logger.debug(f"Creating local directory structure: {dir_path}") - os.makedirs(dir_path, exist_ok=True) - - self.logger.debug(f"Saving file to local path: {full_path}") - file_storage.save(full_path) - - # Verify the file was saved - if os.path.exists(full_path): - self.logger.info(f"Successfully saved file locally: {full_path}") - self.logger.debug(f"File size: {os.path.getsize(full_path)} bytes") - else: - self.logger.error(f"Failed to verify file existence locally: {full_path}") - raise RuntimeError(f"Failed to verify file existence locally: {full_path}") - - return f"file://{full_path}" - - except Exception as e: - self.logger.error(f"Error saving file {filename}: {str(e)}", exc_info=True) - raise + full_path = self._get_full_path(filename) + + if self.protocol == 's3': + with self.fs.open(f"{self.bucket}/{full_path}", 'wb') as f: + file_storage.save(f) + return f"s3://{self.bucket}/{full_path}" + else: + os.makedirs(os.path.dirname(full_path), exist_ok=True) + file_storage.save(full_path) + return f"file://{full_path}" def open(self, filename: str, mode: str = 'rb') -> BinaryIO: """Open a file from storage""" @@ -122,33 +70,33 @@ class StorageBackend: full_path = self._get_full_path(filename) if self.protocol == 's3': path = f"{self.bucket}/{full_path}" - self.logger.debug(f"Attempting to delete S3 file: {path}") + current_app.logger.debug(f"Attempting to delete S3 file: {path}") if self.fs.exists(path): - self.logger.debug(f"File exists, deleting: {path}") + current_app.logger.debug(f"File exists, deleting: {path}") self.fs.delete(path) deleted = not self.fs.exists(path) if deleted: - self.logger.debug(f"Successfully deleted file: {path}") + current_app.logger.debug(f"Successfully deleted file: {path}") else: - self.logger.error(f"Failed to delete file: {path}") + current_app.logger.error(f"Failed to delete file: {path}") return deleted - self.logger.debug(f"File doesn't exist, skipping delete: {path}") + current_app.logger.debug(f"File doesn't exist, skipping delete: {path}") return True # File didn't exist else: - self.logger.debug(f"Attempting to delete local file: {full_path}") + current_app.logger.debug(f"Attempting to delete local file: {full_path}") if self.fs.exists(full_path): - self.logger.debug(f"File exists, deleting: {full_path}") + current_app.logger.debug(f"File exists, deleting: {full_path}") self.fs.delete(full_path) deleted = not os.path.exists(full_path) if deleted: - self.logger.debug(f"Successfully deleted file: {full_path}") + current_app.logger.debug(f"Successfully deleted file: {full_path}") else: - self.logger.error(f"Failed to delete file: {full_path}") + current_app.logger.error(f"Failed to delete file: {full_path}") return deleted - self.logger.debug(f"File doesn't exist, skipping delete: {full_path}") + current_app.logger.debug(f"File doesn't exist, skipping delete: {full_path}") return True # File didn't exist except Exception as e: - self.logger.error(f"Failed to delete file {filename}: {str(e)}", exc_info=True) + current_app.logger.error(f"Failed to delete file {filename}: {str(e)}", exc_info=True) return False def url_for(self, filename: str) -> str: @@ -164,27 +112,11 @@ class StorageBackend: return f"{endpoint}/{self.bucket}/{full_path}" return f"s3://{self.bucket}/{full_path}" else: - # For local storage, use static/uploads path - return url_for('static', filename=f'uploads/{filename}') + return f"/uploads/{filename}" def exists(self, filename: str) -> bool: """Check if a file exists""" full_path = self._get_full_path(filename) if self.protocol == 's3': return self.fs.exists(f"{self.bucket}/{full_path}") - return self.fs.exists(full_path) - - def get_file_stream(self, filename: str): - """Get a file stream from storage""" - try: - if self.protocol == 's3': - s3_path = f"{self.bucket}/{self._get_full_path(filename)}" - self.logger.debug(f"Opening S3 file stream: {s3_path}") - return self.fs.open(s3_path, 'rb') - else: - full_path = self._get_full_path(filename) - self.logger.debug(f"Opening local file stream: {full_path}") - return open(full_path, 'rb') - except Exception as e: - self.logger.error(f"Failed to get file stream for {filename}: {str(e)}", exc_info=True) - raise \ No newline at end of file + return self.fs.exists(full_path) \ No newline at end of file diff --git a/templates/asset_detail.html b/templates/asset_detail.html index 95d1011..6d32fe7 100644 --- a/templates/asset_detail.html +++ b/templates/asset_detail.html @@ -64,14 +64,15 @@ {% endif %} -
-

Files

+