diff --git a/.dockerignore b/.dockerignore index c628f4a..1fb3901 100644 --- a/.dockerignore +++ b/.dockerignore @@ -21,3 +21,5 @@ coverage.xml venv/ ENV/ instance/ +repo-images/ +.forgejo/ \ No newline at end of file diff --git a/.env.example b/.env.example index 11e4098..8497b77 100644 --- a/.env.example +++ b/.env.example @@ -1,3 +1,6 @@ +## Debugging +LOGGING_LEVEL=DEBUG + ## Database Configuration DATABASE_URL=sqlite:///instance/app.db diff --git a/README.md b/README.md index 8a8c287..5497001 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,26 @@ A digital asset management system built with Flask and S3-compatible storage. - License key management - Docker container support +## Screenshots +
+ Home Page +

+ +

+
+
+ Asset View +

+ +

+
+
+ Edit Page +

+ +

+
+ ## Container Registry This project includes automated container builds using Forgejo CI/CD. The container images are published to the project's container registry. diff --git a/app.py b/app.py index 7936bb6..d223557 100644 --- a/app.py +++ b/app.py @@ -1,6 +1,7 @@ import os import uuid -from flask import Flask, render_template, request, redirect, url_for, flash +import mimetypes +from flask import Flask, render_template, request, redirect, url_for, flash, jsonify, send_from_directory, send_file, Response, stream_with_context from werkzeug.utils import secure_filename from config import Config from flask_migrate import Migrate @@ -20,11 +21,13 @@ def create_app(): # Initialize extensions db.init_app(app) migrate.init_app(app, db) + + # Initialize storage backend + app.storage = StorageBackend(app.config['STORAGE_URL']) return app app = create_app() -storage = StorageBackend(app.config['STORAGE_URL']) def generate_unique_filename(original_filename): # Get the file extension @@ -47,13 +50,22 @@ def index(): @app.route('/asset/add', methods=['GET', 'POST']) def add_asset(): if request.method == 'POST': - title = request.form.get('title') - description = request.form.get('description') - license_key = request.form.get('license_key') - featured_image = request.files.get('featured_image') - additional_files = request.files.getlist('additional_files') + try: + title = request.form.get('title') + description = request.form.get('description') + license_key = request.form.get('license_key') + featured_image = request.files.get('featured_image') + additional_files = request.files.getlist('additional_files') + + if not title: + return jsonify({'success': False, 'error': 'Title is required'}) + + if not featured_image: + return jsonify({'success': False, 'error': 'Featured image is required'}) + + if not allowed_file(featured_image.filename, is_featured_image=True): + return jsonify({'success': False, 'error': 'Invalid featured image format'}) - if title and featured_image and allowed_file(featured_image.filename, is_featured_image=True): # Process and convert featured image to WebP processed_image, ext = ImageProcessor.process_featured_image(featured_image) @@ -69,7 +81,7 @@ def add_asset(): ) # Save featured image with unique filename using storage backend - storage.save(processed_file, unique_featured_filename) + app.storage.save(processed_file, unique_featured_filename) # Create asset with unique filename asset = Asset( @@ -87,7 +99,7 @@ def add_asset(): if file and allowed_file(file.filename): original_filename = secure_filename(file.filename) unique_filename = generate_unique_filename(original_filename) - storage.save(file, unique_filename) + app.storage.save(file, unique_filename) asset_file = AssetFile( filename=unique_filename, original_filename=original_filename, @@ -96,8 +108,19 @@ def add_asset(): db.session.add(asset_file) db.session.commit() - flash('Asset added successfully!', 'success') - return redirect(url_for('index')) + return jsonify({ + 'success': True, + 'message': 'Asset added successfully!', + 'redirect': url_for('index') + }) + + except Exception as e: + db.session.rollback() + app.logger.error(f"Error adding asset: {str(e)}", exc_info=True) + return jsonify({ + 'success': False, + 'error': str(e) + }) return render_template('add_asset.html') @@ -111,90 +134,180 @@ def edit_asset(id): asset = Asset.query.get_or_404(id) if request.method == 'POST': - asset.title = request.form.get('title') - asset.set_description(request.form.get('description')) - license_key = request.form.get('license_key') - asset.license_key = license_key.strip() if license_key else None + try: + asset.title = request.form.get('title') + if not asset.title: + return jsonify({'success': False, 'error': 'Title is required'}) - # Handle featured image update - featured_image = request.files.get('featured_image') - if featured_image and featured_image.filename and allowed_file(featured_image.filename, is_featured_image=True): - # Delete old featured image - if asset.featured_image: - storage.delete(asset.featured_image) + asset.set_description(request.form.get('description')) + license_key = request.form.get('license_key') + asset.license_key = license_key.strip() if license_key else None - # Process and convert featured image to WebP - processed_image, ext = ImageProcessor.process_featured_image(featured_image) - - # Generate unique filename - original_featured_filename = secure_filename(featured_image.filename) - unique_featured_filename = f"{uuid.uuid4().hex}{ext}" + # Handle featured image update + featured_image = request.files.get('featured_image') + if featured_image and featured_image.filename: + if not allowed_file(featured_image.filename, is_featured_image=True): + return jsonify({'success': False, 'error': 'Invalid featured image format'}) - # Create a FileStorage object from the processed image - processed_file = FileStorage( - stream=processed_image, - filename=unique_featured_filename, - content_type='image/webp' - ) + # Delete old featured image + if asset.featured_image: + app.storage.delete(asset.featured_image) - # Save the processed image - storage.save(processed_file, unique_featured_filename) - asset.featured_image = unique_featured_filename - asset.original_featured_image = original_featured_filename + # Process and convert featured image to WebP + processed_image, ext = ImageProcessor.process_featured_image(featured_image) + + # Generate unique filename + original_featured_filename = secure_filename(featured_image.filename) + unique_featured_filename = f"{uuid.uuid4().hex}{ext}" - # Handle additional files - additional_files = request.files.getlist('additional_files') - for file in additional_files: - if file and allowed_file(file.filename): - original_filename = secure_filename(file.filename) - unique_filename = generate_unique_filename(original_filename) - storage.save(file, unique_filename) - asset_file = AssetFile( - filename=unique_filename, - original_filename=original_filename, - asset_id=asset.id + # Create a FileStorage object from the processed image + processed_file = FileStorage( + stream=processed_image, + filename=unique_featured_filename, + content_type='image/webp' ) - db.session.add(asset_file) - db.session.commit() - flash('Asset updated successfully!', 'success') - return redirect(url_for('asset_detail', id=asset.id)) + # Save the processed image + app.storage.save(processed_file, unique_featured_filename) + asset.featured_image = unique_featured_filename + asset.original_featured_image = original_featured_filename + + # Handle additional files + additional_files = request.files.getlist('additional_files') + for file in additional_files: + if file and allowed_file(file.filename): + original_filename = secure_filename(file.filename) + unique_filename = generate_unique_filename(original_filename) + app.storage.save(file, unique_filename) + asset_file = AssetFile( + filename=unique_filename, + original_filename=original_filename, + asset_id=asset.id + ) + db.session.add(asset_file) + + db.session.commit() + return jsonify({ + 'success': True, + 'message': 'Asset updated successfully!', + 'redirect': url_for('asset_detail', id=asset.id) + }) + + except Exception as e: + db.session.rollback() + return jsonify({ + 'success': False, + 'error': str(e) + }) return render_template('edit_asset.html', asset=asset) @app.route('/asset//delete', methods=['POST']) def delete_asset(id): - asset = Asset.query.get_or_404(id) + try: + asset = Asset.query.get_or_404(id) + deletion_errors = [] - # Delete featured image - if asset.featured_image: - storage.delete(asset.featured_image) + # Delete featured image + if asset.featured_image: + if not app.storage.delete(asset.featured_image): + deletion_errors.append(f"Failed to delete featured image: {asset.featured_image}") - # Delete additional files - for file in asset.files: - storage.delete(file.filename) - db.session.delete(file) + # Delete additional files + for file in asset.files: + if not app.storage.delete(file.filename): + deletion_errors.append(f"Failed to delete file: {file.filename}") + db.session.delete(file) - db.session.delete(asset) - db.session.commit() + db.session.delete(asset) + db.session.commit() - flash('Asset deleted successfully!', 'success') - return redirect(url_for('index')) + if deletion_errors: + app.logger.error("Asset deletion had errors: %s", deletion_errors) + flash('Asset deleted from database, but some files could not be deleted: ' + '; '.join(deletion_errors), 'warning') + else: + flash('Asset deleted successfully!', 'success') + + return redirect(url_for('index')) + + except Exception as e: + db.session.rollback() + app.logger.error("Failed to delete asset: %s", str(e)) + flash('Failed to delete asset: ' + str(e), 'error') + return redirect(url_for('asset_detail', id=id)) @app.route('/asset/file//delete', methods=['POST']) def delete_asset_file(id): - asset_file = AssetFile.query.get_or_404(id) - asset_id = asset_file.asset_id + try: + asset_file = AssetFile.query.get_or_404(id) + asset_id = asset_file.asset_id + filename = asset_file.filename + display_name = asset_file.original_filename or asset_file.filename - # Delete the file using storage backend - storage.delete(asset_file.filename) + # Delete the file using storage backend + if not app.storage.delete(filename): + error_msg = f'Failed to delete file {display_name} from storage' + app.logger.error(error_msg) + flash(error_msg, 'error') + return redirect(url_for('asset_detail', id=asset_id)) - # Remove from database - db.session.delete(asset_file) - db.session.commit() + # Only remove from database if storage deletion was successful + db.session.delete(asset_file) + db.session.commit() - flash('File deleted successfully!', 'success') - return redirect(url_for('asset_detail', id=asset_id)) + flash('File deleted successfully!', 'success') + return redirect(url_for('asset_detail', id=asset_id)) + + except Exception as e: + db.session.rollback() + app.logger.error("Failed to delete asset file: %s", str(e)) + flash('Failed to delete file: ' + str(e), 'error') + return redirect(url_for('asset_detail', id=asset_id)) + +@app.route('/download/') +def download_file(file_id): + """Download a file with its original filename""" + try: + asset_file = AssetFile.query.get_or_404(file_id) + filename = asset_file.filename + download_name = asset_file.original_filename or filename + + # Guess the mime type + mime_type, _ = mimetypes.guess_type(download_name) + if mime_type is None: + mime_type = 'application/octet-stream' + + app.logger.debug(f"Starting download of {filename} as {download_name} with type {mime_type}") + + try: + file_stream = app.storage.get_file_stream(filename) + + def generate(): + try: + while True: + chunk = file_stream.read(8192) # Read in 8KB chunks + if not chunk: + break + yield chunk + finally: + file_stream.close() + + response = Response( + stream_with_context(generate()), + mimetype=mime_type + ) + response.headers['Content-Disposition'] = f'attachment; filename="{download_name}"' + return response + + except Exception as e: + app.logger.error(f"Error streaming file {filename}: {str(e)}", exc_info=True) + flash('Error downloading file. Please try again.', 'error') + return redirect(url_for('asset_detail', id=asset_file.asset_id)) + + except Exception as e: + app.logger.error(f"Error in download_file: {str(e)}", exc_info=True) + flash('File not found or error occurred.', 'error') + return redirect(url_for('index')) if __name__ == '__main__': app.run(host='0.0.0.0', port=5432, debug=True) diff --git a/config.py b/config.py index 2b158fc..106e041 100644 --- a/config.py +++ b/config.py @@ -22,6 +22,9 @@ class Config: S3_ENDPOINT_URL = os.environ.get('S3_ENDPOINT_URL') S3_PUBLIC_URL = os.environ.get('S3_PUBLIC_URL') + # Logging configuration + LOGGING_LEVEL = os.environ.get('LOGGING_LEVEL', 'DEBUG' if os.environ.get('FLASK_ENV') != 'production' else 'INFO') + @staticmethod def init_app(app): # Create necessary directories @@ -31,3 +34,7 @@ class Config: if app.config['STORAGE_URL'].startswith('file://'): storage_path = app.config['STORAGE_URL'].replace('file://', '') os.makedirs(storage_path, exist_ok=True) + + # Configure logging + import logging + logging.basicConfig(level=getattr(logging, app.config['LOGGING_LEVEL'])) diff --git a/repo-images/asset-view.webp b/repo-images/asset-view.webp new file mode 100644 index 0000000..c946ff5 Binary files /dev/null and b/repo-images/asset-view.webp differ diff --git a/repo-images/edit-view.webp b/repo-images/edit-view.webp new file mode 100644 index 0000000..ffe86c9 Binary files /dev/null and b/repo-images/edit-view.webp differ diff --git a/repo-images/list-of-assets.webp b/repo-images/list-of-assets.webp new file mode 100644 index 0000000..9121740 Binary files /dev/null and b/repo-images/list-of-assets.webp differ diff --git a/storage.py b/storage.py index 46852df..6886564 100644 --- a/storage.py +++ b/storage.py @@ -1,9 +1,10 @@ import os import fsspec +import logging import asyncio from typing import BinaryIO, Optional, Union from urllib.parse import urlparse -from flask import current_app +from flask import current_app, url_for from werkzeug.datastructures import FileStorage class StorageBackend: @@ -18,6 +19,15 @@ class StorageBackend: self.parsed_url = urlparse(storage_url) self.protocol = self.parsed_url.scheme or 'file' + # Set up logging - use Flask logger if in app context, otherwise use Python logging + try: + current_app.name # Check if we're in app context + self.logger = current_app.logger + except RuntimeError: + self.logger = logging.getLogger(__name__) + + self.logger.info(f"Initializing StorageBackend with URL: {storage_url}, protocol: {self.protocol}") + # Configure filesystem if self.protocol == 's3': self.fs = fsspec.filesystem( @@ -31,28 +41,70 @@ class StorageBackend: ) self.bucket = self.parsed_url.netloc self.base_path = self.parsed_url.path.lstrip('/') + self.logger.debug(f"Configured S3 storage with bucket: {self.bucket}, base_path: {self.base_path}") else: self.fs = fsspec.filesystem('file') self.base_path = self.parsed_url.path or '/uploads' + self.logger.debug(f"Configured local storage with base_path: {self.base_path}") def _get_full_path(self, filename: str) -> str: """Get full path for a file""" if self.protocol == 's3': - return os.path.join(self.base_path, filename) - return os.path.join(current_app.root_path, self.base_path, filename) + full_path = os.path.join(self.base_path, filename) + self.logger.debug(f"Generated S3 full path: {full_path}") + return full_path + + full_path = os.path.join(current_app.root_path, self.base_path, filename) + self.logger.debug(f"Generated local full path: {full_path}") + return full_path def save(self, file_storage: FileStorage, filename: str) -> str: """Save a file to storage""" - full_path = self._get_full_path(filename) - - if self.protocol == 's3': - with self.fs.open(f"{self.bucket}/{full_path}", 'wb') as f: - file_storage.save(f) - return f"s3://{self.bucket}/{full_path}" - else: - os.makedirs(os.path.dirname(full_path), exist_ok=True) - file_storage.save(full_path) - return f"file://{full_path}" + try: + full_path = self._get_full_path(filename) + self.logger.info(f"Attempting to save file {filename} to {full_path}") + + if not isinstance(file_storage, FileStorage): + self.logger.error(f"Invalid file_storage object type: {type(file_storage)}") + raise ValueError("file_storage must be a FileStorage object") + + if self.protocol == 's3': + s3_path = f"{self.bucket}/{full_path}" + self.logger.debug(f"Opening S3 file for writing: {s3_path}") + with self.fs.open(s3_path, 'wb') as f: + self.logger.debug("Saving file content to S3...") + file_storage.save(f) + + # Verify the file was saved + if self.fs.exists(s3_path): + self.logger.info(f"Successfully saved file to S3: {s3_path}") + else: + self.logger.error(f"Failed to verify file existence in S3: {s3_path}") + raise RuntimeError(f"Failed to verify file existence in S3: {s3_path}") + + return f"s3://{self.bucket}/{full_path}" + else: + # Create directory structure if it doesn't exist + dir_path = os.path.dirname(full_path) + self.logger.debug(f"Creating local directory structure: {dir_path}") + os.makedirs(dir_path, exist_ok=True) + + self.logger.debug(f"Saving file to local path: {full_path}") + file_storage.save(full_path) + + # Verify the file was saved + if os.path.exists(full_path): + self.logger.info(f"Successfully saved file locally: {full_path}") + self.logger.debug(f"File size: {os.path.getsize(full_path)} bytes") + else: + self.logger.error(f"Failed to verify file existence locally: {full_path}") + raise RuntimeError(f"Failed to verify file existence locally: {full_path}") + + return f"file://{full_path}" + + except Exception as e: + self.logger.error(f"Error saving file {filename}: {str(e)}", exc_info=True) + raise def open(self, filename: str, mode: str = 'rb') -> BinaryIO: """Open a file from storage""" @@ -61,13 +113,43 @@ class StorageBackend: return self.fs.open(f"{self.bucket}/{full_path}", mode) return self.fs.open(full_path, mode) - def delete(self, filename: str) -> None: - """Delete a file from storage""" - full_path = self._get_full_path(filename) - if self.protocol == 's3': - self.fs.delete(f"{self.bucket}/{full_path}") - else: - self.fs.delete(full_path) + def delete(self, filename: str) -> bool: + """ + Delete a file from storage + Returns True if file was deleted or didn't exist, False if deletion failed + """ + try: + full_path = self._get_full_path(filename) + if self.protocol == 's3': + path = f"{self.bucket}/{full_path}" + self.logger.debug(f"Attempting to delete S3 file: {path}") + if self.fs.exists(path): + self.logger.debug(f"File exists, deleting: {path}") + self.fs.delete(path) + deleted = not self.fs.exists(path) + if deleted: + self.logger.debug(f"Successfully deleted file: {path}") + else: + self.logger.error(f"Failed to delete file: {path}") + return deleted + self.logger.debug(f"File doesn't exist, skipping delete: {path}") + return True # File didn't exist + else: + self.logger.debug(f"Attempting to delete local file: {full_path}") + if self.fs.exists(full_path): + self.logger.debug(f"File exists, deleting: {full_path}") + self.fs.delete(full_path) + deleted = not os.path.exists(full_path) + if deleted: + self.logger.debug(f"Successfully deleted file: {full_path}") + else: + self.logger.error(f"Failed to delete file: {full_path}") + return deleted + self.logger.debug(f"File doesn't exist, skipping delete: {full_path}") + return True # File didn't exist + except Exception as e: + self.logger.error(f"Failed to delete file {filename}: {str(e)}", exc_info=True) + return False def url_for(self, filename: str) -> str: """Get URL for a file""" @@ -82,11 +164,27 @@ class StorageBackend: return f"{endpoint}/{self.bucket}/{full_path}" return f"s3://{self.bucket}/{full_path}" else: - return f"/uploads/{filename}" + # For local storage, use static/uploads path + return url_for('static', filename=f'uploads/{filename}') def exists(self, filename: str) -> bool: """Check if a file exists""" full_path = self._get_full_path(filename) if self.protocol == 's3': return self.fs.exists(f"{self.bucket}/{full_path}") - return self.fs.exists(full_path) \ No newline at end of file + return self.fs.exists(full_path) + + def get_file_stream(self, filename: str): + """Get a file stream from storage""" + try: + if self.protocol == 's3': + s3_path = f"{self.bucket}/{self._get_full_path(filename)}" + self.logger.debug(f"Opening S3 file stream: {s3_path}") + return self.fs.open(s3_path, 'rb') + else: + full_path = self._get_full_path(filename) + self.logger.debug(f"Opening local file stream: {full_path}") + return open(full_path, 'rb') + except Exception as e: + self.logger.error(f"Failed to get file stream for {filename}: {str(e)}", exc_info=True) + raise \ No newline at end of file diff --git a/templates/add_asset.html b/templates/add_asset.html index 1840eeb..421aa94 100644 --- a/templates/add_asset.html +++ b/templates/add_asset.html @@ -4,7 +4,7 @@
-
+
- @@ -115,8 +115,11 @@ "https://cdnjs.cloudflare.com/ajax/libs/Trumbowyg/2.27.3/ui/icons.svg", }); - // File input preview handling document.addEventListener("DOMContentLoaded", function () { + const form = document.getElementById("assetForm"); + const loadingOverlay = document.querySelector(".loading-overlay"); + const loadingText = document.querySelector(".loading-text"); + // Featured image preview const featuredInput = document.getElementById("featured_image"); const featuredPreview = document.querySelector(".file-input-preview"); @@ -152,6 +155,45 @@ `; }); }); + + form.addEventListener("submit", function (e) { + e.preventDefault(); + + const formData = new FormData(form); + loadingOverlay.style.display = "flex"; + loadingText.textContent = "Processing..."; + + const xhr = new XMLHttpRequest(); + xhr.open("POST", "{{ url_for('add_asset') }}", true); + + xhr.onload = function() { + if (xhr.status === 200) { + const response = JSON.parse(xhr.responseText); + if (response.success) { + window.location.href = response.redirect; + } else { + loadingText.textContent = "Failed: " + response.error; + setTimeout(() => { + loadingOverlay.style.display = "none"; + }, 2000); + } + } else { + loadingText.textContent = "Upload failed! Please try again."; + setTimeout(() => { + loadingOverlay.style.display = "none"; + }, 2000); + } + }; + + xhr.onerror = function() { + loadingText.textContent = "Network error! Please try again."; + setTimeout(() => { + loadingOverlay.style.display = "none"; + }, 2000); + }; + + xhr.send(formData); + }); }); {% endblock %} diff --git a/templates/asset_detail.html b/templates/asset_detail.html index 1c6325c..95d1011 100644 --- a/templates/asset_detail.html +++ b/templates/asset_detail.html @@ -64,33 +64,19 @@
{% endif %} -
-

Attached Files

+
+

Files

{% if asset.files %}
diff --git a/templates/base.html b/templates/base.html index 562a66f..9695792 100644 --- a/templates/base.html +++ b/templates/base.html @@ -27,11 +27,47 @@ crossorigin="anonymous" referrerpolicy="no-referrer" /> + {% block head %}{% endblock %} +
+
+
Processing...
+