Compare commits

..

No commits in common. "main" and "v0.0.2" have entirely different histories.
main ... v0.0.2

9 changed files with 43 additions and 180 deletions

View file

@ -21,5 +21,3 @@ coverage.xml
venv/ venv/
ENV/ ENV/
instance/ instance/
repo-images/
.forgejo/

View file

@ -10,26 +10,6 @@ A digital asset management system built with Flask and S3-compatible storage.
- License key management - License key management
- Docker container support - Docker container support
## Screenshots
<details>
<summary>Home Page</summary>
<p>
<img src="https://git.hack13.dev/hack13/Personal-Digital-Asset-Manager/raw/branch/main/repo-images/list-of-assets.webp" />
</p>
</details>
<details>
<summary>Asset View</summary>
<p>
<img src="https://git.hack13.dev/hack13/Personal-Digital-Asset-Manager/raw/branch/main/repo-images/asset-view.webp" />
</p>
</details>
<details>
<summary>Edit Page</summary>
<p>
<img src="https://git.hack13.dev/hack13/Personal-Digital-Asset-Manager/raw/branch/main/repo-images/edit-view.webp" />
</p>
</details>
## Container Registry ## Container Registry
This project includes automated container builds using Forgejo CI/CD. The container images are published to the project's container registry. This project includes automated container builds using Forgejo CI/CD. The container images are published to the project's container registry.

69
app.py
View file

@ -1,7 +1,6 @@
import os import os
import uuid import uuid
import mimetypes from flask import Flask, render_template, request, redirect, url_for, flash, jsonify
from flask import Flask, render_template, request, redirect, url_for, flash, jsonify, send_from_directory, send_file, Response, stream_with_context
from werkzeug.utils import secure_filename from werkzeug.utils import secure_filename
from config import Config from config import Config
from flask_migrate import Migrate from flask_migrate import Migrate
@ -21,13 +20,11 @@ def create_app():
# Initialize extensions # Initialize extensions
db.init_app(app) db.init_app(app)
migrate.init_app(app, db) migrate.init_app(app, db)
# Initialize storage backend
app.storage = StorageBackend(app.config['STORAGE_URL'])
return app return app
app = create_app() app = create_app()
storage = StorageBackend(app.config['STORAGE_URL'])
def generate_unique_filename(original_filename): def generate_unique_filename(original_filename):
# Get the file extension # Get the file extension
@ -81,7 +78,7 @@ def add_asset():
) )
# Save featured image with unique filename using storage backend # Save featured image with unique filename using storage backend
app.storage.save(processed_file, unique_featured_filename) storage.save(processed_file, unique_featured_filename)
# Create asset with unique filename # Create asset with unique filename
asset = Asset( asset = Asset(
@ -99,7 +96,7 @@ def add_asset():
if file and allowed_file(file.filename): if file and allowed_file(file.filename):
original_filename = secure_filename(file.filename) original_filename = secure_filename(file.filename)
unique_filename = generate_unique_filename(original_filename) unique_filename = generate_unique_filename(original_filename)
app.storage.save(file, unique_filename) storage.save(file, unique_filename)
asset_file = AssetFile( asset_file = AssetFile(
filename=unique_filename, filename=unique_filename,
original_filename=original_filename, original_filename=original_filename,
@ -116,7 +113,6 @@ def add_asset():
except Exception as e: except Exception as e:
db.session.rollback() db.session.rollback()
app.logger.error(f"Error adding asset: {str(e)}", exc_info=True)
return jsonify({ return jsonify({
'success': False, 'success': False,
'error': str(e) 'error': str(e)
@ -151,7 +147,7 @@ def edit_asset(id):
# Delete old featured image # Delete old featured image
if asset.featured_image: if asset.featured_image:
app.storage.delete(asset.featured_image) storage.delete(asset.featured_image)
# Process and convert featured image to WebP # Process and convert featured image to WebP
processed_image, ext = ImageProcessor.process_featured_image(featured_image) processed_image, ext = ImageProcessor.process_featured_image(featured_image)
@ -168,7 +164,7 @@ def edit_asset(id):
) )
# Save the processed image # Save the processed image
app.storage.save(processed_file, unique_featured_filename) storage.save(processed_file, unique_featured_filename)
asset.featured_image = unique_featured_filename asset.featured_image = unique_featured_filename
asset.original_featured_image = original_featured_filename asset.original_featured_image = original_featured_filename
@ -178,7 +174,7 @@ def edit_asset(id):
if file and allowed_file(file.filename): if file and allowed_file(file.filename):
original_filename = secure_filename(file.filename) original_filename = secure_filename(file.filename)
unique_filename = generate_unique_filename(original_filename) unique_filename = generate_unique_filename(original_filename)
app.storage.save(file, unique_filename) storage.save(file, unique_filename)
asset_file = AssetFile( asset_file = AssetFile(
filename=unique_filename, filename=unique_filename,
original_filename=original_filename, original_filename=original_filename,
@ -210,12 +206,12 @@ def delete_asset(id):
# Delete featured image # Delete featured image
if asset.featured_image: if asset.featured_image:
if not app.storage.delete(asset.featured_image): if not storage.delete(asset.featured_image):
deletion_errors.append(f"Failed to delete featured image: {asset.featured_image}") deletion_errors.append(f"Failed to delete featured image: {asset.featured_image}")
# Delete additional files # Delete additional files
for file in asset.files: for file in asset.files:
if not app.storage.delete(file.filename): if not storage.delete(file.filename):
deletion_errors.append(f"Failed to delete file: {file.filename}") deletion_errors.append(f"Failed to delete file: {file.filename}")
db.session.delete(file) db.session.delete(file)
@ -245,7 +241,7 @@ def delete_asset_file(id):
display_name = asset_file.original_filename or asset_file.filename display_name = asset_file.original_filename or asset_file.filename
# Delete the file using storage backend # Delete the file using storage backend
if not app.storage.delete(filename): if not storage.delete(filename):
error_msg = f'Failed to delete file {display_name} from storage' error_msg = f'Failed to delete file {display_name} from storage'
app.logger.error(error_msg) app.logger.error(error_msg)
flash(error_msg, 'error') flash(error_msg, 'error')
@ -264,50 +260,5 @@ def delete_asset_file(id):
flash('Failed to delete file: ' + str(e), 'error') flash('Failed to delete file: ' + str(e), 'error')
return redirect(url_for('asset_detail', id=asset_id)) return redirect(url_for('asset_detail', id=asset_id))
@app.route('/download/<int:file_id>')
def download_file(file_id):
"""Download a file with its original filename"""
try:
asset_file = AssetFile.query.get_or_404(file_id)
filename = asset_file.filename
download_name = asset_file.original_filename or filename
# Guess the mime type
mime_type, _ = mimetypes.guess_type(download_name)
if mime_type is None:
mime_type = 'application/octet-stream'
app.logger.debug(f"Starting download of {filename} as {download_name} with type {mime_type}")
try:
file_stream = app.storage.get_file_stream(filename)
def generate():
try:
while True:
chunk = file_stream.read(8192) # Read in 8KB chunks
if not chunk:
break
yield chunk
finally:
file_stream.close()
response = Response(
stream_with_context(generate()),
mimetype=mime_type
)
response.headers['Content-Disposition'] = f'attachment; filename="{download_name}"'
return response
except Exception as e:
app.logger.error(f"Error streaming file {filename}: {str(e)}", exc_info=True)
flash('Error downloading file. Please try again.', 'error')
return redirect(url_for('asset_detail', id=asset_file.asset_id))
except Exception as e:
app.logger.error(f"Error in download_file: {str(e)}", exc_info=True)
flash('File not found or error occurred.', 'error')
return redirect(url_for('index'))
if __name__ == '__main__': if __name__ == '__main__':
app.run(host='0.0.0.0', port=5432, debug=True) app.run(host='0.0.0.0', port=5432, debug=True)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 129 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 94 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 256 KiB

View file

@ -1,10 +1,9 @@
import os import os
import fsspec import fsspec
import logging
import asyncio import asyncio
from typing import BinaryIO, Optional, Union from typing import BinaryIO, Optional, Union
from urllib.parse import urlparse from urllib.parse import urlparse
from flask import current_app, url_for from flask import current_app
from werkzeug.datastructures import FileStorage from werkzeug.datastructures import FileStorage
class StorageBackend: class StorageBackend:
@ -19,15 +18,6 @@ class StorageBackend:
self.parsed_url = urlparse(storage_url) self.parsed_url = urlparse(storage_url)
self.protocol = self.parsed_url.scheme or 'file' self.protocol = self.parsed_url.scheme or 'file'
# Set up logging - use Flask logger if in app context, otherwise use Python logging
try:
current_app.name # Check if we're in app context
self.logger = current_app.logger
except RuntimeError:
self.logger = logging.getLogger(__name__)
self.logger.info(f"Initializing StorageBackend with URL: {storage_url}, protocol: {self.protocol}")
# Configure filesystem # Configure filesystem
if self.protocol == 's3': if self.protocol == 's3':
self.fs = fsspec.filesystem( self.fs = fsspec.filesystem(
@ -41,70 +31,28 @@ class StorageBackend:
) )
self.bucket = self.parsed_url.netloc self.bucket = self.parsed_url.netloc
self.base_path = self.parsed_url.path.lstrip('/') self.base_path = self.parsed_url.path.lstrip('/')
self.logger.debug(f"Configured S3 storage with bucket: {self.bucket}, base_path: {self.base_path}")
else: else:
self.fs = fsspec.filesystem('file') self.fs = fsspec.filesystem('file')
self.base_path = self.parsed_url.path or '/uploads' self.base_path = self.parsed_url.path or '/uploads'
self.logger.debug(f"Configured local storage with base_path: {self.base_path}")
def _get_full_path(self, filename: str) -> str: def _get_full_path(self, filename: str) -> str:
"""Get full path for a file""" """Get full path for a file"""
if self.protocol == 's3': if self.protocol == 's3':
full_path = os.path.join(self.base_path, filename) return os.path.join(self.base_path, filename)
self.logger.debug(f"Generated S3 full path: {full_path}") return os.path.join(current_app.root_path, self.base_path, filename)
return full_path
full_path = os.path.join(current_app.root_path, self.base_path, filename)
self.logger.debug(f"Generated local full path: {full_path}")
return full_path
def save(self, file_storage: FileStorage, filename: str) -> str: def save(self, file_storage: FileStorage, filename: str) -> str:
"""Save a file to storage""" """Save a file to storage"""
try: full_path = self._get_full_path(filename)
full_path = self._get_full_path(filename)
self.logger.info(f"Attempting to save file {filename} to {full_path}") if self.protocol == 's3':
with self.fs.open(f"{self.bucket}/{full_path}", 'wb') as f:
if not isinstance(file_storage, FileStorage): file_storage.save(f)
self.logger.error(f"Invalid file_storage object type: {type(file_storage)}") return f"s3://{self.bucket}/{full_path}"
raise ValueError("file_storage must be a FileStorage object") else:
os.makedirs(os.path.dirname(full_path), exist_ok=True)
if self.protocol == 's3': file_storage.save(full_path)
s3_path = f"{self.bucket}/{full_path}" return f"file://{full_path}"
self.logger.debug(f"Opening S3 file for writing: {s3_path}")
with self.fs.open(s3_path, 'wb') as f:
self.logger.debug("Saving file content to S3...")
file_storage.save(f)
# Verify the file was saved
if self.fs.exists(s3_path):
self.logger.info(f"Successfully saved file to S3: {s3_path}")
else:
self.logger.error(f"Failed to verify file existence in S3: {s3_path}")
raise RuntimeError(f"Failed to verify file existence in S3: {s3_path}")
return f"s3://{self.bucket}/{full_path}"
else:
# Create directory structure if it doesn't exist
dir_path = os.path.dirname(full_path)
self.logger.debug(f"Creating local directory structure: {dir_path}")
os.makedirs(dir_path, exist_ok=True)
self.logger.debug(f"Saving file to local path: {full_path}")
file_storage.save(full_path)
# Verify the file was saved
if os.path.exists(full_path):
self.logger.info(f"Successfully saved file locally: {full_path}")
self.logger.debug(f"File size: {os.path.getsize(full_path)} bytes")
else:
self.logger.error(f"Failed to verify file existence locally: {full_path}")
raise RuntimeError(f"Failed to verify file existence locally: {full_path}")
return f"file://{full_path}"
except Exception as e:
self.logger.error(f"Error saving file {filename}: {str(e)}", exc_info=True)
raise
def open(self, filename: str, mode: str = 'rb') -> BinaryIO: def open(self, filename: str, mode: str = 'rb') -> BinaryIO:
"""Open a file from storage""" """Open a file from storage"""
@ -122,33 +70,33 @@ class StorageBackend:
full_path = self._get_full_path(filename) full_path = self._get_full_path(filename)
if self.protocol == 's3': if self.protocol == 's3':
path = f"{self.bucket}/{full_path}" path = f"{self.bucket}/{full_path}"
self.logger.debug(f"Attempting to delete S3 file: {path}") current_app.logger.debug(f"Attempting to delete S3 file: {path}")
if self.fs.exists(path): if self.fs.exists(path):
self.logger.debug(f"File exists, deleting: {path}") current_app.logger.debug(f"File exists, deleting: {path}")
self.fs.delete(path) self.fs.delete(path)
deleted = not self.fs.exists(path) deleted = not self.fs.exists(path)
if deleted: if deleted:
self.logger.debug(f"Successfully deleted file: {path}") current_app.logger.debug(f"Successfully deleted file: {path}")
else: else:
self.logger.error(f"Failed to delete file: {path}") current_app.logger.error(f"Failed to delete file: {path}")
return deleted return deleted
self.logger.debug(f"File doesn't exist, skipping delete: {path}") current_app.logger.debug(f"File doesn't exist, skipping delete: {path}")
return True # File didn't exist return True # File didn't exist
else: else:
self.logger.debug(f"Attempting to delete local file: {full_path}") current_app.logger.debug(f"Attempting to delete local file: {full_path}")
if self.fs.exists(full_path): if self.fs.exists(full_path):
self.logger.debug(f"File exists, deleting: {full_path}") current_app.logger.debug(f"File exists, deleting: {full_path}")
self.fs.delete(full_path) self.fs.delete(full_path)
deleted = not os.path.exists(full_path) deleted = not os.path.exists(full_path)
if deleted: if deleted:
self.logger.debug(f"Successfully deleted file: {full_path}") current_app.logger.debug(f"Successfully deleted file: {full_path}")
else: else:
self.logger.error(f"Failed to delete file: {full_path}") current_app.logger.error(f"Failed to delete file: {full_path}")
return deleted return deleted
self.logger.debug(f"File doesn't exist, skipping delete: {full_path}") current_app.logger.debug(f"File doesn't exist, skipping delete: {full_path}")
return True # File didn't exist return True # File didn't exist
except Exception as e: except Exception as e:
self.logger.error(f"Failed to delete file {filename}: {str(e)}", exc_info=True) current_app.logger.error(f"Failed to delete file {filename}: {str(e)}", exc_info=True)
return False return False
def url_for(self, filename: str) -> str: def url_for(self, filename: str) -> str:
@ -164,27 +112,11 @@ class StorageBackend:
return f"{endpoint}/{self.bucket}/{full_path}" return f"{endpoint}/{self.bucket}/{full_path}"
return f"s3://{self.bucket}/{full_path}" return f"s3://{self.bucket}/{full_path}"
else: else:
# For local storage, use static/uploads path return f"/uploads/{filename}"
return url_for('static', filename=f'uploads/{filename}')
def exists(self, filename: str) -> bool: def exists(self, filename: str) -> bool:
"""Check if a file exists""" """Check if a file exists"""
full_path = self._get_full_path(filename) full_path = self._get_full_path(filename)
if self.protocol == 's3': if self.protocol == 's3':
return self.fs.exists(f"{self.bucket}/{full_path}") return self.fs.exists(f"{self.bucket}/{full_path}")
return self.fs.exists(full_path) return self.fs.exists(full_path)
def get_file_stream(self, filename: str):
"""Get a file stream from storage"""
try:
if self.protocol == 's3':
s3_path = f"{self.bucket}/{self._get_full_path(filename)}"
self.logger.debug(f"Opening S3 file stream: {s3_path}")
return self.fs.open(s3_path, 'rb')
else:
full_path = self._get_full_path(filename)
self.logger.debug(f"Opening local file stream: {full_path}")
return open(full_path, 'rb')
except Exception as e:
self.logger.error(f"Failed to get file stream for {filename}: {str(e)}", exc_info=True)
raise

View file

@ -64,14 +64,15 @@
</div> </div>
{% endif %} {% endif %}
<div class="files-section"> <div class="attached-files content-box">
<h2>Files</h2> <h2>Attached Files</h2>
{% if asset.files %} {% if asset.files %}
<ul class="files-list"> <ul class="files-list">
{% for file in asset.files %} {% for file in asset.files %}
<li class="file-item"> <li class="file-item">
<a <a
href="{{ url_for('download_file', file_id=file.id) }}" href="{{ file.file_url }}"
target="_blank"
class="file-link" class="file-link"
> >
<i class="fas fa-file"></i> <i class="fas fa-file"></i>

View file

@ -28,7 +28,7 @@
<label class="form-label">Current Featured Image</label> <label class="form-label">Current Featured Image</label>
<div class="current-image"> <div class="current-image">
<img <img
src="{{ asset.featured_image_url }}" src="{{ url_for('static', filename='uploads/' + asset.featured_image) }}"
alt="{{ asset.title }}" alt="{{ asset.title }}"
/> />
</div> </div>
@ -109,7 +109,8 @@
{% for file in asset.files %} {% for file in asset.files %}
<li class="file-item"> <li class="file-item">
<a <a
href="{{ url_for('download_file', file_id=file.id) }}" href="{{ url_for('static', filename='uploads/' + file.filename) }}"
target="_blank"
class="file-link" class="file-link"
> >
<i class="fas fa-file"></i> <i class="fas fa-file"></i>