broke
This commit is contained in:
@ -1,4 +1,4 @@
|
||||
# plugins/media/models.py
|
||||
# File: plugins/media/models.py
|
||||
|
||||
from datetime import datetime
|
||||
from flask import url_for
|
||||
@ -8,17 +8,22 @@ class Media(db.Model):
|
||||
__tablename__ = "media"
|
||||
__table_args__ = {"extend_existing": True}
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
plugin = db.Column(db.String(50), nullable=False)
|
||||
related_id = db.Column(db.Integer, nullable=False)
|
||||
filename = db.Column(db.String(256), nullable=False)
|
||||
uploaded_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||
uploader_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False)
|
||||
caption = db.Column(db.String(255), nullable=True)
|
||||
plant_id = db.Column(db.Integer, db.ForeignKey("plant.id"), nullable=True)
|
||||
growlog_id = db.Column(db.Integer, db.ForeignKey("grow_logs.id"), nullable=True)
|
||||
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||
file_url = db.Column(db.String(512), nullable=False)
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
plugin = db.Column(db.String(50), nullable=False)
|
||||
related_id = db.Column(db.Integer, nullable=False)
|
||||
filename = db.Column(db.String(256), nullable=False)
|
||||
uploaded_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||
uploader_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False)
|
||||
caption = db.Column(db.String(255), nullable=True)
|
||||
plant_id = db.Column(db.Integer, db.ForeignKey("plant.id"), nullable=True)
|
||||
growlog_id = db.Column(db.Integer, db.ForeignKey("grow_logs.id"), nullable=True)
|
||||
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||
file_url = db.Column(db.String(512), nullable=False)
|
||||
original_file_url = db.Column(db.Text, nullable=True)
|
||||
|
||||
# ←─ NEW ── track orphaned state
|
||||
status = db.Column(db.String(20), nullable=False, default='active')
|
||||
orphaned_at = db.Column(db.DateTime, nullable=True)
|
||||
|
||||
hearts = db.relationship(
|
||||
"ImageHeart",
|
||||
@ -33,7 +38,7 @@ class Media(db.Model):
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
|
||||
# ↔ Media items attached to a Plant
|
||||
# ↔ attached Plant
|
||||
plant = db.relationship(
|
||||
"Plant",
|
||||
back_populates="media_items",
|
||||
@ -41,7 +46,7 @@ class Media(db.Model):
|
||||
lazy="joined",
|
||||
)
|
||||
|
||||
# ↔ Media items attached to a GrowLog
|
||||
# ↔ attached GrowLog
|
||||
growlog = db.relationship(
|
||||
"GrowLog",
|
||||
back_populates="media_items",
|
||||
@ -75,6 +80,15 @@ class Media(db.Model):
|
||||
for fe in self.featured_entries
|
||||
)
|
||||
|
||||
def mark_orphaned(self, new_url):
|
||||
"""
|
||||
Move to orphaned state, recording original URL and timestamp.
|
||||
"""
|
||||
self.original_file_url = self.file_url
|
||||
self.file_url = new_url
|
||||
self.status = 'orphaned'
|
||||
self.orphaned_at = datetime.utcnow()
|
||||
|
||||
|
||||
class ZipJob(db.Model):
|
||||
__tablename__ = 'zip_jobs'
|
||||
@ -93,7 +107,7 @@ class ImageHeart(db.Model):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False)
|
||||
media_id = db.Column(db.Integer, db.ForeignKey("media.id"), nullable=False)
|
||||
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||
|
||||
|
||||
class FeaturedImage(db.Model):
|
||||
@ -106,4 +120,4 @@ class FeaturedImage(db.Model):
|
||||
context_id = db.Column(db.Integer, nullable=False)
|
||||
override_text = db.Column(db.String(255), nullable=True)
|
||||
is_featured = db.Column(db.Boolean, default=True, nullable=False)
|
||||
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||
|
@ -2,7 +2,7 @@
|
||||
"name": "Media",
|
||||
"version": "0.1.0",
|
||||
"author": "Bryson Shepard <bryson@natureinpots.com>",
|
||||
"description": "Manages image uploads, storage, and URL generation.",
|
||||
"description": "Upload, serve, and process images & other media.",
|
||||
"module": "plugins.media",
|
||||
"routes": {
|
||||
"module": "plugins.media.routes",
|
||||
@ -18,6 +18,15 @@
|
||||
"callable": "plugins.media.routes.generate_image_url"
|
||||
}
|
||||
],
|
||||
"tasks": [
|
||||
"plugins.media.tasks"
|
||||
],
|
||||
"tasks_init": [
|
||||
{
|
||||
"module": "plugins.media.tasks",
|
||||
"callable": "init_media_tasks"
|
||||
}
|
||||
],
|
||||
"license": "Proprietary",
|
||||
"repository": "https://github.com/your-org/your-app"
|
||||
}
|
||||
}
|
||||
|
@ -1,35 +1,47 @@
|
||||
# File: plugins/media/tasks.py
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import zipfile
|
||||
from werkzeug.utils import secure_filename
|
||||
from PIL import Image, UnidentifiedImageError
|
||||
from celery.schedules import crontab
|
||||
from flask import current_app
|
||||
from app import db
|
||||
from plugins.media.models import ZipJob
|
||||
from app.celery_app import celery
|
||||
from plugins.media.models import Media, ZipJob
|
||||
|
||||
# Re‐import your create_app and utility plugin to get Celery
|
||||
from plugins.utility.celery import celery_app
|
||||
|
||||
# Constants
|
||||
IMAGE_EXTS = {'.jpg','.jpeg','.png','.gif'}
|
||||
DOC_EXTS = {'.pdf','.txt','.csv'}
|
||||
# ─── Constants ────────────────────────────────────────────────────────────────
|
||||
IMAGE_EXTS = {'.jpg', '.jpeg', '.png', '.gif', '.webp'}
|
||||
DOC_EXTS = {'.pdf', '.txt', '.csv'}
|
||||
MAX_ZIP_FILES = 1000
|
||||
MAX_PIXELS = 8000 * 8000
|
||||
|
||||
|
||||
def validate_image(path):
|
||||
try:
|
||||
with Image.open(path) as img:
|
||||
img.verify()
|
||||
w, h = Image.open(path).size
|
||||
return (w*h) <= MAX_PIXELS
|
||||
return (w * h) <= MAX_PIXELS
|
||||
except (UnidentifiedImageError, IOError):
|
||||
return False
|
||||
|
||||
@celery_app.task(bind=True)
|
||||
|
||||
@celery.task(
|
||||
bind=True,
|
||||
name='plugins.media.tasks.process_zip',
|
||||
queue='media'
|
||||
)
|
||||
def process_zip(self, job_id, zip_path):
|
||||
"""
|
||||
Unpack and validate a user‐uploaded ZIP batch.
|
||||
"""
|
||||
job = ZipJob.query.get(job_id)
|
||||
job.status = 'processing'
|
||||
db.session.commit()
|
||||
|
||||
extract_dir = zip_path + '_contents'
|
||||
extract_dir = f"{zip_path}_contents"
|
||||
try:
|
||||
with zipfile.ZipFile(zip_path) as zf:
|
||||
names = zf.namelist()
|
||||
@ -50,20 +62,84 @@ def process_zip(self, job_id, zip_path):
|
||||
with zf.open(member) as src, open(target, 'wb') as dst:
|
||||
dst.write(src.read())
|
||||
|
||||
if ext in IMAGE_EXTS and not validate_image(target):
|
||||
raise ValueError(f'Bad image: {member}')
|
||||
if ext in IMAGE_EXTS:
|
||||
if not validate_image(target):
|
||||
raise ValueError(f'Bad image: {member}')
|
||||
elif ext == '.pdf':
|
||||
if open(target,'rb').read(5)!=b'%PDF-':
|
||||
with open(target, 'rb') as f:
|
||||
header = f.read(5)
|
||||
if header != b'%PDF-':
|
||||
raise ValueError(f'Bad PDF: {member}')
|
||||
else:
|
||||
# txt/csv → simple UTF-8 check
|
||||
open(target,'rb').read(1024).decode('utf-8')
|
||||
with open(target, 'rb') as f:
|
||||
f.read(1024).decode('utf-8')
|
||||
|
||||
job.status = 'done'
|
||||
|
||||
except Exception as e:
|
||||
job.status = 'failed'
|
||||
job.error = str(e)
|
||||
|
||||
job.error = str(e)
|
||||
finally:
|
||||
db.session.commit()
|
||||
if os.path.isdir(extract_dir):
|
||||
shutil.rmtree(extract_dir)
|
||||
|
||||
|
||||
@celery.on_after_configure.connect
|
||||
def setup_periodic_tasks(sender, **kwargs):
|
||||
"""
|
||||
Schedule periodic media prune job every day at 2am.
|
||||
"""
|
||||
sender.add_periodic_task(
|
||||
crontab(hour=2, minute=0),
|
||||
prune_orphans.s(),
|
||||
name='media_prune',
|
||||
queue='media'
|
||||
)
|
||||
|
||||
|
||||
@celery.task(
|
||||
name='plugins.media.tasks.prune_orphans',
|
||||
queue='media'
|
||||
)
|
||||
def prune_orphans():
|
||||
"""
|
||||
Mark orphaned Media records, move their files to /static/orphaned/,
|
||||
and log the change in the DB.
|
||||
"""
|
||||
orphan_dir = os.path.join(current_app.root_path, 'static', 'orphaned')
|
||||
os.makedirs(orphan_dir, exist_ok=True)
|
||||
|
||||
candidates = Media.query.filter(
|
||||
Media.status == 'active',
|
||||
Media.plant_id.is_(None),
|
||||
Media.growlog_id.is_(None),
|
||||
Media.related_id.is_(None)
|
||||
).all()
|
||||
|
||||
for m in candidates:
|
||||
src_rel = m.file_url.lstrip('/')
|
||||
src_abs = os.path.join(current_app.root_path, src_rel)
|
||||
if not os.path.isfile(src_abs):
|
||||
current_app.logger.warning(f"Orphan prune: file not found {src_abs}")
|
||||
continue
|
||||
|
||||
filename = os.path.basename(src_abs)
|
||||
dest_abs = os.path.join(orphan_dir, filename)
|
||||
shutil.move(src_abs, dest_abs)
|
||||
|
||||
new_url = f"/static/orphaned/{filename}"
|
||||
m.mark_orphaned(new_url)
|
||||
|
||||
current_app.logger.info(
|
||||
f"Orphaned media #{m.id}: moved {src_rel} → {new_url}"
|
||||
)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
|
||||
def init_media_tasks(celery_app):
|
||||
"""
|
||||
Called by the JSON‐driven loader so tasks_init no longer errors.
|
||||
Celery scheduling is handled via on_after_configure.
|
||||
"""
|
||||
celery_app.logger.info("[Media] init_media_tasks called (no‐op)")
|
||||
|
Reference in New Issue
Block a user