a ton of fun happened, refactored alot
This commit is contained in:
@ -2,8 +2,9 @@
|
||||
USE_REMOTE_MYSQL=0
|
||||
ENABLE_DB_SEEDING=1
|
||||
DOCKER_ENV=development
|
||||
UPLOAD_FOLDER=app/static/uploads
|
||||
SECRET_KEY=supersecretplantappkey
|
||||
FLASK_ENV=development
|
||||
UPLOAD_FOLDER=static/uploads
|
||||
SECRET_KEY=37f765030a6986ce47922ea1248d1e8dc24c1bc0638e4cd0d09382d1634a8e2a
|
||||
|
||||
# MySQL configuration
|
||||
MYSQL_HOST=db
|
||||
@ -13,9 +14,10 @@ MYSQL_USER=plant_user
|
||||
MYSQL_PASSWORD=plant_pass
|
||||
MYSQL_ROOT_PASSWORD=supersecret
|
||||
|
||||
|
||||
# Neo4j Settings
|
||||
NEO4J_URI=bolt://neo4j:7687
|
||||
NEO4J_USER=neo4j
|
||||
NEO4J_PASSWORD=your_secure_password
|
||||
|
||||
# Media Settings
|
||||
STANDARD_IMG_SIZE=300x200
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -11,7 +11,6 @@ __pycache__/
|
||||
instance/
|
||||
mysql_data/
|
||||
.env
|
||||
#.env.*
|
||||
|
||||
# VS Code
|
||||
.vscode/
|
||||
|
28
Dockerfile
28
Dockerfile
@ -1,16 +1,32 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
# Install build deps and netcat for the DB-wait
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
default-libmysqlclient-dev \
|
||||
pkg-config \
|
||||
netcat-openbsd \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Upgrade pip and install Python requirements
|
||||
WORKDIR /app
|
||||
COPY requirements.txt .
|
||||
RUN pip install --upgrade pip \
|
||||
&& pip install -r requirements.txt
|
||||
|
||||
# Copy the app code
|
||||
COPY . .
|
||||
|
||||
# Required for mysqlclient + netcat wait
|
||||
RUN apt-get update && apt-get install -y gcc default-libmysqlclient-dev pkg-config netcat-openbsd curl && rm -rf /var/lib/apt/lists/*
|
||||
# Create a non-root user and give it ownership of /app
|
||||
RUN useradd -ms /bin/bash appuser \
|
||||
&& chown -R appuser:appuser /app
|
||||
|
||||
RUN pip install --upgrade pip
|
||||
RUN pip install -r requirements.txt
|
||||
# Switch to appuser for all subsequent commands
|
||||
USER appuser
|
||||
|
||||
# Add entrypoint script
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
# Make the entrypoint script executable
|
||||
COPY --chown=appuser:appuser entrypoint.sh /entrypoint.sh
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
195
app/__init__.py
195
app/__init__.py
@ -1,120 +1,145 @@
|
||||
# app/__init__.py
|
||||
# File: app/__init__.py
|
||||
|
||||
import os
|
||||
import json
|
||||
import glob
|
||||
import importlib
|
||||
import importlib.util
|
||||
import time
|
||||
|
||||
from flask import Flask,request
|
||||
from flask import Flask, request
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
from flask_migrate import Migrate
|
||||
from flask_login import LoginManager
|
||||
from flask_wtf.csrf import CSRFProtect
|
||||
from dotenv import load_dotenv
|
||||
from datetime import datetime
|
||||
from dotenv import load_dotenv, find_dotenv
|
||||
|
||||
# ─── Load .env ────────────────────────────────────────────────────────────────
|
||||
dotenv_path = find_dotenv()
|
||||
if dotenv_path:
|
||||
load_dotenv(dotenv_path, override=True)
|
||||
|
||||
# Load environment variables from .env or system
|
||||
load_dotenv()
|
||||
|
||||
# ─── Initialize core extensions ─────────────────────────────────────────────────
|
||||
# ─── Core extensions ───────────────────────────────────────────────────────────
|
||||
db = SQLAlchemy()
|
||||
migrate = Migrate()
|
||||
login_manager = LoginManager()
|
||||
csrf = CSRFProtect()
|
||||
|
||||
from plugins.media.routes import generate_image_url # Import it here
|
||||
|
||||
# ─── Template helper (still in core) ──────────────────────────────────────────
|
||||
from plugins.media.routes import generate_image_url # noqa: E402
|
||||
|
||||
def create_app():
|
||||
app = Flask(__name__)
|
||||
# ─── Configure Flask ────────────────────────────────────────────────────────
|
||||
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
|
||||
app = Flask(
|
||||
__name__,
|
||||
static_folder=os.path.join(project_root, 'static'),
|
||||
static_url_path='/static'
|
||||
)
|
||||
app.config.from_object('app.config.Config')
|
||||
|
||||
# ─── Initialize extensions with the app ───────────────────────────────────────
|
||||
# ─── Init extensions ───────────────────────────────────────────────────────
|
||||
csrf.init_app(app)
|
||||
db.init_app(app)
|
||||
migrate.init_app(app, db)
|
||||
login_manager.init_app(app)
|
||||
login_manager.login_view = 'auth.login'
|
||||
|
||||
# ─── Register user_loader for Flask-Login ───────────────────────────────────
|
||||
from plugins.auth.models import User
|
||||
|
||||
@login_manager.user_loader
|
||||
def load_user(user_id):
|
||||
try:
|
||||
return User.query.get(int(user_id))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# ─── Register error handlers ─────────────────────────────────────────────────
|
||||
from .errors import bp as errors_bp
|
||||
# ─── Core routes & errors ───────────────────────────────────────────────────
|
||||
from .errors import bp as errors_bp # noqa: E402
|
||||
app.register_blueprint(errors_bp)
|
||||
from .routes import init_app as register_core_routes # noqa: E402
|
||||
register_core_routes(app)
|
||||
app.logger.info("✔️ Registered core routes")
|
||||
|
||||
# ─── 1) Auto‐import plugin models by their package names ─────────────────────
|
||||
# This ensures that every plugins/<plugin>/models.py is imported exactly once
|
||||
plugin_model_paths = glob.glob(
|
||||
os.path.join(os.path.dirname(__file__), '..', 'plugins', '*', 'models.py')
|
||||
)
|
||||
for path in plugin_model_paths:
|
||||
# path looks like ".../plugins/plant/models.py"
|
||||
rel = path.split(os.sep)[-2] # e.g. "plant"
|
||||
pkg = f"plugins.{rel}.models" # e.g. "plugins.plant.models"
|
||||
try:
|
||||
importlib.import_module(pkg)
|
||||
print(f"✅ (Startup) Loaded: {pkg}")
|
||||
except Exception as e:
|
||||
print(f"❌ (Startup) Failed to load {pkg}: {e}")
|
||||
|
||||
# ─── 2) Auto‐discover & register plugin routes, CLI, entry‐points ────────────
|
||||
plugin_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'plugins'))
|
||||
for plugin in os.listdir(plugin_path):
|
||||
if plugin.endswith('.noload'):
|
||||
print(f"[⏭] Skipping plugin '{plugin}' (marked as .noload)")
|
||||
# ─── JSON‐driven plugin loader ──────────────────────────────────────────────
|
||||
plugins_dir = os.path.join(project_root, 'plugins')
|
||||
for name in sorted(os.listdir(plugins_dir)):
|
||||
plugin_path = os.path.join(plugins_dir, name)
|
||||
manifest = os.path.join(plugin_path, 'plugin.json')
|
||||
if not os.path.isfile(manifest):
|
||||
continue
|
||||
|
||||
plugin_dir = os.path.join(plugin_path, plugin)
|
||||
if not os.path.isdir(plugin_dir):
|
||||
errors = []
|
||||
try:
|
||||
meta = json.load(open(manifest))
|
||||
except Exception as e:
|
||||
print(f"Plugin '{name}' 🛑 manifest load failed: {e}")
|
||||
continue
|
||||
|
||||
# (a) Register routes.py
|
||||
route_file = os.path.join(plugin_dir, 'routes.py')
|
||||
if os.path.isfile(route_file):
|
||||
# 1) Import models
|
||||
for model_path in meta.get('models', []):
|
||||
try:
|
||||
spec = importlib.util.spec_from_file_location(f"plugins.{plugin}.routes", route_file)
|
||||
mod = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(mod)
|
||||
if hasattr(mod, 'bp'):
|
||||
app.register_blueprint(mod.bp, strict_slashes=False)
|
||||
print(f"✔️ Registered routes for plugin '{plugin}'")
|
||||
importlib.import_module(model_path)
|
||||
except Exception as e:
|
||||
print(f"❌ Failed to load routes from plugin '{plugin}': {e}")
|
||||
errors.append(f"model import ({model_path}): {e}")
|
||||
|
||||
# (b) Register CLI and entry‐point
|
||||
init_file = os.path.join(plugin_dir, '__init__.py')
|
||||
plugin_json = os.path.join(plugin_dir, 'plugin.json')
|
||||
if os.path.isfile(init_file):
|
||||
# 1.b) user_loader hook
|
||||
ul = meta.get('user_loader')
|
||||
if ul:
|
||||
try:
|
||||
cli_module = importlib.import_module(f"plugins.{plugin}")
|
||||
if hasattr(cli_module, 'register_cli'):
|
||||
cli_module.register_cli(app)
|
||||
print(f"✔️ Registered CLI for plugin '{plugin}'")
|
||||
if os.path.isfile(plugin_json):
|
||||
with open(plugin_json, 'r') as f:
|
||||
meta = json.load(f)
|
||||
entry = meta.get('entry_point')
|
||||
if entry and hasattr(cli_module, entry):
|
||||
getattr(cli_module, entry)(app)
|
||||
print(f"✔️ Ran entry point '{entry}' for plugin '{plugin}'")
|
||||
mod = importlib.import_module(ul['module'])
|
||||
fn = getattr(mod, ul['callable'])
|
||||
fn(app)
|
||||
except Exception as e:
|
||||
print(f"❌ Failed to load CLI for plugin '{plugin}': {e}")
|
||||
errors.append(f"user_loader ({ul['module']}:{ul['callable']}): {e}")
|
||||
|
||||
# ─── Inject current year into templates ────────────────────────────────────────
|
||||
# 2) Register routes
|
||||
routes_cfg = meta.get('routes')
|
||||
if routes_cfg:
|
||||
try:
|
||||
mod = importlib.import_module(routes_cfg['module'])
|
||||
bp_obj = getattr(mod, routes_cfg['blueprint'])
|
||||
prefix = routes_cfg.get('url_prefix')
|
||||
app.register_blueprint(bp_obj, url_prefix=prefix, strict_slashes=False)
|
||||
except Exception as e:
|
||||
errors.append(f"routes ({routes_cfg['module']}): {e}")
|
||||
|
||||
# 3) Register CLI commands
|
||||
cli_cfg = meta.get('cli')
|
||||
if cli_cfg:
|
||||
try:
|
||||
mod = importlib.import_module(cli_cfg['module'])
|
||||
fn = getattr(mod, cli_cfg['callable'])
|
||||
app.cli.add_command(fn)
|
||||
except Exception as e:
|
||||
errors.append(f"cli ({cli_cfg['module']}:{cli_cfg['callable']}): {e}")
|
||||
|
||||
# 4) Template globals
|
||||
for tg in meta.get('template_globals', []):
|
||||
try:
|
||||
mod_name, fn_name = tg['callable'].rsplit('.', 1)
|
||||
mod = importlib.import_module(mod_name)
|
||||
fn = getattr(mod, fn_name)
|
||||
app.jinja_env.globals[tg['name']] = fn
|
||||
except Exception as e:
|
||||
errors.append(f"template_global ({tg}): {e}")
|
||||
|
||||
# 5) Subplugins (models + routes)
|
||||
for sp in meta.get('subplugins', []):
|
||||
for mp in sp.get('models', []):
|
||||
try:
|
||||
importlib.import_module(mp)
|
||||
except Exception as e:
|
||||
errors.append(f"subplugin model ({mp}): {e}")
|
||||
sp_rt = sp.get('routes')
|
||||
if sp_rt:
|
||||
try:
|
||||
mod = importlib.import_module(sp_rt['module'])
|
||||
bp_obj = getattr(mod, sp_rt['blueprint'])
|
||||
prefix = sp_rt.get('url_prefix')
|
||||
app.register_blueprint(bp_obj, url_prefix=prefix, strict_slashes=False)
|
||||
except Exception as e:
|
||||
errors.append(f"subplugin routes ({sp_rt['module']}): {e}")
|
||||
|
||||
# Final status
|
||||
if errors:
|
||||
print(f"Plugin '{name}' 🛑 failed to load: {'; '.join(errors)}")
|
||||
else:
|
||||
print(f"Plugin '{name}' ✔️ Loaded Successfully.")
|
||||
|
||||
# ─── Context processors, analytics, teardown ───────────────────────────────
|
||||
@app.context_processor
|
||||
def inject_current_year():
|
||||
from datetime import datetime
|
||||
return {'current_year': datetime.now().year}
|
||||
|
||||
@app.context_processor
|
||||
@ -127,17 +152,16 @@ def create_app():
|
||||
|
||||
@app.after_request
|
||||
def log_analytics(response):
|
||||
# import here to avoid circular at module‐load time
|
||||
from plugins.admin.models import AnalyticsEvent
|
||||
from plugins.admin.models import AnalyticsEvent # noqa: E402
|
||||
try:
|
||||
duration = time.time() - getattr(request, '_start_time', time.time())
|
||||
ev = AnalyticsEvent(
|
||||
method=request.method,
|
||||
path=request.path,
|
||||
status_code=response.status_code,
|
||||
response_time=duration,
|
||||
user_agent=request.headers.get('User-Agent'),
|
||||
referer=request.headers.get('Referer'),
|
||||
method = request.method,
|
||||
path = request.path,
|
||||
status_code = response.status_code,
|
||||
response_time = duration,
|
||||
user_agent = request.headers.get('User-Agent'),
|
||||
referer = request.headers.get('Referer'),
|
||||
accept_language=request.headers.get('Accept-Language'),
|
||||
)
|
||||
db.session.add(ev)
|
||||
@ -146,6 +170,11 @@ def create_app():
|
||||
db.session.rollback()
|
||||
return response
|
||||
|
||||
@app.teardown_appcontext
|
||||
def shutdown_session(exception=None):
|
||||
db.session.remove()
|
||||
|
||||
# ─── Keep the template helper exposed ──────────────────────────────────────
|
||||
app.jinja_env.globals['generate_image_url'] = generate_image_url
|
||||
|
||||
return app
|
||||
|
@ -1,45 +1,93 @@
|
||||
import os
|
||||
from dotenv import load_dotenv, find_dotenv
|
||||
|
||||
# CONFIG_DIR is your app package; go up one to the project root
|
||||
# ─── Load .env from project root or any parent ────────────────────────────────
|
||||
dotenv_path = find_dotenv()
|
||||
if dotenv_path:
|
||||
load_dotenv(dotenv_path, override=True)
|
||||
|
||||
# ─── Paths ────────────────────────────────────────────────────────────────────
|
||||
CONFIG_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
PROJECT_ROOT = os.path.dirname(CONFIG_DIR)
|
||||
|
||||
class Config:
|
||||
SECRET_KEY = os.environ['SECRET_KEY']
|
||||
MAX_CONTENT_LENGTH = int(
|
||||
os.environ.get('MAX_CONTENT_LENGTH', 20 * 1024 * 1024 * 1024)
|
||||
)
|
||||
ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg', 'gif'}
|
||||
# ─── Environment ─────────────────────────────────────────────────────────────
|
||||
ENV = (
|
||||
os.getenv('FLASK_ENV')
|
||||
or os.getenv('DOCKER_ENV')
|
||||
or 'production'
|
||||
).lower()
|
||||
|
||||
# ─── Secret Key ──────────────────────────────────────────────────────────────
|
||||
if ENV == 'production':
|
||||
SECRET_KEY = os.getenv('SECRET_KEY')
|
||||
if not SECRET_KEY:
|
||||
raise RuntimeError(
|
||||
"SECRET_KEY environment variable not set! "
|
||||
"Generate one with `openssl rand -hex 32` and export it."
|
||||
)
|
||||
else:
|
||||
# dev/test: fall back to env or a random one
|
||||
SECRET_KEY = os.getenv('SECRET_KEY') or os.urandom(24).hex()
|
||||
|
||||
# ─── Uploads ────────────────────────────────────────────────────────────────
|
||||
# Default to PROJECT_ROOT/static/uploads; if UPLOAD_FOLDER env is set, resolve relative to PROJECT_ROOT
|
||||
_env_upload = os.getenv('UPLOAD_FOLDER', '')
|
||||
if _env_upload:
|
||||
# if absolute, use directly; otherwise join to project root
|
||||
UPLOAD_FOLDER = _env_upload if os.path.isabs(_env_upload) else os.path.join(PROJECT_ROOT, _env_upload)
|
||||
else:
|
||||
UPLOAD_FOLDER = os.path.join(PROJECT_ROOT, "static", "uploads")
|
||||
|
||||
# MySQL connection parameters
|
||||
MYSQL_USER = os.environ['MYSQL_USER']
|
||||
MYSQL_PASSWORD = os.environ['MYSQL_PASSWORD']
|
||||
MYSQL_HOST = os.environ['MYSQL_HOST']
|
||||
MYSQL_PORT = int(os.environ.get('MYSQL_PORT', 3306))
|
||||
MYSQL_DATABASE = os.environ['MYSQL_DATABASE']
|
||||
MAX_CONTENT_LENGTH = int(os.getenv('MAX_CONTENT_LENGTH', 20 * 1024**3))
|
||||
ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg', 'gif'}
|
||||
|
||||
# ─── Celery ────────────────────────────────────────────────────────────────
|
||||
CELERY_BROKER_URL = os.getenv('CELERY_BROKER_URL')
|
||||
if not CELERY_BROKER_URL:
|
||||
raise RuntimeError("CELERY_BROKER_URL environment variable not set!")
|
||||
CELERY_RESULT_BACKEND = os.getenv('CELERY_RESULT_BACKEND', CELERY_BROKER_URL)
|
||||
|
||||
# ─── MySQL ──────────────────────────────────────────────────────────────────
|
||||
MYSQL_USER = os.getenv('MYSQL_USER')
|
||||
MYSQL_PASSWORD = os.getenv('MYSQL_PASSWORD')
|
||||
if not MYSQL_PASSWORD:
|
||||
raise RuntimeError("MYSQL_PASSWORD environment variable not set!")
|
||||
MYSQL_HOST = os.getenv('MYSQL_HOST', 'db')
|
||||
MYSQL_PORT = int(os.getenv('MYSQL_PORT', 3306))
|
||||
MYSQL_DATABASE = os.getenv('MYSQL_DATABASE')
|
||||
if not MYSQL_DATABASE:
|
||||
raise RuntimeError("MYSQL_DATABASE environment variable not set!")
|
||||
|
||||
# Build the SQLAlchemy database URI
|
||||
SQLALCHEMY_DATABASE_URI = (
|
||||
f"mysql+pymysql://{MYSQL_USER}:{MYSQL_PASSWORD}"
|
||||
f"@{MYSQL_HOST}:{MYSQL_PORT}/{MYSQL_DATABASE}"
|
||||
)
|
||||
SQLALCHEMY_TRACK_MODIFICATIONS = False
|
||||
|
||||
# Optional toggles
|
||||
ENABLE_DB_SEEDING = os.environ.get('ENABLE_DB_SEEDING', '0') == '1'
|
||||
DOCKER_ENV = os.environ.get('FLASK_ENV', 'production')
|
||||
# ─── Cookies / Session ──────────────────────────────────────────────────────
|
||||
SESSION_COOKIE_SECURE = True
|
||||
SESSION_COOKIE_HTTPONLY = True
|
||||
SESSION_COOKIE_SAMESITE = 'Lax'
|
||||
REMEMBER_COOKIE_SECURE = True
|
||||
REMEMBER_COOKIE_HTTPONLY = True
|
||||
REMEMBER_COOKIE_SAMESITE = 'Lax'
|
||||
PREFERRED_URL_SCHEME = 'https'
|
||||
|
||||
# Neo4j configuration
|
||||
# ─── Toggles ────────────────────────────────────────────────────────────────
|
||||
ENABLE_DB_SEEDING = os.getenv('ENABLE_DB_SEEDING', '0') == '1'
|
||||
DOCKER_ENV = os.getenv('DOCKER_ENV', 'production')
|
||||
|
||||
# ─── Neo4j ──────────────────────────────────────────────────────────────────
|
||||
NEO4J_URI = os.getenv('NEO4J_URI', 'bolt://neo4j:7687')
|
||||
NEO4J_USER = os.getenv('NEO4J_USER', 'neo4j')
|
||||
NEO4J_PASSWORD = os.getenv('NEO4J_PASSWORD', 'your_secure_password')
|
||||
NEO4J_PASSWORD = os.getenv('NEO4J_PASSWORD')
|
||||
if not NEO4J_PASSWORD:
|
||||
raise RuntimeError("NEO4J_PASSWORD environment variable not set!")
|
||||
|
||||
# Standard image size (for placeholders, etc.)
|
||||
# ─── Misc ──────────────────────────────────────────────────────────────────
|
||||
STANDARD_IMG_SIZE = tuple(
|
||||
map(int, os.getenv('STANDARD_IMG_SIZE', '300x200').split('x'))
|
||||
)
|
||||
|
||||
PLANT_CARDS_BASE_URL = "https://plant.cards"
|
||||
ALLOW_REGISTRATION = False
|
||||
|
15
app/routes.py
Normal file
15
app/routes.py
Normal file
@ -0,0 +1,15 @@
|
||||
from flask import render_template
|
||||
|
||||
def init_app(app):
|
||||
"""
|
||||
Register core application routes directly on the Flask app:
|
||||
- GET / → home page
|
||||
- GET /health → health check
|
||||
"""
|
||||
@app.route('/')
|
||||
def home():
|
||||
return render_template('core/home.html')
|
||||
|
||||
@app.route('/health')
|
||||
def health():
|
||||
return 'OK', 200
|
@ -7,6 +7,6 @@
|
||||
<body>
|
||||
<h1>400 – Bad Request</h1>
|
||||
<p>{{ error.description or "Sorry, we couldn’t understand that request." }}</p>
|
||||
<a href="{{ url_for('main.index') }}">Return home</a>
|
||||
<a href="{{ url_for('home') }}">Return home</a>
|
||||
</body>
|
||||
</html>
|
||||
|
@ -1,4 +1,3 @@
|
||||
{# plugins/core_ui/templates/core_ui/_media_macros.html #}
|
||||
{% macro render_media_list(media_list, thumb_width=150, current_user=None) -%}
|
||||
{% if media_list %}
|
||||
<div class="row">
|
@ -22,7 +22,7 @@
|
||||
<body>
|
||||
<nav class="navbar navbar-expand-lg navbar-light bg-white shadow-sm mb-4">
|
||||
<div class="container">
|
||||
<a class="navbar-brand fw-bold" href="{{ url_for('core_ui.home') }}">
|
||||
<a class="navbar-brand fw-bold" href="{{ url_for('home') }}">
|
||||
Nature In Pots
|
||||
</a>
|
||||
<button
|
||||
@ -38,7 +38,7 @@
|
||||
<!-- Left links -->
|
||||
<ul class="navbar-nav me-auto mb-2 mb-lg-0">
|
||||
<li class="nav-item me-2">
|
||||
<a class="nav-link" href="{{ url_for('core_ui.home') }}">Home</a>
|
||||
<a class="nav-link" href="{{ url_for('home') }}">Home</a>
|
||||
</li>
|
||||
{% if current_user.is_authenticated %}
|
||||
<li class="nav-item me-2">
|
@ -1,4 +1,4 @@
|
||||
{% extends 'core_ui/base.html' %}
|
||||
{% extends 'core/base.html' %}
|
||||
{% block title %}Home | Nature In Pots{% endblock %}
|
||||
|
||||
{% block content %}
|
BIN
beta-0.1.0.zip
Normal file
BIN
beta-0.1.0.zip
Normal file
Binary file not shown.
BIN
betas/beta-0.0.10.zip
Normal file
BIN
betas/beta-0.0.10.zip
Normal file
Binary file not shown.
BIN
betas/beta-0.0.11.zip
Normal file
BIN
betas/beta-0.0.11.zip
Normal file
Binary file not shown.
BIN
betas/beta-0.0.12.zip
Normal file
BIN
betas/beta-0.0.12.zip
Normal file
Binary file not shown.
BIN
betas/beta-0.0.13.zip
Normal file
BIN
betas/beta-0.0.13.zip
Normal file
Binary file not shown.
BIN
betas/beta-0.0.2.zip
Normal file
BIN
betas/beta-0.0.2.zip
Normal file
Binary file not shown.
BIN
betas/beta-0.0.3.zip
Normal file
BIN
betas/beta-0.0.3.zip
Normal file
Binary file not shown.
BIN
betas/beta-0.0.4.zip
Normal file
BIN
betas/beta-0.0.4.zip
Normal file
Binary file not shown.
BIN
betas/beta-0.0.5.zip
Normal file
BIN
betas/beta-0.0.5.zip
Normal file
Binary file not shown.
BIN
betas/beta-0.0.6.zip
Normal file
BIN
betas/beta-0.0.6.zip
Normal file
Binary file not shown.
BIN
betas/beta-0.0.7.zip
Normal file
BIN
betas/beta-0.0.7.zip
Normal file
Binary file not shown.
BIN
betas/beta-0.0.8.zip
Normal file
BIN
betas/beta-0.0.8.zip
Normal file
Binary file not shown.
BIN
betas/beta-0.0.9.zip
Normal file
BIN
betas/beta-0.0.9.zip
Normal file
Binary file not shown.
@ -1,6 +1,11 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
networks:
|
||||
- appnet
|
||||
|
||||
web:
|
||||
build: .
|
||||
ports:
|
||||
@ -8,17 +13,23 @@ services:
|
||||
volumes:
|
||||
- .:/app
|
||||
- ./static/uploads:/app/static/uploads
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- FLASK_APP=app
|
||||
- FLASK_ENV=development
|
||||
- FLASK_APP=app:create_app
|
||||
- FLASK_ENV=${FLASK_ENV}
|
||||
- SECRET_KEY=${SECRET_KEY}
|
||||
- USE_REMOTE_MYSQL=${USE_REMOTE_MYSQL}
|
||||
- ENABLE_DB_SEEDING=${ENABLE_DB_SEEDING}
|
||||
- MYSQL_HOST=${MYSQL_HOST}
|
||||
- MYSQL_PORT=${MYSQL_PORT}
|
||||
- MYSQL_DATABASE=${MYSQL_DATABASE}
|
||||
- MYSQL_USER=${MYSQL_USER}
|
||||
- MYSQL_PASSWORD=${MYSQL_PASSWORD}
|
||||
- MYSQL_HOST=${MYSQL_HOST}
|
||||
- MYSQL_PORT=${MYSQL_PORT}
|
||||
- CELERY_BROKER_URL=redis://redis:6379/0
|
||||
- CELERY_RESULT_BACKEND=redis://redis:6379/0
|
||||
depends_on:
|
||||
- redis
|
||||
- db
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "curl -fs http://127.0.0.1:5000/health || exit 1"]
|
||||
@ -32,6 +43,8 @@ services:
|
||||
db:
|
||||
image: mysql:8
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD}
|
||||
- MYSQL_DATABASE=${MYSQL_DATABASE}
|
||||
@ -73,12 +86,31 @@ services:
|
||||
- "7474:7474"
|
||||
- "7687:7687"
|
||||
environment:
|
||||
- NEO4J_AUTH=neo4j/your_secure_password
|
||||
# only the one var Neo4j actually needs
|
||||
- NEO4J_AUTH=neo4j/${NEO4J_PASSWORD}
|
||||
volumes:
|
||||
- neo4j_data:/data
|
||||
networks:
|
||||
- appnet
|
||||
|
||||
worker:
|
||||
build: .
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- SECRET_KEY=${SECRET_KEY}
|
||||
- MYSQL_HOST=${MYSQL_HOST}
|
||||
- MYSQL_PORT=${MYSQL_PORT}
|
||||
- CELERY_BROKER_URL=redis://redis:6379/0
|
||||
- CELERY_RESULT_BACKEND=redis://redis:6379/0
|
||||
user: "appuser"
|
||||
command: celery -A plugins.utility.celery:celery_app worker --loglevel=info
|
||||
depends_on:
|
||||
- redis
|
||||
- db
|
||||
networks:
|
||||
- appnet
|
||||
|
||||
volumes:
|
||||
neo4j_data:
|
||||
|
||||
|
@ -1,49 +1,46 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
UPLOAD_DIR="/app/static/uploads"
|
||||
mkdir -p "$UPLOAD_DIR"
|
||||
chown -R 1000:998 "$UPLOAD_DIR"
|
||||
chmod -R 775 "$UPLOAD_DIR"
|
||||
# Resolve DB host/port from vars or defaults
|
||||
DB_HOST="${DB_HOST:-${MYSQL_HOST:-db}}"
|
||||
DB_PORT="${DB_PORT:-${MYSQL_PORT:-3306}}"
|
||||
|
||||
DB_HOST=${DB_HOST:-db}
|
||||
DB_PORT=${DB_PORT:-3306}
|
||||
echo "[⏳] Waiting for database at $DB_HOST:$DB_PORT..."
|
||||
until nc -z "$DB_HOST" "$DB_PORT"; do
|
||||
sleep 1
|
||||
done
|
||||
echo "[✔] Database is up"
|
||||
|
||||
# Initialize Alembic if not present
|
||||
if [ ! -d "./migrations" ]; then
|
||||
echo "[🆕] No migrations directory found; initializing Alembic"
|
||||
flask db init
|
||||
echo "[🆕] Generating initial migration"
|
||||
flask db migrate -m "initial" || echo "[ℹ️] Nothing to migrate"
|
||||
fi
|
||||
# Only the "flask" entrypoint needs uploads + migrations
|
||||
if [ "$1" = "flask" ]; then
|
||||
|
||||
# Autogenerate new migration if needed
|
||||
echo "[🛠️] Checking for new schema changes"
|
||||
flask db migrate -m "auto-migrate" || echo "[ℹ️] No schema changes detected"
|
||||
# Prepare upload dir (web only)
|
||||
UPLOAD_DIR="/app/${UPLOAD_FOLDER:-static/uploads}"
|
||||
mkdir -p "$UPLOAD_DIR"
|
||||
chown -R 1000:998 "$UPLOAD_DIR"
|
||||
chmod -R 775 "$UPLOAD_DIR"
|
||||
|
||||
# Apply migrations
|
||||
echo "[▶️] Applying database migrations"
|
||||
flask db upgrade
|
||||
# Run DB migrations
|
||||
echo "[🛠️] Applying database migrations"
|
||||
flask db upgrade
|
||||
|
||||
# Create any missing tables (edge case fallback)
|
||||
echo "[🔧] Running db.create_all() to ensure full sync"
|
||||
python <<EOF
|
||||
# Ensure any missing tables
|
||||
echo "[🛠️] Ensuring tables exist"
|
||||
python <<EOF
|
||||
from app import create_app, db
|
||||
app = create_app()
|
||||
with app.app_context():
|
||||
db.create_all()
|
||||
EOF
|
||||
|
||||
# Optional seeding
|
||||
if [ "$ENABLE_DB_SEEDING" = "true" ] || [ "$ENABLE_DB_SEEDING" = "1" ]; then
|
||||
# Optional seeding
|
||||
if [ "${ENABLE_DB_SEEDING,,}" = "true" ] || [ "${ENABLE_DB_SEEDING}" = "1" ]; then
|
||||
echo "[🌱] Seeding Data"
|
||||
flask preload-data
|
||||
fi
|
||||
|
||||
echo "[🚀] Starting Flask"
|
||||
fi
|
||||
|
||||
echo "[🚀] Starting Flask"
|
||||
# Finally hand off to whatever service was requested (flask or celery)
|
||||
exec "$@"
|
||||
|
@ -1,66 +1,108 @@
|
||||
from __future__ import with_statement
|
||||
# File: migrations/env.py
|
||||
|
||||
import os
|
||||
import logging
|
||||
import glob
|
||||
import sys
|
||||
import json
|
||||
import importlib
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import create_engine, pool
|
||||
from alembic import context
|
||||
|
||||
from flask import current_app
|
||||
from app import db
|
||||
# ─── Ensure we can load .env and app code ────────────────────────────────────
|
||||
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
sys.path.insert(0, project_root)
|
||||
|
||||
# -----------------------------
|
||||
# 🔍 Automatically import all plugin models under their real package name
|
||||
# -----------------------------
|
||||
plugin_model_paths = glob.glob(os.path.join("plugins", "*", "models.py"))
|
||||
|
||||
for path in plugin_model_paths:
|
||||
rel = path[len("plugins/") : -len("/models.py")]
|
||||
pkg = f"plugins.{rel}.models"
|
||||
try:
|
||||
importlib.import_module(pkg)
|
||||
print(f"✅ Loaded: {pkg}")
|
||||
except Exception as e:
|
||||
print(f"❌ Failed to load {pkg}: {e}")
|
||||
# -----------------------------
|
||||
# ─── Load .env (so MYSQL_* and other vars are available) ─────────────────────
|
||||
from dotenv import load_dotenv, find_dotenv
|
||||
dotenv_path = find_dotenv() # looks in project root or parents
|
||||
if dotenv_path:
|
||||
load_dotenv(dotenv_path, override=True)
|
||||
|
||||
# ─── Alembic Config & Logging ────────────────────────────────────────────────
|
||||
config = context.config
|
||||
fileConfig(config.config_file_name)
|
||||
logger = logging.getLogger("alembic.env")
|
||||
logger.setLevel(logging.WARN) # optional: silence alembic spam
|
||||
|
||||
# ─── Import your app’s metadata for 'autogenerate' support ─────────────────
|
||||
from app import db
|
||||
target_metadata = db.metadata
|
||||
|
||||
# ─── Dynamically import all plugin models listed in plugin.json ─────────────
|
||||
plugins_dir = os.path.join(project_root, "plugins")
|
||||
for plugin in sorted(os.listdir(plugins_dir)):
|
||||
manifest = os.path.join(plugins_dir, plugin, "plugin.json")
|
||||
if not os.path.isfile(manifest):
|
||||
continue
|
||||
try:
|
||||
meta = json.load(open(manifest))
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
for model_mod in meta.get("models", []):
|
||||
try:
|
||||
importlib.import_module(model_mod)
|
||||
except ImportError:
|
||||
pass
|
||||
for sp in meta.get("subplugins", []):
|
||||
for model_mod in sp.get("models", []):
|
||||
try:
|
||||
importlib.import_module(model_mod)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# ─── Build or retrieve the database URL ──────────────────────────────────────
|
||||
def get_database_url():
|
||||
# 1) alembic.ini setting
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
if url:
|
||||
return url
|
||||
|
||||
# 2) Generic DATABASE_URL env var
|
||||
url = os.environ.get("DATABASE_URL")
|
||||
if url:
|
||||
return url
|
||||
|
||||
# 3) MySQL env vars (from .env or docker-compose)
|
||||
user = os.environ.get("MYSQL_USER")
|
||||
pwd = os.environ.get("MYSQL_PASSWORD")
|
||||
host = os.environ.get("MYSQL_HOST", "db")
|
||||
port = os.environ.get("MYSQL_PORT", "3306")
|
||||
dbn = os.environ.get("MYSQL_DATABASE")
|
||||
if user and pwd and dbn:
|
||||
return f"mysql+pymysql://{user}:{pwd}@{host}:{port}/{dbn}"
|
||||
|
||||
raise RuntimeError(
|
||||
"Database URL not configured for Alembic migrations; "
|
||||
"set 'sqlalchemy.url' in alembic.ini, or DATABASE_URL, "
|
||||
"or MYSQL_USER/MYSQL_PASSWORD/MYSQL_DATABASE in the environment"
|
||||
)
|
||||
|
||||
# ─── Offline migration ───────────────────────────────────────────────────────
|
||||
def run_migrations_offline():
|
||||
url = get_database_url()
|
||||
context.configure(
|
||||
url=current_app.config.get("SQLALCHEMY_DATABASE_URI"),
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
sort_tables=True,
|
||||
render_as_batch=True, # ✅ important!
|
||||
)
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
# ─── Online migration ────────────────────────────────────────────────────────
|
||||
def run_migrations_online():
|
||||
connectable = db.engine
|
||||
url = get_database_url()
|
||||
connectable = create_engine(url, poolclass=pool.NullPool)
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
sort_tables=True,
|
||||
render_as_batch=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
print("🧠 Alembic sees these tables:")
|
||||
print(sorted(db.metadata.tables.keys()))
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
|
28
migrations/versions/06234a515bde_auto_migrate.py
Normal file
28
migrations/versions/06234a515bde_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: 06234a515bde
|
||||
Revises: 87c6df96bef3
|
||||
Create Date: 2025-06-30 09:44:06.865642
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '06234a515bde'
|
||||
down_revision = '87c6df96bef3'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/076bfc1a441b_auto_migrate.py
Normal file
28
migrations/versions/076bfc1a441b_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: 076bfc1a441b
|
||||
Revises: 7229fe50de09
|
||||
Create Date: 2025-06-30 08:22:10.087506
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '076bfc1a441b'
|
||||
down_revision = '7229fe50de09'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/0964777a3294_auto_migrate.py
Normal file
28
migrations/versions/0964777a3294_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: 0964777a3294
|
||||
Revises: 53d0e3d0cd47
|
||||
Create Date: 2025-06-30 09:37:40.005273
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '0964777a3294'
|
||||
down_revision = '53d0e3d0cd47'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/10e39b33d4e7_auto_migrate.py
Normal file
28
migrations/versions/10e39b33d4e7_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: 10e39b33d4e7
|
||||
Revises: ee4be515bb55
|
||||
Create Date: 2025-06-30 10:06:13.159708
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '10e39b33d4e7'
|
||||
down_revision = 'ee4be515bb55'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/12cc29f97b11_auto_migrate.py
Normal file
28
migrations/versions/12cc29f97b11_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: 12cc29f97b11
|
||||
Revises: dcc114909948
|
||||
Create Date: 2025-06-30 07:59:46.612023
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '12cc29f97b11'
|
||||
down_revision = 'dcc114909948'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/12ef820b5618_auto_migrate.py
Normal file
28
migrations/versions/12ef820b5618_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: 12ef820b5618
|
||||
Revises: 228e71f1a33b
|
||||
Create Date: 2025-06-30 08:45:15.427549
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '12ef820b5618'
|
||||
down_revision = '228e71f1a33b'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/228e71f1a33b_auto_migrate.py
Normal file
28
migrations/versions/228e71f1a33b_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: 228e71f1a33b
|
||||
Revises: 493fbb46e881
|
||||
Create Date: 2025-06-30 08:40:05.646744
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '228e71f1a33b'
|
||||
down_revision = '493fbb46e881'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
36
migrations/versions/24de4aa78a43_auto_migrate.py
Normal file
36
migrations/versions/24de4aa78a43_auto_migrate.py
Normal file
@ -0,0 +1,36 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: 24de4aa78a43
|
||||
Revises: 4082065b932b
|
||||
Create Date: 2025-06-28 23:24:05.909001
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '24de4aa78a43'
|
||||
down_revision = '4082065b932b'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('zip_jobs',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('filename', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('status', sa.String(length=20), nullable=True),
|
||||
sa.Column('error', sa.Text(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('zip_jobs')
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/27f1b3976f3f_auto_migrate.py
Normal file
28
migrations/versions/27f1b3976f3f_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: 27f1b3976f3f
|
||||
Revises: 10e39b33d4e7
|
||||
Create Date: 2025-06-30 10:09:47.442196
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '27f1b3976f3f'
|
||||
down_revision = '10e39b33d4e7'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/2d11e31941d9_auto_migrate.py
Normal file
28
migrations/versions/2d11e31941d9_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: 2d11e31941d9
|
||||
Revises: acd3093204e7
|
||||
Create Date: 2025-06-30 07:45:03.061969
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '2d11e31941d9'
|
||||
down_revision = 'acd3093204e7'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/310f500a3d2f_auto_migrate.py
Normal file
28
migrations/versions/310f500a3d2f_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: 310f500a3d2f
|
||||
Revises: d49ee8d82364
|
||||
Create Date: 2025-06-30 10:13:54.468427
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '310f500a3d2f'
|
||||
down_revision = 'd49ee8d82364'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/493fbb46e881_auto_migrate.py
Normal file
28
migrations/versions/493fbb46e881_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: 493fbb46e881
|
||||
Revises: faeca4f53b04
|
||||
Create Date: 2025-06-30 08:28:50.667633
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '493fbb46e881'
|
||||
down_revision = 'faeca4f53b04'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/53d0e3d0cd47_auto_migrate.py
Normal file
28
migrations/versions/53d0e3d0cd47_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: 53d0e3d0cd47
|
||||
Revises: c6fad4522e3c
|
||||
Create Date: 2025-06-30 09:32:22.487970
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '53d0e3d0cd47'
|
||||
down_revision = 'c6fad4522e3c'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/7229fe50de09_auto_migrate.py
Normal file
28
migrations/versions/7229fe50de09_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: 7229fe50de09
|
||||
Revises: 12cc29f97b11
|
||||
Create Date: 2025-06-30 08:20:50.414985
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7229fe50de09'
|
||||
down_revision = '12cc29f97b11'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/87c6df96bef3_auto_migrate.py
Normal file
28
migrations/versions/87c6df96bef3_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: 87c6df96bef3
|
||||
Revises: f34b5e058563
|
||||
Create Date: 2025-06-30 09:43:22.353321
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '87c6df96bef3'
|
||||
down_revision = 'f34b5e058563'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/a5cb08298ee4_auto_migrate.py
Normal file
28
migrations/versions/a5cb08298ee4_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: a5cb08298ee4
|
||||
Revises: 0964777a3294
|
||||
Create Date: 2025-06-30 09:40:06.234651
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'a5cb08298ee4'
|
||||
down_revision = '0964777a3294'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/acd3093204e7_auto_migrate.py
Normal file
28
migrations/versions/acd3093204e7_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: acd3093204e7
|
||||
Revises: f741addef1a1
|
||||
Create Date: 2025-06-30 07:29:07.401797
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'acd3093204e7'
|
||||
down_revision = 'f741addef1a1'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/b1e37dc718f2_auto_migrate.py
Normal file
28
migrations/versions/b1e37dc718f2_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: b1e37dc718f2
|
||||
Revises: c92477263320
|
||||
Create Date: 2025-06-30 09:46:40.791979
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b1e37dc718f2'
|
||||
down_revision = 'c92477263320'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/b57c767ad0d6_auto_migrate.py
Normal file
28
migrations/versions/b57c767ad0d6_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: b57c767ad0d6
|
||||
Revises: 310f500a3d2f
|
||||
Create Date: 2025-06-30 10:15:24.093788
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b57c767ad0d6'
|
||||
down_revision = '310f500a3d2f'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/b684611b27b1_auto_migrate.py
Normal file
28
migrations/versions/b684611b27b1_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: b684611b27b1
|
||||
Revises: 12ef820b5618
|
||||
Create Date: 2025-06-30 08:51:21.461638
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b684611b27b1'
|
||||
down_revision = '12ef820b5618'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/c6fad4522e3c_auto_migrate.py
Normal file
28
migrations/versions/c6fad4522e3c_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: c6fad4522e3c
|
||||
Revises: dd2492e0ede0
|
||||
Create Date: 2025-06-30 09:30:35.084623
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'c6fad4522e3c'
|
||||
down_revision = 'dd2492e0ede0'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/c92477263320_auto_migrate.py
Normal file
28
migrations/versions/c92477263320_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: c92477263320
|
||||
Revises: fa34eb3f6084
|
||||
Create Date: 2025-06-30 09:45:35.016682
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'c92477263320'
|
||||
down_revision = 'fa34eb3f6084'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/d49ee8d82364_auto_migrate.py
Normal file
28
migrations/versions/d49ee8d82364_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: d49ee8d82364
|
||||
Revises: 27f1b3976f3f
|
||||
Create Date: 2025-06-30 10:12:13.065540
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'd49ee8d82364'
|
||||
down_revision = '27f1b3976f3f'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/d647dd4d3fbd_auto_migrate.py
Normal file
28
migrations/versions/d647dd4d3fbd_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: d647dd4d3fbd
|
||||
Revises: b684611b27b1
|
||||
Create Date: 2025-06-30 08:54:56.276182
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'd647dd4d3fbd'
|
||||
down_revision = 'b684611b27b1'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/dcc114909948_auto_migrate.py
Normal file
28
migrations/versions/dcc114909948_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: dcc114909948
|
||||
Revises: 2d11e31941d9
|
||||
Create Date: 2025-06-30 07:49:55.919638
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'dcc114909948'
|
||||
down_revision = '2d11e31941d9'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/dd2492e0ede0_auto_migrate.py
Normal file
28
migrations/versions/dd2492e0ede0_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: dd2492e0ede0
|
||||
Revises: d647dd4d3fbd
|
||||
Create Date: 2025-06-30 09:18:20.337888
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'dd2492e0ede0'
|
||||
down_revision = 'd647dd4d3fbd'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/ee4be515bb55_auto_migrate.py
Normal file
28
migrations/versions/ee4be515bb55_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: ee4be515bb55
|
||||
Revises: b1e37dc718f2
|
||||
Create Date: 2025-06-30 09:57:22.706206
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'ee4be515bb55'
|
||||
down_revision = 'b1e37dc718f2'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/f34b5e058563_auto_migrate.py
Normal file
28
migrations/versions/f34b5e058563_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: f34b5e058563
|
||||
Revises: a5cb08298ee4
|
||||
Create Date: 2025-06-30 09:40:49.692944
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'f34b5e058563'
|
||||
down_revision = 'a5cb08298ee4'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
59
migrations/versions/f741addef1a1_auto_migrate.py
Normal file
59
migrations/versions/f741addef1a1_auto_migrate.py
Normal file
@ -0,0 +1,59 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: f741addef1a1
|
||||
Revises: 24de4aa78a43
|
||||
Create Date: 2025-06-29 10:16:35.487343
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'f741addef1a1'
|
||||
down_revision = '24de4aa78a43'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('update_images')
|
||||
op.drop_table('plant_updates')
|
||||
with op.batch_alter_table('grow_logs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('media_id', sa.Integer(), nullable=True))
|
||||
batch_op.create_foreign_key(None, 'media', ['media_id'], ['id'])
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('grow_logs', schema=None) as batch_op:
|
||||
batch_op.drop_constraint(None, type_='foreignkey')
|
||||
batch_op.drop_column('media_id')
|
||||
|
||||
op.create_table('plant_updates',
|
||||
sa.Column('id', mysql.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column('growlog_id', mysql.INTEGER(), autoincrement=False, nullable=False),
|
||||
sa.Column('description', mysql.TEXT(), nullable=True),
|
||||
sa.Column('created_at', mysql.DATETIME(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['growlog_id'], ['grow_logs.id'], name=op.f('plant_updates_ibfk_1')),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_collate='utf8mb4_0900_ai_ci',
|
||||
mysql_default_charset='utf8mb4',
|
||||
mysql_engine='InnoDB'
|
||||
)
|
||||
op.create_table('update_images',
|
||||
sa.Column('id', mysql.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column('update_id', mysql.INTEGER(), autoincrement=False, nullable=False),
|
||||
sa.Column('media_id', mysql.INTEGER(), autoincrement=False, nullable=False),
|
||||
sa.Column('created_at', mysql.DATETIME(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['media_id'], ['media.id'], name=op.f('update_images_ibfk_2')),
|
||||
sa.ForeignKeyConstraint(['update_id'], ['plant_updates.id'], name=op.f('update_images_ibfk_1')),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_collate='utf8mb4_0900_ai_ci',
|
||||
mysql_default_charset='utf8mb4',
|
||||
mysql_engine='InnoDB'
|
||||
)
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/fa34eb3f6084_auto_migrate.py
Normal file
28
migrations/versions/fa34eb3f6084_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: fa34eb3f6084
|
||||
Revises: 06234a515bde
|
||||
Create Date: 2025-06-30 09:44:53.445644
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'fa34eb3f6084'
|
||||
down_revision = '06234a515bde'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
28
migrations/versions/faeca4f53b04_auto_migrate.py
Normal file
28
migrations/versions/faeca4f53b04_auto_migrate.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""auto-migrate
|
||||
|
||||
Revision ID: faeca4f53b04
|
||||
Revises: 076bfc1a441b
|
||||
Create Date: 2025-06-30 08:27:15.001657
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'faeca4f53b04'
|
||||
down_revision = '076bfc1a441b'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
@ -1,6 +1,17 @@
|
||||
{
|
||||
"name": "admin",
|
||||
"version": "0.1.1",
|
||||
"description": "Admin panel plugin for Nature In Pots",
|
||||
"entry_point": "register_cli"
|
||||
"name": "Admin",
|
||||
"version": "0.1.0",
|
||||
"author": "Bryson Shepard <bryson@natureinpots.com>",
|
||||
"description": "Provides the administrative UI and analytics hooks.",
|
||||
"module": "plugins.admin",
|
||||
"routes": {
|
||||
"module": "plugins.admin.routes",
|
||||
"blueprint": "bp",
|
||||
"url_prefix": "/admin"
|
||||
},
|
||||
"models": [
|
||||
"plugins.admin.models"
|
||||
],
|
||||
"license": "Proprietary",
|
||||
"repository": "https://github.com/your-org/your-app"
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ from datetime import datetime, timedelta
|
||||
|
||||
from app import db
|
||||
from plugins.auth.models import User
|
||||
from plugins.growlog.models import GrowLog
|
||||
from plugins.plant.growlog.models import GrowLog
|
||||
from plugins.plant.models import Plant
|
||||
from plugins.admin.models import AnalyticsEvent
|
||||
from .forms import UserForm
|
||||
|
@ -1,5 +1,5 @@
|
||||
{# plugins/admin/templates/admin/dashboard.html #}
|
||||
{% extends 'core_ui/base.html' %}
|
||||
{% extends 'core/base.html' %}
|
||||
{% block title %}Admin Dashboard – Nature In Pots{% endblock %}
|
||||
|
||||
{% block styles %}
|
||||
|
@ -1,4 +1,4 @@
|
||||
{% extends 'core_ui/base.html' %}
|
||||
{% extends 'core/base.html' %}
|
||||
{% block title %}{{ action }} User – Admin – Nature In Pots{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
@ -1,4 +1,4 @@
|
||||
{% extends 'core_ui/base.html' %}
|
||||
{% extends 'core/base.html' %}
|
||||
{% block title %}Users – Admin – Nature In Pots{% endblock %}
|
||||
{% block content %}
|
||||
<h1>Users</h1>
|
||||
|
15
plugins/auth/forms.py
Normal file
15
plugins/auth/forms.py
Normal file
@ -0,0 +1,15 @@
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, PasswordField, SubmitField
|
||||
from wtforms.validators import DataRequired, Email, Length, EqualTo
|
||||
|
||||
class RegistrationForm(FlaskForm):
|
||||
username = StringField('Username', validators=[DataRequired(), Length(min=3, max=25)])
|
||||
email = StringField('Email', validators=[DataRequired(), Email()])
|
||||
password = PasswordField('Password', validators=[DataRequired(), Length(min=6)])
|
||||
confirm = PasswordField('Confirm Password', validators=[DataRequired(), EqualTo('password')])
|
||||
submit = SubmitField('Register')
|
||||
|
||||
class LoginForm(FlaskForm):
|
||||
email = StringField('Email', validators=[DataRequired(), Email()])
|
||||
password = PasswordField('Password', validators=[DataRequired()])
|
||||
submit = SubmitField('Login')
|
@ -1,7 +1,9 @@
|
||||
# File: plugins/auth/models.py
|
||||
|
||||
from werkzeug.security import generate_password_hash, check_password_hash
|
||||
from flask_login import UserMixin
|
||||
from datetime import datetime
|
||||
from app import db
|
||||
from app import db, login_manager
|
||||
|
||||
class User(db.Model, UserMixin):
|
||||
__tablename__ = 'users'
|
||||
@ -15,14 +17,10 @@ class User(db.Model, UserMixin):
|
||||
excluded_from_analytics = db.Column(db.Boolean, default=False)
|
||||
created_at = db.Column(db.DateTime, default=datetime.utcnow)
|
||||
|
||||
# Soft-delete flag
|
||||
is_deleted = db.Column(db.Boolean, nullable=False, default=False)
|
||||
# Permanent ban flag
|
||||
is_banned = db.Column(db.Boolean, nullable=False, default=False)
|
||||
# Temporary suspension until this UTC datetime
|
||||
suspended_until = db.Column(db.DateTime, nullable=True)
|
||||
|
||||
# Use back_populates, not backref
|
||||
submitted_submissions = db.relationship(
|
||||
"Submission",
|
||||
foreign_keys="Submission.user_id",
|
||||
@ -42,3 +40,20 @@ class User(db.Model, UserMixin):
|
||||
|
||||
def check_password(self, password):
|
||||
return check_password_hash(self.password_hash, password)
|
||||
|
||||
|
||||
# ─── Flask-Login integration ─────────────────────────────────────────────────
|
||||
|
||||
def _load_user(user_id):
|
||||
"""Return a User by ID, or None."""
|
||||
if not str(user_id).isdigit():
|
||||
return None
|
||||
return User.query.get(int(user_id))
|
||||
|
||||
|
||||
def register_user_loader(app):
|
||||
"""
|
||||
Hook into Flask-Login to register the user_loader.
|
||||
Called by our JSON-driven loader if declared in plugin.json.
|
||||
"""
|
||||
login_manager.user_loader(_load_user)
|
||||
|
@ -1,6 +1,27 @@
|
||||
{
|
||||
"name": "auth",
|
||||
"version": "1.0.0",
|
||||
"description": "User authentication and authorization plugin",
|
||||
"entry_point": null
|
||||
"name": "Auth",
|
||||
"version": "0.1.0",
|
||||
"author": "Bryson Shepard <bryson@natureinpots.com>",
|
||||
"description": "Handles user registration, login, logout, and invitation flows.",
|
||||
"module": "plugins.auth",
|
||||
"routes": {
|
||||
"module": "plugins.auth.routes",
|
||||
"blueprint": "bp",
|
||||
"url_prefix": "/auth"
|
||||
},
|
||||
"models": [
|
||||
"plugins.auth.models"
|
||||
],
|
||||
"template_globals": [
|
||||
{
|
||||
"name": "current_user",
|
||||
"callable": "flask_login.current_user"
|
||||
}
|
||||
],
|
||||
"user_loader": {
|
||||
"module": "plugins.auth.models",
|
||||
"callable": "register_user_loader"
|
||||
},
|
||||
"license": "Proprietary",
|
||||
"repository": "https://github.com/your-org/your-app"
|
||||
}
|
@ -1,51 +1,48 @@
|
||||
from flask import Blueprint, render_template, request, redirect, url_for, flash, current_app
|
||||
from flask_login import login_user, logout_user, login_required
|
||||
from werkzeug.security import check_password_hash
|
||||
from app import db
|
||||
from .models import User
|
||||
# File: plugins/auth/routes.py
|
||||
|
||||
from flask import Blueprint, render_template, redirect, flash, url_for, request
|
||||
from flask_login import login_user, logout_user, login_required
|
||||
from .models import User
|
||||
from .forms import LoginForm, RegistrationForm
|
||||
from app import db
|
||||
|
||||
bp = Blueprint(
|
||||
'auth',
|
||||
__name__,
|
||||
template_folder='templates/auth', # ← now points at plugins/auth/templates/auth/
|
||||
url_prefix='/auth'
|
||||
)
|
||||
|
||||
bp = Blueprint('auth', __name__, template_folder='templates')
|
||||
|
||||
@bp.route('/login', methods=['GET', 'POST'])
|
||||
def login():
|
||||
if request.method == 'POST':
|
||||
email = request.form['email']
|
||||
password = request.form['password']
|
||||
user = User.query.filter_by(email=email).first()
|
||||
if user and check_password_hash(user.password_hash, password):
|
||||
form = LoginForm()
|
||||
if form.validate_on_submit():
|
||||
user = User.query.filter_by(email=form.email.data).first()
|
||||
if user and user.check_password(form.password.data):
|
||||
login_user(user)
|
||||
flash('Logged in successfully.', 'success')
|
||||
return redirect(url_for('core_ui.home'))
|
||||
else:
|
||||
flash('Invalid credentials.', 'danger')
|
||||
return render_template('auth/login.html')
|
||||
return redirect(url_for('home'))
|
||||
flash('Invalid email or password.', 'danger')
|
||||
return render_template('login.html', form=form) # resolves to templates/auth/login.html
|
||||
|
||||
|
||||
@bp.route('/logout')
|
||||
@login_required
|
||||
def logout():
|
||||
logout_user()
|
||||
flash('Logged out.', 'info')
|
||||
return redirect(url_for('core_ui.home'))
|
||||
return redirect(url_for('home'))
|
||||
|
||||
|
||||
@bp.route('/register', methods=['GET', 'POST'])
|
||||
def register():
|
||||
if not current_app.config.get('ALLOW_REGISTRATION', True):
|
||||
flash('Registration is currently closed.', 'warning')
|
||||
return redirect(url_for('auth.login'))
|
||||
|
||||
if request.method == 'POST':
|
||||
email = request.form['email']
|
||||
password = request.form['password']
|
||||
|
||||
existing_user = User.query.filter_by(email=email).first()
|
||||
if existing_user:
|
||||
flash('Email already registered.', 'warning')
|
||||
else:
|
||||
user = User(email=email)
|
||||
user.set_password(password)
|
||||
form = RegistrationForm()
|
||||
if form.validate_on_submit():
|
||||
user = User(email=form.email.data)
|
||||
user.set_password(form.password.data)
|
||||
db.session.add(user)
|
||||
db.session.commit()
|
||||
flash('Account created. You can now log in.', 'success')
|
||||
flash('Account created! Please log in.', 'success')
|
||||
return redirect(url_for('auth.login'))
|
||||
|
||||
return render_template('auth/register.html')
|
||||
return render_template('register.html', form=form) # resolves to templates/auth/register.html
|
||||
|
@ -1,4 +1,4 @@
|
||||
{% extends 'core_ui/base.html' %}
|
||||
{% extends 'core/base.html' %}
|
||||
{% block content %}
|
||||
<h2>Login</h2>
|
||||
<form method="POST" action="{{ url_for('auth.login') }}">
|
||||
|
@ -1,4 +1,4 @@
|
||||
{% extends 'core_ui/base.html' %}
|
||||
{% extends 'core/base.html' %}
|
||||
{% block title %}Register{% endblock %}
|
||||
{% block content %}
|
||||
<h2>Register</h2>
|
||||
|
@ -1,6 +1,12 @@
|
||||
{
|
||||
"name": "cli",
|
||||
"version": "1.0.0",
|
||||
"description": "Command-line interface plugin",
|
||||
"entry_point": null
|
||||
"name": "CLI",
|
||||
"version": "0.1.0",
|
||||
"author": "Bryson Shepard <bryson@natureinpots.com>",
|
||||
"description": "Adds custom Flask CLI commands for seeding and maintenance.",
|
||||
"module": "plugins.cli",
|
||||
"cli": {
|
||||
"module": "plugins.cli.seed",
|
||||
"callable": "preload_data_cli"
|
||||
},
|
||||
"license": "Proprietary"
|
||||
}
|
@ -1,6 +0,0 @@
|
||||
{
|
||||
"name": "core_ui",
|
||||
"version": "1.1.0",
|
||||
"description": "Media rendering macros and styling helpers",
|
||||
"entry_point": null
|
||||
}
|
@ -1,12 +0,0 @@
|
||||
from flask import Blueprint, render_template
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
bp = Blueprint('core_ui', __name__, template_folder='templates')
|
||||
|
||||
@bp.route('/')
|
||||
def home():
|
||||
return render_template('core_ui/home.html')
|
||||
|
||||
@bp.route('/health')
|
||||
def health():
|
||||
return 'OK', 200
|
@ -1,25 +0,0 @@
|
||||
# plugins/growlog/forms.py
|
||||
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import SelectField, StringField, TextAreaField, BooleanField, SubmitField
|
||||
from wtforms.validators import DataRequired, Length
|
||||
|
||||
class GrowLogForm(FlaskForm):
|
||||
plant_uuid = SelectField(
|
||||
'Plant',
|
||||
choices=[], # injected in view
|
||||
validators=[DataRequired()]
|
||||
)
|
||||
|
||||
event_type = SelectField('Event Type', choices=[
|
||||
('water', 'Watered'),
|
||||
('fertilizer', 'Fertilized'),
|
||||
('repot', 'Repotted'),
|
||||
('note', 'Note'),
|
||||
('pest', 'Pest Observed')
|
||||
], validators=[DataRequired()])
|
||||
|
||||
title = StringField('Title', validators=[Length(max=255)])
|
||||
notes = TextAreaField('Notes', validators=[Length(max=1000)])
|
||||
is_public = BooleanField('Public?')
|
||||
submit = SubmitField('Add Log')
|
@ -1,91 +0,0 @@
|
||||
from datetime import datetime
|
||||
from app import db
|
||||
|
||||
class GrowLog(db.Model):
|
||||
__tablename__ = "grow_logs"
|
||||
__table_args__ = {"extend_existing": True}
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
plant_id = db.Column(db.Integer, db.ForeignKey("plant.id"), nullable=False)
|
||||
event_type = db.Column(db.String(50), nullable=False)
|
||||
title = db.Column(db.String(255), nullable=True)
|
||||
notes = db.Column(db.Text, nullable=True)
|
||||
is_public = db.Column(db.Boolean, default=False, nullable=False)
|
||||
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||
updated_at = db.Column(
|
||||
db.DateTime,
|
||||
default=datetime.utcnow,
|
||||
onupdate=datetime.utcnow,
|
||||
nullable=False
|
||||
)
|
||||
|
||||
# ↔ images uploaded directly to this GrowLog
|
||||
media_items = db.relationship(
|
||||
"plugins.media.models.Media",
|
||||
back_populates="growlog",
|
||||
foreign_keys="plugins.media.models.Media.growlog_id",
|
||||
lazy="dynamic",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
|
||||
# ↔ child updates
|
||||
updates = db.relationship(
|
||||
"plugins.growlog.models.PlantUpdate",
|
||||
back_populates="growlog",
|
||||
foreign_keys="plugins.growlog.models.PlantUpdate.growlog_id",
|
||||
lazy="dynamic",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
|
||||
|
||||
class PlantUpdate(db.Model):
|
||||
__tablename__ = "plant_updates"
|
||||
__table_args__ = {"extend_existing": True}
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
growlog_id = db.Column(db.Integer, db.ForeignKey("grow_logs.id"), nullable=False)
|
||||
description = db.Column(db.Text, nullable=True)
|
||||
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||
|
||||
# ↔ parent GrowLog.updates
|
||||
growlog = db.relationship(
|
||||
"plugins.growlog.models.GrowLog",
|
||||
back_populates="updates",
|
||||
foreign_keys=[growlog_id],
|
||||
lazy="joined",
|
||||
)
|
||||
|
||||
# ↔ images attached via UpdateImage join table
|
||||
media_items = db.relationship(
|
||||
"plugins.growlog.models.UpdateImage",
|
||||
back_populates="update",
|
||||
foreign_keys="plugins.growlog.models.UpdateImage.update_id",
|
||||
lazy="dynamic",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
|
||||
|
||||
class UpdateImage(db.Model):
|
||||
__tablename__ = "update_images"
|
||||
__table_args__ = {"extend_existing": True}
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
update_id = db.Column(db.Integer, db.ForeignKey("plant_updates.id"), nullable=False)
|
||||
media_id = db.Column(db.Integer, db.ForeignKey("media.id"), nullable=False)
|
||||
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||
|
||||
# ↔ PlantUpdate.media_items
|
||||
update = db.relationship(
|
||||
"plugins.growlog.models.PlantUpdate",
|
||||
back_populates="media_items",
|
||||
foreign_keys=[update_id],
|
||||
lazy="joined",
|
||||
)
|
||||
|
||||
# ↔ the actual Media record
|
||||
media = db.relationship(
|
||||
"plugins.media.models.Media",
|
||||
backref=db.backref("update_images", lazy="dynamic"),
|
||||
foreign_keys=[media_id],
|
||||
lazy="joined",
|
||||
)
|
@ -1,6 +0,0 @@
|
||||
{
|
||||
"name": "growlog",
|
||||
"version": "1.0.0",
|
||||
"description": "Tracks time-based plant care logs",
|
||||
"entry_point": null
|
||||
}
|
@ -1,4 +1,5 @@
|
||||
# plugins/media/models.py
|
||||
|
||||
from datetime import datetime
|
||||
from flask import url_for
|
||||
from app import db
|
||||
@ -17,45 +18,40 @@ class Media(db.Model):
|
||||
plant_id = db.Column(db.Integer, db.ForeignKey("plant.id"), nullable=True)
|
||||
growlog_id = db.Column(db.Integer, db.ForeignKey("grow_logs.id"), nullable=True)
|
||||
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||
|
||||
# You already have a file_url column in your DB
|
||||
file_url = db.Column(db.String(512), nullable=False)
|
||||
|
||||
hearts = db.relationship(
|
||||
"plugins.media.models.ImageHeart",
|
||||
"ImageHeart",
|
||||
backref="media",
|
||||
lazy="dynamic",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
featured_entries = db.relationship(
|
||||
"plugins.media.models.FeaturedImage",
|
||||
"FeaturedImage",
|
||||
backref="media",
|
||||
lazy="dynamic",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
|
||||
# ↔ Media items attached to a Plant
|
||||
plant = db.relationship(
|
||||
"plugins.plant.models.Plant",
|
||||
"Plant",
|
||||
back_populates="media_items",
|
||||
foreign_keys=[plant_id],
|
||||
lazy="joined",
|
||||
)
|
||||
|
||||
# ↔ Media items attached to a GrowLog
|
||||
growlog = db.relationship(
|
||||
"plugins.growlog.models.GrowLog",
|
||||
"GrowLog",
|
||||
back_populates="media_items",
|
||||
foreign_keys=[growlog_id],
|
||||
lazy="joined",
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""
|
||||
Infer plugin & related_id from whichever FK is set,
|
||||
and build the file_url path immediately so that INSERT
|
||||
never tries to write plugin=None or related_id=None.
|
||||
"""
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# If they passed plant_id or growlog_id in kwargs, pick one:
|
||||
if self.plant_id:
|
||||
self.plugin = "plant"
|
||||
self.related_id = self.plant_id
|
||||
@ -63,7 +59,6 @@ class Media(db.Model):
|
||||
self.plugin = "growlog"
|
||||
self.related_id = self.growlog_id
|
||||
else:
|
||||
# fallback (you might choose to raise instead)
|
||||
self.plugin = kwargs.get("plugin", "")
|
||||
self.related_id = kwargs.get("related_id", 0)
|
||||
|
||||
@ -81,6 +76,16 @@ class Media(db.Model):
|
||||
)
|
||||
|
||||
|
||||
class ZipJob(db.Model):
|
||||
__tablename__ = 'zip_jobs'
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
user_id = db.Column(db.Integer, nullable=False)
|
||||
filename = db.Column(db.String(255), nullable=False)
|
||||
created_at = db.Column(db.DateTime, default=datetime.utcnow)
|
||||
status = db.Column(db.String(20), default='queued') # queued|processing|done|failed
|
||||
error = db.Column(db.Text, nullable=True)
|
||||
|
||||
|
||||
class ImageHeart(db.Model):
|
||||
__tablename__ = "image_hearts"
|
||||
__table_args__ = {"extend_existing": True}
|
||||
|
@ -1,6 +1,23 @@
|
||||
{
|
||||
"name": "media",
|
||||
"version": "1.0.0",
|
||||
"description": "Upload and attach media to plants and grow logs",
|
||||
"entry_point": null
|
||||
"name": "Media",
|
||||
"version": "0.1.0",
|
||||
"author": "Bryson Shepard <bryson@natureinpots.com>",
|
||||
"description": "Manages image uploads, storage, and URL generation.",
|
||||
"module": "plugins.media",
|
||||
"routes": {
|
||||
"module": "plugins.media.routes",
|
||||
"blueprint": "bp",
|
||||
"url_prefix": "/media"
|
||||
},
|
||||
"models": [
|
||||
"plugins.media.models"
|
||||
],
|
||||
"template_globals": [
|
||||
{
|
||||
"name": "generate_image_url",
|
||||
"callable": "plugins.media.routes.generate_image_url"
|
||||
}
|
||||
],
|
||||
"license": "Proprietary",
|
||||
"repository": "https://github.com/your-org/your-app"
|
||||
}
|
@ -1,9 +1,6 @@
|
||||
import os
|
||||
import zipfile
|
||||
import uuid
|
||||
import io
|
||||
import traceback
|
||||
import tempfile
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from werkzeug.utils import secure_filename
|
||||
from werkzeug.datastructures import FileStorage
|
||||
@ -13,10 +10,11 @@ from flask import (
|
||||
jsonify, abort
|
||||
)
|
||||
from flask_login import login_required, current_user
|
||||
from PIL import Image, ExifTags
|
||||
from PIL import Image, UnidentifiedImageError
|
||||
|
||||
from app import db
|
||||
from .models import Media, ImageHeart, FeaturedImage
|
||||
from .models import Media, ZipJob, ImageHeart, FeaturedImage
|
||||
from .tasks import process_zip
|
||||
|
||||
bp = Blueprint(
|
||||
"media",
|
||||
@ -25,43 +23,91 @@ bp = Blueprint(
|
||||
template_folder="templates"
|
||||
)
|
||||
|
||||
# ─── Constants ──────────────────────────────────────────────────────────────
|
||||
|
||||
IMAGE_EXTS = {".jpg", ".jpeg", ".png", ".gif", ".webp"}
|
||||
DOC_EXTS = {".pdf", ".txt", ".csv"}
|
||||
ZIP_EXT = ".zip"
|
||||
MAX_ZIP_FILES = 1000
|
||||
MAX_IMAGE_PIXELS = 8000 * 8000 # ~64M pixels
|
||||
|
||||
# ─── Context Processor ──────────────────────────────────────────────────────
|
||||
|
||||
# ─── Context Processor ─────────────────────────────────────────────────────────
|
||||
@bp.app_context_processor
|
||||
def utility_processor():
|
||||
def inject_helpers():
|
||||
"""Expose generate_image_url in all media templates."""
|
||||
return dict(generate_image_url=generate_image_url)
|
||||
|
||||
# ─── Helper Functions ───────────────────────────────────────────────────────
|
||||
|
||||
# ─── Helpers & Config ─────────────────────────────────────────────────────────
|
||||
def allowed_file(filename):
|
||||
ext = filename.rsplit(".", 1)[-1].lower() if "." in filename else ""
|
||||
return ext in current_app.config.get(
|
||||
"ALLOWED_EXTENSIONS",
|
||||
{"png", "jpg", "jpeg", "gif", "webp"}
|
||||
)
|
||||
|
||||
|
||||
def get_upload_path(plugin: str, related_id: int):
|
||||
def allowed_file(filename: str) -> bool:
|
||||
"""
|
||||
Return (absolute_dir, subdir) where uploads are stored:
|
||||
<UPLOAD_FOLDER>/<plugin>/<related_id>/
|
||||
Return True if the file extension is allowed.
|
||||
"""
|
||||
ext = os.path.splitext(filename)[1].lower()
|
||||
allowed = current_app.config.get(
|
||||
"ALLOWED_EXTENSIONS",
|
||||
IMAGE_EXTS | DOC_EXTS | {ZIP_EXT}
|
||||
)
|
||||
return ext in allowed
|
||||
|
||||
def get_upload_path(plugin: str, related_id: int) -> (str, str):
|
||||
"""
|
||||
Build and return (absolute_dir, relative_subdir) under UPLOAD_FOLDER.
|
||||
"""
|
||||
base = current_app.config["UPLOAD_FOLDER"]
|
||||
subdir = os.path.join(plugin, str(related_id))
|
||||
abs_dir = os.path.join(base, subdir)
|
||||
abs_dir = os.path.abspath(os.path.join(base, subdir))
|
||||
if not abs_dir.startswith(os.path.abspath(base) + os.sep):
|
||||
raise RuntimeError("Upload path escapes base directory")
|
||||
os.makedirs(abs_dir, exist_ok=True)
|
||||
return abs_dir, subdir
|
||||
|
||||
def validate_image(path: str) -> bool:
|
||||
"""
|
||||
Verify image integrity and enforce pixel-size limit.
|
||||
"""
|
||||
try:
|
||||
with Image.open(path) as img:
|
||||
img.verify()
|
||||
w, h = Image.open(path).size
|
||||
return w * h <= MAX_IMAGE_PIXELS
|
||||
except (UnidentifiedImageError, IOError):
|
||||
return False
|
||||
|
||||
def _strip_exif(image: Image.Image) -> Image.Image:
|
||||
def validate_pdf(path: str) -> bool:
|
||||
"""
|
||||
Quick header check for PDF files.
|
||||
"""
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
return f.read(5) == b"%PDF-"
|
||||
except IOError:
|
||||
return False
|
||||
|
||||
def validate_text(path: str) -> bool:
|
||||
"""
|
||||
Ensure the file is valid UTF-8 text/CSV.
|
||||
"""
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
f.read(1024).decode("utf-8")
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def strip_exif(image: Image.Image) -> Image.Image:
|
||||
"""
|
||||
Rotate per EXIF orientation and strip metadata.
|
||||
"""
|
||||
try:
|
||||
exif = image._getexif()
|
||||
orient_key = next(
|
||||
(k for k, v in ExifTags.TAGS.items() if v == "Orientation"),
|
||||
None
|
||||
if exif:
|
||||
orientation_key = next(
|
||||
(k for k, v in Image.ExifTags.TAGS.items()
|
||||
if v == "Orientation"), None
|
||||
)
|
||||
if exif and orient_key in exif:
|
||||
o = exif[orient_key]
|
||||
o = exif.get(orientation_key)
|
||||
if o == 3:
|
||||
image = image.rotate(180, expand=True)
|
||||
elif o == 6:
|
||||
@ -70,8 +116,259 @@ def _strip_exif(image: Image.Image) -> Image.Image:
|
||||
image = image.rotate(90, expand=True)
|
||||
except Exception:
|
||||
pass
|
||||
return image
|
||||
data = list(image.getdata())
|
||||
clean = Image.new(image.mode, image.size)
|
||||
clean.putdata(data)
|
||||
return clean
|
||||
|
||||
def generate_image_url(media: Media):
|
||||
"""
|
||||
Given a Media instance, return its public URL or a placeholder.
|
||||
"""
|
||||
if media and media.file_url:
|
||||
return url_for(
|
||||
"media.serve_context_media",
|
||||
context=media.plugin,
|
||||
context_id=media.related_id,
|
||||
filename=media.filename
|
||||
)
|
||||
# fallback placeholder
|
||||
w, h = current_app.config.get("STANDARD_IMG_SIZE", (300, 200))
|
||||
return f"https://placehold.co/{w}x{h}"
|
||||
|
||||
# ─── Core Media Routes ──────────────────────────────────────────────────────
|
||||
|
||||
@bp.route("/upload", methods=["POST"])
|
||||
@login_required
|
||||
def upload_media():
|
||||
"""
|
||||
Accept images, PDFs, text/CSV inline; enqueue ZIPs for async processing.
|
||||
"""
|
||||
uploaded: FileStorage = request.files.get("media")
|
||||
if not uploaded or uploaded.filename == "":
|
||||
flash("No file selected.", "warning")
|
||||
return redirect(request.referrer or url_for("home"))
|
||||
|
||||
filename = secure_filename(uploaded.filename)
|
||||
ext = os.path.splitext(filename)[1].lower()
|
||||
if not allowed_file(filename):
|
||||
flash("Unsupported file type.", "danger")
|
||||
return redirect(request.referrer)
|
||||
|
||||
# Determine plugin & ID
|
||||
plugin = request.form.get("plugin", "user")
|
||||
related_id = int(request.form.get("related_id", current_user.id))
|
||||
|
||||
# Save location
|
||||
abs_dir, subdir = get_upload_path(plugin, related_id)
|
||||
save_path = os.path.join(abs_dir, filename)
|
||||
uploaded.save(save_path)
|
||||
|
||||
# Validate & post-process
|
||||
if ext in IMAGE_EXTS:
|
||||
if not validate_image(save_path):
|
||||
os.remove(save_path)
|
||||
flash("Invalid or oversized image.", "danger")
|
||||
return redirect(request.referrer)
|
||||
with Image.open(save_path) as img:
|
||||
clean = strip_exif(img)
|
||||
clean.save(save_path)
|
||||
|
||||
elif ext == ".pdf":
|
||||
if not validate_pdf(save_path):
|
||||
os.remove(save_path)
|
||||
flash("Invalid PDF.", "danger")
|
||||
return redirect(request.referrer)
|
||||
|
||||
elif ext in {".txt", ".csv"}:
|
||||
if not validate_text(save_path):
|
||||
os.remove(save_path)
|
||||
flash("Invalid text/CSV.", "danger")
|
||||
return redirect(request.referrer)
|
||||
|
||||
elif ext == ZIP_EXT:
|
||||
# Create and enqueue a ZipJob
|
||||
job = ZipJob(user_id=current_user.id, filename=filename)
|
||||
db.session.add(job)
|
||||
db.session.commit()
|
||||
process_zip.delay(job.id, save_path)
|
||||
flash("ZIP received; processing in background.", "info")
|
||||
return redirect(url_for("media.upload_status", job_id=job.id))
|
||||
|
||||
# Record small-file upload in DB
|
||||
media = Media(
|
||||
plugin = plugin,
|
||||
related_id = related_id,
|
||||
filename = filename,
|
||||
file_url = f"{subdir}/{filename}",
|
||||
uploader_id = current_user.id,
|
||||
uploaded_at = datetime.utcnow()
|
||||
)
|
||||
db.session.add(media)
|
||||
db.session.commit()
|
||||
flash("File uploaded successfully.", "success")
|
||||
return redirect(request.referrer or url_for("home"))
|
||||
|
||||
@bp.route("/upload/<int:job_id>/status", methods=["GET"])
|
||||
@login_required
|
||||
def upload_status(job_id: int):
|
||||
"""
|
||||
Return JSON status for a background ZIP processing job.
|
||||
"""
|
||||
job = ZipJob.query.get_or_404(job_id)
|
||||
if job.user_id != current_user.id:
|
||||
abort(403)
|
||||
return jsonify({
|
||||
"job_id": job.id,
|
||||
"status": job.status,
|
||||
"error": job.error
|
||||
})
|
||||
|
||||
@bp.route("/<context>/<int:context_id>/<filename>")
|
||||
def serve_context_media(context: str, context_id: int, filename: str):
|
||||
"""
|
||||
Serve a file from UPLOAD_FOLDER/<plugin>/<id>/<filename>,
|
||||
with path‐traversal guard and DB check.
|
||||
"""
|
||||
# Normalize plugin name
|
||||
valid = {"user", "plant", "growlog", "vendor"}
|
||||
if context in valid:
|
||||
plugin_name = context
|
||||
elif context.endswith("s") and context[:-1] in valid:
|
||||
plugin_name = context[:-1]
|
||||
else:
|
||||
abort(404)
|
||||
|
||||
# Sanitize filename
|
||||
safe_filename = secure_filename(filename)
|
||||
if safe_filename != filename:
|
||||
abort(404)
|
||||
|
||||
# Build and verify path
|
||||
base_dir = current_app.config["UPLOAD_FOLDER"]
|
||||
dir_path = os.path.join(base_dir, plugin_name, str(context_id))
|
||||
full_path = os.path.abspath(os.path.join(dir_path, safe_filename))
|
||||
if not full_path.startswith(os.path.abspath(base_dir) + os.sep):
|
||||
abort(404)
|
||||
|
||||
# Confirm DB row
|
||||
Media.query.filter_by(
|
||||
plugin = plugin_name,
|
||||
related_id = context_id,
|
||||
filename = filename
|
||||
).first_or_404()
|
||||
|
||||
return send_from_directory(dir_path, filename)
|
||||
|
||||
# ─── Utility Routes ─────────────────────────────────────────────────────────
|
||||
|
||||
@bp.route("/heart/<int:media_id>", methods=["POST"])
|
||||
@login_required
|
||||
def toggle_heart(media_id: int):
|
||||
"""
|
||||
Toggle a “heart” (like) on an image for the current user.
|
||||
"""
|
||||
existing = ImageHeart.query.filter_by(
|
||||
user_id = current_user.id,
|
||||
media_id = media_id
|
||||
).first()
|
||||
if existing:
|
||||
db.session.delete(existing)
|
||||
db.session.commit()
|
||||
return jsonify(status="unhearted")
|
||||
heart = ImageHeart(user_id=current_user.id, media_id=media_id)
|
||||
db.session.add(heart)
|
||||
db.session.commit()
|
||||
return jsonify(status="hearted")
|
||||
|
||||
@bp.route("/featured/<context>/<int:context_id>/<int:media_id>", methods=["POST"])
|
||||
@login_required
|
||||
def set_featured_image(context: str, context_id: int, media_id: int):
|
||||
"""
|
||||
Mark a single image as featured for a given context.
|
||||
"""
|
||||
valid = {"plant", "growlog", "user", "vendor"}
|
||||
if context in valid:
|
||||
plugin_name = context
|
||||
elif context.endswith("s") and context[:-1] in valid:
|
||||
plugin_name = context[:-1]
|
||||
else:
|
||||
abort(404)
|
||||
|
||||
media = Media.query.filter_by(
|
||||
plugin = plugin_name,
|
||||
related_id = context_id,
|
||||
id = media_id
|
||||
).first_or_404()
|
||||
|
||||
if media.uploader_id != current_user.id and current_user.role != "admin":
|
||||
abort(403)
|
||||
|
||||
FeaturedImage.query.filter_by(
|
||||
context = plugin_name,
|
||||
context_id = context_id
|
||||
).delete()
|
||||
fi = FeaturedImage(
|
||||
media_id = media.id,
|
||||
context = plugin_name,
|
||||
context_id = context_id,
|
||||
is_featured = True
|
||||
)
|
||||
db.session.add(fi)
|
||||
db.session.commit()
|
||||
flash("Featured image updated.", "success")
|
||||
return redirect(request.referrer or url_for("home"))
|
||||
|
||||
@bp.route("/delete/<int:media_id>", methods=["POST"])
|
||||
@login_required
|
||||
def delete_media(media_id: int):
|
||||
"""
|
||||
Delete a media file and its DB record (soft‐delete by permission).
|
||||
"""
|
||||
media = Media.query.get_or_404(media_id)
|
||||
if media.uploader_id != current_user.id and current_user.role != "admin":
|
||||
flash("Not authorized to delete this media.", "danger")
|
||||
return redirect(request.referrer or url_for("home"))
|
||||
|
||||
# Remove file on disk
|
||||
base = current_app.config["UPLOAD_FOLDER"]
|
||||
full = os.path.abspath(os.path.join(base, media.file_url))
|
||||
try:
|
||||
os.remove(full)
|
||||
except OSError:
|
||||
current_app.logger.error(f"Failed to delete file {full}")
|
||||
|
||||
# Remove DB record
|
||||
db.session.delete(media)
|
||||
db.session.commit()
|
||||
flash("Media deleted.", "success")
|
||||
return redirect(request.referrer or url_for("home"))
|
||||
|
||||
@bp.route("/rotate/<int:media_id>", methods=["POST"])
|
||||
@login_required
|
||||
def rotate_media(media_id: int):
|
||||
"""
|
||||
Rotate an image −90° and strip its EXIF metadata.
|
||||
"""
|
||||
media = Media.query.get_or_404(media_id)
|
||||
if media.uploader_id != current_user.id and current_user.role != "admin":
|
||||
abort(403)
|
||||
|
||||
base = current_app.config["UPLOAD_FOLDER"]
|
||||
full = os.path.abspath(os.path.join(base, media.file_url))
|
||||
try:
|
||||
with Image.open(full) as img:
|
||||
rotated = img.rotate(-90, expand=True)
|
||||
clean = strip_exif(rotated)
|
||||
clean.save(full)
|
||||
flash("Image rotated successfully.", "success")
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"Rotation failed for {full}: {e}")
|
||||
flash("Failed to rotate image.", "danger")
|
||||
|
||||
return redirect(request.referrer or url_for("home"))
|
||||
|
||||
# ─── Legacy Helpers for Other Plugins ───────────────────────────────────────
|
||||
|
||||
def _process_upload_file(
|
||||
file: FileStorage,
|
||||
@ -105,26 +402,29 @@ def _process_upload_file(
|
||||
# 5) Build the Media row
|
||||
now = datetime.utcnow()
|
||||
media = Media(
|
||||
plugin=plugin,
|
||||
related_id=related_id,
|
||||
filename=filename,
|
||||
uploaded_at=now,
|
||||
uploader_id=uploader_id,
|
||||
caption=caption,
|
||||
plant_id=plant_id,
|
||||
growlog_id=growlog_id,
|
||||
created_at=now,
|
||||
file_url=file_url
|
||||
plugin = plugin,
|
||||
related_id = related_id,
|
||||
filename = filename,
|
||||
uploaded_at = now,
|
||||
uploader_id = uploader_id,
|
||||
caption = caption,
|
||||
plant_id = plant_id,
|
||||
growlog_id = growlog_id,
|
||||
created_at = now,
|
||||
file_url = file_url
|
||||
)
|
||||
return media
|
||||
|
||||
|
||||
# ─── Exposed Utilities ─────────────────────────────────────────────────────────
|
||||
def save_media_file(file, user_id, **ctx):
|
||||
def save_media_file(file: FileStorage, user_id: int, **ctx) -> Media:
|
||||
"""
|
||||
Simple wrapper for other plugins to save an upload via the same logic.
|
||||
"""
|
||||
return _process_upload_file(file, user_id, **ctx)
|
||||
|
||||
|
||||
def delete_media_file(media: Media):
|
||||
"""
|
||||
Remove a Media record and its file from disk, commit immediately.
|
||||
"""
|
||||
base = current_app.config["UPLOAD_FOLDER"]
|
||||
full = os.path.normpath(os.path.join(base, media.file_url))
|
||||
if os.path.exists(full):
|
||||
@ -132,256 +432,12 @@ def delete_media_file(media: Media):
|
||||
db.session.delete(media)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
def rotate_media_file(media: Media):
|
||||
"""
|
||||
Rotate a Media file −90° in place and commit metadata-only change.
|
||||
"""
|
||||
base = current_app.config["UPLOAD_FOLDER"]
|
||||
full = os.path.normpath(os.path.join(base, media.file_url))
|
||||
with Image.open(full) as img:
|
||||
img.rotate(-90, expand=True).save(full)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
def generate_image_url(media: Media):
|
||||
"""
|
||||
Given a Media instance (or None), return its public URL
|
||||
under our new schema, or a placeholder if no media.
|
||||
"""
|
||||
if media and media.file_url:
|
||||
# use singular context
|
||||
return url_for(
|
||||
"media.serve_context_media",
|
||||
context=media.plugin,
|
||||
context_id=media.related_id,
|
||||
filename=media.filename
|
||||
)
|
||||
# fallback
|
||||
w, h = current_app.config.get("STANDARD_IMG_SIZE", (300, 200))
|
||||
return f"https://placehold.co/{w}x{h}"
|
||||
|
||||
|
||||
@bp.route("/<context>/<int:context_id>/<filename>")
|
||||
def serve_context_media(context, context_id, filename):
|
||||
"""
|
||||
Serve files saved under:
|
||||
<UPLOAD_FOLDER>/<plugin>/<context_id>/<filename>
|
||||
Accepts both singular and trailing-'s' contexts:
|
||||
/media/plant/1/foo.jpg OR /media/plants/1/foo.jpg
|
||||
"""
|
||||
|
||||
# — determine plugin name (always singular) —
|
||||
valid = {"user", "plant", "growlog", "vendor"}
|
||||
if context in valid:
|
||||
plugin = context
|
||||
elif context.endswith("s") and context[:-1] in valid:
|
||||
plugin = context[:-1]
|
||||
else:
|
||||
logging.debug(f"Invalid context '{context}' in URL")
|
||||
abort(404)
|
||||
|
||||
# — build filesystem path —
|
||||
base = current_app.config["UPLOAD_FOLDER"]
|
||||
directory = os.path.join(base, plugin, str(context_id))
|
||||
full_path = os.path.join(directory, filename)
|
||||
|
||||
# — Debug log what we’re about to do —
|
||||
logging.debug(f"[serve_context_media] plugin={plugin!r}, "
|
||||
f"context_id={context_id!r}, filename={filename!r}")
|
||||
logging.debug(f"[serve_context_media] checking DB for media row…")
|
||||
logging.debug(f"[serve_context_media] filesystem path = {full_path!r}, exists? {os.path.exists(full_path)}")
|
||||
|
||||
# — Check the DB row (but don’t abort if missing) —
|
||||
media = Media.query.filter_by(
|
||||
plugin=plugin,
|
||||
related_id=context_id,
|
||||
filename=filename
|
||||
).first()
|
||||
if not media:
|
||||
logging.warning(f"[serve_context_media] no Media DB row for "
|
||||
f"{plugin}/{context_id}/{filename!r}, "
|
||||
"will try serving from disk anyway")
|
||||
|
||||
# — If the file exists on disk, serve it — otherwise 404 —
|
||||
if os.path.exists(full_path):
|
||||
return send_from_directory(directory, filename)
|
||||
|
||||
logging.error(f"[serve_context_media] file not found on disk: {full_path!r}")
|
||||
abort(404)
|
||||
|
||||
|
||||
|
||||
# ─── Legacy / Other Routes (you can leave these for backward compatibility) ────
|
||||
@bp.route("/", methods=["GET"])
|
||||
def media_index():
|
||||
return redirect(url_for("core_ui.home"))
|
||||
|
||||
|
||||
@bp.route("/<plugin>/<filename>")
|
||||
def serve(plugin, filename):
|
||||
# optional legacy support
|
||||
m = Media.query.filter_by(file_url=f"{plugin}s/%/{filename}").first_or_404()
|
||||
date_path = m.uploaded_at.strftime("%Y/%m/%d")
|
||||
disk_dir = os.path.join(
|
||||
current_app.config["UPLOAD_FOLDER"],
|
||||
f"{plugin}s",
|
||||
str(m.plant_id or m.growlog_id),
|
||||
date_path
|
||||
)
|
||||
return send_from_directory(disk_dir, filename)
|
||||
|
||||
|
||||
@bp.route("/<filename>")
|
||||
def media_public(filename):
|
||||
base = current_app.config["UPLOAD_FOLDER"]
|
||||
m = Media.query.filter(Media.file_url.endswith(filename)).first_or_404()
|
||||
full = os.path.normpath(os.path.join(base, m.file_url))
|
||||
if not full.startswith(os.path.abspath(base)):
|
||||
abort(404)
|
||||
return send_from_directory(base, m.file_url)
|
||||
|
||||
|
||||
@bp.route("/heart/<int:media_id>", methods=["POST"])
|
||||
@login_required
|
||||
def toggle_heart(media_id):
|
||||
existing = ImageHeart.query.filter_by(
|
||||
user_id=current_user.id, media_id=media_id
|
||||
).first()
|
||||
if existing:
|
||||
db.session.delete(existing)
|
||||
db.session.commit()
|
||||
return jsonify({"status": "unhearted"})
|
||||
heart = ImageHeart(user_id=current_user.id, media_id=media_id)
|
||||
db.session.add(heart)
|
||||
db.session.commit()
|
||||
return jsonify({"status": "hearted"})
|
||||
|
||||
|
||||
@bp.route("/add/<string:plant_uuid>", methods=["POST"])
|
||||
@login_required
|
||||
def add_media(plant_uuid):
|
||||
plant = Plant.query.filter_by(uuid=plant_uuid).first_or_404()
|
||||
file = request.files.get("file")
|
||||
if not file or not allowed_file(file.filename):
|
||||
flash("Invalid or missing file.", "danger")
|
||||
return redirect(request.referrer or url_for("plant.edit", uuid_val=plant_uuid))
|
||||
|
||||
_process_upload_file(
|
||||
file=file,
|
||||
uploader_id=current_user.id,
|
||||
plugin="plant",
|
||||
related_id=plant.id
|
||||
)
|
||||
flash("Media uploaded successfully.", "success")
|
||||
return redirect(request.referrer or url_for("plant.edit", uuid_val=plant_uuid))
|
||||
|
||||
|
||||
@bp.route("/<context>/<int:context_id>/<filename>")
|
||||
def media_file(context, context_id, filename):
|
||||
# your existing serve_context_media logic here
|
||||
# (unchanged)
|
||||
from flask import current_app, send_from_directory
|
||||
import os
|
||||
valid = {"user", "plant", "growlog", "vendor"}
|
||||
if context in valid:
|
||||
plugin = context
|
||||
elif context.endswith("s") and context[:-1] in valid:
|
||||
plugin = context[:-1]
|
||||
else:
|
||||
abort(404)
|
||||
|
||||
media = Media.query.filter_by(
|
||||
plugin=plugin,
|
||||
related_id=context_id,
|
||||
filename=filename
|
||||
).first_or_404()
|
||||
|
||||
base = current_app.config["UPLOAD_FOLDER"]
|
||||
directory = os.path.join(base, plugin, str(context_id))
|
||||
return send_from_directory(directory, filename)
|
||||
|
||||
@bp.route('/featured/<context>/<int:context_id>/<int:media_id>', methods=['POST'])
|
||||
def set_featured_image(context, context_id, media_id):
|
||||
"""
|
||||
Single‐select “featured” toggle for any plugin (plants, grow_logs, etc).
|
||||
"""
|
||||
# normalize to singular plugin name (matches Media.plugin & FeaturedImage.context)
|
||||
valid = {'plant', 'growlog', 'user', 'vendor'}
|
||||
if context in valid:
|
||||
plugin_name = context
|
||||
elif context.endswith('s') and context[:-1] in valid:
|
||||
plugin_name = context[:-1]
|
||||
else:
|
||||
abort(404)
|
||||
|
||||
# must own that media row
|
||||
media = Media.query.filter_by(
|
||||
plugin=plugin_name,
|
||||
related_id=context_id,
|
||||
id=media_id
|
||||
).first_or_404()
|
||||
|
||||
# clear out any existing featured rows
|
||||
FeaturedImage.query.filter_by(
|
||||
context=plugin_name,
|
||||
context_id=context_id
|
||||
).delete()
|
||||
|
||||
# insert new featured row
|
||||
fi = FeaturedImage(
|
||||
media_id=media.id,
|
||||
context=plugin_name,
|
||||
context_id=context_id,
|
||||
is_featured=True
|
||||
)
|
||||
db.session.add(fi)
|
||||
db.session.commit()
|
||||
|
||||
# Redirect back with a flash instead of JSON
|
||||
flash("Featured image updated.", "success")
|
||||
return redirect(request.referrer or url_for("core_ui.home"))
|
||||
|
||||
|
||||
@bp.route("/delete/<int:media_id>", methods=["POST"])
|
||||
@login_required
|
||||
def delete_media(media_id):
|
||||
media = Media.query.get_or_404(media_id)
|
||||
if media.uploader_id != current_user.id and current_user.role != "admin":
|
||||
flash("Not authorized to delete this media.", "danger")
|
||||
return redirect(request.referrer or url_for("core_ui.home"))
|
||||
|
||||
delete_media_file(media)
|
||||
flash("Media deleted.", "success")
|
||||
return redirect(request.referrer or url_for("plant.edit", uuid_val=media.plant.uuid))
|
||||
|
||||
|
||||
@bp.route("/bulk_delete/<string:plant_uuid>", methods=["POST"])
|
||||
@login_required
|
||||
def bulk_delete_media(plant_uuid):
|
||||
plant = Plant.query.filter_by(uuid=plant_uuid).first_or_404()
|
||||
media_ids = request.form.getlist("delete_ids")
|
||||
deleted = 0
|
||||
|
||||
for mid in media_ids:
|
||||
m = Media.query.filter_by(id=mid, plant_id=plant.id).first()
|
||||
if m and (m.uploader_id == current_user.id or current_user.role == "admin"):
|
||||
delete_media_file(m)
|
||||
deleted += 1
|
||||
|
||||
flash(f"{deleted} image(s) deleted.", "success")
|
||||
return redirect(request.referrer or url_for("plant.edit", uuid_val=plant_uuid))
|
||||
|
||||
|
||||
@bp.route("/rotate/<int:media_id>", methods=["POST"])
|
||||
@login_required
|
||||
def rotate_media(media_id):
|
||||
media = Media.query.get_or_404(media_id)
|
||||
if media.uploader_id != current_user.id and current_user.role != "admin":
|
||||
flash("Not authorized to rotate this media.", "danger")
|
||||
return redirect(request.referrer or url_for("core_ui.home"))
|
||||
|
||||
try:
|
||||
rotate_media_file(media)
|
||||
flash("Image rotated successfully.", "success")
|
||||
except Exception as e:
|
||||
flash(f"Failed to rotate image: {e}", "danger")
|
||||
|
||||
return redirect(request.referrer or url_for("plant.edit", uuid_val=media.plant.uuid))
|
||||
|
69
plugins/media/tasks.py
Normal file
69
plugins/media/tasks.py
Normal file
@ -0,0 +1,69 @@
|
||||
import os
|
||||
import zipfile
|
||||
from werkzeug.utils import secure_filename
|
||||
from PIL import Image, UnidentifiedImageError
|
||||
from app import db
|
||||
from plugins.media.models import ZipJob
|
||||
|
||||
# Re‐import your create_app and utility plugin to get Celery
|
||||
from plugins.utility.celery import celery_app
|
||||
|
||||
# Constants
|
||||
IMAGE_EXTS = {'.jpg','.jpeg','.png','.gif'}
|
||||
DOC_EXTS = {'.pdf','.txt','.csv'}
|
||||
MAX_ZIP_FILES = 1000
|
||||
MAX_PIXELS = 8000 * 8000
|
||||
|
||||
def validate_image(path):
|
||||
try:
|
||||
with Image.open(path) as img:
|
||||
img.verify()
|
||||
w, h = Image.open(path).size
|
||||
return (w*h) <= MAX_PIXELS
|
||||
except (UnidentifiedImageError, IOError):
|
||||
return False
|
||||
|
||||
@celery_app.task(bind=True)
|
||||
def process_zip(self, job_id, zip_path):
|
||||
job = ZipJob.query.get(job_id)
|
||||
job.status = 'processing'
|
||||
db.session.commit()
|
||||
|
||||
extract_dir = zip_path + '_contents'
|
||||
try:
|
||||
with zipfile.ZipFile(zip_path) as zf:
|
||||
names = zf.namelist()
|
||||
if len(names) > MAX_ZIP_FILES:
|
||||
raise ValueError('ZIP contains too many files.')
|
||||
|
||||
os.makedirs(extract_dir, exist_ok=True)
|
||||
for member in names:
|
||||
safe = secure_filename(member)
|
||||
if safe != member:
|
||||
raise ValueError(f'Illegal filename {member}')
|
||||
|
||||
_, ext = os.path.splitext(safe.lower())
|
||||
if ext not in IMAGE_EXTS | DOC_EXTS:
|
||||
raise ValueError(f'Unsupported type {ext}')
|
||||
|
||||
target = os.path.join(extract_dir, safe)
|
||||
with zf.open(member) as src, open(target, 'wb') as dst:
|
||||
dst.write(src.read())
|
||||
|
||||
if ext in IMAGE_EXTS and not validate_image(target):
|
||||
raise ValueError(f'Bad image: {member}')
|
||||
elif ext == '.pdf':
|
||||
if open(target,'rb').read(5)!=b'%PDF-':
|
||||
raise ValueError(f'Bad PDF: {member}')
|
||||
else:
|
||||
# txt/csv → simple UTF-8 check
|
||||
open(target,'rb').read(1024).decode('utf-8')
|
||||
|
||||
job.status = 'done'
|
||||
|
||||
except Exception as e:
|
||||
job.status = 'failed'
|
||||
job.error = str(e)
|
||||
|
||||
finally:
|
||||
db.session.commit()
|
@ -1,5 +1,5 @@
|
||||
{# plugins/media/templates/media/list.html #}
|
||||
{% extends 'core_ui/base.html' %}
|
||||
{% extends 'core/base.html' %}
|
||||
|
||||
{% block content %}
|
||||
<h2>All Uploaded Media</h2>
|
||||
|
14
plugins/ownership/plugin.json
Normal file
14
plugins/ownership/plugin.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "Ownership",
|
||||
"version": "0.1.0",
|
||||
"author": "Bryson Shepard <bryson@natureinpots.com>",
|
||||
"description": "Tracks plant ownership transfers and history.",
|
||||
"module": "plugins.ownership",
|
||||
"routes": {
|
||||
"module": "plugins.ownership.routes",
|
||||
"blueprint": "bp",
|
||||
"url_prefix": "/ownership"
|
||||
},
|
||||
"license": "Proprietary",
|
||||
"repository": "https://github.com/your-org/your-app"
|
||||
}
|
29
plugins/plant/growlog/forms.py
Normal file
29
plugins/plant/growlog/forms.py
Normal file
@ -0,0 +1,29 @@
|
||||
# plugins/plant/growlog/forms.py
|
||||
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import SelectField, StringField, TextAreaField, BooleanField, SubmitField
|
||||
from wtforms.validators import DataRequired, Length
|
||||
|
||||
class GrowLogForm(FlaskForm):
|
||||
plant_uuid = SelectField(
|
||||
'Plant',
|
||||
choices=[], # injected in view
|
||||
validators=[DataRequired()]
|
||||
)
|
||||
|
||||
event_type = SelectField(
|
||||
'Event Type',
|
||||
choices=[
|
||||
('water', 'Watered'),
|
||||
('fertilizer', 'Fertilized'),
|
||||
('repot', 'Repotted'),
|
||||
('note', 'Note'),
|
||||
('pest', 'Pest Observed'),
|
||||
],
|
||||
validators=[DataRequired()]
|
||||
)
|
||||
|
||||
title = StringField('Title', validators=[Length(max=255)])
|
||||
notes = TextAreaField('Notes', validators=[Length(max=1000)])
|
||||
is_public = BooleanField('Public?')
|
||||
submit = SubmitField('Save Log')
|
40
plugins/plant/growlog/models.py
Normal file
40
plugins/plant/growlog/models.py
Normal file
@ -0,0 +1,40 @@
|
||||
# plugins/plant/growlog/models.py
|
||||
|
||||
from datetime import datetime
|
||||
from app import db
|
||||
|
||||
class GrowLog(db.Model):
|
||||
__tablename__ = "grow_logs"
|
||||
__table_args__ = {"extend_existing": True}
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
plant_id = db.Column(db.Integer, db.ForeignKey("plant.id"), nullable=False)
|
||||
event_type = db.Column(db.String(50), nullable=False)
|
||||
title = db.Column(db.String(255), nullable=True)
|
||||
notes = db.Column(db.Text, nullable=True)
|
||||
is_public = db.Column(db.Boolean, default=False, nullable=False)
|
||||
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||
updated_at = db.Column(
|
||||
db.DateTime,
|
||||
default=datetime.utcnow,
|
||||
onupdate=datetime.utcnow,
|
||||
nullable=False
|
||||
)
|
||||
|
||||
# ─── Single “primary” media for this log ───────────────────────────────────
|
||||
media_id = db.Column(db.Integer, db.ForeignKey("media.id"), nullable=True)
|
||||
media = db.relationship(
|
||||
"plugins.media.models.Media",
|
||||
backref=db.backref("update_images", lazy="dynamic"),
|
||||
foreign_keys=[media_id],
|
||||
lazy="joined",
|
||||
)
|
||||
|
||||
# ─── All Media items whose growlog_id points here ─────────────────────────
|
||||
media_items = db.relationship(
|
||||
"plugins.media.models.Media",
|
||||
back_populates="growlog",
|
||||
foreign_keys="plugins.media.models.Media.growlog_id",
|
||||
lazy="dynamic",
|
||||
cascade="all, delete-orphan"
|
||||
)
|
@ -1,7 +1,9 @@
|
||||
# plugins/plant/growlog/routes.py
|
||||
|
||||
from uuid import UUID as _UUID
|
||||
from werkzeug.exceptions import NotFound
|
||||
from flask import (
|
||||
Blueprint, render_template, abort, redirect, url_for, request, flash
|
||||
Blueprint, render_template, abort, redirect,
|
||||
url_for, request, flash
|
||||
)
|
||||
from flask_login import login_required, current_user
|
||||
from app import db
|
||||
@ -9,7 +11,6 @@ from .models import GrowLog
|
||||
from .forms import GrowLogForm
|
||||
from plugins.plant.models import Plant, PlantCommonName
|
||||
|
||||
|
||||
bp = Blueprint(
|
||||
'growlog',
|
||||
__name__,
|
||||
@ -17,33 +18,33 @@ bp = Blueprint(
|
||||
template_folder='templates',
|
||||
)
|
||||
|
||||
|
||||
def _get_plant_by_uuid(uuid_val):
|
||||
"""
|
||||
uuid_val may already be a uuid.UUID (from a <uuid:> route converter)
|
||||
or a string (from form POST). Normalize & validate it, then lookup.
|
||||
Normalize & validate a UUID (may be a uuid.UUID or a string),
|
||||
then return the Plant owned by current_user or 404.
|
||||
"""
|
||||
# 1) If Flask route gave us a UUID instance, just stringify it
|
||||
# 1) If Flask gave us a real UUID, stringify it
|
||||
if isinstance(uuid_val, _UUID):
|
||||
val = str(uuid_val)
|
||||
else:
|
||||
# 2) Otherwise try to parse it as a hex string
|
||||
# 2) Otherwise try to parse it
|
||||
try:
|
||||
val = str(_UUID(uuid_val))
|
||||
except (ValueError, TypeError):
|
||||
# invalid format → 404
|
||||
abort(404)
|
||||
|
||||
# 3) Only return plants owned by current_user
|
||||
# 3) Only return plants owned by this user
|
||||
return (
|
||||
Plant.query
|
||||
.filter_by(uuid=val, owner_id=current_user.id)
|
||||
.first_or_404()
|
||||
)
|
||||
|
||||
|
||||
def _user_plant_choices():
|
||||
# join to the common‐name table and sort by its name
|
||||
"""
|
||||
Return [(uuid, "Common Name – uuid"), ...] for all plants
|
||||
owned by current_user, sorted by common name.
|
||||
"""
|
||||
plants = (
|
||||
Plant.query
|
||||
.filter_by(owner_id=current_user.id)
|
||||
@ -62,20 +63,19 @@ def _user_plant_choices():
|
||||
@login_required
|
||||
def add_log(plant_uuid=None):
|
||||
form = GrowLogForm()
|
||||
# 1) always populate the dropdown behind the scenes
|
||||
# always populate the select behind the scenes
|
||||
form.plant_uuid.choices = _user_plant_choices()
|
||||
|
||||
plant = None
|
||||
hide_select = False
|
||||
|
||||
# 2) if URL had a plant_uuid, load & pre-select it, hide dropdown
|
||||
# if URL gave us a plant_uuid, lock to that one
|
||||
if plant_uuid:
|
||||
plant = _get_plant_by_uuid(plant_uuid)
|
||||
form.plant_uuid.data = str(plant_uuid)
|
||||
hide_select = True
|
||||
|
||||
if form.validate_on_submit():
|
||||
# 3) on POST, resolve via form.plant_uuid
|
||||
plant = _get_plant_by_uuid(form.plant_uuid.data)
|
||||
log = GrowLog(
|
||||
plant_id = plant.id,
|
||||
@ -95,7 +95,7 @@ def add_log(plant_uuid=None):
|
||||
'growlog/log_form.html',
|
||||
form = form,
|
||||
plant = plant,
|
||||
hide_plant_select = hide_select
|
||||
hide_plant_select = hide_select,
|
||||
)
|
||||
|
||||
|
||||
@ -103,15 +103,17 @@ def add_log(plant_uuid=None):
|
||||
@bp.route('/<uuid:plant_uuid>')
|
||||
@login_required
|
||||
def list_logs(plant_uuid):
|
||||
# how many to show?
|
||||
from plugins.utility.celery import celery_app
|
||||
celery_app.send_task('plugins.utility.tasks.ping')
|
||||
|
||||
limit = request.args.get('limit', default=10, type=int)
|
||||
|
||||
if plant_uuid:
|
||||
# logs for a single plant
|
||||
# logs for one plant
|
||||
plant = _get_plant_by_uuid(plant_uuid)
|
||||
query = GrowLog.query.filter_by(plant_id=plant.id)
|
||||
else:
|
||||
# logs for all your plants
|
||||
# logs across all of this user’s plants
|
||||
plant = None
|
||||
query = (
|
||||
GrowLog.query
|
||||
@ -128,20 +130,20 @@ def list_logs(plant_uuid):
|
||||
|
||||
return render_template(
|
||||
'growlog/log_list.html',
|
||||
plant=plant,
|
||||
logs=logs,
|
||||
limit=limit
|
||||
plant = plant,
|
||||
logs = logs,
|
||||
limit = limit,
|
||||
)
|
||||
|
||||
|
||||
@bp.route('/<uuid:plant_uuid>/edit/<int:log_id>', methods=['GET', 'POST'])
|
||||
@bp.route('/<uuid:plant_uuid>/edit/<int:log_id>', methods=['GET','POST'])
|
||||
@login_required
|
||||
def edit_log(plant_uuid, log_id):
|
||||
plant = _get_plant_by_uuid(plant_uuid)
|
||||
log = GrowLog.query.filter_by(id=log_id, plant_id=plant.id).first_or_404()
|
||||
form = GrowLogForm(obj=log)
|
||||
|
||||
# Lock the dropdown to this one plant
|
||||
# lock the dropdown to this plant
|
||||
form.plant_uuid.choices = [(plant.uuid, plant.common_name.name)]
|
||||
form.plant_uuid.data = plant.uuid
|
||||
|
||||
@ -151,16 +153,15 @@ def edit_log(plant_uuid, log_id):
|
||||
log.notes = form.notes.data
|
||||
log.is_public = form.is_public.data
|
||||
db.session.commit()
|
||||
|
||||
flash('Grow log updated.', 'success')
|
||||
return redirect(url_for('growlog.list_logs', plant_uuid=plant_uuid))
|
||||
|
||||
return render_template(
|
||||
'growlog/log_form.html',
|
||||
form=form,
|
||||
plant_uuid=plant_uuid,
|
||||
plant=plant,
|
||||
log=log
|
||||
form = form,
|
||||
plant_uuid = plant_uuid,
|
||||
plant = plant,
|
||||
log = log,
|
||||
)
|
||||
|
||||
|
||||
@ -171,6 +172,5 @@ def delete_log(plant_uuid, log_id):
|
||||
log = GrowLog.query.filter_by(id=log_id, plant_id=plant.id).first_or_404()
|
||||
db.session.delete(log)
|
||||
db.session.commit()
|
||||
|
||||
flash('Grow log deleted.', 'warning')
|
||||
return redirect(url_for('growlog.list_logs', plant_uuid=plant_uuid))
|
@ -1,4 +1,4 @@
|
||||
{% extends 'core_ui/base.html' %}
|
||||
{% extends 'core/base.html' %}
|
||||
{% block title %}Add Grow Log{% endblock %}
|
||||
|
||||
{% block content %}
|
@ -1,5 +1,5 @@
|
||||
{# plugins/growlog/templates/growlog/log_list.html #}
|
||||
{% extends 'core_ui/base.html' %}
|
||||
{# plugins/plant/growlog/templates/growlog/log_list.html #}
|
||||
{% extends 'core/base.html' %}
|
||||
|
||||
{% block title %}
|
||||
{% if plant %}
|
||||
@ -18,9 +18,12 @@
|
||||
Recent Grow Logs
|
||||
{% endif %}
|
||||
</h2>
|
||||
{# “Add” button: carry plant_uuid when in single-plant view #}
|
||||
<a
|
||||
href="{% if plant %}{{ url_for('growlog.add_log', plant_uuid=plant.uuid) }}{% else %}{{ url_for('growlog.add_log') }}{% endif %}"
|
||||
href="{% if plant %}
|
||||
{{ url_for('growlog.add_log', plant_uuid=plant.uuid) }}
|
||||
{% else %}
|
||||
{{ url_for('growlog.add_log') }}
|
||||
{% endif %}"
|
||||
class="btn btn-success">
|
||||
<i class="bi bi-plus-lg"></i> Add Log
|
||||
</a>
|
||||
@ -38,7 +41,6 @@
|
||||
</small>
|
||||
</div>
|
||||
{% if not plant %}
|
||||
{# Show which plant this log belongs to when listing across all plants #}
|
||||
<div class="ms-auto text-end">
|
||||
<small class="text-secondary">Plant:</small><br>
|
||||
<a href="{{ url_for('growlog.list_logs', plant_uuid=log.plant.uuid) }}">
|
||||
@ -60,7 +62,7 @@
|
||||
<img
|
||||
src="{{ generate_image_url(media) }}"
|
||||
class="img-thumbnail"
|
||||
style="max-width:100px;"
|
||||
style="max-width: 100px;"
|
||||
alt="{{ media.caption or '' }}"
|
||||
>
|
||||
{% endfor %}
|
||||
@ -96,7 +98,11 @@
|
||||
{% else %}
|
||||
<p class="text-muted">
|
||||
No grow logs found{% if plant %} for {{ plant.common_name.name }}{% endif %}.
|
||||
<a href="{% if plant %}{{ url_for('growlog.add_log', plant_uuid=plant.uuid) }}{% else %}{{ url_for('growlog.add_log') }}{% endif %}">
|
||||
<a href="{% if plant %}
|
||||
{{ url_for('growlog.add_log', plant_uuid=plant.uuid) }}
|
||||
{% else %}
|
||||
{{ url_for('growlog.add_log') }}
|
||||
{% endif %}">
|
||||
Add one now
|
||||
</a>.
|
||||
</p>
|
@ -1,8 +1,10 @@
|
||||
# plugins/plant/models.py
|
||||
|
||||
from datetime import datetime
|
||||
import uuid as uuid_lib
|
||||
|
||||
import string, random # for generate_short_id
|
||||
from app import db
|
||||
from plugins.plant.growlog.models import GrowLog
|
||||
|
||||
# Association table for Plant ↔ Tag
|
||||
plant_tags = db.Table(
|
||||
@ -102,14 +104,14 @@ class Plant(db.Model):
|
||||
media_items = db.relationship(
|
||||
'plugins.media.models.Media',
|
||||
back_populates='plant',
|
||||
lazy='select', # ← this is the fix
|
||||
lazy='select',
|
||||
cascade='all, delete-orphan',
|
||||
foreign_keys='plugins.media.models.Media.plant_id'
|
||||
)
|
||||
|
||||
@property
|
||||
def media(self):
|
||||
return self.media_items # already a list when lazy='select'
|
||||
return self.media_items
|
||||
|
||||
# the one you see on the detail page
|
||||
featured_media = db.relationship(
|
||||
@ -120,7 +122,7 @@ class Plant(db.Model):
|
||||
|
||||
# ↔ GrowLog instances for this plant
|
||||
updates = db.relationship(
|
||||
'plugins.growlog.models.GrowLog',
|
||||
GrowLog,
|
||||
backref='plant',
|
||||
lazy=True,
|
||||
cascade='all, delete-orphan'
|
||||
@ -162,6 +164,5 @@ class Plant(db.Model):
|
||||
alphabet = string.ascii_lowercase + string.digits
|
||||
while True:
|
||||
candidate = ''.join(random.choices(alphabet, k=length))
|
||||
# Check uniqueness
|
||||
if not cls.query.filter_by(short_id=candidate).first():
|
||||
return candidate
|
||||
|
@ -1,6 +1,31 @@
|
||||
{
|
||||
"name": "plant",
|
||||
"version": "1.0.0",
|
||||
"description": "Plant profile management plugin",
|
||||
"entry_point": null
|
||||
"name": "Plant",
|
||||
"version": "0.1.0",
|
||||
"author": "Bryson Shepard <bryson@natureinpots.com>",
|
||||
"description": "Core plant catalog and management.",
|
||||
"module": "plugins.plant",
|
||||
"routes": {
|
||||
"module": "plugins.plant.routes",
|
||||
"blueprint": "bp",
|
||||
"url_prefix": "/plant"
|
||||
},
|
||||
"models": [
|
||||
"plugins.plant.models"
|
||||
],
|
||||
"subplugins": [
|
||||
{
|
||||
"name": "GrowLog",
|
||||
"module": "plugins.plant.growlog",
|
||||
"routes": {
|
||||
"module": "plugins.plant.growlog.routes",
|
||||
"blueprint": "bp",
|
||||
"url_prefix": "/plant/growlog"
|
||||
},
|
||||
"models": [
|
||||
"plugins.plant.growlog.models"
|
||||
]
|
||||
}
|
||||
],
|
||||
"license": "Proprietary",
|
||||
"repository": "https://github.com/your-org/your-app"
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
{% extends 'core_ui/base.html' %}
|
||||
{% extends 'core/base.html' %}
|
||||
|
||||
{% block content %}
|
||||
<div class="card mb-4">
|
||||
|
@ -1,4 +1,4 @@
|
||||
{% extends 'core_ui/base.html' %}
|
||||
{% extends 'core/base.html' %}
|
||||
{% block title %}Add New Plant – Nature In Pots{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
@ -1,4 +1,4 @@
|
||||
{% extends 'core_ui/base.html' %}
|
||||
{% extends 'core/base.html' %}
|
||||
{% block title %}
|
||||
{{ plant.common_name.name if plant.common_name else "Unnamed Plant" }} – Nature In Pots
|
||||
{% endblock %}
|
||||
|
@ -1,4 +1,4 @@
|
||||
{% extends 'core_ui/base.html' %}
|
||||
{% extends 'core/base.html' %}
|
||||
{% block title %}Edit Plant – Nature In Pots{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
@ -1,4 +1,4 @@
|
||||
{% extends 'core_ui/base.html' %}
|
||||
{% extends 'core/base.html' %}
|
||||
{% block title %}View Entries – Nature In Pots{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
@ -1,15 +0,0 @@
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, SelectMultipleField, SubmitField
|
||||
from wtforms.validators import Optional, Length, Regexp
|
||||
|
||||
class SearchForm(FlaskForm):
|
||||
query = StringField(
|
||||
'Search',
|
||||
validators=[
|
||||
Optional(),
|
||||
Length(min=2, max=100, message="Search term must be between 2 and 100 characters."),
|
||||
Regexp(r'^[\w\s\-]+$', message="Search can only include letters, numbers, spaces, and dashes.")
|
||||
]
|
||||
)
|
||||
tags = SelectMultipleField('Tags', coerce=int)
|
||||
submit = SubmitField('Search')
|
@ -1,17 +0,0 @@
|
||||
from app import db
|
||||
#from plugins.plant.models import Plant
|
||||
|
||||
# plant_tags = db.Table(
|
||||
# 'plant_tags',
|
||||
# db.metadata,
|
||||
# db.Column('plant_id', db.Integer, db.ForeignKey('plant.id'), primary_key=True),
|
||||
# db.Column('tag_id', db.Integer, db.ForeignKey('tags.id'), primary_key=True),
|
||||
# extend_existing=True
|
||||
# )
|
||||
|
||||
# class Tag(db.Model):
|
||||
# __tablename__ = 'tags'
|
||||
# __table_args__ = {'extend_existing': True}
|
||||
|
||||
# id = db.Column(db.Integer, primary_key=True)
|
||||
# name = db.Column(db.String(100), unique=True, nullable=False)
|
@ -1,6 +0,0 @@
|
||||
{
|
||||
"name": "search",
|
||||
"version": "1.1",
|
||||
"description": "Updated search plugin with live Plant model integration",
|
||||
"entry_point": null
|
||||
}
|
@ -1,37 +0,0 @@
|
||||
from flask import Blueprint, render_template, request, jsonify
|
||||
from flask_login import login_required, current_user
|
||||
from app import db
|
||||
from .forms import SearchForm
|
||||
from plugins.plant.models import Plant, Tag
|
||||
|
||||
bp = Blueprint('search', __name__, template_folder='templates')
|
||||
|
||||
@bp.route('/search', methods=['GET', 'POST'])
|
||||
@login_required
|
||||
def search():
|
||||
form = SearchForm()
|
||||
form.tags.choices = [(tag.id, tag.name) for tag in Tag.query.order_by(Tag.name).all()]
|
||||
results = []
|
||||
if form.validate_on_submit():
|
||||
query = db.session.query(Plant).join(PlantScientific).join(PlantCommon)
|
||||
if form.query.data:
|
||||
q = f"%{form.query.data}%"
|
||||
query = query.filter(
|
||||
db.or_(
|
||||
PlantScientific.name.ilike(q),
|
||||
PlantCommon.name.ilike(q),
|
||||
Plant.current_status.ilike(q)
|
||||
)
|
||||
)
|
||||
if form.tags.data:
|
||||
query = query.filter(Plant.tags.any(Tag.id.in_(form.tags.data)))
|
||||
query = query.filter(Plant.owner_id == current_user.id)
|
||||
results = query.all()
|
||||
return render_template('search/search.html', form=form, results=results)
|
||||
|
||||
@bp.route('/search/tags')
|
||||
@login_required
|
||||
def search_tags():
|
||||
term = request.args.get('term', '')
|
||||
tags = Tag.query.filter(Tag.name.ilike(f"%{term}%")).limit(10).all()
|
||||
return jsonify([tag.name for tag in tags])
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user