diff --git a/.gitignore b/.gitignore index b393411..ea4691b 100644 --- a/.gitignore +++ b/.gitignore @@ -7,10 +7,10 @@ __pycache__/ *.db # Flask/Migrations -migrations/ +#migrations/ instance/ .env -.env.* +#.env.* # VS Code .vscode/ diff --git a/app/__init__.py b/app/__init__.py index 91452c8..b1b25db 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -1,8 +1,9 @@ +# app/__init__.py + import os import json -import importlib +import glob import importlib.util - from flask import Flask from flask_sqlalchemy import SQLAlchemy from flask_migrate import Migrate @@ -10,32 +11,52 @@ from flask_login import LoginManager from flask_wtf.csrf import CSRFProtect from dotenv import load_dotenv -# Load environment variables load_dotenv() -# Initialize extensions -db = SQLAlchemy() -migrate = Migrate() +# ---------------------------------------------------------------- +# 1) Initialize core extensions +# ---------------------------------------------------------------- +db = SQLAlchemy() +migrate = Migrate() login_manager = LoginManager() -csrf = CSRFProtect() +csrf = CSRFProtect() def create_app(): app = Flask(__name__) app.config.from_object('app.config.Config') - # Initialize core extensions + # Initialize extensions with app csrf.init_app(app) db.init_app(app) migrate.init_app(app, db) login_manager.init_app(app) login_manager.login_view = 'auth.login' - # Register error handlers + # ---------------------------------------------------------------- + # 2) Register error handlers + # ---------------------------------------------------------------- from .errors import bp as errors_bp app.register_blueprint(errors_bp) - # Auto-discover and register plugins + # ---------------------------------------------------------------- + # 3) Auto-load each plugin’s models.py so that SQLAlchemy metadata + # knows about every table (Plant, PlantOwnershipLog, PlantUpdate, etc.) + # ---------------------------------------------------------------- + plugin_model_paths = glob.glob(os.path.join(os.path.dirname(__file__), '..', 'plugins', '*', 'models.py')) + for path in plugin_model_paths: + module_name = path.replace("/", ".").replace(".py", "") + try: + spec = importlib.util.spec_from_file_location(module_name, path) + mod = importlib.util.module_from_spec(spec) + spec.loader.exec_module(mod) + print(f"✅ (Startup) Loaded: {module_name}") + except Exception as e: + print(f"❌ (Startup) Failed to load {module_name}: {e}") + + # ---------------------------------------------------------------- + # 4) Auto-discover & register each plugin’s routes.py and CLI + # ---------------------------------------------------------------- plugin_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'plugins')) for plugin in os.listdir(plugin_path): if plugin.endswith('.noload'): @@ -46,47 +67,37 @@ def create_app(): if not os.path.isdir(plugin_dir): continue - # 1. Register routes + # --- (a) Register routes blueprint if present --- route_file = os.path.join(plugin_dir, 'routes.py') if os.path.isfile(route_file): try: spec = importlib.util.spec_from_file_location(f"plugins.{plugin}.routes", route_file) - mod = importlib.util.module_from_spec(spec) + mod = importlib.util.module_from_spec(spec) spec.loader.exec_module(mod) if hasattr(mod, 'bp'): app.register_blueprint(mod.bp, strict_slashes=False) + print(f"✔️ Registered routes for plugin '{plugin}'") except Exception as e: - print(f"[⚠️] Failed to load routes from plugin '{plugin}': {e}") + print(f"❌ Failed to load routes from plugin '{plugin}': {e}") - # Define paths - init_file = os.path.join(plugin_dir, '__init__.py') + # --- (b) Register CLI & entry point if present --- + init_file = os.path.join(plugin_dir, '__init__.py') plugin_json = os.path.join(plugin_dir, 'plugin.json') - model_file = os.path.join(plugin_dir, 'models.py') - - # 2. Register CLI commands and run entry point if os.path.isfile(init_file): try: cli_module = importlib.import_module(f"plugins.{plugin}") if hasattr(cli_module, 'register_cli'): cli_module.register_cli(app) - + print(f"✔️ Registered CLI for plugin '{plugin}'") if os.path.isfile(plugin_json): with open(plugin_json, 'r') as f: meta = json.load(f) entry = meta.get('entry_point') if entry and hasattr(cli_module, entry): getattr(cli_module, entry)(app) + print(f"✔️ Ran entry point '{entry}' for plugin '{plugin}'") except Exception as e: - print(f"[⚠️] Failed to load CLI for plugin '{plugin}': {e}") - - ## 3. Auto-load plugin models for migrations - #if os.path.isfile(model_file): - # try: - # spec = importlib.util.spec_from_file_location(f"plugins.{plugin}.models", model_file) - # mod = importlib.util.module_from_spec(spec) - # spec.loader.exec_module(mod) - # except Exception as e: - # print(f"[⚠️] Failed to load models from plugin '{plugin}': {e}") + print(f"❌ Failed to load CLI for plugin '{plugin}': {e}") @app.context_processor def inject_current_year(): @@ -94,9 +105,3 @@ def create_app(): return {'current_year': datetime.now().year} return app - - -@login_manager.user_loader -def load_user(user_id): - from plugins.auth.models import User - return User.query.get(int(user_id)) diff --git a/app/neo4j_utils.py b/app/neo4j_utils.py index defc7c8..81cbfe4 100644 --- a/app/neo4j_utils.py +++ b/app/neo4j_utils.py @@ -1,32 +1,103 @@ +# app/neo4j_utils.py from neo4j import GraphDatabase from flask import current_app class Neo4jHandler: - def __init__(self, uri, user, password): - self.driver = GraphDatabase.driver(uri, auth=(user, password)) + def __init__(self, uri=None, user=None, password=None): + # We read from current_app.config if nothing is passed in explicitly. + # If you already set NEO4J_URI / NEO4J_USER / NEO4J_PASSWORD in your config.py, + # these defaults will be overridden by those values automatically. + uri = uri or current_app.config.get("NEO4J_URI", "bolt://nip_neo4j:7687") + user = user or current_app.config.get("NEO4J_USER", "neo4j") + pw = password or current_app.config.get("NEO4J_PASSWORD", "your_password_here") + + self.driver = GraphDatabase.driver(uri, auth=(user, pw)) def close(self): self.driver.close() - def create_plant_node(self, uuid, name): - with self.driver.session() as session: - session.run( - "MERGE (p:Plant {uuid: $uuid}) " - "SET p.name = $name", - uuid=uuid, name=name - ) + def create_plant_node(self, uuid: str, name: str = "Unknown"): + """ + MERGE a Plant node by UUID. On create, set its name. + We strip() and strip('"') in case the CSV had extra quotes or spaces around the UUID. + """ + if not uuid: + print("[⚠️] Skipped node creation: missing UUID") + return - def create_lineage(self, child_uuid, parent_uuid): - with self.driver.session() as session: - session.run( - "MATCH (child:Plant {uuid: $child_uuid}), (parent:Plant {uuid: $parent_uuid}) " - "MERGE (parent)-[:PARENT_OF]->(child)", - child_uuid=child_uuid, parent_uuid=parent_uuid - ) + # Remove surrounding quotes or whitespace + uuid_clean = uuid.strip().strip('"') + name_clean = (name or "Unknown").strip() -def get_neo4j_handler(): - uri = current_app.config['NEO4J_URI'] - user = current_app.config['NEO4J_USER'] - password = current_app.config['NEO4J_PASSWORD'] + print(f"[ℹ️] (Neo4j) MERGE Plant node → uuid='{uuid_clean}', name='{name_clean}'") + try: + with self.driver.session() as session: + session.run( + """ + MERGE (p:Plant {uuid: $uuid}) + ON CREATE SET p.name = $name + """, + uuid=uuid_clean, + name=name_clean + ) + except Exception as e: + print(f"[❌] Neo4j node creation failed for UUID={uuid_clean}: {e}") + + def create_lineage(self, child_uuid: str, parent_uuid: str): + """ + MATCH both child and parent by UUID, then MERGE a LINEAGE relationship. + Again, strip() any extraneous quotes or whitespace. + """ + if not child_uuid or not parent_uuid: + print(f"[⚠️] Skipped lineage creation: missing UUID(s) ({child_uuid!r} → {parent_uuid!r})") + return + + child_clean = child_uuid.strip().strip('"') + parent_clean = parent_uuid.strip().strip('"') + + print(f"[ℹ️] (Neo4j) Attempting to MERGE LINEAGE → child='{child_clean}', parent='{parent_clean}'") + try: + with self.driver.session() as session: + result = session.run( + """ + MATCH (c:Plant {uuid: $child_uuid}) + MATCH (p:Plant {uuid: $parent_uuid}) + MERGE (c)-[r:LINEAGE]->(p) + RETURN type(r) AS rel_type + """, + child_uuid=child_clean, + parent_uuid=parent_clean + ) + record = result.single() + if record and record.get("rel_type") == "LINEAGE": + print(f"[✅] (Neo4j) Created LINEAGE → {child_clean} → {parent_clean}") + else: + print(f"[⚠️] (Neo4j) No LINEAGE created (nodes may not match) → {child_clean} → {parent_clean}") + except Exception as e: + print(f"[❌] Neo4j lineage creation failed: {e}") + + def debug_check_node(self, uuid: str): + """ + Utility: check whether a Plant node with this UUID exists in Neo4j. + """ + uuid_clean = uuid.strip().strip('"') + with self.driver.session() as session: + result = session.run( + "MATCH (p:Plant {uuid: $uuid}) RETURN p", + uuid=uuid_clean + ) + record = result.single() + if record: + print(f"[✅] (Neo4j) Node '{uuid_clean}' exists.") + else: + print(f"[❌] (Neo4j) Node '{uuid_clean}' NOT found.") + +def get_neo4j_handler() -> Neo4jHandler: + """ + Factory: read NEO4J_URI / NEO4J_USER / NEO4J_PASSWORD from current_app.config. + """ + uri = current_app.config.get("NEO4J_URI", "bolt://nip_neo4j:7687") + user = current_app.config.get("NEO4J_USER", "neo4j") + password = current_app.config.get("NEO4J_PASSWORD", "your_password_here") return Neo4jHandler(uri, user, password) diff --git a/files.zip b/files.zip new file mode 100644 index 0000000..fdafedc Binary files /dev/null and b/files.zip differ diff --git a/main.zip b/main.zip deleted file mode 100644 index a2e894f..0000000 Binary files a/main.zip and /dev/null differ diff --git a/migrations/README b/migrations/README new file mode 100644 index 0000000..0e04844 --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Single-database configuration for Flask. diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 0000000..ec9d45c --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,50 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic,flask_migrate + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[logger_flask_migrate] +level = INFO +handlers = +qualname = flask_migrate + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 0000000..cd0a6b3 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,63 @@ +from __future__ import with_statement +import os +import logging +import importlib.util +from alembic import context +from sqlalchemy import engine_from_config, pool +from logging.config import fileConfig +from flask import current_app +from app import db + +# ----------------------------- +# 🔍 Automatically import all plugin models +# ----------------------------- +import glob +import importlib.util + +plugin_model_paths = glob.glob(os.path.join("plugins", "*", "models.py")) + +for path in plugin_model_paths: + module_name = path.replace("/", ".").replace(".py", "") + try: + spec = importlib.util.spec_from_file_location(module_name, path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + print(f"✅ Loaded: {module_name}") + except Exception as e: + print(f"❌ Failed to load {module_name}: {e}") +# ----------------------------- + +config = context.config +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + +target_metadata = db.metadata + +def run_migrations_offline(): + context.configure( + url=current_app.config.get("SQLALCHEMY_DATABASE_URI"), + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + with context.begin_transaction(): + context.run_migrations() + +print("🧠 Alembic sees these tables:") +print(sorted(db.metadata.tables.keys())) + +def run_migrations_online(): + connectable = db.engine + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=target_metadata, + compare_type=True, + ) + with context.begin_transaction(): + context.run_migrations() + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 0000000..2c01563 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/0171b270afc1_auto.py b/migrations/versions/0171b270afc1_auto.py new file mode 100644 index 0000000..c6f44ba --- /dev/null +++ b/migrations/versions/0171b270afc1_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: 0171b270afc1 +Revises: 4d9859ada63b +Create Date: 2025-06-04 06:20:47.463202 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '0171b270afc1' +down_revision = '4d9859ada63b' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/07d152ee2ac2_auto.py b/migrations/versions/07d152ee2ac2_auto.py new file mode 100644 index 0000000..c7884dd --- /dev/null +++ b/migrations/versions/07d152ee2ac2_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: 07d152ee2ac2 +Revises: 0171b270afc1 +Create Date: 2025-06-04 06:24:51.986909 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '07d152ee2ac2' +down_revision = '0171b270afc1' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/1c7cef84b4ae_auto.py b/migrations/versions/1c7cef84b4ae_auto.py new file mode 100644 index 0000000..d9d463e --- /dev/null +++ b/migrations/versions/1c7cef84b4ae_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: 1c7cef84b4ae +Revises: 26803929dc3e +Create Date: 2025-06-04 22:07:43.375613 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '1c7cef84b4ae' +down_revision = '26803929dc3e' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('plant', sa.Column('is_verified', sa.Boolean(), nullable=False)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('plant', 'is_verified') + # ### end Alembic commands ### diff --git a/migrations/versions/26803929dc3e_auto.py b/migrations/versions/26803929dc3e_auto.py new file mode 100644 index 0000000..d38bf09 --- /dev/null +++ b/migrations/versions/26803929dc3e_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: 26803929dc3e +Revises: 07d152ee2ac2 +Create Date: 2025-06-04 06:38:27.377036 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '26803929dc3e' +down_revision = '07d152ee2ac2' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/27a65a4e055c_auto.py b/migrations/versions/27a65a4e055c_auto.py new file mode 100644 index 0000000..3cbfdbd --- /dev/null +++ b/migrations/versions/27a65a4e055c_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: 27a65a4e055c +Revises: 48d93714beaf +Create Date: 2025-06-05 04:23:44.796455 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '27a65a4e055c' +down_revision = '48d93714beaf' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/2a0b02a42543_auto.py b/migrations/versions/2a0b02a42543_auto.py new file mode 100644 index 0000000..58b985c --- /dev/null +++ b/migrations/versions/2a0b02a42543_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: 2a0b02a42543 +Revises: 93b893e47742 +Create Date: 2025-06-05 02:41:56.741133 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '2a0b02a42543' +down_revision = '93b893e47742' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/2fa6feb17477_auto.py b/migrations/versions/2fa6feb17477_auto.py new file mode 100644 index 0000000..c28df43 --- /dev/null +++ b/migrations/versions/2fa6feb17477_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: 2fa6feb17477 +Revises: 9cff183551e1 +Create Date: 2025-06-05 00:45:39.693560 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '2fa6feb17477' +down_revision = '9cff183551e1' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/48d93714beaf_auto.py b/migrations/versions/48d93714beaf_auto.py new file mode 100644 index 0000000..52956b9 --- /dev/null +++ b/migrations/versions/48d93714beaf_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: 48d93714beaf +Revises: 761d0f8be3ff +Create Date: 2025-06-05 04:20:31.030479 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '48d93714beaf' +down_revision = '761d0f8be3ff' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/48fee8a8a3be_auto.py b/migrations/versions/48fee8a8a3be_auto.py new file mode 100644 index 0000000..238b584 --- /dev/null +++ b/migrations/versions/48fee8a8a3be_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: 48fee8a8a3be +Revises: af76c66c9075 +Create Date: 2025-06-05 00:25:55.439874 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '48fee8a8a3be' +down_revision = 'af76c66c9075' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/4d9859ada63b_auto.py b/migrations/versions/4d9859ada63b_auto.py new file mode 100644 index 0000000..53b72f6 --- /dev/null +++ b/migrations/versions/4d9859ada63b_auto.py @@ -0,0 +1,202 @@ +"""auto + +Revision ID: 4d9859ada63b +Revises: +Create Date: 2025-06-04 06:16:08.829142 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '4d9859ada63b' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('plant_common_name', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('name'), + sa.UniqueConstraint('name') + ) + op.create_table('tag', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('name'), + sa.UniqueConstraint('name') + ) + op.create_table('users', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('email', sa.String(length=120), nullable=False), + sa.Column('password_hash', sa.Text(), nullable=False), + sa.Column('role', sa.String(length=50), nullable=True), + sa.Column('is_verified', sa.Boolean(), nullable=True), + sa.Column('excluded_from_analytics', sa.Boolean(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('email'), + sa.UniqueConstraint('email') + ) + op.create_table('plant_scientific_name', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('common_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['common_id'], ['plant_common_name.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('name'), + sa.UniqueConstraint('name') + ) + op.create_table('plant', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=36), nullable=False), + sa.Column('custom_slug', sa.String(length=255), nullable=True), + sa.Column('owner_id', sa.Integer(), nullable=False), + sa.Column('common_id', sa.Integer(), nullable=False), + sa.Column('scientific_id', sa.Integer(), nullable=False), + sa.Column('plant_type', sa.String(length=50), nullable=False), + sa.Column('status', sa.String(length=50), nullable=False), + sa.Column('notes', sa.Text(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=True), + sa.Column('transferred', sa.Boolean(), nullable=True), + sa.Column('graph_node_id', sa.String(length=255), nullable=True), + sa.ForeignKeyConstraint(['common_id'], ['plant_common_name.id'], ), + sa.ForeignKeyConstraint(['owner_id'], ['users.id'], ), + sa.ForeignKeyConstraint(['scientific_id'], ['plant_scientific_name.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('custom_slug'), + sa.UniqueConstraint('custom_slug'), + sa.UniqueConstraint('uuid'), + sa.UniqueConstraint('uuid') + ) + op.create_table('grow_logs', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('plant_id', sa.Integer(), nullable=False), + sa.Column('title', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(['plant_id'], ['plant.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('plant_lineage', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('child_plant_id', sa.Integer(), nullable=False), + sa.Column('parent_plant_id', sa.Integer(), nullable=False), + sa.Column('type', sa.String(length=50), nullable=False), + sa.ForeignKeyConstraint(['child_plant_id'], ['plant.id'], ), + sa.ForeignKeyConstraint(['parent_plant_id'], ['plant.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('plant_ownership_log', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('plant_id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('start_time', sa.DateTime(), nullable=False), + sa.Column('end_time', sa.DateTime(), nullable=True), + sa.Column('transfer_note', sa.Text(), nullable=True), + sa.ForeignKeyConstraint(['plant_id'], ['plant.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('plant_tags', + sa.Column('plant_id', sa.Integer(), nullable=False), + sa.Column('tag_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['plant_id'], ['plant.id'], ), + sa.ForeignKeyConstraint(['tag_id'], ['tag.id'], ), + sa.PrimaryKeyConstraint('plant_id', 'tag_id') + ) + op.create_table('submissions', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('plant_id', sa.Integer(), nullable=True), + sa.Column('common_name', sa.String(length=120), nullable=False), + sa.Column('scientific_name', sa.String(length=120), nullable=True), + sa.Column('price', sa.Float(), nullable=False), + sa.Column('source', sa.String(length=120), nullable=True), + sa.Column('timestamp', sa.DateTime(), nullable=True), + sa.Column('height', sa.Float(), nullable=True), + sa.Column('width', sa.Float(), nullable=True), + sa.Column('leaf_count', sa.Integer(), nullable=True), + sa.Column('potting_mix', sa.String(length=255), nullable=True), + sa.Column('container_size', sa.String(length=120), nullable=True), + sa.Column('health_status', sa.String(length=50), nullable=True), + sa.Column('notes', sa.Text(), nullable=True), + sa.ForeignKeyConstraint(['plant_id'], ['plant.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('plant_updates', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('plant_id', sa.Integer(), nullable=False), + sa.Column('growlog_id', sa.Integer(), nullable=True), + sa.Column('update_type', sa.String(length=50), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(['growlog_id'], ['grow_logs.id'], ), + sa.ForeignKeyConstraint(['plant_id'], ['plant.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('submission_images', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('submission_id', sa.Integer(), nullable=False), + sa.Column('file_path', sa.String(length=255), nullable=False), + sa.Column('is_visible', sa.Boolean(), nullable=True), + sa.ForeignKeyConstraint(['submission_id'], ['submissions.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('featured_images', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('submission_image_id', sa.Integer(), nullable=False), + sa.Column('override_text', sa.String(length=255), nullable=True), + sa.Column('is_featured', sa.Boolean(), nullable=True), + sa.ForeignKeyConstraint(['submission_image_id'], ['submission_images.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('image_hearts', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('submission_image_id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(['submission_image_id'], ['submission_images.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('media', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('file_url', sa.String(length=256), nullable=False), + sa.Column('uploaded_at', sa.DateTime(), nullable=True), + sa.Column('plant_id', sa.Integer(), nullable=True), + sa.Column('growlog_id', sa.Integer(), nullable=True), + sa.Column('update_id', sa.Integer(), nullable=True), + sa.Column('caption', sa.String(length=255), nullable=True), + sa.ForeignKeyConstraint(['growlog_id'], ['grow_logs.id'], ), + sa.ForeignKeyConstraint(['plant_id'], ['plant.id'], ), + sa.ForeignKeyConstraint(['update_id'], ['plant_updates.id'], ), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('media') + op.drop_table('image_hearts') + op.drop_table('featured_images') + op.drop_table('submission_images') + op.drop_table('plant_updates') + op.drop_table('submissions') + op.drop_table('plant_tags') + op.drop_table('plant_ownership_log') + op.drop_table('plant_lineage') + op.drop_table('grow_logs') + op.drop_table('plant') + op.drop_table('plant_scientific_name') + op.drop_table('users') + op.drop_table('tag') + op.drop_table('plant_common_name') + # ### end Alembic commands ### diff --git a/migrations/versions/50d5ff358f96_auto.py b/migrations/versions/50d5ff358f96_auto.py new file mode 100644 index 0000000..9586e25 --- /dev/null +++ b/migrations/versions/50d5ff358f96_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: 50d5ff358f96 +Revises: 1c7cef84b4ae +Create Date: 2025-06-04 22:14:54.902029 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '50d5ff358f96' +down_revision = '1c7cef84b4ae' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/58022b5ab921_auto.py b/migrations/versions/58022b5ab921_auto.py new file mode 100644 index 0000000..784f2a0 --- /dev/null +++ b/migrations/versions/58022b5ab921_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: 58022b5ab921 +Revises: 50d5ff358f96 +Create Date: 2025-06-04 22:32:06.203591 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '58022b5ab921' +down_revision = '50d5ff358f96' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/761d0f8be3ff_auto.py b/migrations/versions/761d0f8be3ff_auto.py new file mode 100644 index 0000000..ad16b26 --- /dev/null +++ b/migrations/versions/761d0f8be3ff_auto.py @@ -0,0 +1,93 @@ +"""auto + +Revision ID: 761d0f8be3ff +Revises: ad9ea9d31b58 +Create Date: 2025-06-05 04:18:09.403526 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '761d0f8be3ff' +down_revision = 'ad9ea9d31b58' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('plant_lineage') + op.add_column('plant', sa.Column('updated_at', sa.DateTime(), nullable=True)) + op.drop_column('plant', 'transferred') + op.drop_column('plant', 'status') + op.drop_column('plant', 'is_verified') + op.drop_column('plant', 'graph_node_id') + op.drop_column('plant', 'notes') + op.add_column('plant_common_name', sa.Column('created_at', sa.DateTime(), nullable=True)) + op.alter_column('plant_common_name', 'name', + existing_type=mysql.VARCHAR(length=255), + type_=sa.String(length=128), + existing_nullable=False) + op.add_column('plant_ownership_log', sa.Column('date_acquired', sa.DateTime(), nullable=True)) + op.add_column('plant_ownership_log', sa.Column('transferred', sa.Boolean(), nullable=False)) + op.add_column('plant_ownership_log', sa.Column('graph_node_id', sa.String(length=255), nullable=True)) + op.add_column('plant_ownership_log', sa.Column('is_verified', sa.Boolean(), nullable=False)) + op.drop_column('plant_ownership_log', 'start_time') + op.drop_column('plant_ownership_log', 'transfer_note') + op.drop_column('plant_ownership_log', 'end_time') + op.add_column('plant_scientific_name', sa.Column('created_at', sa.DateTime(), nullable=True)) + op.alter_column('plant_scientific_name', 'name', + existing_type=mysql.VARCHAR(length=255), + type_=sa.String(length=256), + existing_nullable=False) + op.alter_column('tag', 'name', + existing_type=mysql.VARCHAR(length=255), + type_=sa.String(length=128), + existing_nullable=False) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('tag', 'name', + existing_type=sa.String(length=128), + type_=mysql.VARCHAR(length=255), + existing_nullable=False) + op.alter_column('plant_scientific_name', 'name', + existing_type=sa.String(length=256), + type_=mysql.VARCHAR(length=255), + existing_nullable=False) + op.drop_column('plant_scientific_name', 'created_at') + op.add_column('plant_ownership_log', sa.Column('end_time', mysql.DATETIME(), nullable=True)) + op.add_column('plant_ownership_log', sa.Column('transfer_note', mysql.TEXT(), nullable=True)) + op.add_column('plant_ownership_log', sa.Column('start_time', mysql.DATETIME(), nullable=False)) + op.drop_column('plant_ownership_log', 'is_verified') + op.drop_column('plant_ownership_log', 'graph_node_id') + op.drop_column('plant_ownership_log', 'transferred') + op.drop_column('plant_ownership_log', 'date_acquired') + op.alter_column('plant_common_name', 'name', + existing_type=sa.String(length=128), + type_=mysql.VARCHAR(length=255), + existing_nullable=False) + op.drop_column('plant_common_name', 'created_at') + op.add_column('plant', sa.Column('notes', mysql.TEXT(), nullable=True)) + op.add_column('plant', sa.Column('graph_node_id', mysql.VARCHAR(length=255), nullable=True)) + op.add_column('plant', sa.Column('is_verified', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False)) + op.add_column('plant', sa.Column('status', mysql.VARCHAR(length=50), nullable=False)) + op.add_column('plant', sa.Column('transferred', mysql.TINYINT(display_width=1), autoincrement=False, nullable=True)) + op.drop_column('plant', 'updated_at') + op.create_table('plant_lineage', + sa.Column('id', mysql.INTEGER(), autoincrement=True, nullable=False), + sa.Column('child_plant_id', mysql.INTEGER(), autoincrement=False, nullable=False), + sa.Column('parent_plant_id', mysql.INTEGER(), autoincrement=False, nullable=False), + sa.Column('type', mysql.VARCHAR(length=50), nullable=False), + sa.ForeignKeyConstraint(['child_plant_id'], ['plant.id'], name=op.f('plant_lineage_ibfk_1')), + sa.ForeignKeyConstraint(['parent_plant_id'], ['plant.id'], name=op.f('plant_lineage_ibfk_2')), + sa.PrimaryKeyConstraint('id'), + mysql_collate='utf8mb4_0900_ai_ci', + mysql_default_charset='utf8mb4', + mysql_engine='InnoDB' + ) + # ### end Alembic commands ### diff --git a/migrations/versions/806e94a40aeb_auto.py b/migrations/versions/806e94a40aeb_auto.py new file mode 100644 index 0000000..e210a63 --- /dev/null +++ b/migrations/versions/806e94a40aeb_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: 806e94a40aeb +Revises: e1cdc5f78f5e +Create Date: 2025-06-05 01:11:25.968741 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '806e94a40aeb' +down_revision = 'e1cdc5f78f5e' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/93b893e47742_auto.py b/migrations/versions/93b893e47742_auto.py new file mode 100644 index 0000000..85febb0 --- /dev/null +++ b/migrations/versions/93b893e47742_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: 93b893e47742 +Revises: b783b3b43713 +Create Date: 2025-06-05 02:37:12.714926 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '93b893e47742' +down_revision = 'b783b3b43713' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/9b93a2dffe81_auto.py b/migrations/versions/9b93a2dffe81_auto.py new file mode 100644 index 0000000..460b19b --- /dev/null +++ b/migrations/versions/9b93a2dffe81_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: 9b93a2dffe81 +Revises: c1a4158c8226 +Create Date: 2025-06-05 01:29:51.402975 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '9b93a2dffe81' +down_revision = 'c1a4158c8226' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/9cff183551e1_auto.py b/migrations/versions/9cff183551e1_auto.py new file mode 100644 index 0000000..18f75e2 --- /dev/null +++ b/migrations/versions/9cff183551e1_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: 9cff183551e1 +Revises: 48fee8a8a3be +Create Date: 2025-06-05 00:32:07.995675 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '9cff183551e1' +down_revision = '48fee8a8a3be' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/ad9ea9d31b58_auto.py b/migrations/versions/ad9ea9d31b58_auto.py new file mode 100644 index 0000000..70aabbe --- /dev/null +++ b/migrations/versions/ad9ea9d31b58_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: ad9ea9d31b58 +Revises: 2a0b02a42543 +Create Date: 2025-06-05 03:05:30.311725 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'ad9ea9d31b58' +down_revision = '2a0b02a42543' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/af76c66c9075_auto.py b/migrations/versions/af76c66c9075_auto.py new file mode 100644 index 0000000..8f969e5 --- /dev/null +++ b/migrations/versions/af76c66c9075_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: af76c66c9075 +Revises: 58022b5ab921 +Create Date: 2025-06-04 22:44:12.056714 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'af76c66c9075' +down_revision = '58022b5ab921' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/b783b3b43713_auto.py b/migrations/versions/b783b3b43713_auto.py new file mode 100644 index 0000000..bbb68d7 --- /dev/null +++ b/migrations/versions/b783b3b43713_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: b783b3b43713 +Revises: bfc7a6bd8abc +Create Date: 2025-06-05 02:07:18.572162 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'b783b3b43713' +down_revision = 'bfc7a6bd8abc' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/b9c03e1ae0bf_auto.py b/migrations/versions/b9c03e1ae0bf_auto.py new file mode 100644 index 0000000..ae22c11 --- /dev/null +++ b/migrations/versions/b9c03e1ae0bf_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: b9c03e1ae0bf +Revises: 9b93a2dffe81 +Create Date: 2025-06-05 01:37:57.483736 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'b9c03e1ae0bf' +down_revision = '9b93a2dffe81' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/bfc7a6bd8abc_auto.py b/migrations/versions/bfc7a6bd8abc_auto.py new file mode 100644 index 0000000..0fa3d30 --- /dev/null +++ b/migrations/versions/bfc7a6bd8abc_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: bfc7a6bd8abc +Revises: cc35036a6f94 +Create Date: 2025-06-05 01:57:23.973531 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'bfc7a6bd8abc' +down_revision = 'cc35036a6f94' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/c1a4158c8226_auto.py b/migrations/versions/c1a4158c8226_auto.py new file mode 100644 index 0000000..2dbb0d3 --- /dev/null +++ b/migrations/versions/c1a4158c8226_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: c1a4158c8226 +Revises: 806e94a40aeb +Create Date: 2025-06-05 01:16:54.451574 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'c1a4158c8226' +down_revision = '806e94a40aeb' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/cc35036a6f94_auto.py b/migrations/versions/cc35036a6f94_auto.py new file mode 100644 index 0000000..9341d7f --- /dev/null +++ b/migrations/versions/cc35036a6f94_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: cc35036a6f94 +Revises: b9c03e1ae0bf +Create Date: 2025-06-05 01:45:09.251040 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'cc35036a6f94' +down_revision = 'b9c03e1ae0bf' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/e1cdc5f78f5e_auto.py b/migrations/versions/e1cdc5f78f5e_auto.py new file mode 100644 index 0000000..55e1cd7 --- /dev/null +++ b/migrations/versions/e1cdc5f78f5e_auto.py @@ -0,0 +1,28 @@ +"""auto + +Revision ID: e1cdc5f78f5e +Revises: 2fa6feb17477 +Create Date: 2025-06-05 00:57:10.914714 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'e1cdc5f78f5e' +down_revision = '2fa6feb17477' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/plant.csv b/plant.csv new file mode 100644 index 0000000..642ac60 --- /dev/null +++ b/plant.csv @@ -0,0 +1,61 @@ +uuid,plant_type,name,scientific_name,mother_uuid +a290790f-1a7a-44e4-8563-e009df73cfe1,plant,Monstera,Monstera deliciosa, +83ce3667-9a47-4d02-9c76-ae438b99696d,plant,Thai Constellation Monstera,Monstera deliciosa 'Thai Constellation', +2ee2e0e7-69de-4b8f-abfe-4ed973c3d760,plant,Baltic Blue,Epipremnum pinnatum 'Baltic Blue', +abd9dbf7-e487-4d2e-9753-5f96bd757d30,cutting,Yellow Flame Pothos,Epipremnum pinnatum 'Yellow Flame', +e788375e-a90c-4442-b9ba-dbb7d54e4ffa,cutting,Yellow Flame Pothos,Epipremnum pinnatum 'Yellow Flame', +9359048e-42d7-44c1-a145-c71c5f7dde99,cutting,Yellow Flame Pothos ,Epipremnum pinnatum 'Yellow Flame', +84f7c394-025a-4a7d-8ba6-809001f2e9b1,cutting,Yellow Flame Pothos ,Epipremnum pinnatum 'Yellow Flame', +55e834e5-42c5-4d1e-b672-a229cf9fb385,cutting,Yellow Flame Pothos ,Epipremnum pinnatum 'Yellow Flame', +2b0f313d-bc4b-4ae3-a1b7-7a43c33183c8,cutting,Teruno Himiko Pothos,Epipremnum aureum 'Teruno Himiko', +9e9c54e8-b4d7-483d-829b-809a5ad56965,cutting,Jessenia Pothos ,Epipremnum aureum 'Jessenia', +cfd7f26a-fbcb-4858-bb68-9057bc02ce11,cutting,Jessenia Pothos ,Epipremnum aureum 'Jessenia', +8015f878-ce4b-43a0-93a4-77d27c788f90,cutting,Lemon Top Pothos,Epipremnum aureum 'Lemon Top', +61a75de2-6f15-4881-b476-805817406068,cutting,Lemon Top Pothos ,Epipremnum aureum 'Lemon Top', +7593deb2-ee82-4609-844d-d968ceefe86e,cutting,Snow Queen Pothos,Epipremnum aureum 'Snow Queen', +d4518e5f-555b-42ca-b6ae-23ae7cdf242e,cutting,Skeleton Key Pothos ,Epipremnum pinnatum 'Skeleton Key', +a26e98c8-8d9a-454f-bb55-7f86bd3c85a1,cutting,Skeleton Key Pothos ,Epipremnum pinnatum 'Skeleton Key', +b6f2d9a7-b7a9-4ee9-bd99-32eb1086a50a,cutting,Lemon Meringue Pothos ,Epipremnum aureum 'Lemon Meringue', +604e0128-2243-4a86-8ed5-f3f0d1f42375,cutting,Lemon Meringue Pothos ,Epipremnum aureum 'Lemon Meringue', +6d7c6be6-ebf6-474a-8529-beb119f78885,cutting,Champs-Élysées Pothos,Epipremnum aureum 'Champs-Élysées', +44b3db46-3e76-4121-8684-15c6aab3c941,cutting,Champs-Élysées Pothos,Epipremnum aureum 'Champs-Élysées', +992c7664-cdb0-4272-b91e-a883e2c7583e,cutting,Champs-Élysées Pothos,Epipremnum Aureum 'Champs-Élysées', +bac3ed6a-fc57-40bd-951c-4dfbefdf97a0,cutting,Hoya Carnosa,Hoya carnosa, +63752b15-f8d6-4243-90b0-e03c91b5ab00,cutting,Aurea Variegated Pothos,Epipremnum amplissimum 'aurea', +3c75a03c-85c8-4556-99bc-f9374649ac91,cutting,Aurea Variegated Pothos,Epipremnum amplissimum 'aurea', +7338f556-2667-4bb5-8e4d-93258b3e7629,cutting,Aurea Variegated Pothos,Epipremnum amplissimum 'aurea', +65fe9d39-4901-475f-b281-288470ce8bfc,tissue_culture,Philodendron Joepii,Philodendron × joepii, +0d3eb3b4-f91f-4223-aeae-8d07e376fb0f,tissue_culture,Philodendron Joepii,Philodendron × joepii, +0054998b-69e9-4b54-939d-28af973a5072,tissue_culture,Philodendron Joepii,Philodendron × joepii, +72765979-76c9-4cd5-9f32-c1447bd442c6,tissue_culture,Philodendron Joepii,Philodendron × joepii, +41f61e96-7aa0-4266-981a-bdc07dc1d73d,tissue_culture,Philodendron Joepii,Philodendron × joepii, +114701f1-6b7e-447e-902b-ac8eb7a77b1e,tissue_culture,Anthurium Warocqueanum Queen,Anthurium warocqueanum, +2bf6cf5b-02ef-4fd7-b2aa-4116d5a82210,tissue_culture,Anthurium Warocqueanum Queen,Anthurium warocqueanum, +053da33c-0491-40d0-ad45-0f4dfd3899cd,tissue_culture,Philodendron V. White Princess,Philodendron erubescens 'White Princess', +f14a61b1-3e79-4fd5-94b4-ca65fd6dabf4,tissue_culture,Philodendron V. White Princess,Philodendron erubescens 'White Princess', +1944c316-1a69-4708-95a2-e78ee310cfcf,tissue_culture,Philodendron V. White Princess,Philodendron erubescens 'White Princess', +18b16b54-bb05-4ab8-aedf-01f5a3ca0429,tissue_culture,Philodendron V. White Princess,Philodendron erubescens 'White Princess', +02d416eb-f1ef-43fd-901e-c67b9af5d04c,tissue_culture,Philodendron V. White Princess,Philodendron erubescens 'White Princess', +b9320c61-a2df-40b4-9998-f292f814ceec,tissue_culture,Philodendron V. Pink Princess,Philodendron erubescens 'Pink Princess', +1992b27a-2b12-42cd-ae1c-a152e64b742c,tissue_culture,Philodendron V. Pink Princess,Philodendron erubescens 'Pink Princess', +e9aa3ec5-d107-4769-bfb1-180b4013c5f2,tissue_culture,Philodendron V. Pink Princess,Philodendron erubescens 'Pink Princess', +e0f4a333-ae5c-4eaf-b5fc-6275d96b37d1,tissue_culture,Philodendron V. Pink Princess,Philodendron erubescens 'Pink Princess', +cf39b897-3fcd-45db-a421-653177d45f7e,tissue_culture,Philodendron V. Pink Princess,Philodendron erubescens 'Pink Princess', +a14ea46c-49d7-440b-8e7d-25be98fad0c2,tissue_culture,Philodendron V. Red Anderson,Philodendron erubescens 'Anderson', +5510456a-27de-49f5-87ca-1ce4c9eaa1a8,tissue_culture,Philodendron Goeldii Mint,Thaumatophyllum spruceanum 'Mint' , +7ac40dec-747c-48ec-b9f3-cdde9b45cc4c,plant,Albo Monstera,Monstera deliciosa 'Albo Variegata' , +f3ab0c30-c50a-4fdc-b9d3-df84a416fe45,cutting,Albo Monstera ,Monstera deliciosa 'Albo Variegata' , +6f1cbaec-8753-4ba8-b208-db132eba20af,cutting,Albo Monstera ,Monstera deliciosa 'Albo Variegata' , +05e4d1d7-66b1-4b51-ac8e-f9f29a6ffea4,cutting,Albo Monstera ,Monstera deliciosa 'Albo Variegata' , +10c05f27-94ed-4834-849c-ba56a1c0648c,cutting,Albo Monstera ,Monstera deliciosa 'Albo Variegata' , +c7d5ebad-6a90-4310-b5b9-50a8dc9d53b7,plant,Thai Constellation Monstera,Monstera deliciosa 'Thai Constellation', +9accd1a0-dae5-4fbb-8a11-f01b4a5079d3,plant,Thai Constellation Monstera,Monstera deliciosa 'Thai Constellation', +f419f936-a662-4ad8-8504-49d187e1029d,plant,Mint Monstera,Monstera deliciosa 'Mint', +b55eb046-1551-4073-9958-7bc52090344c,plant,Albo Monstera ,Monstera deliciosa 'Albo Variegata', +ce681d56-e777-431f-aea3-68f7b9a86cc1,plant,Albo Monstera ,Monstera deliciosa 'Albo Variegata', +f0b88046-1262-40a2-981c-7ce8d1ac2cf6,plant,Albo Monstera ,Monstera deliciosa 'Albo Variegata', +71a6b08b-7ec4-4c94-a558-25aaceb4f4a6,plant,Philodendron V. Pink Princess ,Philodendron erubescens 'Pink Princess', +cd49f25c-a397-4d93-b127-ab7fe5fb681f,plant,White Monstera Monstera,Monstera deliciosa 'White Monster', +c138540f-aca7-4753-bd49-fef0dcd15afa,plant,Monstera Dubia,Monstera dubia, +8b1059c8-8dd3-487a-af19-1eb548788e87,cutting,Baltic Blue,Epipremnum pinnatum 'Baltic Blue',2ee2e0e7-69de-4b8f-abfe-4ed973c3d760 +5646befb-36d0-444c-b531-6cca73128c59,cutting,Baltic Blue ,Epipremnum pinnatum 'Baltic Blue',2ee2e0e7-69de-4b8f-abfe-4ed973c3d760 \ No newline at end of file diff --git a/plugins/cli/seed.py b/plugins/cli/seed.py index 23293e3..7126c3d 100644 --- a/plugins/cli/seed.py +++ b/plugins/cli/seed.py @@ -5,13 +5,8 @@ from datetime import datetime, timedelta from app import db from plugins.auth.models import User from plugins.plant.models import ( - Plant, PlantCommonName, PlantScientificName, PlantOwnershipLog, - PlantLineage + Plant, PlantCommonName, PlantScientificName ) -from plugins.growlog.models import PlantUpdate -from plugins.media.models import Media, ImageHeart, FeaturedImage -from plugins.submission.models import Submission, SubmissionImage - @click.command(name='preload-data') # 🔧 changed from preload_data @with_appcontext @@ -38,12 +33,6 @@ def preload_data(auto=False): db.session.commit() # COMMON & SCIENTIFIC NAMES - #monstera_common = PlantCommonName(name='Monstera') - #deliciosa_sci = PlantScientificName(name='Monstera deliciosa') - #aurea_sci = PlantScientificName(name='Monstera aurea') - #db.session.add_all([monstera_common, deliciosa_sci, aurea_sci]) - #db.session.commit() - joepii_common = PlantCommonName(name='Philodendron Joepii') queen_common = PlantCommonName(name='Anthurium Warocqueanum Queen') thai_common = PlantCommonName(name='Thai Constellation Monstera') @@ -60,78 +49,5 @@ def preload_data(auto=False): db.session.add_all([joepii_sci, queen_sci, thai_sci, generic_sci]) db.session.commit() - # PLANTS - """ parent_plant = Plant( - common_name_id=monstera_common.id, - scientific_name_id=deliciosa_sci.id, - created_by_user_id=admin.id - ) - child_plant = Plant( - common_name_id=monstera_common.id, - scientific_name_id=aurea_sci.id, - created_by_user_id=user.id, - parent_id=1 - ) - db.session.add_all([parent_plant, child_plant]) - db.session.flush() - - # LINEAGE & OWNERSHIP - db.session.add(PlantLineage(parent_plant_id=parent_plant.id, child_plant_id=child_plant.id)) - db.session.add(PlantOwnershipLog( - plant_id=child_plant.id, - user_id=user.id, - date_acquired=datetime.utcnow() - timedelta(days=20) - )) - db.session.commit() - - # UPDATE & MEDIA - update = PlantUpdate( - plant_id=child_plant.id, - update_type='Repotted', - description='Moved to a 6" pot with a new moss pole.', - ) - db.session.add(update) - db.session.flush() - - db.session.add(Media( - file_url='uploads/demo_plant_update.jpg', - update_id=update.id, - caption='Freshly repotted.' - )) - db.session.commit() - - # SUBMISSION & IMAGE - submission = Submission( - user_id=user.id, - plant_id=child_plant.id, - common_name='Monstera', - scientific_name='Monstera aurea', - price=120.00, - source='Etsy', - height=45, - width=30, - leaf_count=5, - potting_mix='2:1:1 bark:pumice:coco', - container_size='6"', - health_status='Healthy', - notes='Some minor yellowing on one leaf.' - ) - db.session.add(submission) - db.session.flush() - - image = SubmissionImage( - submission_id=submission.id, - file_path='uploads/demo_submission.jpg', - is_visible=True - ) - db.session.add(image) - db.session.flush() - - db.session.add_all([ - ImageHeart(user_id=admin.id, submission_image_id=image.id), - FeaturedImage(submission_image_id=image.id, override_text='Gorgeous coloration', is_featured=True) - ]) - db.session.commit() """ - if not auto: click.echo("🎉 Demo data seeded successfully.") diff --git a/plugins/importer/routes.py b/plugins/importer/routes.py index cb3f84f..b0b322a 100644 --- a/plugins/importer/routes.py +++ b/plugins/importer/routes.py @@ -1,16 +1,25 @@ +# plugins/importer/routes.py + import csv import io import difflib + from flask import Blueprint, request, render_template, redirect, flash, session, url_for from flask_login import login_required, current_user -from app.neo4j_utils import get_neo4j_handler -from plugins.plant.models import db, Plant, PlantCommon, PlantScientific +from flask_wtf.csrf import generate_csrf -bp = Blueprint("importer", __name__, template_folder="templates") +from app.neo4j_utils import get_neo4j_handler +from plugins.plant.models import ( + db, + Plant, PlantCommonName, PlantScientificName, PlantOwnershipLog +) + +bp = Blueprint("importer", __name__, template_folder="templates", url_prefix="/import") REQUIRED_HEADERS = {"uuid", "plant_type", "name"} -@bp.route("/import/", methods=["GET", "POST"]) + +@bp.route("/", methods=["GET", "POST"]) @login_required def upload(): if request.method == "POST": @@ -24,48 +33,58 @@ def upload(): stream = io.StringIO(decoded) reader = csv.DictReader(stream) - headers = set(reader.fieldnames) - if not REQUIRED_HEADERS.issubset(headers): - flash(f"Missing required CSV headers: {REQUIRED_HEADERS - headers}", "error") + headers = set(reader.fieldnames or []) + missing = REQUIRED_HEADERS - headers + if missing: + flash(f"Missing required CSV headers: {missing}", "error") return redirect(request.url) session["pending_rows"] = [] review_list = [] - all_common = {c.name.lower(): c for c in PlantCommon.query.all()} - all_scientific = {s.name.lower(): s for s in PlantScientific.query.all()} + # Preload existing common/scientific names + all_common = {c.name.lower(): c for c in PlantCommonName.query.all()} + all_scientific = {s.name.lower(): s for s in PlantScientificName.query.all()} for row in reader: - uuid = row.get("uuid") - name = row.get("name", "").strip() - sci_name = row.get("scientific_name", "").strip() - plant_type = row.get("plant_type", "plant") - mother_uuid = row.get("mother_uuid", "").strip() + uuid_raw = row.get("uuid", "") + uuid = uuid_raw.strip().strip('"') - if not all([uuid, name, plant_type]): + name_raw = row.get("name", "") + name = name_raw.strip() + + sci_raw = row.get("scientific_name", "") + sci_name = sci_raw.strip() + + plant_type = row.get("plant_type", "").strip() or "plant" + + mother_raw = row.get("mother_uuid", "") + mother_uuid = mother_raw.strip().strip('"') + + # If any required field is missing, skip + if not (uuid and name and plant_type): continue - name_lc = name.lower() - sci_lc = sci_name.lower() + # Try fuzzy‐matching scientific names if needed suggested_match = None - original_input = sci_name + original_sci = sci_name + name_lc = name.lower() + sci_lc = sci_name.lower() - # Fuzzy match scientific name if sci_lc and sci_lc not in all_scientific: close = difflib.get_close_matches(sci_lc, all_scientific.keys(), n=1, cutoff=0.85) if close: suggested_match = all_scientific[close[0]].name - # Infer from common name if not sci_lc and name_lc in all_common: - sci_obj = PlantScientific.query.filter_by(common_id=all_common[name_lc].id).first() + sci_obj = PlantScientificName.query.filter_by(common_id=all_common[name_lc].id).first() if sci_obj: sci_name = sci_obj.name elif not sci_lc: close_common = difflib.get_close_matches(name_lc, all_common.keys(), n=1, cutoff=0.85) if close_common: match_name = close_common[0] - sci_obj = PlantScientific.query.filter_by(common_id=all_common[match_name].id).first() + sci_obj = PlantScientificName.query.filter_by(common_id=all_common[match_name].id).first() if sci_obj: suggested_match = sci_obj.name sci_name = sci_obj.name @@ -74,17 +93,17 @@ def upload(): "uuid": uuid, "name": name, "sci_name": sci_name, - "original_sci_name": original_input, + "original_sci_name": original_sci, "plant_type": plant_type, "mother_uuid": mother_uuid, "suggested_scientific_name": suggested_match, }) - if suggested_match and suggested_match != original_input: + if suggested_match and suggested_match != original_sci: review_list.append({ "uuid": uuid, "common_name": name, - "user_input": original_input or "(blank)", + "user_input": original_sci or "(blank)", "suggested_name": suggested_match }) @@ -92,44 +111,60 @@ def upload(): return redirect(url_for("importer.review")) except Exception as e: - flash(f"Import failed: {str(e)}", "error") + flash(f"Import failed: {e}", "error") + return redirect(request.url) - return render_template("importer/upload.html") + return render_template("importer/upload.html", csrf_token=generate_csrf()) -@bp.route("/import/review", methods=["GET", "POST"]) +@bp.route("/review", methods=["GET", "POST"]) @login_required def review(): - rows = session.get("pending_rows", []) + rows = session.get("pending_rows", []) review_list = session.get("review_list", []) + if request.method == "POST": - neo = get_neo4j_handler() + neo = get_neo4j_handler() added = 0 + # ————————————————————————————————————————————— + # (1) CREATE MySQL records & MERGE every Neo4j node + # ————————————————————————————————————————————— for row in rows: - uuid = row["uuid"] - name = row["name"] - sci_name = row["sci_name"] - user_input = row["original_sci_name"] - plant_type = row["plant_type"] - mother_uuid = row["mother_uuid"] - suggested = row.get("suggested_scientific_name") + uuid_raw = row["uuid"] + uuid = uuid_raw.strip().strip('"') - common = PlantCommon.query.filter_by(name=name).first() + name_raw = row["name"] + name = name_raw.strip() + + sci_raw = row["sci_name"] + sci_name = sci_raw.strip() + + plant_type = row["plant_type"].strip() + + mother_raw = row["mother_uuid"] + mother_uuid = mother_raw.strip().strip('"') + + suggested = row.get("suggested_scientific_name") + + # ——— MySQL: PlantCommonName ——— + common = PlantCommonName.query.filter_by(name=name).first() if not common: - common = PlantCommon(name=name) + common = PlantCommonName(name=name) db.session.add(common) db.session.flush() - accepted = request.form.get(f"confirm_{uuid}") - sci_name_to_use = suggested if (suggested and accepted) else sci_name + # ——— MySQL: PlantScientificName ——— + accepted = request.form.get(f"confirm_{uuid}") + sci_to_use = suggested if (suggested and accepted) else sci_name - scientific = PlantScientific.query.filter_by(name=sci_name_to_use).first() + scientific = PlantScientificName.query.filter_by(name=sci_to_use).first() if not scientific: - scientific = PlantScientific(name=sci_name_to_use, common_id=common.id) + scientific = PlantScientificName(name=sci_to_use, common_id=common.id) db.session.add(scientific) db.session.flush() + # ——— MySQL: Plant row ——— plant = Plant.query.filter_by(uuid=uuid).first() if not plant: plant = Plant( @@ -141,18 +176,60 @@ def review(): is_verified=bool(accepted) ) db.session.add(plant) + db.session.flush() # so plant.id is available immediately added += 1 - neo.create_plant_node(uuid, name) - if mother_uuid: - neo.create_plant_node(mother_uuid, "Parent") - neo.create_lineage(uuid, mother_uuid) + # ——— MySQL: Create initial ownership log entry ——— + log = PlantOwnershipLog( + plant_id = plant.id, + user_id = current_user.id, + date_acquired = datetime.utcnow(), + transferred = False, + is_verified = bool(accepted) + ) + db.session.add(log) + # ——— Neo4j: ensure a node exists for this plant UUID ——— + neo.create_plant_node(uuid, name) + + # Commit MySQL so that all Plant/OwnershipLog rows exist db.session.commit() + + + # ————————————————————————————————————————————— + # (2) CREATE Neo4j LINEAGE relationships (child → parent). (Unchanged) + # ————————————————————————————————————————————— + for row in rows: + child_raw = row.get("uuid", "") + child_uuid = child_raw.strip().strip('"') + + mother_raw = row.get("mother_uuid", "") + mother_uuid = mother_raw.strip().strip('"') + + print( + f"[DEBUG] row → child_raw={child_raw!r}, child_uuid={child_uuid!r}; " + f"mother_raw={mother_raw!r}, mother_uuid={mother_uuid!r}" + ) + + if mother_uuid: + neo.create_plant_node(mother_uuid, name="Unknown") + neo.create_lineage(child_uuid, mother_uuid) + else: + print(f"[DEBUG] Skipping LINEAGE creation for child {child_uuid!r} (no mother_uuid)") + + # (Optional) Check two known UUIDs + neo.debug_check_node("8b1059c8-8dd3-487a-af19-1eb548788e87") + neo.debug_check_node("2ee2e0e7-69de-4d8f-abfe-4ed973c3d760") + neo.close() - flash(f"{added} plants added.", "success") + flash(f"{added} plants added (MySQL) + Neo4j nodes/relations created.", "success") + session.pop("pending_rows", None) session.pop("review_list", None) return redirect(url_for("importer.upload")) - return render_template("importer/review.html", review_list=review_list) + return render_template( + "importer/review.html", + review_list=review_list, + csrf_token=generate_csrf() + ) diff --git a/plugins/importer/templates/importer/review.html b/plugins/importer/templates/importer/review.html index 89caa6d..45eb29c 100644 --- a/plugins/importer/templates/importer/review.html +++ b/plugins/importer/templates/importer/review.html @@ -1,43 +1,38 @@ {% extends "core_ui/base.html" %} -{% block title %}Review Scientific Names{% endblock %} - +{% block title %}Review Matches{% endblock %} {% block content %}

🔍 Review Suggested Matches

-
- - + {% if review_list %} -
- - +

Confirm the suggested scientific name replacements below. Only confirmed matches will override user input.

+
+ + + + + + + + + + {% for row in review_list %} - - - + + + + - - - {% for row in review_list %} - - - - - - {% endfor %} - -
Common NameUser InputSuggested MatchConfirm
Common NameSuggested Scientific NameConfirm?{{ row.common_name }}{{ row.user_input }}{{ row.suggested_name }} + +
{{ row.common_name }}{{ row.suggested_name }} - -
-
+ {% endfor %} + + {% else %} -

No suggestions were made. You can safely continue.

+

No matches found that need confirmation.

{% endif %} - -
- -
+
{% endblock %} diff --git a/plugins/importer/templates/importer/upload.html b/plugins/importer/templates/importer/upload.html index 6f3e62c..3ff1139 100644 --- a/plugins/importer/templates/importer/upload.html +++ b/plugins/importer/templates/importer/upload.html @@ -1,10 +1,8 @@ {% extends "core_ui/base.html" %} {% block title %}CSV Import{% endblock %} - {% block content %}

📤 Import Plant Data

- {% with messages = get_flashed_messages(with_categories=true) %} {% if messages %} {% for category, message in messages %} @@ -15,14 +13,13 @@ {% endfor %} {% endif %} {% endwith %} -
- +
- Must include: uuid, plant_type, name
+ Required: uuid, plant_type, name
Optional: scientific_name, mother_uuid
diff --git a/plugins/ownership/routes.py b/plugins/ownership/routes.py new file mode 100644 index 0000000..87802b1 --- /dev/null +++ b/plugins/ownership/routes.py @@ -0,0 +1,62 @@ +# plugins/ownership/routes.py + +from datetime import datetime + +from flask import Blueprint, request, jsonify, abort +from flask_login import login_required, current_user + +from plugins.plant.models import db, Plant, PlantOwnershipLog +from plugins.auth.models import User # Adjust import path if User lives elsewhere + +bp = Blueprint("ownership", __name__, url_prefix="/ownership") + + +@bp.route("/transfer/", methods=["POST"]) +@login_required +def transfer(plant_uuid): + """ + Transfer a plant from the current owner to another user: + Required JSON or form data: { "new_owner_id": } + """ + data = request.get_json() or request.form + new_owner_id = data.get("new_owner_id", None) + if not new_owner_id: + return jsonify({"error": "new_owner_id is required"}), 400 + + # 1) Fetch the plant by UUID + plant = Plant.query.filter_by(uuid=plant_uuid).first() + if not plant: + return jsonify({"error": "Plant not found"}), 404 + + # 2) Only current owner (or some admin) can transfer + if plant.owner_id != current_user.id: + return jsonify({"error": "Only the current owner can transfer this plant"}), 403 + + # 3) Verify the new owner exists + new_owner = User.query.get(new_owner_id) + if not new_owner: + return jsonify({"error": "New owner user not found"}), 404 + + # 4) Create a log entry before changing owner + log = PlantOwnershipLog( + plant_id = plant.id, + user_id = new_owner.id, + date_acquired = datetime.utcnow(), + transferred = True, + # If you want to store a reference to Neo4j node, set graph_node_id here. + graph_node_id = None, + is_verified = False + ) + db.session.add(log) + + # 5) Update the Plant.owner_id + plant.owner_id = new_owner.id + db.session.add(plant) + + db.session.commit() + + return jsonify({ + "message": f"Plant '{plant.uuid}' transferred to user {new_owner.username}.", + "plant_uuid": plant.uuid, + "new_owner_id": new_owner.id + }), 200 diff --git a/plugins/plant/models.py b/plugins/plant/models.py index 21c6ffa..f0a94fa 100644 --- a/plugins/plant/models.py +++ b/plugins/plant/models.py @@ -1,96 +1,129 @@ -from flask_sqlalchemy import SQLAlchemy -import uuid as uuid_lib +# plugins/plant/models.py + from datetime import datetime +import uuid as uuid_lib + +# Import the central SQLAlchemy instance, not a new one from app import db -# Association table for tags +# If your User model lives in plugins/auth/models.py, import it here: +from plugins.auth.models import User + +# ----------------------------- +# (We no longer need PlantLineage) +# ----------------------------- + +# Association table for tags (unchanged) plant_tags = db.Table( 'plant_tags', db.metadata, db.Column('plant_id', db.Integer, db.ForeignKey('plant.id'), primary_key=True), - db.Column('tag_id', db.Integer, db.ForeignKey('tag.id'), primary_key=True), + db.Column('tag_id', db.Integer, db.ForeignKey('tag.id'), primary_key=True), extend_existing=True ) class Tag(db.Model): __tablename__ = 'tag' __table_args__ = {'extend_existing': True} - - id = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(255), unique=True, nullable=False) + + id = db.Column(db.Integer, primary_key=True) + name = db.Column(db.String(128), unique=True, nullable=False) + # … any other columns you had … + class PlantCommonName(db.Model): __tablename__ = 'plant_common_name' __table_args__ = {'extend_existing': True} - - id = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(255), unique=True, nullable=False) + + id = db.Column(db.Integer, primary_key=True) + name = db.Column(db.String(128), unique=True, nullable=False) + created_at = db.Column(db.DateTime, default=datetime.utcnow) + + scientific_names = db.relationship( + 'plugins.plant.models.PlantScientificName', + backref=db.backref('common', lazy='joined'), + lazy=True, + cascade='all, delete-orphan' + ) + class PlantScientificName(db.Model): __tablename__ = 'plant_scientific_name' __table_args__ = {'extend_existing': True} - - id = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(255), unique=True, nullable=False) - common_id = db.Column(db.Integer, db.ForeignKey('plant_common_name.id'), nullable=False) -class PlantLineage(db.Model): - __tablename__ = 'plant_lineage' - __table_args__ = {'extend_existing': True} - - id = db.Column(db.Integer, primary_key=True) - child_plant_id = db.Column(db.Integer, db.ForeignKey('plant.id'), nullable=False) - parent_plant_id = db.Column(db.Integer, db.ForeignKey('plant.id'), nullable=False) - type = db.Column(db.String(50), nullable=False) # cutting, seed, division + id = db.Column(db.Integer, primary_key=True) + name = db.Column(db.String(256), unique=True, nullable=False) + common_id = db.Column(db.Integer, db.ForeignKey('plant_common_name.id'), nullable=False) + created_at = db.Column(db.DateTime, default=datetime.utcnow) + + plants = db.relationship( + 'plugins.plant.models.Plant', + backref='scientific', + lazy='dynamic' + ) + class PlantOwnershipLog(db.Model): __tablename__ = 'plant_ownership_log' __table_args__ = {'extend_existing': True} - - id = db.Column(db.Integer, primary_key=True) - plant_id = db.Column(db.Integer, db.ForeignKey('plant.id'), nullable=False) - user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False) - start_time = db.Column(db.DateTime, nullable=False) - end_time = db.Column(db.DateTime, nullable=True) - transfer_note = db.Column(db.Text, nullable=True) + + id = db.Column(db.Integer, primary_key=True) + plant_id = db.Column(db.Integer, db.ForeignKey('plant.id'), nullable=False) + user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False) + date_acquired = db.Column(db.DateTime, default=datetime.utcnow) + transferred = db.Column(db.Boolean, default=False, nullable=False) + graph_node_id = db.Column(db.String(255), nullable=True) # optional + is_verified = db.Column(db.Boolean, default=False, nullable=False) + + user = db.relationship('plugins.auth.models.User', backref='ownership_logs', lazy=True) + class Plant(db.Model): __tablename__ = 'plant' __table_args__ = {'extend_existing': True} - id = db.Column(db.Integer, primary_key=True) - uuid = db.Column(db.String(36), default=lambda: str(uuid_lib.uuid4()), unique=True, nullable=False) - custom_slug = db.Column(db.String(255), unique=True, nullable=True) + id = db.Column(db.Integer, primary_key=True) + uuid = db.Column(db.String(36), default=lambda: str(uuid_lib.uuid4()), unique=True, nullable=False) + custom_slug = db.Column(db.String(255), unique=True, nullable=True) - owner_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False) - common_id = db.Column(db.Integer, db.ForeignKey('plant_common_name.id'), nullable=False) - scientific_id = db.Column(db.Integer, db.ForeignKey('plant_scientific_name.id'), nullable=False) + owner_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False) + common_id = db.Column(db.Integer, db.ForeignKey('plant_common_name.id'), nullable=False) + scientific_id = db.Column(db.Integer, db.ForeignKey('plant_scientific_name.id'), nullable=False) - plant_type = db.Column(db.String(50), nullable=False) - status = db.Column(db.String(50), nullable=False, default='active') - notes = db.Column(db.Text, nullable=True) + plant_type = db.Column(db.String(50), nullable=False) + created_at = db.Column(db.DateTime, default=datetime.utcnow) + updated_at = db.Column(db.DateTime, onupdate=datetime.utcnow) - created_at = db.Column(db.DateTime, default=datetime.utcnow) - transferred = db.Column(db.Boolean, default=False) - graph_node_id = db.Column(db.String(255), nullable=True) - - is_verified = db.Column(db.Boolean, nullable=False, default=False) + updates = db.relationship( + 'plugins.growlog.models.PlantUpdate', + backref='plant', + lazy=True, + cascade='all, delete-orphan' + ) + tags = db.relationship( + 'plugins.plant.models.Tag', + secondary=plant_tags, + backref='plants', + lazy='dynamic' + ) - # Relationships - updates = db.relationship('PlantUpdate', backref='growlog', lazy=True) - lineage = db.relationship('PlantLineage', backref='child', lazy=True, foreign_keys='PlantLineage.child_plant_id') - tags = db.relationship('Tag', secondary=plant_tags, backref='plants') - - common_name = db.relationship( - 'PlantCommonName', - backref=db.backref('plants', lazy='dynamic'), - lazy=True - ) + common_name = db.relationship( + 'plugins.plant.models.PlantCommonName', + backref=db.backref('plants', lazy='dynamic'), + lazy=True + ) scientific_name = db.relationship( - 'PlantScientificName', - backref=db.backref('plants', lazy='dynamic'), - lazy=True - ) - -PlantCommon = PlantCommonName -PlantScientific = PlantScientificName + 'plugins.plant.models.PlantScientificName', + backref=db.backref('plants', lazy='dynamic'), + lazy=True + ) + + ownership_logs = db.relationship( + 'plugins.plant.models.PlantOwnershipLog', + backref='plant', + lazy=True, + cascade='all, delete-orphan' + ) + + def __repr__(self): + return f""