This commit is contained in:
2025-07-09 01:05:45 -05:00
parent 1bbe6e2743
commit d7a610a83b
113 changed files with 1512 additions and 2348 deletions

View File

@ -1,4 +1,4 @@
# plugins/utility/routes.py
# File: plugins/utility/routes.py
# Standard library
import csv
@ -34,9 +34,8 @@ from plugins.plant.models import (
PlantOwnershipLog,
)
from plugins.media.models import Media
from plugins.media.routes import _process_upload_file
from plugins.utility.models import ImportBatch
from plugins.utility.tasks import import_text_data
bp = Blueprint(
'utility',
@ -52,18 +51,28 @@ def index():
return redirect(url_for("utility.upload"))
@bp.route("/imports", methods=["GET"])
@login_required
def imports():
batches = (
ImportBatch.query
.filter_by(user_id=current_user.id)
.order_by(ImportBatch.imported_at.desc())
.limit(20)
.all()
)
return render_template("utility/imports.html", batches=batches)
# ────────────────────────────────────────────────────────────────────────────────
# Required headers for your sub-app export ZIP
PLANT_HEADERS = [
PLANT_HEADERS = [
"UUID","Type","Name","Scientific Name",
"Vendor Name","Price","Mother UUID","Notes",
"Short ID"
]
MEDIA_HEADERS = [
MEDIA_HEADERS = [
"Plant UUID","Image Path","Uploaded At","Source Type"
]
# Headers for standalone CSV review flow
REQUIRED_HEADERS = {"uuid", "plant_type", "name", "scientific_name", "mother_uuid"}
@ -84,6 +93,7 @@ def upload():
file.save(tmp_zip.name)
tmp_zip.close()
# validate ZIP
try:
z = zipfile.ZipFile(tmp_zip.name)
except zipfile.BadZipFile:
@ -97,6 +107,7 @@ def upload():
flash("ZIP must contain both plants.csv and media.csv", "danger")
return redirect(request.url)
# extract export_id from metadata
export_id = None
if "metadata.txt" in names:
meta = z.read("metadata.txt").decode("utf-8", "ignore")
@ -109,23 +120,37 @@ def upload():
flash("metadata.txt missing or missing export_id", "danger")
return redirect(request.url)
# prevent duplicates
if ImportBatch.query.filter_by(export_id=export_id, user_id=current_user.id).first():
os.remove(tmp_zip.name)
flash("This export has already been imported.", "info")
return redirect(request.url)
# record batch
batch = ImportBatch(
export_id=export_id,
user_id=current_user.id,
imported_at=datetime.utcnow()
export_id = export_id,
user_id = current_user.id,
imported_at = datetime.utcnow(),
status = 'pending'
)
db.session.add(batch)
db.session.commit()
# hand off to Celery
try:
import_text_data.delay(tmp_zip.name, "zip", batch.id)
flash("ZIP received; import queued in background.", "success")
return redirect(request.url)
except Exception:
current_app.logger.exception("Failed to enqueue import_text_data")
flash("Failed to queue import job; falling back to inline import", "warning")
# ── Fallback: inline import ─────────────────────────────────────────
tmpdir = tempfile.mkdtemp()
z.extractall(tmpdir)
# --- load and validate plants.csv ---
# load plants.csv
plant_path = os.path.join(tmpdir, "plants.csv")
with open(plant_path, newline="", encoding="utf-8-sig") as pf:
reader = csv.DictReader(pf)
@ -137,7 +162,7 @@ def upload():
return redirect(request.url)
plant_rows = list(reader)
# --- load and validate media.csv ---
# load media.csv
media_path = os.path.join(tmpdir, "media.csv")
with open(media_path, newline="", encoding="utf-8-sig") as mf:
mreader = csv.DictReader(mf)
@ -149,113 +174,129 @@ def upload():
return redirect(request.url)
media_rows = list(mreader)
# --- import plants (first pass, only set mother_uuid if parent exists) ---
neo = get_neo4j_handler()
# import plants
neo = get_neo4j_handler()
plant_map = {}
added_plants = 0
plant_map = {}
for row in plant_rows:
# common name
common = PlantCommonName.query.filter_by(name=row["Name"]).first()
if not common:
common = PlantCommonName(name=row["Name"])
db.session.add(common)
db.session.flush()
scientific = PlantScientificName.query.filter_by(
name=row["Scientific Name"]
).first()
# scientific name
scientific = PlantScientificName.query.filter_by(name=row["Scientific Name"]).first()
if not scientific:
scientific = PlantScientificName(
name=row["Scientific Name"],
common_id=common.id
name = row["Scientific Name"],
common_id = common.id
)
db.session.add(scientific)
db.session.flush()
raw_mu = row.get("Mother UUID") or None
mu_for_insert = raw_mu if raw_mu in plant_map else None
raw_mu = row.get("Mother UUID") or None
mu_for_insert= raw_mu if raw_mu in plant_map else None
p = Plant(
uuid=row["UUID"],
common_id=common.id,
scientific_id=scientific.id,
plant_type=row["Type"],
owner_id=current_user.id,
vendor_name=row["Vendor Name"] or None,
price=float(row["Price"]) if row["Price"] else None,
mother_uuid=mu_for_insert,
notes=row["Notes"] or None,
short_id=(row.get("Short ID") or None),
data_verified=True
uuid = row["UUID"],
common_id = common.id,
scientific_id = scientific.id,
plant_type = row["Type"],
owner_id = current_user.id,
vendor_name = row["Vendor Name"] or None,
price = float(row["Price"]) if row["Price"] else None,
mother_uuid = mu_for_insert,
notes = row["Notes"] or None,
short_id = row.get("Short ID") or None,
data_verified = True
)
db.session.add(p)
db.session.flush()
plant_map[p.uuid] = p.id
log = PlantOwnershipLog(
plant_id=p.id,
user_id=current_user.id,
date_acquired=datetime.utcnow(),
transferred=False,
is_verified=True
plant_id = p.id,
user_id = current_user.id,
date_acquired = datetime.utcnow(),
transferred = False,
is_verified = True
)
db.session.add(log)
neo.create_plant_node(p.uuid, row["Name"])
if raw_mu:
neo.create_lineage(
child_uuid=p.uuid,
parent_uuid=raw_mu
)
neo.create_lineage(child_uuid=p.uuid, parent_uuid=raw_mu)
added_plants += 1
db.session.commit()
# --- second pass: backfill mother_uuid for all rows ---
# backfill mothers
for row in plant_rows:
raw_mu = row.get("Mother UUID") or None
if raw_mu:
if row.get("Mother UUID"):
Plant.query.filter_by(uuid=row["UUID"]).update({
'mother_uuid': raw_mu
'mother_uuid': row["Mother UUID"]
})
db.session.commit()
# --- import media (unchanged) ---
# import media images
added_media = 0
for mrow in media_rows:
plant_uuid = mrow["Plant UUID"]
plant_id = plant_map.get(plant_uuid)
if not plant_id:
puuid = mrow["Plant UUID"]
pid = plant_map.get(puuid)
if not pid:
continue
subpath = mrow["Image Path"].split('uploads/', 1)[-1]
src = os.path.join(tmpdir, "images", subpath)
src = os.path.join(tmpdir, "images", subpath)
if not os.path.isfile(src):
continue
try:
# build FileStorage for convenience
with open(src, "rb") as f:
file_storage = FileStorage(
stream=io.BytesIO(f.read()),
filename=os.path.basename(subpath),
fs = FileStorage(
stream = io.BytesIO(f.read()),
filename = os.path.basename(subpath),
content_type='image/jpeg'
)
media = _process_upload_file(
file=file_storage,
uploader_id=current_user.id,
plugin="plant",
related_id=plant_id,
plant_id=plant_id
)
media.uploaded_at = datetime.fromisoformat(mrow["Uploaded At"])
media.caption = mrow["Source Type"]
db.session.add(media)
added_media += 1
except Exception as e:
current_app.logger.warning(
f"Failed to import media file: {subpath}{e}"
# now save to our UPLOAD_FOLDER
now = datetime.utcnow()
secure_name = secure_filename(fs.filename)
storage_dir = os.path.join(
current_app.config["UPLOAD_FOLDER"],
str(current_user.id),
now.strftime("%Y/%m/%d")
)
os.makedirs(storage_dir, exist_ok=True)
unique_name = f"{uuid.uuid4().hex}_{secure_name}"
full_path = os.path.join(storage_dir, unique_name)
fs.save(full_path)
file_url = f"/{current_user.id}/{now.strftime('%Y/%m/%d')}/{unique_name}"
media = Media(
plugin = "plant",
related_id = pid,
filename = unique_name,
uploaded_at = datetime.fromisoformat(mrow["Uploaded At"]),
uploader_id = current_user.id,
caption = mrow["Source Type"],
plant_id = pid,
created_at = datetime.fromisoformat(mrow["Uploaded At"]),
file_url = file_url
)
db.session.add(media)
added_media += 1
except Exception as e:
current_app.logger.warning(f"Failed to import media file: {subpath}{e}")
current_app.logger.debug(traceback.format_exc())
db.session.commit()
@ -282,9 +323,8 @@ def upload():
session["pending_rows"] = []
review_list = []
all_common = {c.name.lower(): c for c in PlantCommonName.query.all()}
all_sci = {s.name.lower(): s for s in PlantScientificName.query.all()}
all_common = {c.name.lower(): c for c in PlantCommonName.query.all()}
all_sci = {s.name.lower(): s for s in PlantScientificName.query.all()}
for row in reader:
uuid_val = row.get("uuid", "").strip().strip('"')
@ -297,23 +337,19 @@ def upload():
continue
suggestions = difflib.get_close_matches(
sci_name.lower(),
list(all_sci.keys()),
n=1,
cutoff=0.8
)
suggested = (
all_sci[suggestions[0]].name
if suggestions and suggestions[0] != sci_name.lower()
else None
sci_name.lower(), list(all_sci.keys()),
n=1, cutoff=0.8
)
suggested = None
if suggestions and suggestions[0] != sci_name.lower():
suggested = all_sci[suggestions[0]].name
item = {
"uuid": uuid_val,
"name": name,
"sci_name": sci_name,
"suggested": suggested,
"plant_type": plant_type,
"uuid": uuid_val,
"name": name,
"sci_name": sci_name,
"suggested": suggested,
"plant_type": plant_type,
"mother_uuid": mother_uuid
}
review_list.append(item)
@ -321,40 +357,41 @@ def upload():
session["review_list"] = review_list
return redirect(url_for("utility.review"))
# ── Direct Media Upload Flow ───────────────────────────────────────
plugin = request.form.get("plugin", "")
related_id = request.form.get("related_id", 0)
plant_id = request.form.get("plant_id", None)
growlog_id = request.form.get("growlog_id", None)
caption = request.form.get("caption", None)
now = datetime.utcnow()
unique_id = str(uuid.uuid4()).replace("-", "")
secure_name= secure_filename(file.filename)
storage_path = os.path.join(
now = datetime.utcnow()
unique_id = uuid.uuid4().hex
secure_name = secure_filename(file.filename)
storage_path= os.path.join(
current_app.config["UPLOAD_FOLDER"],
str(current_user.id),
now.strftime("%Y/%m/%d")
)
os.makedirs(storage_path, exist_ok=True)
full_file_path = os.path.join(storage_path, f"{unique_id}_{secure_name}")
file.save(full_file_path)
unique_name = f"{unique_id}_{secure_name}"
full_path = os.path.join(storage_path, unique_name)
file.save(full_path)
file_url = f"/{current_user.id}/{now.strftime('%Y/%m/%d')}/{unique_id}_{secure_name}"
file_url = f"/{current_user.id}/{now.strftime('%Y/%m/%d')}/{unique_name}"
media = Media(
plugin=plugin,
related_id=related_id,
filename=f"{unique_id}_{secure_name}",
uploaded_at=now,
uploader_id=current_user.id,
caption=caption,
plant_id=plant_id,
growlog_id=growlog_id,
created_at=now,
file_url=file_url
plugin = plugin,
related_id = related_id,
filename = unique_name,
uploaded_at = now,
uploader_id = current_user.id,
caption = caption,
plant_id = plant_id,
growlog_id = growlog_id,
created_at = now,
file_url = file_url
)
db.session.add(media)
db.session.commit()
@ -372,22 +409,21 @@ def review():
review_list = session.get("review_list", [])
if request.method == "POST":
neo = get_neo4j_handler()
added = 0
neo = get_neo4j_handler()
added = 0
all_common = {c.name.lower(): c for c in PlantCommonName.query.all()}
all_scientific = {s.name.lower(): s for s in PlantScientificName.query.all()}
for row in rows:
uuid_val = row.get("uuid")
name = row.get("name")
sci_name = row.get("sci_name")
suggested = row.get("suggested")
plant_type = row.get("plant_type")
mother_uuid = row.get("mother_uuid")
accepted = request.form.get(f"confirm_{uuid_val}")
uuid_val = row["uuid"]
name = row["name"]
sci_name = row["sci_name"]
suggested = row["suggested"]
plant_type = row["plant_type"]
mother_uuid = row["mother_uuid"]
accepted = request.form.get(f"confirm_{uuid_val}") == "yes"
# handle names
common = PlantCommonName.query.filter_by(name=name).first()
if not common:
common = PlantCommonName(name=name)
@ -395,7 +431,7 @@ def review():
db.session.flush()
all_common[common.name.lower()] = common
use_name = suggested if (suggested and accepted) else sci_name
use_name = suggested if (suggested and accepted) else sci_name
scientific = PlantScientificName.query.filter_by(name=use_name).first()
if not scientific:
scientific = PlantScientificName(
@ -407,7 +443,6 @@ def review():
all_scientific[scientific.name.lower()] = scientific
verified = not suggested or (suggested and accepted)
plant = Plant.query.filter_by(uuid=uuid_val).first()
if not plant:
plant = Plant(
@ -454,22 +489,18 @@ def review():
@bp.route('/export_data', methods=['GET'])
@login_required
def export_data():
# Unique export identifier
export_id = f"{uuid.uuid4()}_{int(datetime.utcnow().timestamp())}"
# 1) Gather plants
plants = (
Plant.query
.filter_by(owner_id=current_user.id)
.order_by(Plant.id)
.all()
Plant.query.filter_by(owner_id=current_user.id)
.order_by(Plant.id).all()
)
# Build plants.csv
# build plants.csv
plant_io = io.StringIO()
pw = csv.writer(plant_io)
pw.writerow([
'UUID', 'Type', 'Name', 'Scientific Name',
'Vendor Name', 'Price', 'Mother UUID', 'Notes'
'UUID','Type','Name','Scientific Name',
'Vendor Name','Price','Mother UUID','Notes'
])
for p in plants:
pw.writerow([
@ -477,26 +508,23 @@ def export_data():
p.plant_type,
p.common_name.name if p.common_name else '',
p.scientific_name.name if p.scientific_name else '',
getattr(p, 'vendor_name', '') or '',
getattr(p, 'price', '') or '',
getattr(p, 'vendor_name','') or '',
getattr(p, 'price','') or '',
p.mother_uuid or '',
p.notes or ''
])
plants_csv = plant_io.getvalue()
# 2) Gather media
# build media.csv
media_records = (
Media.query
.filter(Media.uploader_id == current_user.id, Media.plant_id.isnot(None))
.order_by(Media.id)
.all()
)
# Build media.csv
Media.query.filter(
Media.uploader_id==current_user.id,
Media.plant_id.isnot(None)
).order_by(Media.id).all()
)
media_io = io.StringIO()
mw = csv.writer(media_io)
mw.writerow([
'Plant UUID', 'Image Path', 'Uploaded At', 'Source Type'
])
mw.writerow(['Plant UUID','Image Path','Uploaded At','Source Type'])
for m in media_records:
mw.writerow([
m.plant.uuid,
@ -506,9 +534,9 @@ def export_data():
])
media_csv = media_io.getvalue()
# 3) Assemble ZIP with images from UPLOAD_FOLDER
# assemble ZIP
zip_buf = io.BytesIO()
with zipfile.ZipFile(zip_buf, 'w', zipfile.ZIP_DEFLATED) as zf:
with zipfile.ZipFile(zip_buf,'w',zipfile.ZIP_DEFLATED) as zf:
meta = (
f"export_id,{export_id}\n"
f"user_id,{current_user.id}\n"
@ -517,19 +545,17 @@ def export_data():
zf.writestr('metadata.txt', meta)
zf.writestr('plants.csv', plants_csv)
zf.writestr('media.csv', media_csv)
media_root = current_app.config['UPLOAD_FOLDER']
for m in media_records:
rel = m.file_url.split('uploads/', 1)[-1]
rel = m.file_url.split('uploads/',1)[-1]
abs_path = os.path.join(media_root, rel)
if os.path.isfile(abs_path):
arcname = os.path.join('images', rel)
zf.write(abs_path, arcname)
zip_buf.seek(0)
safe_email = re.sub(r'\W+', '_', current_user.email)
filename = f"{safe_email}_export_{export_id}.zip"
safe_email = re.sub(r'\W+','_',current_user.email)
filename = f"{safe_email}_export_{export_id}.zip"
return send_file(
zip_buf,
mimetype='application/zip',
@ -538,39 +564,45 @@ def export_data():
)
# ────────────────────────────────────────────────────────────────────────────────
# QR-Code Generation Helpers & Routes
# ────────────────────────────────────────────────────────────────────────────────
def generate_label_with_name(qr_url, name, filename):
from PIL import Image, ImageDraw, ImageFont
import qrcode
from qrcode.image.pil import PilImage
from qrcode.constants import ERROR_CORRECT_H
from flask import current_app, send_file
from flask import send_file
# Generate QR code
qr = qrcode.QRCode(version=2, error_correction=ERROR_CORRECT_H, box_size=10, border=1)
qr = qrcode.QRCode(
version=2,
error_correction=ERROR_CORRECT_H,
box_size=10,
border=1
)
qr.add_data(qr_url)
qr.make(fit=True)
qr_img = qr.make_image(image_factory=PilImage, fill_color="black", back_color="white").convert("RGB")
qr_img = qr.make_image(
image_factory=PilImage,
fill_color="black",
back_color="white"
).convert("RGB")
# Create 1.5"x1.5" canvas at 300 DPI
dpi = 300
dpi = 300
label_px = int(1.5 * dpi)
label_img = Image.new("RGB", (label_px, label_px), "white")
label_img= Image.new("RGB", (label_px, label_px), "white")
# Resize QR code
qr_size = 350
qr_img = qr_img.resize((qr_size, qr_size), Image.LANCZOS)
qr_x = (label_px - qr_size) // 2
qr_size = 350
qr_img = qr_img.resize((qr_size, qr_size), Image.LANCZOS)
qr_x = (label_px - qr_size) // 2
label_img.paste(qr_img, (qr_x, 10))
# Load font
font_path = os.path.abspath(os.path.join(current_app.root_path, '..', 'font', 'ARIALLGT.TTF'))
draw = ImageDraw.Draw(label_img)
name = (name or '').strip()
font_size = 28
font_path= os.path.abspath(
os.path.join(
current_app.root_path, '..', 'font', 'ARIALLGT.TTF'
)
)
draw = ImageDraw.Draw(label_img)
name = (name or '').strip()
font_size = 28
while font_size > 10:
try:
font = ImageFont.truetype(font_path, font_size)
@ -585,7 +617,6 @@ def generate_label_with_name(qr_url, name, filename):
name = name[:-1]
name += ""
# Draw text centered
text_x = (label_px - draw.textlength(name, font=font)) // 2
text_y = 370
draw.text((text_x, text_y), name, font=font, fill="black")
@ -605,27 +636,23 @@ def generate_label_with_name(qr_url, name, filename):
@bp.route('/download_qr/<string:uuid_val>', methods=['GET'])
@login_required
def download_qr(uuid_val):
# Private “Direct QR” → f/<short_id> on plant.cards
p = Plant.query.filter_by(uuid=uuid_val, owner_id=current_user.id).first_or_404()
if not getattr(p, 'short_id', None):
if not p.short_id:
p.short_id = Plant.generate_short_id()
db.session.commit()
base = current_app.config.get('PLANT_CARDS_BASE_URL', 'https://plant.cards')
qr_url = f"{base}/f/{p.short_id}"
base = current_app.config.get('PLANT_CARDS_BASE_URL', 'https://plant.cards')
qr_url = f"{base}/f/{p.short_id}"
filename = f"{p.short_id}.png"
return generate_label_with_name(qr_url, p.common_name.name, filename)
@bp.route('/download_qr_card/<string:uuid_val>', methods=['GET'])
def download_qr_card(uuid_val):
# Public “Card QR” → /<short_id> on plant.cards
p = Plant.query.filter_by(uuid=uuid_val).first_or_404()
if not getattr(p, 'short_id', None):
if not p.short_id:
p.short_id = Plant.generate_short_id()
db.session.commit()
base = current_app.config.get('PLANT_CARDS_BASE_URL', 'https://plant.cards')
qr_url = f"{base}/{p.short_id}"
base = current_app.config.get('PLANT_CARDS_BASE_URL', 'https://plant.cards')
qr_url = f"{base}/{p.short_id}"
filename = f"{p.short_id}_card.png"
return generate_label_with_name(qr_url, p.common_name.name, filename)