From 769ca5c7f9ce528fff7ada4369d80cc18b2d66d6 Mon Sep 17 00:00:00 2001 From: "(major) john (major)" Date: Wed, 2 Jul 2025 23:59:38 -0700 Subject: [PATCH 1/7] allow duplicate s3 uri --- bloom_lims/bobjs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bloom_lims/bobjs.py b/bloom_lims/bobjs.py index dbde239..aae170f 100644 --- a/bloom_lims/bobjs.py +++ b/bloom_lims/bobjs.py @@ -3194,8 +3194,8 @@ def add_file_data( b_sub_type="generic", ) if len(existing_euids) > 0: - raise Exception( - f"Remote file with URI {s3_uri} already exists in the database as {existing_euids}." + self.logger.warning( + f"Remote file with URI {s3_uri} already exists in the database as {existing_euids}. Creating a new record anyway." ) file_properties = { From 5c078fbb25f555dfedf48d956b97183fca7316da Mon Sep 17 00:00:00 2001 From: "(major) john (major)" Date: Thu, 3 Jul 2025 00:01:28 -0700 Subject: [PATCH 2/7] Use datestamp for fileset name and show link in report --- main.py | 17 ++++++++++++----- templates/create_file_report.html | 7 +++++++ 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/main.py b/main.py index cca032d..c08d191 100644 --- a/main.py +++ b/main.py @@ -1940,10 +1940,12 @@ async def delete_edge(request: Request, _auth=Depends(require_auth)): def generate_unique_upload_key(): - color = random.choice(BVARS.pantone_colors) - invertebrate = random.choice(BVARS.marine_invertebrates) - number = random.randint(0, 1000000) - return f"{color.replace(' ','_')}_{invertebrate.replace(' ','_')}_{number}" + """Return a datestamp for default file set names. + + The datestamp format follows ``YYYYMMDDTHHMMSS`` as required when a + file set name isn't explicitly provided. + """ + return datetime.utcnow().strftime("%Y%m%dT%H%M%S") @@ -2276,7 +2278,12 @@ async def create_file( user_data = request.session.get("user_data", {}) style = {"skin_css": user_data.get("style_css", "static/skins/bloom.css")} content = templates.get_template("create_file_report.html").render( - request=request, results=results, style=style, udat=user_data + request=request, + results=results, + style=style, + udat=user_data, + file_set_euid=new_file_set.euid, + file_set_name=file_set_name, ) return HTMLResponse(content=content) diff --git a/templates/create_file_report.html b/templates/create_file_report.html index 7a5d194..a2a2500 100644 --- a/templates/create_file_report.html +++ b/templates/create_file_report.html @@ -73,6 +73,13 @@ {% include 'bloom_header.html' %}

Create File Report

+ {% if file_set_euid %} +

File Set: + + {{ file_set_name }} + +

+ {% endif %}
From d933df2bcdd070a0d57ef2080ab4f04189d001b5 Mon Sep 17 00:00:00 2001 From: "(major) john (major)" Date: Thu, 3 Jul 2025 00:04:31 -0700 Subject: [PATCH 3/7] Add database backup and restore controls --- main.py | 66 ++++++++++++++++++++++++++++++++++++++++++-- templates/admin.html | 34 +++++++++++++++++++++++ 2 files changed, 98 insertions(+), 2 deletions(-) diff --git a/main.py b/main.py index cca032d..2cebe80 100644 --- a/main.py +++ b/main.py @@ -7,6 +7,7 @@ from typing import List from pathlib import Path import random +import asyncio import csv import os @@ -122,6 +123,10 @@ def setup_logging(): from auth.supabase.connection import create_supabase_client +# Lock to serialize backup and restore operations +db_lock = asyncio.Lock() + + # local udata prefernces UDAT_FILE = "./etc/udat.json" # Create if not exists @@ -311,6 +316,30 @@ async def get_relationship_data(obj): return relationship_data +def _pg_env(): + env = os.environ.copy() + env.setdefault("PGHOST", "localhost") + env.setdefault("PGPORT", "5445") + env.setdefault("PGUSER", env.get("USER", "bloom")) + env.setdefault("PGPASSWORD", env.get("PGPASSWORD", "passw0rd")) + env.setdefault("PGDBNAME", env.get("PGDBNAME", "bloom")) + return env + + +def pg_dump_file(out_path: Path): + env = _pg_env() + cmd = ["pg_dump", "-Fp", env["PGDBNAME"]] + with open(out_path, "w") as fh: + subprocess.run(cmd, stdout=fh, check=True, env=env) + + +def pg_restore_file(sql_path: Path): + env = _pg_env() + cmd = ["psql", env["PGDBNAME"]] + with open(sql_path, "r") as fh: + subprocess.run(cmd, stdin=fh, check=True, env=env) + + class RequireAuthException(HTTPException): def __init__(self, detail: str): super().__init__(status_code=403, detail=detail) @@ -804,6 +833,13 @@ async def admin(request: Request, _auth=Depends(require_auth), dest="na"): ] # Get just the file names printer_info["style_css"] = csss + + backup_path = user_data.get("db_backup_path", "./db_backups") + if os.path.isdir(backup_path): + backup_files = sorted([p.name for p in Path(backup_path).glob("*.sql")], reverse=True) + else: + backup_files = [] + style = {"skin_css": user_data.get("style_css", "static/skins/bloom.css")} # Rendering the template with the dynamic content @@ -813,6 +849,8 @@ async def admin(request: Request, _auth=Depends(require_auth), dest="na"): user_data=user_data, printer_info=printer_info, dest_section=dest_section, + backup_path=backup_path, + backups=backup_files, udat=request.session["user_data"], ) @@ -851,6 +889,30 @@ async def update_preference(request: Request, auth: dict = Depends(require_auth) return {"status": "error", "message": "User not found in user data"} +@app.post("/db_backup") +async def db_backup(request: Request, _auth=Depends(require_auth)): + backup_path = request.session["user_data"].get("db_backup_path", "./db_backups") + os.makedirs(backup_path, exist_ok=True) + outfile = Path(backup_path) / f"backup_{get_clean_timestamp()}.sql" + async with db_lock: + await asyncio.to_thread(pg_dump_file, outfile) + return RedirectResponse(url="/admin?dest=backup", status_code=303) + + +@app.post("/db_restore") +async def db_restore(request: Request, filename: str = Form(...), _auth=Depends(require_auth)): + backup_path = request.session["user_data"].get("db_backup_path", "./db_backups") + target = Path(backup_path) / filename + if not target.exists(): + raise HTTPException(status_code=404, detail="Backup not found") + os.makedirs(backup_path, exist_ok=True) + new_backup = Path(backup_path) / f"pre_restore_{get_clean_timestamp()}.sql" + async with db_lock: + await asyncio.to_thread(pg_dump_file, new_backup) + await asyncio.to_thread(pg_restore_file, target) + return RedirectResponse(url="/admin?dest=backup", status_code=303) + + @app.get("/queue_details", response_class=HTMLResponse) async def queue_details( request: Request, queue_euid, page=1, _auth=Depends(require_auth) @@ -3115,11 +3177,11 @@ def directory_listing(directory: Path, file_path: str) -> HTMLResponse: for item in items: if item.is_dir(): files.append( - f'
  • {item.name}/
  • ' + f"
  • {item.name}/
  • " ) else: files.append( - f'
  • {item.name}
  • ' + f"
  • {item.name}
  • " ) print('PPPPPP', str(parent_path)) html_content = f""" diff --git a/templates/admin.html b/templates/admin.html index a500a42..0f1ddc6 100644 --- a/templates/admin.html +++ b/templates/admin.html @@ -49,6 +49,40 @@

    Original Raw Workflow View (not intended for operational use)

    +
    +

    Database Backups

    +
    + + + + +
    Backup Directory: + +
    + +
    +
    + + {% if backups %} + + + {% for b in backups %} + + + + + {% endfor %} +
    Backup FileAction
    {{ b }} +
    + + +
    +
    + {% else %} +

    No backups found in {{ backup_path }}

    + {% endif %} + + + + @@ -13,6 +16,11 @@ {% include 'bloom_header.html' %}

    {{ num_results }} Results

    + + @@ -91,6 +99,19 @@

    {{ num_results }} Results

    document.body.removeChild(downloadLink); } +