From 520395893f4d86879320536316d0ad023a12ee4c Mon Sep 17 00:00:00 2001 From: Jeremy Zilar Date: Wed, 18 Mar 2026 09:21:18 -0400 Subject: [PATCH] Make GCS optional for transfer elevation cache and cleanup When GCS_BUCKET_NAME is unset, get_cached_elevations() returns an empty dict instead of failing with 'Cannot determine path without bucket name'. dump_cached_elevations() and cleanup_locations() skip GCS operations in that case, allowing transfers to run locally without GCS credentials. Made-with: Cursor --- transfers/well_transfer_util.py | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/transfers/well_transfer_util.py b/transfers/well_transfer_util.py index 40660349f..7ddeac0be 100644 --- a/transfers/well_transfer_util.py +++ b/transfers/well_transfer_util.py @@ -20,7 +20,7 @@ from sqlalchemy.orm import Session from db import GeologicFormation, Location -from services.gcs_helper import get_storage_bucket +from services.gcs_helper import GCS_BUCKET_NAME, get_storage_bucket from services.util import ( get_state_from_point, get_county_from_point, @@ -156,6 +156,8 @@ def get_or_create_geologic_formation( def get_cached_elevations() -> dict: + if not GCS_BUCKET_NAME: + return {} bucket = get_storage_bucket() log_filename = "transfer_data/cached_elevations.json" blob = bucket.blob(log_filename) @@ -163,6 +165,8 @@ def get_cached_elevations() -> dict: def dump_cached_elevations(lut: dict): + if not GCS_BUCKET_NAME: + return bucket = get_storage_bucket() log_filename = "transfer_data/cached_elevations.json" blob = bucket.blob(log_filename) @@ -174,17 +178,19 @@ def cleanup_locations(session): n = len(locations) lut = {} - bucket = get_storage_bucket() - log_filename = "transfer_data/location_cleanup.json" - blob = bucket.blob(log_filename) - if blob.exists(): - lut = download_blob_json(blob, default={}) + if GCS_BUCKET_NAME: + bucket = get_storage_bucket() + log_filename = "transfer_data/location_cleanup.json" + blob = bucket.blob(log_filename) + if blob.exists(): + lut = download_blob_json(blob, default={}) updates = [] for i, location in enumerate(locations): if i and not i % 100: - logger.info(f"Processing row {i} of {n}. dumping lut to {log_filename}") - upload_blob_json(blob, lut) + logger.info(f"Processing row {i} of {n}") + if GCS_BUCKET_NAME: + upload_blob_json(blob, lut) session.bulk_update_mappings(Location, updates) session.commit() updates = [] @@ -222,7 +228,8 @@ def cleanup_locations(session): f"={quad_name}" ) - upload_blob_json(blob, lut) + if GCS_BUCKET_NAME: + upload_blob_json(blob, lut) if updates: session.bulk_update_mappings(Location, updates) session.commit()