Skip to content

Commit

Permalink
Modify the geotiff upload method
Browse files Browse the repository at this point in the history
  • Loading branch information
supermarkion committed Nov 19, 2024
1 parent 26a51d8 commit 13b440d
Showing 1 changed file with 15 additions and 16 deletions.
31 changes: 15 additions & 16 deletions scripts/stacking_processing_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,32 +154,39 @@ def stacking_processing(task_id, region_id, process_cfg_url, overwrite):
"""
logging_setup() # Initialize the logging setup

print(region_id)

# Load the process configuration from the provided YAML URL
process_cfg = helper.load_yaml_remote(process_cfg_url)

match_products = process_cfg["match_products"]
output_folder = process_cfg["output_folder"]
output_product_name = process_cfg["product"]["name"]

processing_task: task.BurnCubeProcessingTask = (
task.BurnCubeProcessingTask.from_config(
cfg_url=process_cfg_url, task_id=task_id, region_id=region_id
)
)

processing_task.validate_cfg()
processing_task.validate_data()

# generate all kinds of conditions to do result comparision
conditions = ["any", "majority", "all"] # Options: "any", "majority", "all"

for condition in conditions:
# Process files based on the region and products information
sum_summary = process_files(match_products, region_id, output_folder, condition)
sum_summary = process_files(
match_products, region_id, processing_task.output_folder, condition
)

# Define the output GeoTIFF file name pattern
pred_tif = f"dea_nbic_stacking_{region_id}_2020_{condition}.tif"
pred_tif = f"{condition}.tif"

# Write the result to a Cloud Optimized GeoTIFF (COG) file
write_cog(geo_im=sum_summary, fname=pred_tif, overwrite=overwrite, nodata=-999)

logger.info(f"Saved result as: {pred_tif}")

# Construct the S3 file URI for the output file
s3_file_uri = f"{output_folder}/{output_product_name}/3-0-0/{region_id[:3]}/{region_id[3:]}/{pred_tif}"
s3_file_uri = f"{processing_task.s3_bucket_name}/{processing_task.s3_object_key}_{pred_tif}"

# Activate AWS credentials from the service account attached
helper.get_and_set_aws_credentials()
Expand All @@ -188,15 +195,7 @@ def stacking_processing(task_id, region_id, process_cfg_url, overwrite):
bc_io.upload_object_to_s3(pred_tif, s3_file_uri)
logger.info(f"Uploaded to S3: {s3_file_uri}")

processing_task: task.BurnCubeProcessingTask = (
task.BurnCubeProcessingTask.from_config(
cfg_url=process_cfg_url, task_id=task_id, region_id=region_id
)
)

processing_task.validate_cfg()
processing_task.validate_data()

# processing_task.s3_object_key
processing_task.upload_processing_log()
processing_task.add_metadata_files()

Expand Down

0 comments on commit 13b440d

Please sign in to comment.