-
Notifications
You must be signed in to change notification settings - Fork 37
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
More documentation work: ush/enspost_config.py, ush/fire_emiss_tools.py, ush/interp_tools.py #520
base: main
Are you sure you want to change the base?
Changes from 2 commits
2ac6b61
b610908
fb9b2e4
e96812b
c2cf7fd
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,16 +1,32 @@ | ||
""" | ||
Fire emisivity tools? | ||
""" | ||
import os | ||
import numpy as np | ||
import xarray as xr | ||
from datetime import datetime | ||
from netCDF4 import Dataset | ||
import interp_tools as i_tools | ||
|
||
#Compute average FRP from raw RAVE for the previous 24 hours | ||
def averaging_FRP(fcst_dates, cols, rows, intp_dir, rave_to_intp, veg_map, tgt_area, beta, fg_to_ug): | ||
# There are two situations here. | ||
# 1) there is only on fire detection whithin 24 hours so FRP is divided by 2 | ||
# 2) There are more than one fire detection so the average FRP is stimated | ||
# ebb_smoke is always divided by the number of times a fire is detected within 24 hours window | ||
""" | ||
Compute average FRP from raw RAVE for the previous 24 hours. | ||
|
||
There are two situations here. | ||
1) there is only on fire detection whithin 24 hours so FRP is divided by 2 | ||
2) There are more than one fire detection so the average FRP is stimated | ||
ebb_smoke is always divided by the number of times a fire is detected within 24 hours window | ||
|
||
fcst_dates: ??? | ||
cols: ??? | ||
rows: ??? | ||
intp_dir: ??? | ||
rave_to_intp: ??? | ||
veg_map: ??? | ||
tgt_area: ??? | ||
beta: ??? | ||
fg_to_ug: ??? | ||
""" | ||
base_array = np.zeros((cols*rows)) | ||
frp_daily = base_array | ||
ebb_smoke_total = [] | ||
|
@@ -67,9 +83,25 @@ def averaging_FRP(fcst_dates, cols, rows, intp_dir, rave_to_intp, veg_map, tgt_a | |
return(frp_avg_reshaped, ebb_total_reshaped) | ||
|
||
def estimate_fire_duration(intp_avail_hours, intp_dir, fcst_dates, current_day, cols, rows, rave_to_intp): | ||
# There are two steps here. | ||
# 1) First day simulation no RAVE from previous 24 hours available (fire age is set to zero) | ||
# 2) previus files are present (estimate fire age as the difference between the date of the current cycle and the date whe the fire was last observed whiting 24 hours) | ||
""" | ||
There are two steps here. | ||
|
||
1. First day simulation no RAVE from previous 24 hours available | ||
(fire age is set to zero) | ||
|
||
2. previus files are present (estimate fire age as the difference | ||
between the date of the current cycle and the date whe the fire | ||
was last observed whiting 24 hours) | ||
|
||
intp_avail_hours: ??? | ||
intp_dir: ??? | ||
fcst_dates: ??? | ||
current_day: ??? | ||
cols: ??? | ||
rows: ??? | ||
rave_to_intp: ??? | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @haiqinli and @MatthewPyle-NOAA what do each of these parameters mean? |
||
|
||
""" | ||
t_fire = np.zeros((cols, rows)) | ||
|
||
for date_str in fcst_dates: | ||
|
@@ -103,10 +135,34 @@ def estimate_fire_duration(intp_avail_hours, intp_dir, fcst_dates, current_day, | |
return(te) | ||
|
||
def save_fire_dur(cols, rows, te): | ||
""" | ||
??? | ||
|
||
cols: ??? | ||
rows: ??? | ||
te: ??? | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @haiqinli and @MatthewPyle-NOAA what do each of these parameters mean and what does this function do? |
||
|
||
""" | ||
fire_dur = np.array(te).reshape(cols, rows) | ||
return(fire_dur) | ||
|
||
def produce_emiss_file(xarr_hwp, frp_avg_reshaped, totprcp_ave_arr, xarr_totprcp, intp_dir, current_day, tgt_latt, tgt_lont, ebb_tot_reshaped, fire_age, cols, rows): | ||
""" | ||
??? | ||
|
||
xarr_hwp: ??? | ||
frp_avg_reshaped: ??? | ||
totprcp_ave_arr: ??? | ||
xarr_totprcp: ??? | ||
intp_dir: ??? | ||
current_day: ??? | ||
tgt_latt: ??? | ||
tgt_lont: ??? | ||
ebb_tot_reshaped: ??? | ||
fire_age: ??? | ||
cols: ??? | ||
rows: ??? | ||
""" | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @haiqinli and @MatthewPyle-NOAA what do each of these parameters mean and what does this function do? |
||
# Ensure arrays are not negative or NaN | ||
frp_avg_reshaped = np.clip(frp_avg_reshaped, 0, None) | ||
frp_avg_reshaped = np.nan_to_num(frp_avg_reshaped) | ||
|
@@ -159,4 +215,4 @@ def produce_emiss_file(xarr_hwp, frp_avg_reshaped, totprcp_ave_arr, xarr_totprcp | |
print(f"Error creating or writing to NetCDF file {file_path}: {e}") | ||
return f"Error creating or writing to NetCDF file {file_path}: {e}" | ||
|
||
return "Emissions file created successfully" | ||
return "Emissions file created successfully" |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,3 +1,7 @@ | ||
""" | ||
??? | ||
|
||
""" | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @haiqinli and @MatthewPyle-NOAA what does this module hold? |
||
import datetime as dt | ||
import pandas as pd | ||
import os | ||
|
@@ -7,8 +11,13 @@ | |
import numpy as np | ||
from netCDF4 import Dataset | ||
|
||
#Create date range, this is later used to search for RAVE and HWP from previous 24 hours | ||
def date_range(current_day): | ||
""" | ||
Create date range, this is later used to search for RAVE and HWP from previous 24 hours. | ||
|
||
current_day: ??? | ||
|
||
""" | ||
print(f'Searching for interpolated RAVE for {current_day}') | ||
|
||
fcst_datetime = dt.datetime.strptime(current_day, "%Y%m%d%H") | ||
|
@@ -19,8 +28,15 @@ def date_range(current_day): | |
print(f'Current cycle: {fcst_datetime}') | ||
return(fcst_dates) | ||
|
||
# Check if interoplated RAVE is available for the previous 24 hours | ||
def check_for_intp_rave(intp_dir, fcst_dates, rave_to_intp): | ||
""" | ||
Check if interoplated RAVE is available for the previous 24 hours. | ||
|
||
intp_dir: ??? | ||
fcst_dates: ??? | ||
rave_to_intp: ??? | ||
|
||
""" | ||
intp_avail_hours = [] | ||
intp_non_avail_hours = [] | ||
# There are four situations here. | ||
|
@@ -48,8 +64,15 @@ def check_for_intp_rave(intp_dir, fcst_dates, rave_to_intp): | |
|
||
return(intp_avail_hours, intp_non_avail_hours, inp_files_2use) | ||
|
||
#Check if raw RAVE in intp_non_avail_hours list is available for interpolatation | ||
def check_for_raw_rave(RAVE, intp_non_avail_hours, intp_avail_hours): | ||
""" | ||
Check if raw RAVE in intp_non_avail_hours list is available for interpolatation. | ||
|
||
RAVE: ??? | ||
intp_non_avail_hours: ??? | ||
intp_avail_hours: ??? | ||
|
||
""" | ||
rave_avail = [] | ||
rave_avail_hours = [] | ||
rave_nonavail_hours_test = [] | ||
|
@@ -72,9 +95,16 @@ def check_for_raw_rave(RAVE, intp_non_avail_hours, intp_avail_hours): | |
print(f'FIRST DAY?: {first_day}') | ||
return(rave_avail, rave_avail_hours, rave_nonavail_hours_test, first_day) | ||
|
||
#Create source and target fields | ||
def creates_st_fields(grid_in, grid_out, intp_dir, rave_avail_hours): | ||
""" | ||
Create source and target fields. | ||
|
||
grid_in: ??? | ||
grid_out: ??? | ||
intp_dir: ??? | ||
rave_avail_hours: ??? | ||
|
||
""" | ||
# Open datasets with context managers | ||
with xr.open_dataset(grid_in) as ds_in, xr.open_dataset(grid_out) as ds_out: | ||
tgt_area = ds_out['area'] | ||
|
@@ -93,15 +123,30 @@ def creates_st_fields(grid_in, grid_out, intp_dir, rave_avail_hours): | |
|
||
#Define output and variable meta data | ||
def create_emiss_file(fout, cols, rows): | ||
"""Create necessary dimensions for the emission file.""" | ||
"""Create necessary dimensions for the emission file. | ||
|
||
fout: ??? | ||
cols: ??? | ||
rows: ??? | ||
""" | ||
fout.createDimension('t', None) | ||
fout.createDimension('lat', cols) | ||
fout.createDimension('lon', rows) | ||
setattr(fout, 'PRODUCT_ALGORITHM_VERSION', 'Beta') | ||
setattr(fout, 'TIME_RANGE', '1 hour') | ||
|
||
def Store_latlon_by_Level(fout, varname, var, long_name, units, dim, fval, sfactor): | ||
"""Store a 2D variable (latitude/longitude) in the file.""" | ||
"""Store a 2D variable (latitude/longitude) in the file. | ||
|
||
fout: ??? | ||
varname: ??? | ||
var: ??? | ||
long_name: ??? | ||
units: ??? | ||
dim: ??? | ||
fval: ??? | ||
sfactor: ??? | ||
""" | ||
var_out = fout.createVariable(varname, 'f4', ('lat','lon')) | ||
var_out.units=units | ||
var_out.long_name=long_name | ||
|
@@ -111,16 +156,35 @@ def Store_latlon_by_Level(fout, varname, var, long_name, units, dim, fval, sfact | |
var_out.coordinates='geolat geolon' | ||
|
||
def Store_by_Level(fout, varname, long_name, units, dim, fval, sfactor): | ||
"""Store a 3D variable (time, latitude/longitude) in the file.""" | ||
"""Store a 3D variable (time, latitude/longitude) in the file. | ||
|
||
fout: ??? | ||
varname: ??? | ||
long_name: ??? | ||
units: ??? | ||
dim: ??? | ||
fval: ??? | ||
sfactor: ??? | ||
""" | ||
var_out = fout.createVariable(varname, 'f4', ('t','lat','lon')) | ||
var_out.units=units | ||
var_out.long_name = long_name | ||
var_out.standard_name=long_name | ||
var_out.FillValue=fval | ||
var_out.coordinates='t geolat geolon' | ||
|
||
#create a dummy rave interpolated file if first day or regrider fails | ||
def create_dummy(intp_dir, current_day, tgt_latt, tgt_lont, cols, rows): | ||
""" | ||
Create a dummy rave interpolated file if first day or regrider fails. | ||
|
||
intp_dir: ??? | ||
current_day: ??? | ||
tgt_latt: ??? | ||
tgt_lont: ??? | ||
cols: ??? | ||
rows: ??? | ||
|
||
""" | ||
file_path = os.path.join(intp_dir, f'SMOKE_RRFS_data_{current_day}00.nc') | ||
dummy_file = np.zeros((cols, rows)) # Changed to 3D to match the '3D' dimensions | ||
with Dataset(file_path, 'w') as fout: | ||
|
@@ -143,8 +207,18 @@ def create_dummy(intp_dir, current_day, tgt_latt, tgt_lont, cols, rows): | |
|
||
return "Emissions dummy file created successfully" | ||
|
||
#generate regridder | ||
def generate_regrider(rave_avail_hours, srcfield, tgtfield, weightfile, inp_files_2use, intp_avail_hours): | ||
""" | ||
generate regridder. | ||
|
||
rave_avail_hours: ??? | ||
srcfield: ??? | ||
tgtfield: ??? | ||
weightfile: ??? | ||
inp_files_2use: ??? | ||
intp_avail_hours: ??? | ||
|
||
""" | ||
print('Checking conditions for generating regridder.') | ||
use_dummy_emiss = len(rave_avail_hours) == 0 and len(intp_avail_hours) == 0 | ||
regridder = None | ||
|
@@ -167,9 +241,28 @@ def generate_regrider(rave_avail_hours, srcfield, tgtfield, weightfile, inp_file | |
|
||
return(regridder, use_dummy_emiss) | ||
|
||
#process RAVE available for interpolation | ||
def interpolate_rave(RAVE, rave_avail, rave_avail_hours, use_dummy_emiss, vars_emis, regridder, | ||
srcgrid, tgtgrid, rave_to_intp, intp_dir, src_latt, tgt_latt, tgt_lont, cols, rows): | ||
""" | ||
Process RAVE available for interpolation. | ||
|
||
RAVE: ??? | ||
rave_avail: ??? | ||
rave_avail_hours: ??? | ||
use_dummy_emiss: ??? | ||
vars_emis: ??? | ||
regridder: ??? | ||
srcgrid: ??? | ||
tgtgrid: ??? | ||
rave_to_intp: ??? | ||
intp_dir: ??? | ||
src_latt: ??? | ||
tgt_latt: ??? | ||
tgt_lont: ??? | ||
cols: ??? | ||
rows: ??? | ||
|
||
""" | ||
for index, current_hour in enumerate(rave_avail_hours): | ||
file_name = rave_avail[index] | ||
rave_file_path = os.path.join(RAVE, file_name[0]) | ||
|
@@ -221,4 +314,4 @@ def interpolate_rave(RAVE, rave_avail, rave_avail_hours, use_dummy_emiss, vars_e | |
except (OSError, IOError, RuntimeError, FileNotFoundError, TypeError, IndexError, MemoryError) as e: | ||
print(f"Error reading NetCDF file {rave_file_path}: {e}") | ||
else: | ||
print(f"File not found or dummy emissions required: {rave_file_path}") | ||
print(f"File not found or dummy emissions required: {rave_file_path}") |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@haiqinli and @MatthewPyle-NOAA what do each of these parameters mean?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Some of these were already defined for HWP_tools.py - guessing these items mean the same here. Can @haiqinli confirm and fill out the rest?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@JohanaRomeroAlvarez Johana is the author of these scripts, but she is on vacation this week and next week. @jordanschnell Jordan, would you like to help to check these parameters mean? Thanks.