Skip to content

Commit

Permalink
write ROOT group to ptx/ group
Browse files Browse the repository at this point in the history
  • Loading branch information
suzanne64 committed Jul 13, 2020
1 parent 5f6241e commit 6ec9938
Show file tree
Hide file tree
Showing 3 changed files with 95 additions and 62 deletions.
19 changes: 18 additions & 1 deletion ATL06_to_ATL11.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
import write_METADATA
import glob
import sys, h5py
import matplotlib.pyplot as plt


#591 10 -F /Volumes/ice2/ben/scf/AA_06/001/cycle_02/ATL06_20190205041106_05910210_001_01.h5 -b -101. -76. -90. -74.5 -o test.h5 -G "/Volumes/ice2/ben/scf/AA_06/001/cycle*/index/GeoIndex.h5"
#591 10 -F /Volumes/ice2/ben/scf/AA_06/001/cycle_02/ATL06_20190205041106_05910210_001_01.h5 -o test.h5 -G "/Volumes/ice2/ben/scf/AA_06/001/cycle*/index/GeoIndex.h5"
Expand Down Expand Up @@ -100,12 +102,27 @@ def main(argv):
setattr(D11.ROOT,'cycle_number',list(range(args.cycles[0],args.cycles[1]+1)))
# add dimensions to D11
D11.N_pts, D11.N_cycles = D11.ROOT.h_corr.shape
print(D11.ROOT.cycle_number)
# print(D11.cycle_stats.cycle_number)
# exit(-1)
# fig,ax=plt.subplots()
# plt.plot(D11.ref_surf.x_atc-D11.cycle_stats.x_atc[:,0],'b')
# plt.plot(D11.ref_surf.x_atc-D11.cycle_stats.x_atc[:,1],'r.')
# plt.plot(D11.ref_surf.x_atc-D11.cycle_stats.x_atc[:,2],'ko')
# plt.plot(D11.ref_surf.x_atc-D11.cycle_stats.x_atc[:,3],'gx')
# plt.plot(D11.ref_surf.y_atc-D11.cycle_stats.y_atc[:,0],'b')
# plt.plot(D11.ref_surf.y_atc-D11.cycle_stats.y_atc[:,1],'r.')
# plt.plot(D11.ref_surf.y_atc-D11.cycle_stats.y_atc[:,2],'ko')
# plt.plot(D11.ref_surf.y_atc-D11.cycle_stats.y_atc[:,3],'gx')
# plt.show()

if isinstance(D11.crossing_track_data.h_corr, np.ndarray):
D11.Nxo = D11.crossing_track_data.h_corr.shape[0]

if D11 is not None:
D11.write_to_file(out_file)

print('line 124')
print('line 125')
out_file = write_METADATA.write_METADATA(out_file,files)

print("ATL06_to_ATL11: done with "+out_file)
Expand Down
2 changes: 0 additions & 2 deletions ATL11_output_attrs.csv
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ ref_surf,misfit_chi2r,meters,N_pts,Float64,"""misfit chi square, divided by the
ref_surf,misfit_RMS,meters,N_pts,Float64,"""RMS misfit for the surface-polynomial fit""","""misfit RMS""","""derived, ATL11 algorithm"""
ref_surf,dem_h,meters,N_pts,Float64,"""DEM elevation, derived from ATL06 /gtxx/atl06_segments/dem/dem_h""","""DEM elevation""","""ATL06"""
ref_surf,fit_quality,1,N_pts,int8,"""Indicates quality of the fit: 0: no problem identified, 1: One or more polynomial coefficients has an error of 2 or larger, 2: One or more surface slope components is greater than 0.02, 3: both 1 and 2""","""fit quality summary""","""derived, ATL11 algorithm"""
cycle_stats,cycle_number,counts,N_cycles,int8,"""cycle number""","""cycle number""","""ATL06"""
cycle_stats,atl06_summary_zero_count,counts,"N_pts, N_cycles",int8,"""Number of segments with ATL06_quality_summary==0 (0 indicates the best-quality data)""","""ATL06 best quality count""","""ATL06"""
cycle_stats,h_rms_misfit,meters,"N_pts, N_cycles",Float64,"""Weighted-average RMS misfit between PE heights and along-track land-ice segment fit""","""weighted average RMS fit error""","""derived, ATL11 algorithm"""
cycle_stats,r_eff,1,"N_pts, N_cycles",Float64,"""Weighted-average effective, uncorrected reflectance for each pass.""","""weighted average reflectance""","""derived, ATL11 algorithm"""
Expand All @@ -43,7 +42,6 @@ cycle_stats,bsnow_h,meters,"N_pts, N_cycles",Float64,"""Weighted-average blowin
cycle_stats,bsnow_conf,1,"N_pts, N_cycles",int8,"""Maximum bsnow_conf flag from ATL06: indicates the greatest (among segments) confidence flag for presence of blowing snow for each cycle""","""maxmimum blowing snow confidence flag""","""ATL06"""
cycle_stats,x_atc,meters,"N_pts, N_cycles",Float64,"""weighted average of pair-center RGT x coordinates for each cycle ""","""weighted average rgt x-coordinates""","""ATL06"""
cycle_stats,y_atc,meters,"N_pts, N_cycles",Float64,"""weighted mean of pair-center RGT y coordinates for each cycle""","""weighted average rgt y-coordinates"""
cycle_stats,ref_pt,counts,N_pts,int32,"""Ref point number, counted from the equator crossing along the RGT.""","""reference point number, segment_id""","""ATL06"""
cycle_stats,seg_count,counts,"N_pts, N_cycles",int32,"""Number of segments marked as valid for each cycle. Equal to 0 for those cycles not included in the reference-surface shape fit.""","""number valid segments""","""ATL06"""
cycle_stats,min_signal_selection_source,1,"N_pts, N_cycles",int8,"""Minimum of the ATL06 signal_selection_source value (indicates the highest-quality segment in the cycle)""","""minimum signal selection source""","""ATL06"""
cycle_stats,min_snr_significance,1,"N_pts, N_cycles",Float64,"""Minimum of SNR_significance (indicates the quality of the best segment in the cycle)""","""minumum signal-noise ration significance""","""ATL06"""
Expand Down
136 changes: 77 additions & 59 deletions data.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ def write_to_file(self, fileout, params_11=None):
# put default parameters as top level attributes
if params_11 is None:
params_11=ATL11.defaults()
# PRINT HERE, params_11

# write each variable in params_11 as an attribute
for param, val in vars(params_11).items():
if not isinstance(val,(dict,type(None))):
Expand All @@ -289,21 +289,53 @@ def write_to_file(self, fileout, params_11=None):
reader=list(csv.DictReader(attrfile))
group_names=set([row['group'] for row in reader])
attr_names=[x for x in reader[0].keys() if x != 'field' and x != 'group']

for group in group_names:
print('line 294',group)

# start with 'ROOT' group
list_vars=getattr(self,'ROOT').list_of_fields
list_vars.append('cycle_number')
# establish the two main dimension scales
for field in ['ref_pt','cycle_number']:
field_attrs = {row['field']: {attr_names[ii]:row[attr_names[ii]] for ii in range(len(attr_names))} for row in reader if 'ROOT' in row['group']}
dimensions = field_attrs[field]['dimensions'].split(',')
data = getattr(getattr(self,'ROOT'),field)
dset = g.create_dataset(field.encode('ASCII'),data=data) #,fillvalue=fillvalue)
dset.dims[0].label = field

for field in [item for item in list_vars if (item != 'ref_pt') and (item != 'cycle_number')]:
field_attrs = {row['field']: {attr_names[ii]:row[attr_names[ii]] for ii in range(len(attr_names))} for row in reader if 'ROOT' in row['group']}
dimensions = field_attrs[field]['dimensions'].split(',')
data = getattr(getattr(self,'ROOT'),field)
# change nans to proper invalid, depending on datatype
if field_attrs[field]['datatype'].startswith('int'):
data = np.nan_to_num(data,nan=np.iinfo(np.dtype(field_attrs[field]['datatype'])).max)
data = data.astype('int') # don't change to int before substituting nans with invalid.
fillvalue = np.iinfo(np.dtype(field_attrs[field]['datatype'])).max
elif field_attrs[field]['datatype'].startswith('Float'):
data = np.nan_to_num(data,nan=np.finfo(np.dtype(field_attrs[field]['datatype'])).max)
fillvalue = np.finfo(np.dtype(field_attrs[field]['datatype'])).max
dset = g.create_dataset(field.encode('ASCII'),data=data,fillvalue=fillvalue)
dset.dims[0].label = field

for ii,dim in enumerate(dimensions):
dim=dim.strip()
if 'N_pts' in dim:
dset.dims[ii].attach_scale(g['ref_pt'])
dset.dims[ii].label = 'ref_pt'
if 'N_cycles' in dim:
dset.dims[ii].attach_scale(g['cycle_number'])
dset.dims[ii].label = 'cycle_number'

for group in [item for item in group_names if item != 'ROOT']:
if hasattr(getattr(self,group),'list_of_fields'):

grp = g.create_group(group.encode('ASCII'))

field_attrs = {row['field']: {attr_names[ii]:row[attr_names[ii]] for ii in range(len(attr_names))} for row in reader if group in row['group']}
# get the dimensions for the group
unique_dims = []
[unique_dims.append(dim.strip()) for field in field_attrs for dim in field_attrs[field]['dimensions'].split(',')]
udims = list(set(unique_dims))
# make datasets for dimension scales ~
if 'N_pts' in udims or 'Nxo' in udims:
print(group,'in N_pts')

if 'Nxo' in udims:
this_ref_pt=getattr(getattr(self,group),'ref_pt')
if len(this_ref_pt) > 0:
dset = grp.create_dataset('ref_pt'.encode('ASCII'),data=this_ref_pt.astype(int))
Expand All @@ -312,81 +344,67 @@ def write_to_file(self, fileout, params_11=None):
dset.dims[0].label = 'ref_pt'.encode('ASCII')
for attr in attr_names:
if 'dimensions' not in attr:
# bpj dset.attrs[attr.encode('ASCII')] = field_attrs['ref_pt'][attr].encode('ASCII')
create_attribute(dset.id, attr, [], field_attrs['ref_pt'][attr])
if 'N_cycles' in udims:
dset = grp.create_dataset('cycle_number'.encode('ASCII'),data=getattr(getattr(self,group),'cycle_number'))
dset.dims[0].label = 'cycle_number'.encode('ASCII')
for attr in attr_names:
if 'dimensions' not in attr:
# bpj dset.attrs[attr.encode('ASCII')] = field_attrs['cycle_number'][attr].encode('ASCII')
create_attribute(dset.id, attr, [], field_attrs['cycle_number'][attr])

if 'N_coeffs' in udims:
dset = grp.create_dataset('poly_exponent_x'.encode('ASCII'),data=np.array([item[0] for item in params_11.poly_exponent_list], dtype=int))
dset.dims[0].label = 'poly_exponent_x'.encode('ASCII')
for attr in attr_names:
if 'dimensions' not in attr:
# bpj dset.attrs[attr.encode('ASCII')] = field_attrs['poly_exponent_x'][attr].encode('ASCII')
create_attribute(dset.id, attr, [], field_attrs['poly_exponent_x'][attr])
dset = grp.create_dataset('poly_exponent_y'.encode('ASCII'),data=np.array([item[1] for item in params_11.poly_exponent_list], dtype=int))
dset.dims[0].label = 'poly_exponent_y'.encode('ASCII')
for attr in attr_names:
if 'dimensions' not in attr:
# bpj dset.attrs[attr.encode('ASCII')] = field_attrs['poly_exponent_y'][attr].encode('ASCII')
create_attribute(dset.id, attr, [], field_attrs['poly_exponent_y'][attr])

if 'ref_surf' in group:
grp.attrs['poly_exponent_x'.encode('ASCII')]=np.array([item[0] for item in params_11.poly_exponent_list], dtype=int)
grp.attrs['poly_exponent_y'.encode('ASCII')]=np.array([item[1] for item in params_11.poly_exponent_list], dtype=int)
grp.attrs['slope_change_t0'.encode('ASCII')]=np.mean(self.slope_change_t0).astype('int')
g.attrs['N_poly_coeffs'.encode('ASCII')]=int(self.N_coeffs)

list_vars=getattr(self,group).list_of_fields
# only ROOT in the future >
if 'cycle_stats' in group or 'ROOT' in group:
list_vars.append('cycle_number')
if group == 'crossing_track_data':
list_vars.remove('ref_pt') # handled above
if list_vars is not None:
for field in list_vars:
print('line 349',group,field)
dimensions = field_attrs[field]['dimensions'].split(',')
if ('ref_pt' not in field and 'cycle_number' not in field) or ('cycle_number' in field and 'crossing_track_data' in group):
data = getattr(getattr(self,group),field)
# change nans to proper invalid, depending on datatype
if field_attrs[field]['datatype'].startswith('int'):
data = np.nan_to_num(data,nan=np.iinfo(np.dtype(field_attrs[field]['datatype'])).max)
data = data.astype('int') # don't change to int before substituting nans with invalid.
fillvalue = np.iinfo(np.dtype(field_attrs[field]['datatype'])).max
elif field_attrs[field]['datatype'].startswith('Float'):
data = np.nan_to_num(data,nan=np.finfo(np.dtype(field_attrs[field]['datatype'])).max)
fillvalue = np.finfo(np.dtype(field_attrs[field]['datatype'])).max
data = getattr(getattr(self,group),field)
# change nans to proper invalid, depending on datatype
if field_attrs[field]['datatype'].startswith('int'):
data = np.nan_to_num(data,nan=np.iinfo(np.dtype(field_attrs[field]['datatype'])).max)
data = data.astype('int') # don't change to int before substituting nans with invalid.
fillvalue = np.iinfo(np.dtype(field_attrs[field]['datatype'])).max
elif field_attrs[field]['datatype'].startswith('Float'):
data = np.nan_to_num(data,nan=np.finfo(np.dtype(field_attrs[field]['datatype'])).max)
fillvalue = np.finfo(np.dtype(field_attrs[field]['datatype'])).max

if 'ROOT' in group:
dset = g.create_dataset(field.encode('ASCII'),data=data,fillvalue=fillvalue)
print('ROOT')
else:
dset = grp.create_dataset(field.encode('ASCII'),data=data,fillvalue=fillvalue) #,dtype=dt)
for ii,dim in enumerate(dimensions):
dim=dim.strip()
if 'N_pts' in dim or 'Nxo' in dim:
dset.dims[ii].attach_scale(grp['ref_pt'])
dset.dims[ii].label = 'ref_pt'
if 'N_cycles' in dim:
dset.dims[ii].attach_scale(grp['cycle_number'])
dset.dims[ii].label = 'cycle_number'
if 'N_coeffs' in dim:
dset.dims[ii].attach_scale(grp['poly_exponent_x'])
dset.dims[ii].attach_scale(grp['poly_exponent_y'])
dset.dims[ii].label = '(poly_exponent_x, poly_exponent_y)'

for attr in attr_names:
if 'dimensions' not in attr:
dset = grp.create_dataset(field.encode('ASCII'),data=data,fillvalue=fillvalue) #,dtype=dt)
for ii,dim in enumerate(dimensions):
dim=dim.strip()
if 'N_pts' in dim:
dset.dims[ii].attach_scale(g['ref_pt'])
dset.dims[ii].label = 'ref_pt'
if 'N_cycles' in dim:
dset.dims[ii].attach_scale(g['cycle_number'])
dset.dims[ii].label = 'cycle_number'
if 'N_coeffs' in dim:
dset.dims[ii].attach_scale(grp['poly_exponent_x'])
dset.dims[ii].attach_scale(grp['poly_exponent_y'])
dset.dims[ii].label = '(poly_exponent_x, poly_exponent_y)'
if 'Nxo' in dim:
dset.dims[ii].attach_scale(grp['ref_pt'])
dset.dims[ii].label = 'ref_pt'
for attr in attr_names:
if 'dimensions' not in attr:
# bpj dset.attrs[attr.encode('ASCII')] = str(field_attrs[field][attr]).encode('ASCII')
create_attribute(dset.id, attr, [], str(field_attrs[field][attr]))
if field_attrs[field]['datatype'].startswith('int'):
dset.attrs['_FillValue'.encode('ASCII')] = np.iinfo(np.dtype(field_attrs[field]['datatype'])).max
elif field_attrs[field]['datatype'].startswith('Float'):
dset.attrs['_FillValue'.encode('ASCII')] = np.finfo(np.dtype(field_attrs[field]['datatype'])).max
f.close()
create_attribute(dset.id, attr, [], str(field_attrs[field][attr]))
if field_attrs[field]['datatype'].startswith('int'):
dset.attrs['_FillValue'.encode('ASCII')] = np.iinfo(np.dtype(field_attrs[field]['datatype'])).max
elif field_attrs[field]['datatype'].startswith('Float'):
dset.attrs['_FillValue'.encode('ASCII')] = np.finfo(np.dtype(field_attrs[field]['datatype'])).max
f.close()
return


Expand Down

0 comments on commit 6ec9938

Please sign in to comment.