Skip to content

Commit

Permalink
worked data.from_file() and ATL11_browse_plots.py for corrected_h/ROO…
Browse files Browse the repository at this point in the history
…T group change
  • Loading branch information
suzanne64 committed Jul 14, 2020
1 parent 6ec9938 commit daabd5f
Show file tree
Hide file tree
Showing 3 changed files with 67 additions and 71 deletions.
16 changes: 1 addition & 15 deletions ATL06_to_ATL11.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,27 +102,13 @@ def main(argv):
setattr(D11.ROOT,'cycle_number',list(range(args.cycles[0],args.cycles[1]+1)))
# add dimensions to D11
D11.N_pts, D11.N_cycles = D11.ROOT.h_corr.shape
print(D11.ROOT.cycle_number)
# print(D11.cycle_stats.cycle_number)
# exit(-1)
# fig,ax=plt.subplots()
# plt.plot(D11.ref_surf.x_atc-D11.cycle_stats.x_atc[:,0],'b')
# plt.plot(D11.ref_surf.x_atc-D11.cycle_stats.x_atc[:,1],'r.')
# plt.plot(D11.ref_surf.x_atc-D11.cycle_stats.x_atc[:,2],'ko')
# plt.plot(D11.ref_surf.x_atc-D11.cycle_stats.x_atc[:,3],'gx')
# plt.plot(D11.ref_surf.y_atc-D11.cycle_stats.y_atc[:,0],'b')
# plt.plot(D11.ref_surf.y_atc-D11.cycle_stats.y_atc[:,1],'r.')
# plt.plot(D11.ref_surf.y_atc-D11.cycle_stats.y_atc[:,2],'ko')
# plt.plot(D11.ref_surf.y_atc-D11.cycle_stats.y_atc[:,3],'gx')
# plt.show()

if isinstance(D11.crossing_track_data.h_corr, np.ndarray):
D11.Nxo = D11.crossing_track_data.h_corr.shape[0]

if D11 is not None:
D11.write_to_file(out_file)
print('line 124')
print('line 125')

out_file = write_METADATA.write_METADATA(out_file,files)

print("ATL06_to_ATL11: done with "+out_file)
Expand Down
43 changes: 18 additions & 25 deletions ATL11_browse_plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,9 +78,6 @@ def ATL11_browse_plots(ATL11_file, hemisphere=1, mosaic=None, out_path=None, pdf
for pr in np.arange(3):
pair = pr+1
D = ATL11.data().from_file(ATL11_file, pair=pair, field_dict=None)
if D.no_pair:
print('you should write no pair')
fhlog.write('{}: No beam pair {} data\n'.format(ATL11_file_str,pair))

if hemisphere==1:
D.get_xy(EPSG=3413)
Expand All @@ -93,8 +90,8 @@ def ATL11_browse_plots(ATL11_file, hemisphere=1, mosaic=None, out_path=None, pdf
projection = ccrs.Stereographic(central_longitude=+0.0,
central_latitude=-90.0,
true_scale_latitude=-71.0)

if D.no_pair:
fhlog.write('{}: No beam pair {} data\n'.format(ATL11_file_str,pair))
ref_pt = np.concatenate( (ref_pt,np.full((1,),np.nan)), axis=0)
h_corr = np.concatenate( (h_corr,np.full((1,num_cycles),np.nan)), axis=0)
delta_time = np.concatenate( (delta_time,np.full((1,num_cycles),np.nan)), axis=0)
Expand All @@ -115,23 +112,20 @@ def ATL11_browse_plots(ATL11_file, hemisphere=1, mosaic=None, out_path=None, pdf
xo_cycle_number = np.concatenate( (xo_cycle_number, np.full((1,),np.nan)), axis=0)
xo_pair_number = np.concatenate( (xo_pair_number,np.full((1,),np.nan)), axis=0)
else:
# get only common reference points
ci,ri = np.intersect1d(D.corrected_h.ref_pt, D.ref_surf.ref_pt, return_indices=True)[1:]
ref_pt = np.concatenate( (ref_pt,D.corrected_h.ref_pt[ci]), axis=0)
h_corr = np.concatenate( (h_corr,D.corrected_h.h_corr[ci]), axis=0)
delta_time = np.concatenate( (delta_time,D.corrected_h.delta_time[ci]), axis=0)
lat = np.concatenate( (lat,D.corrected_h.latitude[ci]), axis=0)
lon = np.concatenate( (lon,D.corrected_h.longitude[ci]), axis=0)
x = np.concatenate( (x,D.x[ci]), axis=0)
y = np.concatenate( (y,D.y[ci]), axis=0)
pair_number = np.concatenate( (pair_number,np.full((len(ci),),pair)), axis=0)
ref_pt = np.concatenate( (ref_pt,D.ROOT.ref_pt.ravel()), axis=0)
h_corr = np.concatenate( (h_corr,D.ROOT.h_corr), axis=0)
delta_time = np.concatenate( (delta_time,D.ROOT.delta_time), axis=0)
lat = np.concatenate( (lat,D.ROOT.latitude.ravel()), axis=0)
lon = np.concatenate( (lon,D.ROOT.longitude.ravel()), axis=0)
x = np.concatenate( (x,D.x), axis=0)
y = np.concatenate( (y,D.y), axis=0)
pair_number = np.concatenate( (pair_number,np.full((len(D.ROOT.ref_pt.ravel()),),pair)), axis=0)

refsurf_pt = np.concatenate( (refsurf_pt,D.ref_surf.ref_pt[ri]), axis=0)
dem_h = np.concatenate( (dem_h,D.ref_surf.dem_h[ri]), axis=0)
if ~np.all(np.isnan(D.ref_surf.fit_quality.ravel())): #hasattr(D.ref_surf,'fit_quality'):
fit_quality= np.concatenate( (fit_quality,D.ref_surf.fit_quality[ri].ravel()), axis=0)
else:
fit_quality= np.concatenate( (fit_quality,D.ref_surf.quality_summary[ri]), axis=0)
dem_h = np.concatenate( (dem_h,D.ref_surf.dem_h), axis=0)
# if ~np.all(np.isnan(D.ref_surf.fit_quality.ravel())): #hasattr(D.ref_surf,'fit_quality'):
fit_quality= np.concatenate( (fit_quality,D.ref_surf.fit_quality.ravel()), axis=0)
# else:
# fit_quality= np.concatenate( (fit_quality,D.ref_surf.quality_summary), axis=0)

ref, xo, delta = D.get_xovers()
ref_h_corr = np.concatenate( (ref_h_corr, ref.h_corr), axis=0)
Expand All @@ -150,7 +144,6 @@ def ATL11_browse_plots(ATL11_file, hemisphere=1, mosaic=None, out_path=None, pdf
pair_number[fit_quality==0], dem_h[fit_quality==0]


# fhlog.write('{}: Percentage of good data points, {:.2f}%\n'.format(ATL11_file_str, len(fit_quality[fit_quality==0])/len(fit_quality)*100))
fhlog.write('{}: {:.1f}% data with good fit, used in figures\n'.format(ATL11_file_str, len(fit_quality[fit_quality==0])/len(fit_quality)*100))

if ~np.all(np.isnan(h_corr.ravel())):
Expand All @@ -174,7 +167,7 @@ def ATL11_browse_plots(ATL11_file, hemisphere=1, mosaic=None, out_path=None, pdf
ybuf = xwidth/4/2;
bounds = [ [np.nanmin([xctr-xbuf, np.nanmin(x)-1e4]), np.nanmax([xctr+xbuf, np.nanmax(x)+1e4])],
[np.nanmin([yctr-ybuf, np.nanmin(y)-1e4]), np.nanmax([yctr+ybuf, np.nanmax(y)+1e4])] ]

ddem = h_corr-dem_h[:,None]
ddem05 = stats.scoreatpercentile(ddem[~np.isnan(ddem)].ravel(),5)
ddem95 = stats.scoreatpercentile(ddem[~np.isnan(ddem)].ravel(),95)
Expand Down Expand Up @@ -321,9 +314,9 @@ def ATL11_browse_plots(ATL11_file, hemisphere=1, mosaic=None, out_path=None, pdf
if np.all(np.isnan(h_corr[pair_number==pair,:])):
ax4[0,pr].annotate('No Data',xy=(0.1, 0.8), xycoords='axes fraction')
ax4[1,pr].annotate('No Data',xy=(0.1, 0.8), xycoords='axes fraction')
ax4[0,1].set_title('corrected_h/h_corr', fontdict={'fontsize':10});
ax4[0,1].set_title('h_corr', fontdict={'fontsize':10});
ax4[0,pr].grid(linestyle='--')
ax4[1,1].set_title('corrected_h/h_corr minus DEM', fontdict={'fontsize':10});
ax4[1,1].set_title('h_corr minus DEM', fontdict={'fontsize':10});
ax4[1,pr].grid(linestyle='--')
ax4[0,0].set_ylim((h05,h95))
ax4[0,0].set_ylabel('meters')
Expand Down Expand Up @@ -408,7 +401,7 @@ def ATL11_browse_plots(ATL11_file, hemisphere=1, mosaic=None, out_path=None, pdf
ax8[0,1].set_title('crossing_track_data/h_corr', fontdict={'fontsize':10})
ax8[1,0].set_ylabel('meters')
ax8[1,0].set_ylim((dxo05,dxo95))
ax8[1,1].set_title('corrected_h/h_corr minus crossing_track_data/h_corr', fontdict={'fontsize':10})
ax8[1,1].set_title('h_corr minus crossing_track_data/h_corr', fontdict={'fontsize':10})
else:
ax8[0,0].annotate('No Data', xy=(0.1, 0.8), xycoords='axes fraction')
if pair == 3:
Expand Down
79 changes: 48 additions & 31 deletions data.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,41 +153,57 @@ def from_file(self, filename, pair=2, index_range=[0, -1], field_dict=None, inv
# if the field dict is not specified, read it
if field_dict is None:
field_dict={}
for group in FH[pt].keys():
#print('group line 149',group)
field_dict[group]=[]
for field in FH[pt][group].keys():
#print('line 153',group, field)
field_dict[group].append(field)

# Suzanne, rework this using dim scales.
field_dict['ROOT']=[]
for key,val in FH[pt].items():
if isinstance(val, h5py.Group):
field_dict[key]=[]
for field in FH[pt][key].keys():
field_dict[key].append(field)
if isinstance(val, h5py.Dataset):
field_dict['ROOT'].append(key)

N_pts=FH[pt]['h_corr'][index_range[0]:index_range[-1],:].shape[0]
N_cycles=FH[pt]['cycle_number'].shape[0]
cycles=[FH[pt].attrs['first_cycle'], FH[pt].attrs['last_cycle']]
N_coeffs=FH[pt]['ref_surf']['poly_coeffs'].shape[1]
self.__init__(N_pts=N_pts, cycles=cycles, N_coeffs=N_coeffs)

for in_group in (field_dict):
# the root group is represented as "ROOT"
if in_group is None:
out_group=in_group
else:
out_group='ROOT'
if in_group != 'crossing_track_data':
for field in field_dict[group]:
try:
this_field = np.array(FH[pt][in_group][field]).astype('float')
# check for invalids replace with nans
if invalid_to_nan:
this_field[this_field==FH[pt][in_group][field].fillvalue.astype('float')] = np.nan
if len(this_field.shape) > 1:
setattr(getattr(self, out_group), field, this_field[index_range[0]:index_range[1],:])
else:
if 'cycle_number' in field and (out_group=='ROOT' or out_group=='cycle_stats'):
setattr(getattr(self, out_group), field, this_field[:])

for group in (field_dict):
# if in_group is None:
# out_group=in_group
# else:
# out_group='ROOT'
if group != 'crossing_track_data':
if group == 'ROOT':
for field in field_dict[group]:
try:
this_field = np.array(FH[pt][field]).astype('float')
# check for invalids replace with nans
if invalid_to_nan:
this_field[this_field==FH[pt][field].fillvalue.astype('float')] = np.nan
if len(this_field.shape) > 1:
setattr(getattr(self, group), field, this_field[index_range[0]:index_range[1],:])
else:
setattr(getattr(self, out_group), field, this_field[index_range[0]:index_range[1]])
except KeyError:
print("ATL11 file %s: missing %s/%s" % (filename, out_group, field))
if 'cycle_number' in field: # and (out_group=='ROOT' or out_group=='cycle_stats'):
setattr(getattr(self, group), field, this_field[:])
else:
setattr(getattr(self, group), field, this_field[index_range[0]:index_range[1]])
except KeyError:
print("ATL11 file %s: missing %s/%s" % (filename, out_group, field))

else:
for field in field_dict[group]:
try:
this_field = np.array(FH[pt][group][field]).astype('float')
# check for invalids replace with nans
if invalid_to_nan:
this_field[this_field==FH[pt][group][field].fillvalue.astype('float')] = np.nan
if len(this_field.shape) > 1:
setattr(getattr(self, group), field, this_field[index_range[0]:index_range[1],:])
else:
setattr(getattr(self, group), field, this_field[index_range[0]:index_range[1]])
except KeyError:
print("ATL11 file %s: missing %s/%s" % (filename, group, field))
else:
# get the indices for the crossing_track_data group:
if self.ROOT.ref_pt.size <1:
Expand All @@ -213,7 +229,8 @@ def from_file(self, filename, pair=2, index_range=[0, -1], field_dict=None, inv
self.poly_exponent={'x':np.array(FH[pt]['ref_surf']['poly_exponent_x']), 'y':np.array(FH[pt]['ref_surf']['poly_exponent_y'])}
for attr in FH[pt].attrs.keys():
self.attrs[attr]=FH[pt].attrs[attr]
self.cycle_number=np.array(FH[pt]['/cycle_number'])
# self.cycle_number=np.array(FH[pt]['/cycle_number'])
self.cycle_number=np.array(FH[pt]['cycle_number'])
return self

def get_xy(self, proj4_string=None, EPSG=None):
Expand Down

0 comments on commit daabd5f

Please sign in to comment.