Skip to content

Commit 56c37ec

Browse files
committed
Last Jan CLS commit
1 parent e07cebf commit 56c37ec

File tree

3 files changed

+59
-75
lines changed

3 files changed

+59
-75
lines changed

make_eddy_track_AVISO.py

Lines changed: 39 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
from py_eddy_tracker_classes import plt, np, dt, Dataset, ndimage, time, \
3636
datestr2datetime, gaussian_resolution, \
3737
get_cax, collection_loop, track_eddies, \
38-
anim_figure
38+
anim_figure, interpolate
3939
import make_eddy_tracker_list_obj as eddy_tracker
4040
from dateutil import parser
4141
from mpl_toolkits.basemap import Basemap
@@ -505,8 +505,8 @@ def __init__(self, AVISO_FILE, LONMIN, LONMAX, LATMIN, LATMAX,
505505
self.set_u_v_eke()
506506
pad2 = 2 * self.pad
507507
self.shape = (self.f().shape[0] - pad2, self.f().shape[1] - pad2)
508+
508509

509-
510510
def get_AVISO_data(self, AVISO_FILE):
511511
"""
512512
Read nc data from AVISO file
@@ -517,7 +517,7 @@ def get_AVISO_data(self, AVISO_FILE):
517517
ssh1 = self.read_nc(AVISO_FILE, 'SLA',
518518
indices='[:, self.jp0:self.jp1, :self.ip0]')
519519
ssh0 = self.read_nc(AVISO_FILE, 'SLA',
520-
indices='[:,self.jp0:self.jp1, self.ip1:]')
520+
indices='[:, self.jp0:self.jp1, self.ip1:]')
521521
ssh0, ssh1 = ssh0.squeeze(), ssh1.squeeze()
522522
ssh0 *= 100. # m to cm
523523
ssh1 *= 100. # m to cm
@@ -526,7 +526,7 @@ def get_AVISO_data(self, AVISO_FILE):
526526
ssh1 = self.read_nc(AVISO_FILE, 'Grid_0001',
527527
indices='[:self.ip0, self.jp0:self.jp1]').T
528528
ssh0 = self.read_nc(AVISO_FILE, 'Grid_0001',
529-
indices='[self.ip1:,self.jp0:self.jp1]').T
529+
indices='[self.ip1:, self.jp0:self.jp1]').T
530530

531531
zeta = np.ma.concatenate((ssh0, ssh1), axis=1)
532532

@@ -591,19 +591,19 @@ def fillmask(self, x, mask):
591591

592592
# Multiply the queried good points by the weight, selecting only the
593593
# nearest points. Divide by the number of nearest points to get average
594-
xfill = weight * x[igood[:,0][iquery], igood[:,1][iquery]]
595-
xfill = (xfill / weight.sum(axis=1)[:,np.newaxis]).sum(axis=1)
594+
xfill = weight * x[igood[:, 0][iquery], igood[:,1][iquery]]
595+
xfill = (xfill / weight.sum(axis=1)[:, np.newaxis]).sum(axis=1)
596596

597597
# Place average of nearest good points, xfill, into bad point locations
598-
x[ibad[:,0], ibad[:,1]] = xfill
598+
x[ibad[:, 0], ibad[:, 1]] = xfill
599599
return x
600600

601601

602602
def lon(self):
603603
if self.ZERO_CROSSING:
604604
# TO DO: These concatenations are possibly expensive, they
605605
# shouldn't need to happen with every call to self.lon()
606-
lon0 = self._lon[self.j0:self.j1, self.i1:]
606+
lon0 = self._lon[self.j0:self.j1, self.i1:]
607607
lon1 = self._lon[self.j0:self.j1, :self.i0]
608608
print 'fix this so called only once'
609609
return np.concatenate((lon0 - 360., lon1), axis=1)
@@ -612,23 +612,23 @@ def lon(self):
612612

613613
def lat(self):
614614
if self.ZERO_CROSSING:
615-
lat0 = self._lat[self.j0:self.j1, self.i1:]
615+
lat0 = self._lat[self.j0:self.j1, self.i1:]
616616
lat1 = self._lat[self.j0:self.j1, :self.i0]
617617
return np.concatenate((lat0, lat1), axis=1)
618618
else:
619619
return self._lat[self.j0:self.j1, self.i0:self.i1]
620620

621621
def lonpad(self):
622622
if self.ZERO_CROSSING:
623-
lon0 = self._lon[self.jp0:self.jp1, self.ip1:]
623+
lon0 = self._lon[self.jp0:self.jp1, self.ip1:]
624624
lon1 = self._lon[self.jp0:self.jp1, :self.ip0]
625625
return np.concatenate((lon0 - 360., lon1), axis=1)
626626
else:
627627
return self._lon[self.jp0:self.jp1, self.ip0:self.ip1]
628628

629629
def latpad(self):
630630
if self.ZERO_CROSSING:
631-
lat0 = self._lat[self.jp0:self.jp1, self.ip1:]
631+
lat0 = self._lat[self.jp0:self.jp1, self.ip1:]
632632
lat1 = self._lat[self.jp0:self.jp1, :self.ip0]
633633
return np.concatenate((lat0, lat1), axis=1)
634634
else:
@@ -665,7 +665,7 @@ def pn(self): # Reciprocal of dy
665665

666666
def get_resolution(self):
667667
return np.sqrt(np.diff(self.lon()[1:], axis=1) *
668-
np.diff(self.lat()[:,1:], axis=0)).mean()
668+
np.diff(self.lat()[:, 1:], axis=0)).mean()
669669

670670

671671
def boundary(self):
@@ -676,10 +676,10 @@ def boundary(self):
676676
Returns:
677677
lon/lat boundary points
678678
"""
679-
lon = np.r_[(self.lon()[:,0], self.lon()[-1],
680-
self.lon()[::-1,-1], self.lon()[0,::-1])]
681-
lat = np.r_[(self.lat()[:,0], self.lat()[-1],
682-
self.lat()[::-1,-1], self.lat()[0,::-1])]
679+
lon = np.r_[(self.lon()[:, 0], self.lon()[-1],
680+
self.lon()[::-1, -1], self.lon()[0, ::-1])]
681+
lat = np.r_[(self.lat()[:, 0], self.lat()[-1],
682+
self.lat()[::-1, -1], self.lat()[0, ::-1])]
683683
return lon, lat
684684

685685

@@ -693,32 +693,15 @@ def brypath(self, imin=0, imax=-1, jmin=0, jmax=-1):
693693
return path.Path(brypath)
694694

695695

696-
#def pcol_2dxy(self, x, y):
697-
#"""
698-
#Function to shift x, y for subsequent use with pcolor
699-
#by Jeroen Molemaker UCLA 2008
700-
#"""
701-
#Mp, Lp = x.shape
702-
#M = Mp - 1
703-
#L = Lp - 1
704-
#x_pcol = np.zeros((Mp, Lp))
705-
#y_pcol = np.zeros((Mp, Lp))
706-
#x_tmp = self.half_interp(x[:,:L], x[:,1:Lp])
707-
#x_pcol[1:Mp,1:Lp] = self.half_interp(x_tmp[0:M,:], x_tmp[1:Mp,:])
708-
#x_pcol[0,:] = 2. * x_pcol[1,:] - x_pcol[2,:]
709-
#x_pcol[:,0] = 2. * x_pcol[:,1] - x_pcol[:,2]
710-
#y_tmp = self.half_interp(y[:,0:L], y[:,1:Lp] )
711-
#y_pcol[1:Mp,1:Lp] = self.half_interp(y_tmp[0:M,:], y_tmp[1:Mp,:])
712-
#y_pcol[0,:] = 2. * y_pcol[1,:] - y_pcol[2,:]
713-
#y_pcol[:,0] = 2. * y_pcol[:,1] - y_pcol[:,2]
714-
#return x_pcol, y_pcol
715-
716-
717-
718-
719-
720-
721-
696+
697+
def set_interp_coeffs(self, sla, uspd):
698+
"""
699+
"""
700+
self.sla_coeffs = interpolate.RectBivariateSpline(
701+
self.lat()[:, 0], self.lon()[0], sla, kx=1, ky=1)
702+
self.uspd_coeffs = interpolate.RectBivariateSpline(
703+
self.lat()[:, 0], self.lon()[0], uspd, kx=1, ky=1)
704+
return self
722705

723706

724707
#++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
@@ -840,16 +823,16 @@ def brypath(self, imin=0, imax=-1, jmin=0, jmax=-1):
840823
#--------------------------------------------------------------------------
841824

842825
assert DATE_STR < DATE_END, 'DATE_END must be larger than DATE_STR'
843-
assert DIAGNOSTIC_TYPE in ('Q', 'SLA'), 'DIAGNOSTIC_TYPE not properly defined'
826+
assert DIAGNOSTIC_TYPE in ('Q', 'SLA'), 'Undefined DIAGNOSTIC_TYPE'
844827

845828
thestartdate = dt.date2num(datestr2datetime(str(DATE_STR)))
846829
theenddate = dt.date2num(datestr2datetime(str(DATE_END)))
847830

848831
# Get complete AVISO file list
849832
AVISO_FILES = sorted(glob.glob(DATA_DIR + AVISO_FILES))
850833

851-
# Use this for subsampling to get identical list as old_AVISO
852-
#AVISO_FILES = AVISO_FILES[5:-5:7]
834+
# For subsampling to get identical list as old_AVISO use:
835+
# AVISO_FILES = AVISO_FILES[5:-5:7]
853836
if AVISO_DT14 and AVISO_DT14_SUBSAMP:
854837
AVISO_FILES = AVISO_FILES[5:-5:np.int(DAYS_BTWN_RECORDS)]
855838

@@ -1118,14 +1101,20 @@ def brypath(self, imin=0, imax=-1, jmin=0, jmax=-1):
11181101
C_eddy.slacopy = sla.copy()
11191102

11201103
# Get scalar speed
1121-
Uspd = np.sqrt(sla_grd.u**2 + sla_grd.v**2)
1122-
Uspd = np.ma.masked_where(
1104+
uspd = np.sqrt(sla_grd.u**2 + sla_grd.v**2)
1105+
uspd = np.ma.masked_where(
11231106
sla_grd.mask[sla_grd.jup0:sla_grd.jup1,
11241107
sla_grd.iup0:sla_grd.iup1] == False,
1125-
Uspd)
1126-
A_eddy.Uspd = Uspd.copy()
1127-
C_eddy.Uspd = Uspd.copy()
1108+
uspd)
1109+
A_eddy.uspd = uspd.copy()
1110+
C_eddy.uspd = uspd.copy()
11281111

1112+
# Set interpolation coefficients
1113+
sla_grd.set_interp_coeffs(sla, uspd)
1114+
A_eddy.sla_coeffs = sla_grd.sla_coeffs
1115+
A_eddy.uspd_coeffs = sla_grd.uspd_coeffs
1116+
C_eddy.sla_coeffs = sla_grd.sla_coeffs
1117+
C_eddy.uspd_coeffs = sla_grd.uspd_coeffs
11291118

11301119
# Get contours of Q/sla parameter
11311120
if 'first_record' not in locals():

py_eddy_tracker_amplitude.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -111,10 +111,7 @@ def __init__(self, contlon, contlat, eddy, grd):
111111
self.jmin, self.jmax = self.eddy.jmin, self.eddy.jmax
112112
self.sla = self.eddy.sla[self.jmin:self.jmax,
113113
self.imin:self.imax].copy()
114-
self.rbspline = interpolate.RectBivariateSpline(
115-
self.eddy.grd.lat()[self.jmin:self.jmax, 0],
116-
self.eddy.grd.lon()[0, self.imin:self.imax],
117-
self.sla, kx=1, ky=1)
114+
self.rbspline = grd.sla_coeffs
118115
h0 = self.rbspline.ev(self.contlat, self.contlon)
119116
self.h0 = h0[np.isfinite(h0)].mean()
120117
self.amplitude = np.atleast_1d(0.)

py_eddy_tracker_classes.py

Lines changed: 19 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -473,10 +473,10 @@ def get_uavg(Eddy, CS, collind, centlon_e, centlat_e, poly_eff,
473473
474474
If save_all_uavg == True we want uavg for every contour
475475
"""
476-
def calc_uavg(rbspline, lon, lat):
477-
"""
478-
"""
479-
return rbspline.ev(lon, lat).mean()
476+
#def calc_uavg(rbspline, lon, lat):
477+
#"""
478+
#"""
479+
#return rbspline.ev(lon, lat).mean()
480480

481481
# True for debug figures
482482
#debug_U = False
@@ -495,20 +495,25 @@ def calc_uavg(rbspline, lon, lat):
495495
points = np.array([grd.lon()[jmin:jmax,imin:imax].ravel(),
496496
grd.lat()[jmin:jmax,imin:imax].ravel()]).T
497497

498-
rbspline = interpolate.RectBivariateSpline(
499-
grd.lat()[jmin:jmax,0],
500-
grd.lon()[0,imin:imax],
501-
Eddy.Uspd[jmin:jmax,imin:imax], kx=1, ky=1)
498+
#rbspline = interpolate.RectBivariateSpline(
499+
#grd.lat()[jmin:jmax,0],
500+
#grd.lon()[0,imin:imax],
501+
#Eddy.uspd[jmin:jmax,imin:imax], kx=1, ky=1)
502502

503503
# First contour is the outer one (effective)
504504
theseglon, theseglat = poly_eff.vertices[:,0].copy(), \
505505
poly_eff.vertices[:,1].copy()
506506
theseglon, theseglat = eddy_tracker.uniform_resample(
507507
theseglon, theseglat, method='akima')
508-
uavg = calc_uavg(rbspline, theseglon[:-1], theseglat[:-1])
508+
uavg = Eddy.uspd_coeffs.ev(theseglon[:-1], theseglat[:-1]).mean()
509509

510510
if save_all_uavg:
511511
all_uavg = [uavg]
512+
pixel_min = 1 # iterate until 1 pixel
513+
514+
else:
515+
# Iterate until PIXEL_THRESHOLD[0] number of pixels
516+
pixel_min = Eddy.PIXEL_THRESHOLD[0]
512517

513518
start = True
514519
citer = np.nditer(CS.cvalues, flags=['f_index'])
@@ -526,16 +531,10 @@ def calc_uavg(rbspline, lon, lat):
526531
# 1. Ensure polygon_i is within polygon_e
527532
# 2. Ensure polygon_i contains point centlon_e, centlat_e
528533
# 3. Respect size range
529-
if not save_all_uavg:
530-
# Iterate until PIXEL_THRESHOLD[0] number of pixels
531-
pixel_min = Eddy.PIXEL_THRESHOLD[0]
532-
else:
533-
pixel_min = 1 # iterate until 1 pixel
534-
535534
if np.all([poly_eff.contains_path(poly_i),
536535
poly_i.contains_point([centlon_e, centlat_e]),
537-
np.logical_and(mask_i_sum >= pixel_min,
538-
mask_i_sum <= Eddy.PIXEL_THRESHOLD[1])]):
536+
(mask_i_sum >= pixel_min and
537+
mask_i_sum <= Eddy.PIXEL_THRESHOLD[1])]):
539538
proceed = True
540539
else:
541540
proceed = False
@@ -547,9 +546,8 @@ def calc_uavg(rbspline, lon, lat):
547546
seglon, seglat = eddy_tracker.uniform_resample(
548547
seglon, seglat, method='akima')
549548

550-
# Interpolate Uspd to seglon, seglat, then get mean
551-
#uavgseg = calc_uavg(points, Eddy.Uspd[jmin:jmax,imin:imax], seglon[:-1], seglat[:-1])
552-
uavgseg = calc_uavg(rbspline, seglon[:-1], seglat[:-1])
549+
# Interpolate uspd to seglon, seglat, then get mean
550+
uavgseg = Eddy.uspd_coeffs.ev(seglon[:-1], seglat[:-1]).mean()
553551

554552
if save_all_uavg:
555553
all_uavg.append(uavgseg)
@@ -765,7 +763,7 @@ def collection_loop(CS, grd, rtime, A_list_obj, C_list_obj,
765763

766764
if 'Q' in Eddy.DIAGNOSTIC_TYPE:
767765
#print 'change to rectbispline'
768-
#uavg = interpolate.griddata(points, Eddy.Uspd[jmin:jmax,imin:imax].ravel(),
766+
#uavg = interpolate.griddata(points, Eddy.uspd[jmin:jmax,imin:imax].ravel(),
769767
#(contlon_e, contlat_e), 'linear')
770768
#uavg = np.nan_to_num(uavg).max()
771769
##uavg = 0; print 'fix me'

0 commit comments

Comments
 (0)