forked from AntSimi/py-eddy-tracker
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmake_eddy_track_AVISO.py
More file actions
1458 lines (1135 loc) · 52.4 KB
/
make_eddy_track_AVISO.py
File metadata and controls
1458 lines (1135 loc) · 52.4 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# -*- coding: utf-8 -*-
# %run make_eddy_track_AVISO.py
"""
===========================================================================
This file is part of py-eddy-tracker.
py-eddy-tracker is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
py-eddy-tracker is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with py-eddy-tracker. If not, see <http://www.gnu.org/licenses/>.
Copyright (c) 2014 by Evan Mason
Email: emason@imedea.uib-csic.es
===========================================================================
make_eddy_track_AVISO.py
Version 1.4.2
Scroll down to line ~640 to get started
===========================================================================
"""
"""
WARNINGS...?
--- AVISO_file: /marula/emason/data/altimetry/MSLA/GLOBAL/DT/REF/dt_ref_global_merged_msla_h_qd_19930915_19930915_20100503.nc
/usr/lib64/python2.7/site-packages/numpy/lib/arraysetops.py:197: RuntimeWarning: invalid value encountered in not_equal
flag = np.concatenate(([True], aux[1:] != aux[:-1]))
--- AVISO_file: /marula/emason/data/altimetry/MSLA/GLOBAL/DT/REF/dt_ref_global_merged_msla_h_qd_19930922_19930922_20100503.nc
"""
import glob as glob
#import matplotlib.pyplot as plt
from py_eddy_tracker_classes import *
from make_eddy_tracker_list_obj import *
from dateutil import parser
class PyEddyTracker (object):
'''
Base object
Methods defined here are grouped into categories based on input data source,
i.e., AVISO, ROMS, etc. To introduce a new data source new methods can be
introduced here.
METHODS:
Common: read_nc
read_nc_att
set_initial_indices
set_index_padding
haversine_dist
half_interp
AVISO: get_AVISO_f_pm_pn
ROMS: get_ROMS_f_pm_pn
'''
def __init__(self):
'''
Set some constants
'''
self.gravity = 9.81
self.earth_radius = 6371315.0
self.zero_crossing = False
def read_nc(self, varfile, varname, indices="[:]"):
'''
Read data from nectdf file
varname : variable ('temp', 'mask_rho', etc) to read
indices : string of index ranges, eg. '[0,:,0]'
'''
with netcdf.Dataset(varfile) as nc:
try:
var = eval(''.join(("nc.variables[varname]", indices)))
except Exception:
return None
else:
return var
def read_nc_att(self, varfile, varname, att):
'''
Read data attribute from nectdf file
varname : variable ('temp', 'mask_rho', etc) to read
att : string of attribute, eg. 'valid_range'
'''
with netcdf.Dataset(varfile) as nc:
return eval(''.join(("nc.variables[varname].", att)))
def set_initial_indices(self, lonmin, lonmax, latmin, latmax):
'''
Get indices for desired domain
'''
print '--- Setting initial indices to lonmin, lonmax, latmin, latmax'
self.i0, junk = self.nearest_point(lonmin, latmin + 0.5 * (latmax - latmin))
self.i1, junk = self.nearest_point(lonmax, latmin + 0.5 * (latmax - latmin))
junk, self.j0 = self.nearest_point(lonmin + 0.5 * (lonmax - lonmin), latmin)
junk, self.j1 = self.nearest_point(lonmin + 0.5 * (lonmax - lonmin), latmax)
def kdt(lon, lat, limits, k=4):
ppoints = np.array([lon.ravel(), lat.ravel()]).T
ptree = spatial.cKDTree(ppoints)
pindices = ptree.query(limits, k=k)[1]
iind, jind = np.array([], dtype=int), np.array([], dtype=int)
for pind in pindices.ravel():
j, i = np.unravel_index(pind, lon.shape)
iind = np.r_[iind, i]
jind = np.r_[jind, j]
return iind, jind
if 'AvisoGrid' in self.__class__.__name__:
if self.zero_crossing is True:
'''
Used for a zero crossing, e.g., across Agulhas region
'''
def half_limits(lon, lat):
return np.array([np.array([lon.min(), lon.max(),
lon.max(), lon.min()]),
np.array([lat.min(), lat.min(),
lat.max(), lat.max()])]).T
# Get bounds for right part of grid
lat = self._lat[self._lon >= 360 + lonmin - 0.5]
lon = self._lon[self._lon >= 360 + lonmin - 0.5]
limits = half_limits(lon, lat)
iind, jind = kdt(self._lon, self._lat, limits)
self.i1 = iind.min()
# Get bounds for left part of grid
lat = self._lat[self._lon <= lonmax + 0.5]
lon = self._lon[self._lon <= lonmax + 0.5]
limits = half_limits(lon, lat)
iind, jind = kdt(self._lon, self._lat, limits)
self.i0 = iind.max()
return self
def set_index_padding(self, pad=2):
'''
Set start and end indices for temporary padding and later unpadding
around 2d variables.
Padded matrices are needed only for geostrophic velocity computation.
'''
print '--- Setting padding indices with pad=%s' %pad
self.pad = pad
def get_str(thestr, pad):
'''
Get start indices for pad
Returns:
pad_str - index to add pad
unpad_str - index for later unpadding
'''
pad_str = np.max([0, thestr - pad])
if pad > 0:
unpad_str = np.max([0, np.diff([pad_str, thestr])])
return pad_str, unpad_str
else:
unpad_str = np.min([0, np.diff([pad_str, thestr])])
return pad_str, -1 * unpad_str
def get_end(theend, shape, pad):
'''
Get end indices for pad
Returns:
pad_end - index to add pad
unpad_end - index for later unpadding
'''
if theend is None:
pad_end = None
unpad_end = None
else:
pad_end = np.minimum(shape, theend + pad)
if shape == theend + pad:
unpad_end = -pad
elif shape == theend + pad - 1:
unpad_end = -1
elif shape == pad_end:
unpad_end = None
else:
unpad_end = -pad
if pad > 0:
return pad_end, unpad_end
else:
return pad_end, -1 * unpad_end
self.jp0, self.jup0 = get_str(self.j0, pad)
self.jp1, self.jup1 = get_end(self.j1, self._lon.shape[0], pad)
if self.zero_crossing:
pad = -pad
self.ip0, self.iup0 = get_str(self.i0, pad)
self.ip1, self.iup1 = get_end(self.i1, self._lon.shape[1], pad)
return self
def haversine_dist(self, lon1, lat1, lon2, lat2):
'''
TO DO: change to use f2py version
Haversine formula to calculate distance between two lon/lat points
Uses mean earth radius in metres (from ROMS scalars.h) = 6371315.0
Input:
lon1, lat1, lon2, lat2
Return:
distance (m)
'''
lon1, lat1, lon2, lat2 = lon1.copy(), lat1.copy(), lon2.copy(), lat2.copy()
dlat = np.deg2rad(lat2 - lat1)
dlon = np.deg2rad(lon2 - lon1)
np.deg2rad(lat1, out=lat1)
np.deg2rad(lat2, out=lat2)
a = ne.evaluate('sin(0.5 * dlon) * sin(0.5 * dlon)')
a = ne.evaluate('a * cos(lat1) * cos(lat2)')
a = ne.evaluate('a + (sin(0.5 * dlat) * sin(0.5 * dlat))')
c = ne.evaluate('2 * arctan2(sqrt(a), sqrt(1 - a))')
return ne.evaluate('6371315.0 * c') # Return the distance
def nearest_point(self, lon, lat):
'''
Get indices to a point (lon, lat) in the grid
'''
i, j = eddy_tracker.nearest(lon, lat, self._lon, self._lat, self._lon.shape)
return i, j
def half_interp(self, h_one, h_two):
'''
Speed up frequent operations of type 0.5 * (arr[:-1] + arr[1:])
'''
h_one += h_two
h_one *= 0.5
return h_one
#return ne.evaluate('0.5 * (h_one + h_two)')
def get_AVISO_f_pm_pn(self):
'''
Padded matrices are used here because Coriolis (f), pm and pn
are needed for the geostrophic velocity computation in
method getSurfGeostrVel()
NOTE: this should serve for ROMS too
'''
print '--- Computing g/f (gravity/Coriolis), pm (dx) and pn (dy) for padded grid'
# Get gravity / Coriolis
self._gof = np.sin(np.deg2rad(self.latpad()))
self._gof *= 4.
self._gof *= np.pi
self._gof /= 86400.
self._f = np.copy(self._gof)
self._gof = self.gravity / self._gof
lonu = self.half_interp(self.lonpad()[:,:-1], self.lonpad()[:,1:])
latu = self.half_interp(self.latpad()[:,:-1], self.latpad()[:,1:])
lonv = self.half_interp(self.lonpad()[:-1], self.lonpad()[1:])
latv = self.half_interp(self.latpad()[:-1], self.latpad()[1:])
# Get pm and pn
pm = np.zeros_like(self.lonpad())
pm[:,1:-1] = self.haversine_dist(lonu[:,:-1], latu[:,:-1],
lonu[:,1:], latu[:,1:])
pm[:,0] = pm[:,1]
pm[:,-1] = pm[:,-2]
self._dx = pm
self._pm = np.reciprocal(pm)
pn = np.zeros_like(self.lonpad())
pn[1:-1] = self.haversine_dist(lonv[:-1], latv[:-1],
lonv[1:], latv[1:])
pn[0] = pn[1]
pn[-1] = pn[-2]
self._dy = pn
self._pn = np.reciprocal(pn)
return self
def u2rho_2d(self, uu_in):
'''
Convert a 2D field at u points to a field at rho points
'''
def uu2ur(uu_in, Mp, Lp):
L = Lp - 1
Lm = L - 1
u_out = np.zeros((Mp, Lp))
u_out[:, 1:L] = self.half_interp(uu_in[:, 0:Lm], uu_in[:, 1:L])
u_out[:, 0] = u_out[:, 1]
u_out[:, L] = u_out[:, Lm]
return (u_out.squeeze())
Mshp, Lshp = uu_in.shape
return uu2ur(uu_in, Mshp, Lshp + 1)
def v2rho_2d(self, vv_in):
# Convert a 2D field at v points to a field at rho points
def vv2vr(vv_in, Mp, Lp):
M = Mp - 1
Mm = M - 1
v_out = np.zeros((Mp, Lp))
v_out[1:M] = self.half_interp(vv_in[:Mm], vv_in[1:M])
v_out[0] = v_out[1]
v_out[M] = v_out[Mm]
return (v_out.squeeze())
Mshp, Lshp = vv_in.shape
return vv2vr(vv_in, Mshp + 1, Lshp)
def rho2u_2d(self, rho_in):
'''
Convert a 2D field at rho points to a field at u points
'''
def _r2u(rho_in, Lp):
u_out = rho_in[:, :Lp - 1]
u_out += rho_in[:, 1:Lp]
u_out *= 0.5
return np.squeeze(u_out)
assert rho_in.ndim == 2, 'rho_in must be 2d'
Mshp, Lshp = rho_in.shape
return _r2u(rho_in, Lshp)
def rho2v_2d(self, rho_in):
'''
Convert a 2D field at rho points to a field at v points
'''
def _r2v(rho_in, Mp):
v_out = rho_in[:Mp - 1]
v_out += rho_in[1:Mp]
v_out *= 0.5
return np.squeeze(v_out)
assert rho_in.ndim == 2, 'rho_in must be 2d'
Mshp, Lshp = rho_in.shape
return _r2v(rho_in, Mshp)
def uvmask(self):
'''
Get mask at U and V points
'''
print '--- Computing umask and vmask for padded grid'
Mp, Lp = self.mask.shape
M = Mp - 1
L = Lp - 1
self._umask = self.mask[:,:L] * self.mask[:,1:Lp]
self._vmask = self.mask[:M] * self.mask[1:Mp]
return self
def make_gridmask(self, with_pad=True, use_maskoceans=False):
'''
Use Basemap to make a landmask
'''
print '--- Computing Basemap'
# Create Basemap instance for Mercator projection.
self.M = Basemap(projection='merc', llcrnrlon = self.lonmin - 1, \
urcrnrlon = self.lonmax + 1, \
llcrnrlat = self.latmin - 1, \
urcrnrlat = self.latmax + 1, \
lat_ts = 0.5 * (latmin + latmax), \
resolution = 'h') # 'h'-high, 'l'-low
if with_pad:
x, y = self.M(self.lonpad(), self.latpad())
else:
x, y = self.M(self.lon(), self.lat())
print '--- Computing Basemap mask'
self.mask = np.ones_like(x, dtype=bool)
if use_maskoceans:
print "------ using Basemap *maskoceans*: this is fast but may be"
print "------ marginally less accurate than Basemap's *is_land* method..."
from mpl_toolkits.basemap import maskoceans
if with_pad:
self.mask = maskoceans(self.lonpad(), self.latpad(), self.mask,
inlands=False, resolution='f', grid=1.25)
else:
self.mask = maskoceans(self.lon(), self.lat())
self.mask = self.mask.mask.astype(int)
else:
print "------ using Basemap *is_land*: this is slow for larger domains"
print "------ but can be speeded up once Basemap's *maskoceans* method is introduced"
print "------ (currently longitude wrapping behaviour is unclear...)"
it = np.nditer([x, y], flags=['multi_index'])
while not it.finished:
self.mask[it.multi_index] = self.M.is_land(x[it.multi_index],
y[it.multi_index])
it.iternext()
self.mask = np.atleast_2d(-self.mask).astype(int)
self.Mx, self.My = x, y
return self
def get_geostrophic_velocity(self, zeta):
'''
Returns u and v geostrophic velocity at
surface from variables f, zeta, pm, pn...
Note: output at rho points
'''
gof = self.gof().view()
vmask = self.vmask().view()
zeta1, zeta2 = zeta.data[1:].view(), zeta.data[:-1].view()
pn1, pn2 = self.pn()[1:].view(), self.pn()[:-1].view()
self.upad[:] = self.v2rho_2d(ne.evaluate('vmask * (zeta1 - zeta2) * 0.5 * (pn1 + pn2)'))
self.upad *= -gof
#self.upad[:] = -self.gof() * self.v2rho_2d(self.vmask() * (zeta.data[1:] - zeta.data[:-1]) \
#* 0.5 * (self.pn()[1:] + self.pn()[:-1]))
umask = self.umask().view()
zeta1, zeta2 = zeta.data[:,1:].view(), zeta.data[:,:-1].view()
pm1, pm2 = self.pm()[:,1:].view(), self.pm()[:,:-1].view()
#self.vpad[:] = ne.evaluate('gof * (umask * (zeta1 - zeta2) * 0.5 * (pm1 + pm2))')
self.vpad[:] = self.u2rho_2d(ne.evaluate('umask * (zeta1 - zeta2) * 0.5 * (pm1 + pm2)'))
self.vpad *= gof
#self.vpad[:] = self.gof() * self.u2rho_2d(self.umask() * (zeta.data[:, 1:] - zeta.data[:, :-1]) \
#* 0.5 * (self.pm()[:, 1:] + self.pm()[:, :-1]))
return self
def set_u_v_eke(self, pad=2):
'''
'''
#double_pad = pad * 2
if self.zero_crossing:
u1 = np.empty((self.jp1 - self.jp0, self.ip0))
u0 = np.empty((self.jp1 - self.jp0, self._lon.shape[1] - self.ip1))
self.upad = np.ma.concatenate((u0, u1), axis=1)
else:
self.upad = np.empty((self.jp1 - self.jp0, self.ip1 - self.ip0))
self.vpad = np.empty_like(self.upad)
self.eke = np.empty_like(self.upad[self.jup0:self.jup1, self.iup0:self.iup1])
self.u = np.empty_like(self.eke)
self.v = np.empty_like(self.eke)
return self
def getEKE(self):
'''
'''
self.u[:] = self.upad[self.jup0:self.jup1, self.iup0:self.iup1]
self.v[:] = self.vpad[self.jup0:self.jup1, self.iup0:self.iup1]
u, v = self.u.view(), self.v.view()
self.eke[:] = ne.evaluate('0.5 * (u**2 + v**2)')
#np.add(self.u**2, self.v**2, out=self.eke)
#self.eke *= 0.5
return self
class AvisoGrid (PyEddyTracker):
'''
Class to satisfy the need of the eddy tracker
to have a grid class
'''
def __init__(self, AVISO_file, lonmin, lonmax, latmin, latmax, with_pad=True, use_maskoceans=False):
'''
Initialise the grid object
'''
super(AvisoGrid, self).__init__()
print '\nInitialising the AVISO_grid'
self.i0, self.j0 = 0, 0
self.i1, self.j1 = None, None
self.lonmin = lonmin
self.lonmax = lonmax
self.latmin = latmin
self.latmax = latmax
try: # new AVISO (2014)
self._lon = self.read_nc(AVISO_file, 'lon')
self._lat = self.read_nc(AVISO_file, 'lat')
self.fillval = self.read_nc_att(AVISO_file, 'sla', '_FillValue')
base_date = self.read_nc_att(AVISO_file, 'time', 'units')
self.base_date = dt.date2num(parser.parse(base_date.split(' ')[2:4][0]))
except Exception: # old AVISO
self._lon = self.read_nc(AVISO_file, 'NbLongitudes')
self._lat = self.read_nc(AVISO_file, 'NbLatitudes')
self.fillval = self.read_nc_att(AVISO_file, 'Grid_0001', '_FillValue')
if np.logical_and(lonmin < 0, lonmax <=0):
self._lon -= 360.
self._lon, self._lat = np.meshgrid(self._lon, self._lat)
self._angle = np.zeros_like(self._lon)
# To be used for handling a longitude range that crosses 0 degree meridian
if np.logical_and(lonmin < 0, lonmax >= 0):
self.zero_crossing = True
self.set_initial_indices(lonmin, lonmax, latmin, latmax)
self.set_index_padding()
self.make_gridmask(with_pad, use_maskoceans).uvmask()
self.get_AVISO_f_pm_pn()
self.set_u_v_eke()
pad2 = 2 * self.pad
self.shape = (self.f().shape[0] - pad2, self.f().shape[1] - pad2)
def get_AVISO_data(self, AVISO_file):
'''
Read nc data from AVISO file
'''
if self.zero_crossing:
try: # new AVISO (2014)
ssh1 = self.read_nc(AVISO_file, 'sla',
indices='[:, self.jp0:self.jp1, :self.ip0]')
ssh0 = self.read_nc(AVISO_file, 'sla',
indices='[:,self.jp0:self.jp1, self.ip1:]')
ssh0, ssh1 = ssh0.squeeze(), ssh1.squeeze()
ssh0 *= 100. # m to cm
ssh1 *= 100. # m to cm
except Exception: # old AVISO
ssh1 = self.read_nc(AVISO_file, 'Grid_0001',
indices='[:self.ip0, self.jp0:self.jp1]').T
ssh0 = self.read_nc(AVISO_file, 'Grid_0001',
indices='[self.ip1:,self.jp0:self.jp1]').T
zeta = np.ma.concatenate((ssh0, ssh1), axis=1)
else:
try: # new AVISO (2014)
zeta = self.read_nc(AVISO_file, 'sla',
indices='[:, self.jp0:self.jp1, self.ip0:self.ip1]')
zeta = zeta.squeeze()
zeta *= 100. # m to cm
except Exception: # old AVISO
zeta = self.read_nc(AVISO_file, 'Grid_0001',
indices='[self.ip0:self.ip1, self.jp0:self.jp1]').T
#date = self.read_nc_att(AVISO_file, 'Grid_0001', 'date') # cm
try: # Extrapolate over land points with fillmask
zeta = fillmask(zeta, self.mask == 1)
#zeta = fillmask(zeta, 1 + (-1 * zeta.mask))
except Exception: # In case no landpoints
zeta = np.ma.masked_array(zeta)
return zeta.astype(np.float64)
def fillmask(self, x, mask):
'''
Fill missing values in an array with an average of nearest
neighbours
From http://permalink.gmane.org/gmane.comp.python.scientific.user/19610
'''
assert x.ndim == 2, 'x must be a 2D array.'
fill_value = 9999.99
x[mask == 0] = fill_value
# Create (i, j) point arrays for good and bad data.
# Bad data are marked by the fill_value, good data elsewhere.
igood = np.vstack(np.where(x != fill_value)).T
ibad = np.vstack(np.where(x == fill_value)).T
# Create a tree for the bad points, the points to be filled
tree = spatial.cKDTree(igood)
# Get the four closest points to the bad points
# here, distance is squared
dist, iquery = tree.query(ibad, k=4, p=2)
# Create a normalised weight, the nearest points are weighted as 1.
# Points greater than one are then set to zero
weight = dist / (dist.min(axis=1)[:,np.newaxis])
weight *= np.ones_like(dist)
np.place(weight, weight > 1., 0.)
# Multiply the queried good points by the weight, selecting only the
# nearest points. Divide by the number of nearest points to get average
xfill = weight * x[igood[:,0][iquery], igood[:,1][iquery]]
xfill = (xfill / weight.sum(axis=1)[:,np.newaxis]).sum(axis=1)
# Place average of nearest good points, xfill, into bad point locations
x[ibad[:,0], ibad[:,1]] = xfill
return x
def lon(self):
if self.zero_crossing:
# TO DO: These concatenations are possibly expensive, they shouldn't need
# to happen with every call to self.lon()
lon0 = self._lon[self.j0:self.j1, self.i1:]
lon1 = self._lon[self.j0:self.j1, :self.i0]
return np.concatenate((lon0 - 360., lon1), axis=1)
else:
return self._lon[self.j0:self.j1, self.i0:self.i1]
def lat(self):
if self.zero_crossing:
lat0 = self._lat[self.j0:self.j1, self.i1:]
lat1 = self._lat[self.j0:self.j1, :self.i0]
return np.concatenate((lat0, lat1), axis=1)
else:
return self._lat[self.j0:self.j1, self.i0:self.i1]
def lonpad(self):
if self.zero_crossing:
lon0 = self._lon[self.jp0:self.jp1, self.ip1:]
lon1 = self._lon[self.jp0:self.jp1, :self.ip0]
return np.concatenate((lon0 - 360., lon1), axis=1)
else:
return self._lon[self.jp0:self.jp1, self.ip0:self.ip1]
def latpad(self):
if self.zero_crossing:
lat0 = self._lat[self.jp0:self.jp1, self.ip1:]
lat1 = self._lat[self.jp0:self.jp1, :self.ip0]
return np.concatenate((lat0, lat1), axis=1)
else:
return self._lat[self.jp0:self.jp1, self.ip0:self.ip1]
def angle(self):
return self._angle[self.j0:self.j1, self.i0:self.i1]
#def mask(self, mask):
#return -mask.mask
def umask(self): # Mask at U points
return self._umask
def vmask(self): # Mask at V points
return self._vmask
def f(self): # Coriolis
return self._f
def gof(self): # Gravity / Coriolis
return self._gof
def dx(self): # Grid spacing along X direction
return self._dx
def dy(self): # Grid spacing along Y direction
return self._dy
def pm(self): # Reciprocal of dx
return self._pm
def pn(self): # Reciprocal of dy
return self._pn
def get_resolution(self):
return np.sqrt(np.diff(self.lon()[1:], axis=1) *
np.diff(self.lat()[:,1:], axis=0)).mean()
def boundary(self):
'''
Return lon, lat of perimeter around a ROMS grid
Input:
indices to get boundary of specified subgrid
Returns:
lon/lat boundary points
'''
lon = np.r_[(self.lon()[:,0], self.lon()[-1],
self.lon()[::-1,-1], self.lon()[0,::-1])]
lat = np.r_[(self.lat()[:,0], self.lat()[-1],
self.lat()[::-1,-1], self.lat()[0,::-1])]
return lon, lat
def brypath(self, imin=0, imax=-1, jmin=0, jmax=-1):
'''
Return Path object of perimeter around a ROMS grid
Indices to get boundary of specified subgrid
'''
lon, lat = self.boundary()
brypath = np.array([lon, lat]).T
return path.Path(brypath)
def pcol_2dxy(self, x, y):
'''
Function to shift x, y for subsequent use with pcolor
by Jeroen Molemaker UCLA 2008
'''
Mp, Lp = x.shape
M = Mp - 1
L = Lp - 1
x_pcol = np.zeros((Mp, Lp))
y_pcol = np.zeros((Mp, Lp))
x_tmp = self.half_interp(x[:,:L], x[:,1:Lp])
x_pcol[1:Mp,1:Lp] = self.half_interp(x_tmp[0:M,:], x_tmp[1:Mp,:])
x_pcol[0,:] = 2. * x_pcol[1,:] - x_pcol[2,:]
x_pcol[:,0] = 2. * x_pcol[:,1] - x_pcol[:,2]
y_tmp = self.half_interp(y[:,0:L], y[:,1:Lp] )
y_pcol[1:Mp,1:Lp] = self.half_interp(y_tmp[0:M,:], y_tmp[1:Mp,:])
y_pcol[0,:] = 2. * y_pcol[1,:] - y_pcol[2,:]
y_pcol[:,0] = 2. * y_pcol[:,1] - y_pcol[:,2]
return x_pcol, y_pcol
#++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
if __name__ == '__main__':
plt.close('all')
#----------------------------------------------------------------------------
# Some user-defined input...
# Specify the AVISO domain
the_domain = 'Global'
#the_domain = 'BlackSea'
#the_domain = 'MedSea' # not yet implemented
# Specify use of new AVISO 2014 data
new_AVISO = True
# Specify subsampling new AVISO 2014 data; i.e. you may
# prefer to use every second day rather than every day
if new_AVISO:
new_AVISO_SUBSAMP = False
if new_AVISO_SUBSAMP:
days_btwn_recs = 3. # put sampling rate (days) here
else:
days_btwn_recs = 1.
else: # old seven day AVISO
days_btwn_recs = 7.
# Save only tracks longer than...
track_duration_min = 28. # days
# Set path(s) to directory where SSH data are stored...
if 'Global' in the_domain:
if new_AVISO:
#directory = '/path/to/your/aviso_data/'
#directory = '/shared/Altimetry/global/delayed-time/grids/msla/all-sat-merged/h/'
#AVISO_files = 'dt_global_allsat_msla_h_????????_20140106.nc'
directory = '/marula/emason/data/altimetry/global/delayed-time/grids/msla/two-sat-merged/h/'
AVISO_files = 'dt_global_twosat_msla_h_????????_20140106.nc'
else:
#directory = '/path/to/your/aviso_data/'
directory = '/marula/emason/data/altimetry/MSLA/GLOBAL/DT/REF/'
AVISO_files = 'dt_ref_global_merged_msla_h_qd_????????_*.nc'
elif 'MedSea' in the_domain:
if new_AVISO:
#directory = '/path/to/your/aviso_data/'
directory = '/shared/Altimetry/regional-mediterranean/delayed-time/grids/msla/all-sat-merged/h/'
AVISO_files = 'dt_blacksea_allsat_msla_h_????????_*.nc'
else:
pass
elif 'BlackSea' in the_domain:
if new_AVISO:
#directory = '/path/to/your/aviso_data/'
directory = '/shared/Altimetry/regional-blacksea/delayed-time/grids/msla/all-sat-merged/h/'
AVISO_files = 'dt_blacksea_allsat_msla_h_????????_*.nc'
else:
Exception #no_domain_specified
# Set date range (YYYYMMDD)
#date_str, date_end = 19980107, 19991110 #
#date_str, date_end = 20081107, 20100110 #
#date_str, date_end = 19921014, 20120718 #
#date_str, date_end = 20020101, 20020131 #
date_str, date_end = 20020101, 20020630 #
# Choose type of diagnostic: either q-parameter ('Q') or sea level anomaly ('sla')
#diag_type = 'Q' #<<< not implemented in 1.2.0
diag_type = 'sla'
# Path to directory where outputs are to be saved...
#savedir = directory
#savedir = '/marula/emason/aviso_eddy_tracking/pablo_exp/'
#savedir = '/marula/emason/aviso_eddy_tracking/new_AVISO_test/'
#savedir = '/marula/emason/aviso_eddy_tracking/new_AVISO_SUBSAMP-3days/'
#savedir = '/marula/emason/aviso_eddy_tracking/junk/'
savedir = '/marula/emason/aviso_eddy_tracking/junk2/'
#savedir = '/marula/emason/aviso_eddy_tracking/new_AVISO_test/BlackSea/'
#savedir = '/marula/emason/aviso_eddy_tracking/Corridor_V3_Dec2014/'
#savedir = '/marula/emason/aviso_eddy_tracking/CLS_test_1_acd66ba338a9/'
#savedir = '/marula/emason/aviso_eddy_tracking/CLS_test_2/'
#savedir = '/marula/emason/aviso_eddy_tracking/CLS_test_3/'
#savedir = '/marula/emason/aviso_eddy_tracking/CLS_test_4/'
#savedir = '/path/to/save/your/outputs/'
# True to save outputs in same style as Chelton
chelton_style_nc = True
print '\nOutputs saved to', savedir
# Reference Julian day (Julian date at Jan 1, 1992)
jday_ref = 2448623
# Min and max permitted eddy radii [degrees]
radmin = 0.35 # degrees (Chelton recommends ~50 km minimum)
radmax = 4.461 # degrees
if 'Q' in diag_type:
ampmin = 0.02 # max(abs(xi/f)) within the eddy
ampmax = 100.
elif 'sla' in diag_type:
ampmin = 1. # cm
ampmax = 150.
# Obtain this file from:
# http://www-po.coas.oregonstate.edu/research/po/research/rossby_radius/
rw_path = '/home/emason/data_tmp/chelton_eddies/rossrad.dat'
# Make figures for animations
anim_figs = True
# Define contours
# Set Q contour spacing
if 'Q' in diag_type:
qparameter = np.linspace(0, 5*10**-11, 100)
# Set SLA contour spacing
elif 'sla' in diag_type:
#slaparameter = np.arange(-100., 101., 1.0) # cm
slaparameter = np.arange(-100., 100.25, 0.25) # cm
# Apply a filter to the Q parameter
#smoothing = 'Hanning'
smoothing = 'Gaussian'
if 'smoothing' not in locals():
smoothing = False
smooth_fac = False
elif 'Hanning' in smoothing: # apply Hanning filter
smooth_fac = 5 # number of passes
elif 'Gaussian' in smoothing: # apply Gaussian filter
smooth_fac = 'Deprecated'
zwl = 20. # degrees, zonal wavelength (see Chelton etal 2011)
mwl = 10. # degrees, meridional wavelength
else:
Exception
subdomain = True
if the_domain in 'Global':
#lonmin = -40. # Canary
#lonmax = -5.5
#latmin = 16.
#latmax = 36.5
#lonmin = -30. # BENCS
#lonmax = 22.
#latmin = -35.
#latmax = -10.
lonmin = -65. # Corridor
lonmax = -5.5
latmin = 11.5
latmax = 38.5
#lonmin = -179. # SEP
#lonmax = -65
#latmin = -40.
#latmax = -5.
#lonmin = -70. # Souza
#lonmax = 30.
#latmin = -50.
#latmax = -15.
#lonmin = -1.5 # Charlotte
#lonmax = 51.
#latmin = -47.
#latmax = -24.
lonmin = -38. # small test
lonmax = -25.
latmin = 30.
latmax = 38.
lonmin = .15 # big test
lonmax = 359.15
latmin = -70.
latmax = 70.
elif the_domain in 'MedSea':
lonmin = 354. # SEP
lonmax = 396
latmin = 30.
latmax = 46.
elif the_domain in 'BlackSea':
lonmin = 27. # Black Sea
lonmax = 42.
latmin = 40.
latmax = 47.
# Typical parameters
dist0 = 25000. # m separation distance after ~7 days (see CSS11 fig 22)
if 'Q' in diag_type:
amp0 = 0.02 # vort/f
elif 'sla' in diag_type:
amp0 = 2. # cm
area0 = np.pi * 60000.**2
temp0 = 15.
salt0 = 35.
# Parameters used by Chelton etal and Kurian etal (Sec. 3.2) to ensure the slow evolution
# of the eddies over time; they use min and max values of 0.25 and 2.5
evolve_ammin = 0.0005# 0.25 # min change in amplitude
evolve_ammax = 500#2.5 # max change in amplitude
evolve_armin = 0.0005# 0.25 # min change in area
evolve_armax = 500 # max change in area
separation_method = 'ellipse' # see CSS11
#separation_method = 'sum_radii' # see Kurian etal (2011)
#if 'sum_radii' in separation_method:
## Separation distance factor. Adjust according to number of days between records
## For 7 days, Chelton uses 150 km search ellipse
## So, given typical eddy radius of r=50 km, 1.5 * (r1 + r2) = 150 km.
##sep_dist_fac = 1.0
#sep_dist_fac = 1.15 # Seems ok for AVISO 7-day
##sep_dist_fac = 1.5 # Causes tracks to jump for AVISO 7-day
#Define track_extra_variables to track and save:
# - effective contour points
# - speed-based contour points
# - shape test values
# - profiles of swirl velocity from effective contour inwards
# Useful for working with ARGO data
track_extra_variables = False
cmap = plt.cm.RdBu
verbose = False
# End user defined options (edit below at own risk)
#----------------------------------------------------------------------------
assert date_str < date_end, 'date_end must be larger than date_str'
assert diag_type in ('Q','sla'), 'diag_type not properly defined'
thestartdate = dt.date2num(datestr2datetime(str(date_str)))
theenddate = dt.date2num(datestr2datetime(str(date_end)))
# Get complete AVISO file list
AVISO_files = sorted(glob.glob(directory + AVISO_files))