66from py_eddy_tracker import EddyParser
77from glob import glob
88from yaml import load as yaml_load
9- from py_eddy_tracker .py_eddy_tracker_property_classes import EddiesObservations , TrackEddiesObservations , VirtualEddiesObservations
10- #~ from py_eddy_tracker.make_eddy_tracker_list_obj import RossbyWaveSpeed
9+ from py_eddy_tracker .py_eddy_tracker_property_classes import \
10+ EddiesObservations , TrackEddiesObservations , \
11+ VirtualEddiesObservations
1112from py_eddy_tracker .tools import distance_matrix
1213
1314import logging
1415import numpy as np
1516import datetime as dt
1617
18+
1719D2R = 0.017453292519943295
18- #~ def distance_matrix(lon0, lat0, lon1, lat1):
19- def distance_matrix2 (lon0 , lat0 , lon1 , lat1 ):
20- """Why it's quicker than cython version?
21- """
22- nb_elt0 = lon0 .shape [0 ]
23- nb_elt1 = lon1 .shape [0 ]
24- lon1 , lon0 = np .meshgrid (lon1 , lon0 )
25- lat1 , lat0 = np .meshgrid (lat1 , lat0 )
26- sin_dlat = np .sin ((lat1 - lat0 ) * 0.5 * D2R )
27- sin_dlon = np .sin ((lon1 - lon0 ) * 0.5 * D2R )
28- cos_lat1 = np .cos (lat0 * D2R )
29- cos_lat2 = np .cos (lat1 * D2R )
30- a_val = sin_dlon ** 2 * cos_lat1 * cos_lat2 + sin_dlat ** 2
31- return 6371.315 * 2 * np .arctan2 (a_val ** 0.5 , (1 - a_val ) ** 0.5 )
3220UINT32_MAX = 4294967295
3321UINT16_MAX = 65535
3422
23+
3524if __name__ == '__main__' :
3625 # Run using:
3726 PARSER = EddyParser (
@@ -43,7 +32,7 @@ if __name__ == '__main__':
4332 # Read yaml configuration file
4433 with open (YAML_FILE , 'r' ) as stream :
4534 CONFIG = yaml_load (stream )
46-
35+
4736 NB_OBS_MIN = int (CONFIG ['TRACK_DURATION_MIN' ])
4837 NB_VIRTUAL_OBS_MAX_BY_SEGMENT = int (CONFIG ['VIRTUAL_LEGNTH_MAX' ])
4938
@@ -60,7 +49,7 @@ if __name__ == '__main__':
6049 CORRESPONDANCES = []
6150 START = True
6251 FLG_VIRTUAL = False
63-
52+
6453 for file_name in FILENAMES [1 :]:
6554 logging .debug ('%s match with previous state' , file_name )
6655 e_current = EddiesObservations .load_from_netcdf (file_name )
@@ -69,9 +58,12 @@ if __name__ == '__main__':
6958 if FLG_VIRTUAL :
7059 nb_real_obs = len (e_previous )
7160 # If you comment this the virtual fonctionnality will be disable
72- logging .debug ('%d virtual obs will be add to previous' , len (virtual_obs ))
61+ logging .debug ('%d virtual obs will be add to previous' ,
62+ len (virtual_obs ))
7363 e_previous = e_previous .merge (virtual_obs )
74- dist_result = np .empty ((len (e_previous ), len (e_current )), dtype = 'f8' ) + np .nan
64+ dist_result = np .empty ((len (e_previous ),
65+ len (e_current )),
66+ dtype = 'f8' ) + np .nan
7567 distance_matrix (
7668 e_previous .obs ['lon' ], e_previous .obs ['lat' ],
7769 e_current .obs ['lon' ], e_current .obs ['lat' ],
@@ -80,17 +72,23 @@ if __name__ == '__main__':
8072 nb_match = i_previous .shape [0 ]
8173
8274 logging .debug ('%d match with previous' , nb_match )
83- correspondance = np .array (i_previous , dtype = [('in' , 'u2' ), ('out' , 'u2' ), ('id' , 'u4' ), ('virtual' , np .bool_ ), ('virtual_length' , 'u1' )])
75+ correspondance = np .array (
76+ i_previous ,
77+ dtype = [
78+ ('in' , 'u2' ),
79+ ('out' , 'u2' ),
80+ ('id' , 'u4' ),
81+ ('virtual' , np .bool_ ),
82+ ('virtual_length' , 'u1' )])
8483 correspondance ['out' ] = i_current
85-
84+
8685 if FLG_VIRTUAL :
8786 correspondance ['virtual' ] = i_previous >= nb_real_obs
88-
8987 else :
9088 correspondance ['virtual' ] = False
91-
89+
9290 if START :
93- START = False
91+ START = False
9492 # Set an id for each match
9593 correspondance ['id' ] = np .arange (nb_match )
9694 # Set counter
@@ -101,38 +99,42 @@ if __name__ == '__main__':
10199 # We get old id for previously eddies tracked
102100 previous_id = CORRESPONDANCES [- 1 ]['id' ]
103101 id_previous [CORRESPONDANCES [- 1 ]['out' ]] = previous_id
104- correspondance ['id' ] = id_previous [correspondance ['in' ]]
102+ correspondance ['id' ] = id_previous [correspondance ['in' ]]
105103 if FLG_VIRTUAL :
106104 nb_rebirth = correspondance ['virtual' ].sum ()
107105 if nb_rebirth != 0 :
108- logging .debug ('%d re-birth due to prolongation with virtual observations' , nb_rebirth )
106+ logging .debug ('%d re-birth due to prolongation with'
107+ ' virtual observations' , nb_rebirth )
109108 # Set id for virtual
109+ i_virtual = correspondance ['in' ][correspondance ['virtual' ]] - nb_real_obs
110110 correspondance ['id' ][correspondance ['virtual' ]] = \
111- virtual_obs .obs ['track' ][correspondance [ 'in' ][ correspondance [ 'virtual' ]] - nb_real_obs ]
111+ virtual_obs .obs ['track' ][i_virtual ]
112112 correspondance ['virtual_length' ][correspondance ['virtual' ]] = \
113- virtual_obs .obs ['segment_size' ][correspondance ['in' ][correspondance ['virtual' ]] - nb_real_obs ]
114- #~ correspondance['in'][correspondance['virtual']] = UINT16_MAX
115-
113+ virtual_obs .obs ['segment_size' ][i_virtual ]
116114
117115 # SECTION for virtual observation
118116 nb_virtual_prolongate = 0
119117 if FLG_VIRTUAL :
120118 # Save previous state to count virtual obs
121119 previous_virtual_obs = virtual_obs
122- virtual_dead_id = np .setdiff1d (virtual_obs .obs ['track' ], correspondance ['id' ])
120+ virtual_dead_id = np .setdiff1d (virtual_obs .obs ['track' ],
121+ correspondance ['id' ])
123122 list_previous_virtual_id = virtual_obs .obs ['track' ].tolist ()
124- i_virtual_dead_id = [list_previous_virtual_id .index (i ) for i in virtual_dead_id ]
123+ i_virtual_dead_id = [
124+ list_previous_virtual_id .index (i ) for i in virtual_dead_id ]
125125 # Virtual obs which can be prolongate
126126 alive_virtual_obs = virtual_obs .obs ['segment_size' ][i_virtual_dead_id ] < NB_VIRTUAL_OBS_MAX_BY_SEGMENT
127127 nb_virtual_prolongate = alive_virtual_obs .sum ()
128- logging .debug ('%d virtual obs will be prolongate on the next step' , nb_virtual_prolongate )
128+ logging .debug ('%d virtual obs will be prolongate on the '
129+ 'next step' , nb_virtual_prolongate )
129130
130131 # List previous id which are not use in the next step
131132 dead_id = np .setdiff1d (previous_id , correspondance ['id' ])
132133 nb_dead_track = len (dead_id )
133134 logging .debug ('%d death of real obs in this step' , nb_dead_track )
134135 # Creation of an virtual step for dead one
135- virtual_obs = VirtualEddiesObservations (size = nb_dead_track + nb_virtual_prolongate )
136+ virtual_obs = VirtualEddiesObservations (
137+ size = nb_dead_track + nb_virtual_prolongate )
136138 # Find mask/index on previous correspondance to extrapolate
137139 # position
138140 list_previous_id = previous_id .tolist ()
@@ -145,36 +147,44 @@ if __name__ == '__main__':
145147 # Position N-1 : B
146148 # Virtual Position : C
147149 # New position C = B + AB
148- virtual_obs .obs ['dlon' ][:nb_dead_track ] = obs_b ['lon' ] - obs_a ['lon' ]
149- virtual_obs .obs ['dlat' ][:nb_dead_track ] = obs_b ['lat' ] - obs_a ['lat' ]
150- virtual_obs .obs ['lon' ][:nb_dead_track ] = obs_b ['lon' ] + virtual_obs .obs ['dlon' ][:nb_dead_track ]
151- virtual_obs .obs ['lat' ][:nb_dead_track ] = obs_b ['lat' ] + virtual_obs .obs ['dlat' ][:nb_dead_track ]
150+ virtual_obs .obs ['dlon' ][:nb_dead_track
151+ ] = obs_b ['lon' ] - obs_a ['lon' ]
152+ virtual_obs .obs ['dlat' ][:nb_dead_track
153+ ] = obs_b ['lat' ] - obs_a ['lat' ]
154+ virtual_obs .obs ['lon' ][:nb_dead_track
155+ ] = obs_b ['lon' ] + virtual_obs .obs ['dlon' ][:nb_dead_track ]
156+ virtual_obs .obs ['lat' ][:nb_dead_track
157+ ] = obs_b ['lat' ] + virtual_obs .obs ['dlat' ][:nb_dead_track ]
152158 # Id which are prolongated
153- virtual_obs .obs ['track' ][:nb_dead_track ] = dead_id
159+ virtual_obs .obs ['track' ][:nb_dead_track ] = dead_id
154160 # Add previous virtual
155161 if nb_virtual_prolongate > 0 :
156162 obs_to_prolongate = previous_virtual_obs .obs [i_virtual_dead_id ][alive_virtual_obs ]
157- virtual_obs .obs ['lon' ][nb_dead_track :] = obs_to_prolongate ['lon' ] + obs_to_prolongate ['dlon' ]
158- virtual_obs .obs ['lat' ][nb_dead_track :] = obs_to_prolongate ['lat' ] + obs_to_prolongate ['dlat' ]
159- virtual_obs .obs ['track' ][nb_dead_track :] = obs_to_prolongate ['track' ]
160- virtual_obs .obs ['segment_size' ][nb_dead_track :] = obs_to_prolongate ['segment_size' ]
161- # Count
163+ virtual_obs .obs ['lon' ][nb_dead_track :
164+ ] = obs_to_prolongate ['lon' ] + obs_to_prolongate ['dlon' ]
165+ virtual_obs .obs ['lat' ][nb_dead_track :
166+ ] = obs_to_prolongate ['lat' ] + obs_to_prolongate ['dlat' ]
167+ virtual_obs .obs ['track' ][nb_dead_track :
168+ ] = obs_to_prolongate ['track' ]
169+ virtual_obs .obs ['segment_size' ][nb_dead_track :
170+ ] = obs_to_prolongate ['segment_size' ]
171+ # Count
162172 virtual_obs .obs ['segment_size' ] += 1
163173 if NB_VIRTUAL_OBS_MAX_BY_SEGMENT > 0 :
164174 FLG_VIRTUAL = True
165175 # END
166176
167-
168177 # new_id is equal to UINT32_MAX we must add a new ones
169178 # we count the number of new
170179 mask_new_id = correspondance ['id' ] == UINT32_MAX
171180 nb_new_tracks = mask_new_id .sum ()
172181 logging .debug ('%d birth in this step' , nb_new_tracks )
173182 # Set new id
174- correspondance ['id' ][mask_new_id ] = np .arange (CURRENT_ID , CURRENT_ID + nb_new_tracks )
183+ correspondance ['id' ][mask_new_id ] = np .arange (
184+ CURRENT_ID , CURRENT_ID + nb_new_tracks )
175185 # Set counter
176186 CURRENT_ID += nb_new_tracks
177-
187+
178188 CORRESPONDANCES .append (correspondance )
179189
180190 e_previous2 = e_previous
@@ -187,21 +197,23 @@ if __name__ == '__main__':
187197 for correspondance in CORRESPONDANCES :
188198 nb_obs_by_tracks [correspondance ['id' ]] += 1
189199 # When start is virtual, we don't have a previous correspondance
190- nb_obs_by_tracks [correspondance ['id' ][correspondance ['virtual' ]]] += correspondance ['virtual_length' ][correspondance ['virtual' ]]
200+ nb_obs_by_tracks [correspondance ['id' ][correspondance ['virtual' ]]
201+ ] += correspondance ['virtual_length' ][correspondance ['virtual' ]]
191202
192203 # Compute index of each tracks
193204 i_current_by_tracks = nb_obs_by_tracks .cumsum () - nb_obs_by_tracks
194205 # Number of global obs
195206 nb_obs = nb_obs_by_tracks .sum ()
196207 logging .info ('%d tracks will be create' , CURRENT_ID )
197208 logging .info ('%d observations will be join' , nb_obs )
198- # Start create netcdf to agglomerate all eddy
209+ # Start create netcdf to agglomerate all eddy
199210 FINAL_EDDIES = TrackEddiesObservations (size = nb_obs )
200211
201212 # Calculate the index in each tracks, we compute in u4 and translate
202213 # in u2 (which are limited to 65535)
203214 logging .debug ('Compute global index array (N)' )
204- n = np .arange (nb_obs , dtype = 'u4' ) - i_current_by_tracks .repeat (nb_obs_by_tracks )
215+ n = np .arange (nb_obs ,
216+ dtype = 'u4' ) - i_current_by_tracks .repeat (nb_obs_by_tracks )
205217 FINAL_EDDIES .obs ['n' ] = np .uint16 (n )
206218 logging .debug ('Compute global track array' )
207219 FINAL_EDDIES .obs ['track' ] = np .arange (CURRENT_ID ).repeat (nb_obs_by_tracks )
@@ -213,7 +225,8 @@ if __name__ == '__main__':
213225 FINAL_EDDIES .sign_type = eddies_previous .sign_type
214226
215227 # To know if the track start
216- first_obs_save_in_tracks = np .zeros (i_current_by_tracks .shape , dtype = np .bool_ )
228+ first_obs_save_in_tracks = np .zeros (i_current_by_tracks .shape ,
229+ dtype = np .bool_ )
217230
218231 for i , file_name in enumerate (FILENAMES [1 :]):
219232 # Load current file (we begin with second one)
@@ -222,36 +235,39 @@ if __name__ == '__main__':
222235 i_id = CORRESPONDANCES [i ]['id' ]
223236 # Index where we will write in the final object
224237 index_final = i_current_by_tracks [i_id ]
225-
238+
226239 # First obs of eddies
227240 m_first_obs = - first_obs_save_in_tracks [i_id ]
228241 if m_first_obs .any ():
229242 # Index in the current file
230243 index_in = CORRESPONDANCES [i ]['in' ][m_first_obs ]
231244 # Copy all variable
232245 for var , _ in eddies_current .obs .dtype .descr :
233- FINAL_EDDIES .obs [var ][index_final [m_first_obs ]] = eddies_previous .obs [var ][index_in ]
246+ FINAL_EDDIES .obs [var ][index_final [m_first_obs ]
247+ ] = eddies_previous .obs [var ][index_in ]
234248 # Increment
235249 i_current_by_tracks [i_id [m_first_obs ]] += 1
236250 # Active this flag, we have only one first by tracks
237251 first_obs_save_in_tracks [i_id ] = True
238252 index_final = i_current_by_tracks [i_id ]
239-
253+
240254 # Index in the current file
241255 index_current = CORRESPONDANCES [i ]['out' ]
242- # If the flag virtual in correspondance is active, the previous is virtual
256+ # If the flag virtual in correspondance is active,
257+ # the previous is virtual
243258 m_virtual = CORRESPONDANCES [i ]['virtual' ]
244259 if m_virtual .any ():
245260 index_virtual = index_final [m_virtual ]
246261 # Incrementing index
247- i_current_by_tracks [i_id [m_virtual ]] += CORRESPONDANCES [i ]['virtual_length' ][m_virtual ]
262+ i_current_by_tracks [i_id [m_virtual ]
263+ ] += CORRESPONDANCES [i ]['virtual_length' ][m_virtual ]
248264 # Get new index
249265 index_final = i_current_by_tracks [i_id ]
250266
251267 # Copy all variable
252268 for var , _ in eddies_current .obs .dtype .descr :
253- FINAL_EDDIES .obs [var ][index_final ] = eddies_current . obs [ var ][ index_current ]
254-
269+ FINAL_EDDIES .obs [var ][index_final
270+ ] = eddies_current . obs [ var ][ index_current ]
255271
256272 # Add increment for each index used
257273 i_current_by_tracks [i_id ] += 1
@@ -270,23 +286,29 @@ if __name__ == '__main__':
270286 nb_obs = len (FINAL_EDDIES )
271287 index = np .arange (nb_obs )
272288 for var , _ in eddies_current .obs .dtype .descr :
273- FINAL_EDDIES .obs [var ][m_i ] = np .interp (index [m_i ], index [- m_i ], FINAL_EDDIES .obs [var ][- m_i ])
289+ FINAL_EDDIES .obs [var ][m_i ] = np .interp (
290+ index [m_i ],
291+ index [- m_i ],
292+ FINAL_EDDIES .obs [var ][- m_i ])
274293
275294 # Total running time
276295 logging .info ('Mean duration by loop : %s' ,
277296 (dt .datetime .now () - START_TIME ) / (len (FILENAMES ) - 1 ))
278297 logging .info ('Duration : %s' , dt .datetime .now () - START_TIME )
279-
280- logging .info ('The longest tracks have %d observations' , nb_obs_by_tracks .max ())
281-
282- FINAL_EDDIES .extract_longer_eddies (NB_OBS_MIN , nb_obs_by_tracks .repeat (nb_obs_by_tracks )).write_netcdf ()
298+
299+ logging .info ('The longest tracks have %d observations' ,
300+ nb_obs_by_tracks .max ())
301+
302+ FINAL_EDDIES .extract_longer_eddies (
303+ NB_OBS_MIN , nb_obs_by_tracks .repeat (nb_obs_by_tracks )).write_netcdf ()
283304
284305
285306
286- #~ previous_amp, current_amp = np.meshgrid(
287- #~ e_current.obs['amplitude'], e_previous.obs['amplitude'])
288- #~ delta_amp = abs(current_amp - previous_amp) / previous_amp
307+ #~ previous_amp, current_amp = np.meshgrid(
308+ #~ e_current.obs['amplitude'], e_previous.obs['amplitude'])
309+ #~ delta_amp = abs(current_amp - previous_amp) / previous_amp
289310
290- #~ previous_radius, current_radius = np.meshgrid(
291- #~ e_current.obs['radius_e'], e_previous.obs['radius_e'])
292- #~ delta_radius = abs(current_radius ** 2 - previous_radius ** 2) ** .5 / previous_radius
311+ #~ previous_radius, current_radius = np.meshgrid(
312+ #~ e_current.obs['radius_e'], e_previous.obs['radius_e'])
313+ #~ delta_radius = abs(current_radius ** 2 - previous_radius ** 2) ** .5 \
314+ #~ / previous_radius
0 commit comments