@@ -113,8 +113,8 @@ def longer_than(self, nb_day_min=-1, nb_day_max=-1):
113
113
"""
114
114
Select network on time duration
115
115
116
- :param int nb_day_min: Minimal number of day which must be covered by one network, if negative -> not used
117
- :param int nb_day_max: Maximal number of day which must be covered by one network, if negative -> not used
116
+ :param int nb_day_min: Minimal number of day covered by one network, if negative -> not used
117
+ :param int nb_day_max: Maximal number of day covered by one network, if negative -> not used
118
118
"""
119
119
if nb_day_max < 0 :
120
120
nb_day_max = 1000000000000
@@ -132,7 +132,7 @@ def longer_than(self, nb_day_min=-1, nb_day_max=-1):
132
132
@classmethod
133
133
def from_split_network (cls , group_dataset , indexs , ** kwargs ):
134
134
"""
135
- Build a NetworkObservations object with Group dataset and indexs
135
+ Build a NetworkObservations object with Group dataset and indexes
136
136
137
137
:param TrackEddiesObservations group_dataset: Group dataset
138
138
:param indexs: result from split_network
@@ -204,6 +204,9 @@ def __close_segment(cls, father, shift, connexions, distance):
204
204
cls .__close_segment (son , shift , connexions , distance )
205
205
206
206
def segment_relative_order (self , seg_origine ):
207
+ """
208
+ Compute the relative order of each segment to the chosen segment
209
+ """
207
210
i_s , i_e , i_ref = build_index (self .segment )
208
211
segment_connexions = self .connexions ()
209
212
relative_tr = - ones (i_s .shape , dtype = "i4" )
@@ -216,6 +219,9 @@ def segment_relative_order(self, seg_origine):
216
219
return d
217
220
218
221
def relative (self , i_obs , order = 2 , direct = True , only_past = False , only_future = False ):
222
+ """
223
+ Extract the segments at a certain order.
224
+ """
219
225
d = self .segment_relative_order (self .segment [i_obs ])
220
226
m = (d <= order ) * (d != - 1 )
221
227
return self .extract_with_mask (m )
@@ -234,7 +240,7 @@ def only_one_network(self):
234
240
"""
235
241
_ , i_start , _ = self .index_network
236
242
if len (i_start ) > 1 :
237
- raise Exception ("Several network " )
243
+ raise Exception ("Several networks " )
238
244
239
245
def position_filter (self , median_half_window , loess_half_window ):
240
246
self .median_filter (median_half_window , "time" , "lon" ).loess_filter (
@@ -266,7 +272,15 @@ def display_timeline(
266
272
self , ax , event = True , field = None , method = None , factor = 1 , ** kwargs
267
273
):
268
274
"""
269
- Must be call on only one network
275
+ Plot a timeline of a network.
276
+ Must be called on only one network.
277
+
278
+ :param matplotlib.axes.Axes ax: matplotlib axe used to draw
279
+ :param bool event: if True, draw the splitting and merging events
280
+ :param str,array field: yaxis values, if None, segments are used
281
+ :param str method: if None, mean values are used
282
+ :param float factor: to multiply field
283
+ :return: plot mappable
270
284
"""
271
285
self .only_one_network ()
272
286
j = 0
@@ -516,6 +530,7 @@ def extract_event(self, indices):
516
530
517
531
@property
518
532
def segment_track_array (self ):
533
+ """Return a unique segment id when multiple networks are considered"""
519
534
return build_unique_array (self .segment , self .track )
520
535
521
536
def birth_event (self ):
@@ -542,27 +557,65 @@ def death_event(self):
542
557
indices .append (i .stop - 1 )
543
558
return self .extract_event (list (set (indices )))
544
559
545
- def merging_event (self ):
546
- indices = list ()
560
+ def merging_event (self , triplet = False ):
561
+ """Return observation after a merging event.
562
+
563
+ If `triplet=True` return the eddy after a merging event, the eddy before the merging event,
564
+ and the eddy stopped due to merging.
565
+ """
566
+ idx_m1 = list ()
567
+ if triplet :
568
+ idx_m0_stop = list ()
569
+ idx_m0 = list ()
570
+
547
571
for i , _ , _ in self .iter_on (self .segment_track_array ):
548
572
nb = i .stop - i .start
549
573
if nb == 0 :
550
574
continue
551
575
i_n = self .next_obs [i .stop - 1 ]
552
576
if i_n != - 1 :
553
- indices .append (i .stop - 1 )
554
- return self .extract_event (list (set (indices )))
577
+ if triplet :
578
+ idx_m0_stop .append (i .stop - 1 )
579
+ idx_m0 .append (self .previous_obs [i_n ])
580
+ idx_m1 .append (i_n )
581
+
582
+ if triplet :
583
+ return (
584
+ self .extract_event (list (idx_m1 )),
585
+ self .extract_event (list (idx_m0 )),
586
+ self .extract_event (list (idx_m0_stop )),
587
+ )
588
+ else :
589
+ return self .extract_event (list (set (idx_m1 )))
555
590
556
- def spliting_event (self ):
557
- indices = list ()
591
+ def spliting_event (self , triplet = False ):
592
+ """Return observation before a splitting event.
593
+
594
+ If `triplet=True` return the eddy before a splitting event, the eddy after the splitting event,
595
+ and the eddy starting due to splitting.
596
+ """
597
+ idx_s0 = list ()
598
+ if triplet :
599
+ idx_s1_start = list ()
600
+ idx_s1 = list ()
558
601
for i , _ , _ in self .iter_on (self .segment_track_array ):
559
602
nb = i .stop - i .start
560
603
if nb == 0 :
561
604
continue
562
605
i_p = self .previous_obs [i .start ]
563
606
if i_p != - 1 :
564
- indices .append (i .start )
565
- return self .extract_event (list (set (indices )))
607
+ if triplet :
608
+ idx_s1_start .append (i .start )
609
+ idx_s1 .append (self .next_obs [i_p ])
610
+ idx_s0 .append (i_p )
611
+ if triplet :
612
+ return (
613
+ self .extract_event (list (idx_s0 )),
614
+ self .extract_event (list (idx_s1 )),
615
+ self .extract_event (list (idx_s1_start )),
616
+ )
617
+ else :
618
+ return self .extract_event (list (set (idx_s0 )))
566
619
567
620
def dissociate_network (self ):
568
621
"""
@@ -655,7 +708,7 @@ def remove_dead_end(self, nobs=3, recursive=0, mask=None):
655
708
self .only_one_network ()
656
709
segments_keep = list ()
657
710
connexions = self .connexions ()
658
- for i , b0 , b1 in self .iter_on ("segment" ):
711
+ for i , b0 , _ in self .iter_on ("segment" ):
659
712
nb = i .stop - i .start
660
713
if mask and mask [i ].any ():
661
714
segments_keep .append (b0 )
@@ -852,6 +905,7 @@ def apply_replace(x, x0, x1):
852
905
853
906
@njit (cache = True )
854
907
def build_unique_array (id1 , id2 ):
908
+ """Give a unique id for each (id1, id2) with id1 and id2 increasing monotonically"""
855
909
k = 0
856
910
new_id = empty (id1 .shape , dtype = id1 .dtype )
857
911
id1_previous = id1 [0 ]
0 commit comments