Skip to content

Commit af8e443

Browse files
- correction coherence forward & backward, when time needed is shorted than time available - bug when extracting zarr network which have same number of observation and number of contour - expose underlying parameters to users (min_overlapp, minimal_area) - correction of bug in zarr nb_obs & track_array_variables, if no vars with 2 dimensions was selected - bug when loading EddiesObservation, rotation_type was not loaded - bug in tracking, previous_virtual_obs was not loaded from VirtualEddiesObservations - warning when loading data with different py-eddy-tracker versions - changes of extract_light_with_mask - possibility to select extra variables to extract
1 parent 0e2bb64 commit af8e443

File tree

12 files changed

+175
-109
lines changed

12 files changed

+175
-109
lines changed

CHANGELOG.rst

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,13 +15,20 @@ Changed
1515
New identifications are produced with this type, old files could still be loaded.
1616
If you use old identifications for tracking use the `--unraw` option to unpack old times and store data with the new format.
1717
- Now amplitude is stored with .1 mm of precision (instead of 1 mm), same advice as for time.
18+
- expose more parameters to users for bash tools build_network & divide_network
19+
- add warning when loading a file created from a previous version of py-eddy-tracker.
20+
21+
1822

1923
Fixed
2024
^^^^^
2125

2226
- Fix bug in convolution(filter), lowest rows was replace by zeros in convolution computation.
2327
Important impact for tiny kernel
2428
- Fix method of sampling before contour fitting
29+
- Fix bug when loading dataset in zarr format, not all variables were correctly loaded
30+
- Fix bug when zarr dataset has same size for number of observations and contour size
31+
- Fix bug when tracking, previous_virtual_obs was not always loaded
2532

2633
Added
2734
^^^^^

examples/02_eddy_identification/pet_eddy_detection_ACC.py

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -65,8 +65,7 @@ def set_fancy_labels(fig, ticklabelsize=14, labelsize=14, labelweight="semibold"
6565
y_name="latitude",
6666
# Manual area subset
6767
indexs=dict(
68-
latitude=slice(100 - margin, 220 + margin),
69-
longitude=slice(0, 230 + margin),
68+
latitude=slice(100 - margin, 220 + margin), longitude=slice(0, 230 + margin),
7069
),
7170
)
7271
g_raw = RegularGridDataset(**kw_data)
@@ -188,16 +187,10 @@ def set_fancy_labels(fig, ticklabelsize=14, labelsize=14, labelweight="semibold"
188187
ax.set_ylabel("With filter")
189188

190189
ax.plot(
191-
a_[field][i_a] * factor,
192-
a[field][j_a] * factor,
193-
"r.",
194-
label="Anticyclonic",
190+
a_[field][i_a] * factor, a[field][j_a] * factor, "r.", label="Anticyclonic",
195191
)
196192
ax.plot(
197-
c_[field][i_c] * factor,
198-
c[field][j_c] * factor,
199-
"b.",
200-
label="Cyclonic",
193+
c_[field][i_c] * factor, c[field][j_c] * factor, "b.", label="Cyclonic",
201194
)
202195
ax.set_aspect("equal"), ax.grid()
203196
ax.plot((0, 1000), (0, 1000), "g")

examples/16_network/pet_replay_segmentation.py

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -149,13 +149,7 @@ def get_obs(dataset):
149149
n_.median_filter(15, "time", "latitude")
150150
kw["s"] = (n_.radius_e * 1e-3) ** 2 / 30 ** 2 * 20
151151
m = n_.scatter_timeline(
152-
ax,
153-
"shape_error_e",
154-
vmin=14,
155-
vmax=70,
156-
**kw,
157-
yfield="lon",
158-
method="all",
152+
ax, "shape_error_e", vmin=14, vmax=70, **kw, yfield="lon", method="all",
159153
)
160154
ax.set_ylabel("Longitude")
161155
cb = update_axes(ax, m["scatter"])

src/py_eddy_tracker/__init__.py

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -422,20 +422,14 @@ def identify_time(str_date):
422422
nc_name="previous_cost",
423423
nc_type="float32",
424424
nc_dims=("obs",),
425-
nc_attr=dict(
426-
long_name="Previous cost for previous observation",
427-
comment="",
428-
),
425+
nc_attr=dict(long_name="Previous cost for previous observation", comment="",),
429426
),
430427
next_cost=dict(
431428
attr_name=None,
432429
nc_name="next_cost",
433430
nc_type="float32",
434431
nc_dims=("obs",),
435-
nc_attr=dict(
436-
long_name="Next cost for next observation",
437-
comment="",
438-
),
432+
nc_attr=dict(long_name="Next cost for next observation", comment="",),
439433
),
440434
n=dict(
441435
attr_name=None,
@@ -646,8 +640,7 @@ def identify_time(str_date):
646640
nc_type="f4",
647641
nc_dims=("obs",),
648642
nc_attr=dict(
649-
long_name="Log base 10 background chlorophyll",
650-
units="Log(Chl/[mg/m^3])",
643+
long_name="Log base 10 background chlorophyll", units="Log(Chl/[mg/m^3])",
651644
),
652645
),
653646
year=dict(

src/py_eddy_tracker/appli/eddies.py

Lines changed: 4 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -243,8 +243,7 @@ def browse_dataset_in(
243243
filenames = bytes_(glob(full_path))
244244

245245
dataset_list = empty(
246-
len(filenames),
247-
dtype=[("filename", "S500"), ("date", "datetime64[s]")],
246+
len(filenames), dtype=[("filename", "S500"), ("date", "datetime64[s]")],
248247
)
249248
dataset_list["filename"] = filenames
250249

@@ -372,8 +371,7 @@ def track(
372371

373372
logger.info("Longer track saved have %d obs", c.nb_obs_by_tracks.max())
374373
logger.info(
375-
"The mean length is %d observations for long track",
376-
c.nb_obs_by_tracks.mean(),
374+
"The mean length is %d observations for long track", c.nb_obs_by_tracks.mean(),
377375
)
378376

379377
long_track.write_file(**kw_write)
@@ -383,14 +381,7 @@ def track(
383381

384382

385383
def get_group(
386-
dataset1,
387-
dataset2,
388-
index1,
389-
index2,
390-
score,
391-
invalid=2,
392-
low=10,
393-
high=60,
384+
dataset1, dataset2, index1, index2, score, invalid=2, low=10, high=60,
394385
):
395386
group1, group2 = dict(), dict()
396387
m_valid = (score * 100) >= invalid
@@ -499,8 +490,7 @@ def get_values(v, dataset):
499490
]
500491

501492
labels = dict(
502-
high=f"{high:0.0f} <= high",
503-
low=f"{invalid:0.0f} <= low < {low:0.0f}",
493+
high=f"{high:0.0f} <= high", low=f"{invalid:0.0f} <= low < {low:0.0f}",
504494
)
505495

506496
keys = [labels.get(key, key) for key in list(gr_ref.values())[0].keys()]

src/py_eddy_tracker/appli/network.py

Lines changed: 36 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,20 @@ def build_network():
2121
parser.add_argument(
2222
"--window", "-w", type=int, help="Half time window to search eddy", default=1
2323
)
24+
25+
parser.add_argument(
26+
"--min-overlap",
27+
"-p",
28+
type=float,
29+
help="minimum overlap area to associate observations",
30+
default=0.2,
31+
)
32+
parser.add_argument(
33+
"--minimal-area",
34+
action="store_true",
35+
help="If True, use intersection/little polygon, else intersection/union",
36+
)
37+
2438
parser.contour_intern_arg()
2539

2640
parser.memory_arg()
@@ -32,7 +46,9 @@ def build_network():
3246
intern=args.intern,
3347
memory=args.memory,
3448
)
35-
group = n.group_observations(minimal_area=True)
49+
group = n.group_observations(
50+
min_overlap=args.min_overlap, minimal_area=args.minimal_area
51+
)
3652
n.build_dataset(group).write_file(filename=args.out)
3753

3854

@@ -44,6 +60,18 @@ def divide_network():
4460
parser.add_argument(
4561
"--window", "-w", type=int, help="Half time window to search eddy", default=1
4662
)
63+
parser.add_argument(
64+
"--min-overlap",
65+
"-p",
66+
type=float,
67+
help="minimum overlap area to associate observations",
68+
default=0.2,
69+
)
70+
parser.add_argument(
71+
"--minimal-area",
72+
action="store_true",
73+
help="If True, use intersection/little polygon, else intersection/union",
74+
)
4775
args = parser.parse_args()
4876
contour_name = TrackEddiesObservations.intern(args.intern, public_label=True)
4977
e = TrackEddiesObservations.load_file(
@@ -52,7 +80,12 @@ def divide_network():
5280
)
5381
n = NetworkObservations.from_split_network(
5482
TrackEddiesObservations.load_file(args.input, raw_data=True),
55-
e.split_network(intern=args.intern, window=args.window),
83+
e.split_network(
84+
intern=args.intern,
85+
window=args.window,
86+
min_overlap=args.min_overlap,
87+
minimal_area=args.minimal_area,
88+
),
5689
)
5790
n.write_file(filename=args.out)
5891

@@ -76,9 +109,7 @@ def subset_network():
76109
help="Remove short dead end, first is for minimal obs number and second for minimal segment time to keep",
77110
)
78111
parser.add_argument(
79-
"--remove_trash",
80-
action="store_true",
81-
help="Remove trash (network id == 0)",
112+
"--remove_trash", action="store_true", help="Remove trash (network id == 0)",
82113
)
83114
parser.add_argument(
84115
"-p",

src/py_eddy_tracker/dataset/grid.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -858,13 +858,11 @@ def eddy_identification(
858858
xy_i = uniform_resample(
859859
inner_contour.lon,
860860
inner_contour.lat,
861-
num_fac=presampling_multiplier
862-
)
863-
xy_e = uniform_resample(
864-
contour.lon,
865-
contour.lat,
866861
num_fac=presampling_multiplier,
867862
)
863+
xy_e = uniform_resample(
864+
contour.lon, contour.lat, num_fac=presampling_multiplier,
865+
)
868866
xy_s = uniform_resample(
869867
speed_contour.lon,
870868
speed_contour.lat,

src/py_eddy_tracker/eddy_feature.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -433,8 +433,8 @@ def __init__(self, x, y, z, levels, wrap_x=False, keep_unclose=False):
433433
closed_contours = 0
434434
# Count level and contour
435435
for i, collection in enumerate(self.contours.collections):
436-
collection.get_nearest_path_bbox_contain_pt = (
437-
lambda x, y, i=i: self.get_index_nearest_path_bbox_contain_pt(i, x, y)
436+
collection.get_nearest_path_bbox_contain_pt = lambda x, y, i=i: self.get_index_nearest_path_bbox_contain_pt(
437+
i, x, y
438438
)
439439
nb_level += 1
440440

src/py_eddy_tracker/observations/network.py

Lines changed: 42 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1301,7 +1301,7 @@ def extract_with_period(self, period):
13011301

13021302
return self.extract_with_mask(self.get_mask_with_period(period))
13031303

1304-
def extract_light_with_mask(self, mask):
1304+
def extract_light_with_mask(self, mask, track_extra_variables=[]):
13051305
"""extract data with mask, but only with variables used for coherence, aka self.array_variables
13061306
13071307
:param mask: mask used to extract
@@ -1319,7 +1319,7 @@ def extract_light_with_mask(self, mask):
13191319
variables = ["time"] + self.array_variables
13201320
new = self.__class__(
13211321
size=nb_obs,
1322-
track_extra_variables=[],
1322+
track_extra_variables=track_extra_variables,
13231323
track_array_variables=self.track_array_variables,
13241324
array_variables=self.array_variables,
13251325
only_variables=variables,
@@ -1333,9 +1333,22 @@ def extract_light_with_mask(self, mask):
13331333
f"{nb_obs} observations will be extracted ({nb_obs / self.shape[0]:.3%})"
13341334
)
13351335

1336-
for field in variables:
1336+
for field in variables + track_extra_variables:
13371337
logger.debug("Copy of field %s ...", field)
13381338
new.obs[field] = self.obs[field][mask]
1339+
1340+
if (
1341+
"previous_obs" in track_extra_variables
1342+
and "next_obs" in track_extra_variables
1343+
):
1344+
# n & p must be re-index
1345+
n, p = self.next_obs[mask], self.previous_obs[mask]
1346+
# we add 2 for -1 index return index -1
1347+
translate = -ones(len(self) + 1, dtype="i4")
1348+
translate[:-1][mask] = arange(nb_obs)
1349+
new.next_obs[:] = translate[n]
1350+
new.previous_obs[:] = translate[p]
1351+
13391352
return new
13401353

13411354
def extract_with_mask(self, mask):
@@ -1495,7 +1508,8 @@ def date2file(julian_day):
14951508

14961509
t_start, t_end = int(self.period[0]), int(self.period[1])
14971510

1498-
dates = arange(t_start, t_start + n_days + 1)
1511+
# dates = arange(t_start, t_start + n_days + 1)
1512+
dates = arange(t_start, min(t_start + n_days + 1, t_end + 1))
14991513
first_files = [date_function(x) for x in dates]
15001514

15011515
c = GridCollection.from_netcdf_list(first_files, dates, **uv_params)
@@ -1570,12 +1584,8 @@ def date2file(julian_day):
15701584
ptf_final = zeros((self.obs.size, 2), dtype="i1")
15711585

15721586
t_start, t_end = int(self.period[0]), int(self.period[1])
1573-
# if begin is not None and begin > t_start:
1574-
# t_start = begin
1575-
# if end is not None and end < t_end:
1576-
# t_end = end
15771587

1578-
dates = arange(t_start, t_start + n_days + 1)
1588+
dates = arange(t_start, min(t_start + n_days + 1, t_end + 1))
15791589
first_files = [date_function(x) for x in dates]
15801590

15811591
c = GridCollection.from_netcdf_list(first_files, dates, **uv_params)
@@ -1699,7 +1709,23 @@ def group_translator(nb, duos):
16991709
apply_replace(translate, gr_i, gr_j)
17001710
return translate
17011711

1702-
def group_observations(self, **kwargs):
1712+
def group_observations(self, min_overlap=0.2, minimal_area=False):
1713+
"""Store every interaction between identifications
1714+
1715+
Parameters
1716+
----------
1717+
minimal_area : bool, optional
1718+
If True, function will compute intersection/little polygon, else intersection/union, by default False
1719+
1720+
min_overlap : float, optional
1721+
minimum overlap area to associate observations, by default 0.2
1722+
1723+
Returns
1724+
-------
1725+
TrackEddiesObservations
1726+
netcdf with interactions
1727+
"""
1728+
17031729
results, nb_obs = list(), list()
17041730
# To display print only in INFO
17051731
display_iteration = logger.getEffectiveLevel() == logging.INFO
@@ -1713,7 +1739,12 @@ def group_observations(self, **kwargs):
17131739
for j in range(i + 1, min(self.window + i + 1, self.nb_input)):
17141740
xj, yj = self.buffer.load_contour(self.filenames[j])
17151741
ii, ij = bbox_intersection(xi, yi, xj, yj)
1716-
m = vertice_overlap(xi[ii], yi[ii], xj[ij], yj[ij], **kwargs) > 0.2
1742+
m = (
1743+
vertice_overlap(
1744+
xi[ii], yi[ii], xj[ij], yj[ij], minimal_area=minimal_area
1745+
)
1746+
> min_overlap
1747+
)
17171748
results.append((i, j, ii[m], ij[m]))
17181749
if display_iteration:
17191750
print()

0 commit comments

Comments
 (0)