From 81eaf1574cf0170527367cd21e887b3789b236c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Wed, 12 May 2021 14:30:40 +0200 Subject: [PATCH 1/5] add coherence function, and minor corrections add shift_files to GridCollection change heigth to height correct FIXME in function documentation correct tests --- src/py_eddy_tracker/dataset/grid.py | 10 + src/py_eddy_tracker/observations/network.py | 311 +++++++++++++++++- .../observations/observation.py | 2 +- 3 files changed, 308 insertions(+), 15 deletions(-) diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 11227475..ea602cfa 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -2264,6 +2264,16 @@ def from_netcdf_list(cls, filenames, t, x_name, y_name, indexs=None, heigth=None new.datasets.append((t, d)) return new + def shift_files(self, t, filename, x_name, y_name, indexs, heigth): + """Add next file to the list and remove the oldest""" + + self.datasets = self.datasets[1:] + + d = RegularGridDataset(filename, x_name, y_name, indexs=indexs) + if heigth is not None: + d.add_uv(heigth) + self.datasets.append((t, d)) + def interp(self, grid_name, t, lons, lats, method="bilinear"): """ Compute z over lons, lats diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 58f926a1..5fe0727a 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -6,6 +6,7 @@ from glob import glob from numba import njit +from numba import types as nb_types from numpy import ( arange, array, @@ -20,13 +21,16 @@ unique, where, zeros, + meshgrid, ) +import zarr from ..generic import build_index, wrap_longitude -from ..poly import bbox_intersection, vertice_overlap +from ..poly import bbox_intersection, vertice_overlap, group_obs from .groups import GroupEddiesObservations, get_missing_indices from .observation import EddiesObservations from .tracking import TrackEddiesObservations, track_loess_filter, track_median_filter +from ..dataset.grid import GridCollection logger = logging.getLogger("pet") @@ -97,6 +101,109 @@ def fix_next_previous_obs(next_obs, previous_obs, flag_virtual): previous_obs[i_o + 1] = i_o +def advect(x, y, c, t0, delta_t): + """ + Advect particle from t0 to t0 + delta_t, with data cube. + + :param np.array(float) x: longitude of particles + :param np.array(float) y: latitude of particles + :param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles + :param int t0: julian day of advection start + :param int delta_t: number of days to advect + """ + + kw = dict(nb_step=6, time_step=86400 / 6) + if delta_t < 0: + kw["backward"] = True + delta_t = -delta_t + p = c.advect(x, y, "u", "v", t_init=t0, **kw) + for _ in range(delta_t): + t, x, y = p.__next__() + return t, x, y + + +def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs): + """Select particles within eddies, advect them, return target observation and associated percentages + + :param np.array(float) x: longitude of particles + :param np.array(float) y: latitude of particles + :param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles + :param NetworkObservations eddies: NetworkObservations considered + :param int t_start: julian day of the advection + :param np.array(int) i_target: corresponding obs where particles are advected + :param np.array(int) pct: corresponding percentage of avected particles + :params dict kwargs: dict of params given to `advect` + """ + + # Obs from initial time + m_start = eddies.time == t_start + + e = eddies.extract_with_mask(m_start) + # to be able to get global index + translate_start = where(m_start)[0] + # Identify particle in eddies (only in core) + i_start = e.contains(x, y, intern=True) + m = i_start != -1 + + x, y, i_start = x[m], y[m], i_start[m] + # Advect + t_end, x, y = advect(x, y, c, t_start, **kwargs) + # eddies at last date + m_end = eddies.time == t_end / 86400 + e_end = eddies.extract_with_mask(m_end) + # to be able to get global index + translate_end = where(m_end)[0] + # Id eddies for each alive particle (in core and extern) + i_end = e_end.contains(x, y) + # compute matrix and fill target array + get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct) + + +@njit(cache=True) +def get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct): + """Compute target observation and associated percentages + + :param np.array(int) i_start: indices of associated contours at starting advection day + :param np.array(int) i_end: indices of associated contours after advection + :param np.array(int) translate_start: corresponding global indices at starting advection day + :param np.array(int) translate_end: corresponding global indices after advection + :param np.array(int) i_target: corresponding obs where particles are advected + :param np.array(int) pct: corresponding percentage of avected particles + """ + + nb_start, nb_end = translate_start.size, translate_end.size + # Matrix which will store count for every couple + count = zeros((nb_start, nb_end), dtype=nb_types.int32) + # Number of particles in each origin observation + ref = zeros(nb_start, dtype=nb_types.int32) + # For each particle + for i in range(i_start.size): + i_end_ = i_end[i] + i_start_ = i_start[i] + if i_end_ != -1: + count[i_start_, i_end_] += 1 + ref[i_start_] += 1 + for i in range(nb_start): + for j in range(nb_end): + pct_ = count[i, j] + # If there are particles from i to j + if pct_ != 0: + # Get percent + pct_ = pct_ / ref[i] * 100.0 + # Get indices in full dataset + i_, j_ = translate_start[i], translate_end[j] + pct_0 = pct[i_, 0] + if pct_ > pct_0: + pct[i_, 1] = pct_0 + pct[i_, 0] = pct_ + i_target[i_, 1] = i_target[i_, 0] + i_target[i_, 0] = j_ + elif pct_ > pct[i_, 1]: + pct[i_, 1] = pct_ + i_target[i_, 1] = j_ + return i_target, pct + + class NetworkObservations(GroupEddiesObservations): __slots__ = ("_index_network",) @@ -109,17 +216,16 @@ def __init__(self, *args, **kwargs): def find_segments_relative(self, obs, stopped=None, order=1): """ - Find all relative segments linked with merging/splitting events at a specific order. + Find all relative segments from obs linked with merging/splitting events at a specific order. - :param int obs: index of event after the event - :param int stopped: index of event before the event + :param int obs: index of observation after the event + :param int stopped: index of observation before the event :param int order: order of relatives accepted :return: all relative segments :rtype: EddiesObservations """ - # FIXME : double "event" in the description, please clarify (event = chosen obs?) # extraction of network where the event is network_id = self.tracks[obs] @@ -247,23 +353,17 @@ def infos(self, label=""): def correct_close_events(self, nb_days_max=20): """ Transform event where - segment A split to B, then A merge into B + segment A splits from segment B, then x days after segment B merges with A to - segment A split to B, then B merge to A + segment A splits from segment B then x days after segment A merges with B (B will be longer) - these events are filtered with `nb_days_max`, which the event have to take place in less than `nb_days_max` + These events have to last less than `nb_days_max` to be changed. :param float nb_days_max: maximum time to search for splitting-merging event """ - # FIXME : we want to change - # segment A splits from segment B, then x days after segment B merges with A - # to - # segment A splits from segment B then x days after segement A merges with B (B will be longer) - # comments are in the wrong way but the example works as wanted - _time = self.time # segment used to correct and track changes segment = self.segment_track_array.copy() @@ -1340,6 +1440,189 @@ def extract_with_mask(self, mask): new.previous_obs[:] = translate[p] return new + def analysis_coherence( + self, + date_function, + uv_params, + advection_mode="both", + dt_advect=14, + step_mesh=1.0 / 50, + output_name=None, + dissociate_network=False, + correct_close_events=0, + remove_dead_end=0, + ): + + """Global function to analyse segments coherence, with network preprocessing""" + + if dissociate_network: + self.dissociate_network() + + if correct_close_events > 0: + self.correct_close_events(nb_days_max=correct_close_events) + + if remove_dead_end > 0: + network_clean = self.remove_dead_end(nobs=0, ndays=remove_dead_end) + else: + network_clean = self + + res = network_clean.segment_coherence( + date_function=date_function, + uv_params=uv_params, + advection_mode=advection_mode, + output_name=output_name, + dt_advect=dt_advect, + step_mesh=step_mesh, + ) + + return network_clean, res + + def segment_coherence( + self, + date_function, + uv_params, + advection_mode="both", + dt_advect=14, + step_mesh=1.0 / 50, + output_name=None, + ): + + """ + Percentage of particules and their targets after forward or/and backward advection from a specific eddy. + + :param callable date_function: python function, takes as param `int` (julian day) and return + data filename associated to the date + ex: + def date2file(julian_day): + date = datetime.timedelta(days=julian_day) + datetime.datetime(1950, 1, 1) + + return f"/tmp/dt_global_allsat_phy_l4_{date.strftime('%Y%m%d')}.nc" + + :param dict uv_params: dict of parameters used by + :py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` + :param str advection_mode: "backward", "forward" or "both" + :param int dt_advect: days for advection + :param float step_mesh: step for particule mesh in degrees + :param str output_name: if not None, name of file saved in zarr. Else, data will not be saved + """ + + if advection_mode in ["both", "forward"]: + itf_final = -ones((self.obs.size, 2), dtype="i4") + ptf_final = zeros((self.obs.size, 2), dtype="i1") + + if advection_mode in ["both", "backward"]: + itb_final = -ones((self.obs.size, 2), dtype="i4") + ptb_final = zeros((self.obs.size, 2), dtype="i1") + + for slice_track, b0, _ in self.iter_on(self.track): + if b0 == 0: + continue + + sub_networks = self.network(b0) + + # find extremum to create a mesh of particles + lon = sub_networks.contour_lon_s + lonMin = lon.min() - 0.1 + lonMax = lon.max() + 0.1 + + lat = sub_networks.contour_lat_s + latMin = lat.min() - 0.1 + latMax = lat.max() + 0.1 + + x0, y0 = meshgrid( + arange(lonMin, lonMax, step_mesh), arange(latMin, latMax, step_mesh) + ) + x0, y0 = x0.reshape(-1), y0.reshape(-1) + _, i = group_obs(x0, y0, 1, 360) + x0, y0 = x0[i], y0[i] + + t_start, t_end = sub_networks.period + shape = (sub_networks.obs.size, 2) + + if advection_mode in ["both", "forward"]: + + # first dates to load. + dates = arange(t_start - 1, t_start + dt_advect + 2) + # files associated with dates + first_files = [date_function(x) for x in dates] + + c = GridCollection.from_netcdf_list(first_files, dates, **uv_params) + + i_target_f = -ones(shape, dtype="i4") + pct_target_f = zeros(shape, dtype="i1") + + for _t in range(t_start, t_end - dt_advect + 1): + t_shift = _t + dt_advect + 2 + + # add next date to GridCollection and delete last date + c.shift_files(t_shift, date_function(int(t_shift)), **uv_params) + particle_candidate( + x0, + y0, + c, + sub_networks, + _t, + i_target_f, + pct_target_f, + delta_t=dt_advect, + ) + + itf_final[slice_track] = i_target_f + ptf_final[slice_track] = pct_target_f + + if advection_mode in ["both", "backward"]: + + # first dates to load. + dates = arange(t_start - 1, t_start + dt_advect + 2) + # files associated with dates + first_files = [date_function(x) for x in dates] + + c = GridCollection.from_netcdf_list(first_files, dates, **uv_params) + + i_target_b = -ones(shape, dtype="i4") + pct_target_b = zeros(shape, dtype="i1") + + for _t in range(t_start + dt_advect + 1, t_end + 1): + t_shift = _t + 1 + + # add next date to GridCollection and delete last date + c.shift_files(t_shift, date_function(int(t_shift)), **uv_params) + particle_candidate( + x0, + y0, + c, + self, + _t, + i_target_b, + pct_target_b, + delta_t=-dt_advect, + ) + + itb_final[slice_track] = i_target_b + ptb_final[slice_track] = pct_target_b + + if output_name is not None: + zg = zarr.open(output_name, "w") + + # zarr compression parameters + params_seg = dict() + params_pct = dict() + + res = [] + if advection_mode in ["forward", "both"]: + res = res + [itf_final, ptf_final] + if output_name is not None: + zg.array("i_target_forward", itf_final, **params_seg) + zg.array("pct_target_forward", ptf_final, **params_pct) + + if advection_mode in ["backward", "both"]: + res = res + [itb_final, ptb_final] + if output_name is not None: + zg.array("i_target_backward", itb_final, **params_seg) + zg.array("pct_target_backward", ptb_final, **params_pct) + + return res + class Network: __slots__ = ( diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 173f6c56..3d91ad42 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -2045,7 +2045,7 @@ def is_convex(self, intern=False): def contains(self, x, y, intern=False): """ - Return index of contour which contain (x,y) + Return index of contour containing (x,y) :param array x: longitude :param array y: latitude From 13177b657e3f37b88e734a0aac1c8254d5547fca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Wed, 12 May 2021 14:31:29 +0200 Subject: [PATCH 2/5] correction bug problem when indices is on edge of datas --- src/py_eddy_tracker/dataset/grid.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index ea602cfa..6337e136 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -2551,6 +2551,11 @@ def get_uv_quad(i0, j0, u, v, m, nb_x=0): i1, j1 = i0 + 1, j0 + 1 if nb_x != 0: i1 %= nb_x + i_max, j_max = m.shape + + # if i1 >= i_max or j1 >= j_max: + # return True, nan, nan, nan, nan, nan, nan, nan, nan + if m[i0, j0] or m[i0, j1] or m[i1, j0] or m[i1, j1]: return True, nan, nan, nan, nan, nan, nan, nan, nan # Extract value for u and v From b5c31016a085bd47c7e281cdff5c2a7e0bc7e392 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Fri, 14 May 2021 13:48:26 +0200 Subject: [PATCH 3/5] corrections for merge request move particle_candidate in groups add default values for shift_files mistake with get_uv_quad correction still in comments change delta_t to n_days correct whitespaces --- examples/16_network/pet_follow_particle.py | 80 +----------- src/py_eddy_tracker/dataset/grid.py | 6 +- src/py_eddy_tracker/observations/groups.py | 109 ++++++++++++++++- src/py_eddy_tracker/observations/network.py | 128 +++----------------- 4 files changed, 127 insertions(+), 196 deletions(-) diff --git a/examples/16_network/pet_follow_particle.py b/examples/16_network/pet_follow_particle.py index 0c4be55d..b4dfe343 100644 --- a/examples/16_network/pet_follow_particle.py +++ b/examples/16_network/pet_follow_particle.py @@ -17,6 +17,7 @@ from py_eddy_tracker.data import get_demo_path from py_eddy_tracker.dataset.grid import GridCollection from py_eddy_tracker.observations.network import NetworkObservations +from py_eddy_tracker.observations.groups import particle_candidate from py_eddy_tracker.poly import group_obs start_logger().setLevel("ERROR") @@ -124,81 +125,6 @@ def update(frame): ani = VideoAnimation(a.fig, update, frames=arange(20200, 20269, step), interval=200) -# %% -# In which observations are the particle -# -------------------------------------- -def advect(x, y, c, t0, delta_t): - """ - Advect particle from t0 to t0 + delta_t, with data cube. - """ - kw = dict(nb_step=6, time_step=86400 / 6) - if delta_t < 0: - kw["backward"] = True - delta_t = -delta_t - p = c.advect(x, y, "u", "v", t_init=t0, **kw) - for _ in range(delta_t): - t, x, y = p.__next__() - return t, x, y - - -def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs): - # Obs from initial time - m_start = eddies.time == t_start - e = eddies.extract_with_mask(m_start) - # to be able to get global index - translate_start = where(m_start)[0] - # Identify particle in eddies (only in core) - i_start = e.contains(x, y, intern=True) - m = i_start != -1 - x, y, i_start = x[m], y[m], i_start[m] - # Advect - t_end, x, y = advect(x, y, c, t_start, **kwargs) - # eddies at last date - m_end = eddies.time == t_end / 86400 - e_end = eddies.extract_with_mask(m_end) - # to be able to get global index - translate_end = where(m_end)[0] - # Id eddies for each alive particle (in core and extern) - i_end = e_end.contains(x, y) - # compute matrix and fill target array - get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct) - - -@njit(cache=True) -def get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct): - nb_start, nb_end = translate_start.size, translate_end.size - # Matrix which will store count for every couple - count = zeros((nb_start, nb_end), dtype=nb_types.int32) - # Number of particles in each origin observation - ref = zeros(nb_start, dtype=nb_types.int32) - # For each particle - for i in range(i_start.size): - i_end_ = i_end[i] - i_start_ = i_start[i] - if i_end_ != -1: - count[i_start_, i_end_] += 1 - ref[i_start_] += 1 - for i in range(nb_start): - for j in range(nb_end): - pct_ = count[i, j] - # If there are particles from i to j - if pct_ != 0: - # Get percent - pct_ = pct_ / ref[i] * 100.0 - # Get indices in full dataset - i_, j_ = translate_start[i], translate_end[j] - pct_0 = pct[i_, 0] - if pct_ > pct_0: - pct[i_, 1] = pct_0 - pct[i_, 0] = pct_ - i_target[i_, 1] = i_target[i_, 0] - i_target[i_, 0] = j_ - elif pct_ > pct[i_, 1]: - pct[i_, 1] = pct_ - i_target[i_, 1] = j_ - return i_target, pct - - # %% # Particle advection # ^^^^^^^^^^^^^^^^^^ @@ -217,12 +143,12 @@ def get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct): # Forward run i_target_f, pct_target_f = -ones(shape, dtype="i4"), zeros(shape, dtype="i1") for t in range(t_start, t_end - dt): - particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, delta_t=dt) + particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, n_days=dt) # Backward run i_target_b, pct_target_b = -ones(shape, dtype="i4"), zeros(shape, dtype="i1") for t in range(t_start + dt, t_end): - particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, delta_t=-dt) + particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, n_days=-dt) # %% fig = plt.figure(figsize=(10, 10)) diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 6337e136..28fa8526 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -2264,7 +2264,7 @@ def from_netcdf_list(cls, filenames, t, x_name, y_name, indexs=None, heigth=None new.datasets.append((t, d)) return new - def shift_files(self, t, filename, x_name, y_name, indexs, heigth): + def shift_files(self, t, filename, x_name, y_name, indexs=None, heigth=None): """Add next file to the list and remove the oldest""" self.datasets = self.datasets[1:] @@ -2553,8 +2553,8 @@ def get_uv_quad(i0, j0, u, v, m, nb_x=0): i1 %= nb_x i_max, j_max = m.shape - # if i1 >= i_max or j1 >= j_max: - # return True, nan, nan, nan, nan, nan, nan, nan, nan + if i1 >= i_max or j1 >= j_max: + return True, nan, nan, nan, nan, nan, nan, nan, nan if m[i0, j0] or m[i0, j1] or m[i1, j0] or m[i1, j1]: return True, nan, nan, nan, nan, nan, nan, nan, nan diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index bd8ac81d..835101ff 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -1,8 +1,8 @@ import logging from abc import ABC, abstractmethod -from numba import njit -from numpy import arange, int32, interp, median, zeros +from numba import njit, types as nb_types +from numpy import arange, int32, interp, median, zeros, where from .observation import EddiesObservations @@ -65,6 +65,111 @@ def get_missing_indices( return indices + +def advect(x, y, c, t0, n_days): + """ + Advect particle from t0 to t0 + n_days, with data cube. + + :param np.array(float) x: longitude of particles + :param np.array(float) y: latitude of particles + :param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles + :param int t0: julian day of advection start + :param int n_days: number of days to advect + """ + + kw = dict(nb_step=6, time_step=86400 / 6) + if n_days < 0: + kw["backward"] = True + n_days = -n_days + p = c.advect(x, y, "u", "v", t_init=t0, **kw) + for _ in range(n_days): + t, x, y = p.__next__() + return t, x, y + + +def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs): + """Select particles within eddies, advect them, return target observation and associated percentages + + :param np.array(float) x: longitude of particles + :param np.array(float) y: latitude of particles + :param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles + :param GroupEddiesObservations eddies: GroupEddiesObservations considered + :param int t_start: julian day of the advection + :param np.array(int) i_target: corresponding obs where particles are advected + :param np.array(int) pct: corresponding percentage of avected particles + :params dict kwargs: dict of params given to `advect` + + """ + + # Obs from initial time + m_start = eddies.time == t_start + + e = eddies.extract_with_mask(m_start) + # to be able to get global index + translate_start = where(m_start)[0] + # Identify particle in eddies (only in core) + i_start = e.contains(x, y, intern=True) + m = i_start != -1 + + x, y, i_start = x[m], y[m], i_start[m] + # Advect + t_end, x, y = advect(x, y, c, t_start, **kwargs) + # eddies at last date + m_end = eddies.time == t_end / 86400 + e_end = eddies.extract_with_mask(m_end) + # to be able to get global index + translate_end = where(m_end)[0] + # Id eddies for each alive particle (in core and extern) + i_end = e_end.contains(x, y) + # compute matrix and fill target array + get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct) + + +@njit(cache=True) +def get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct): + """Compute target observation and associated percentages + + :param np.array(int) i_start: indices of associated contours at starting advection day + :param np.array(int) i_end: indices of associated contours after advection + :param np.array(int) translate_start: corresponding global indices at starting advection day + :param np.array(int) translate_end: corresponding global indices after advection + :param np.array(int) i_target: corresponding obs where particles are advected + :param np.array(int) pct: corresponding percentage of avected particles + """ + + nb_start, nb_end = translate_start.size, translate_end.size + # Matrix which will store count for every couple + count = zeros((nb_start, nb_end), dtype=nb_types.int32) + # Number of particles in each origin observation + ref = zeros(nb_start, dtype=nb_types.int32) + # For each particle + for i in range(i_start.size): + i_end_ = i_end[i] + i_start_ = i_start[i] + if i_end_ != -1: + count[i_start_, i_end_] += 1 + ref[i_start_] += 1 + for i in range(nb_start): + for j in range(nb_end): + pct_ = count[i, j] + # If there are particles from i to j + if pct_ != 0: + # Get percent + pct_ = pct_ / ref[i] * 100.0 + # Get indices in full dataset + i_, j_ = translate_start[i], translate_end[j] + pct_0 = pct[i_, 0] + if pct_ > pct_0: + pct[i_, 1] = pct_0 + pct[i_, 0] = pct_ + i_target[i_, 1] = i_target[i_, 0] + i_target[i_, 0] = j_ + elif pct_ > pct[i_, 1]: + pct[i_, 1] = pct_ + i_target[i_, 1] = j_ + return i_target, pct + + class GroupEddiesObservations(EddiesObservations, ABC): @abstractmethod def fix_next_previous_obs(self): diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 5fe0727a..7292e7c2 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -27,7 +27,7 @@ from ..generic import build_index, wrap_longitude from ..poly import bbox_intersection, vertice_overlap, group_obs -from .groups import GroupEddiesObservations, get_missing_indices +from .groups import GroupEddiesObservations, get_missing_indices, particle_candidate from .observation import EddiesObservations from .tracking import TrackEddiesObservations, track_loess_filter, track_median_filter from ..dataset.grid import GridCollection @@ -101,109 +101,6 @@ def fix_next_previous_obs(next_obs, previous_obs, flag_virtual): previous_obs[i_o + 1] = i_o -def advect(x, y, c, t0, delta_t): - """ - Advect particle from t0 to t0 + delta_t, with data cube. - - :param np.array(float) x: longitude of particles - :param np.array(float) y: latitude of particles - :param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles - :param int t0: julian day of advection start - :param int delta_t: number of days to advect - """ - - kw = dict(nb_step=6, time_step=86400 / 6) - if delta_t < 0: - kw["backward"] = True - delta_t = -delta_t - p = c.advect(x, y, "u", "v", t_init=t0, **kw) - for _ in range(delta_t): - t, x, y = p.__next__() - return t, x, y - - -def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs): - """Select particles within eddies, advect them, return target observation and associated percentages - - :param np.array(float) x: longitude of particles - :param np.array(float) y: latitude of particles - :param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles - :param NetworkObservations eddies: NetworkObservations considered - :param int t_start: julian day of the advection - :param np.array(int) i_target: corresponding obs where particles are advected - :param np.array(int) pct: corresponding percentage of avected particles - :params dict kwargs: dict of params given to `advect` - """ - - # Obs from initial time - m_start = eddies.time == t_start - - e = eddies.extract_with_mask(m_start) - # to be able to get global index - translate_start = where(m_start)[0] - # Identify particle in eddies (only in core) - i_start = e.contains(x, y, intern=True) - m = i_start != -1 - - x, y, i_start = x[m], y[m], i_start[m] - # Advect - t_end, x, y = advect(x, y, c, t_start, **kwargs) - # eddies at last date - m_end = eddies.time == t_end / 86400 - e_end = eddies.extract_with_mask(m_end) - # to be able to get global index - translate_end = where(m_end)[0] - # Id eddies for each alive particle (in core and extern) - i_end = e_end.contains(x, y) - # compute matrix and fill target array - get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct) - - -@njit(cache=True) -def get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct): - """Compute target observation and associated percentages - - :param np.array(int) i_start: indices of associated contours at starting advection day - :param np.array(int) i_end: indices of associated contours after advection - :param np.array(int) translate_start: corresponding global indices at starting advection day - :param np.array(int) translate_end: corresponding global indices after advection - :param np.array(int) i_target: corresponding obs where particles are advected - :param np.array(int) pct: corresponding percentage of avected particles - """ - - nb_start, nb_end = translate_start.size, translate_end.size - # Matrix which will store count for every couple - count = zeros((nb_start, nb_end), dtype=nb_types.int32) - # Number of particles in each origin observation - ref = zeros(nb_start, dtype=nb_types.int32) - # For each particle - for i in range(i_start.size): - i_end_ = i_end[i] - i_start_ = i_start[i] - if i_end_ != -1: - count[i_start_, i_end_] += 1 - ref[i_start_] += 1 - for i in range(nb_start): - for j in range(nb_end): - pct_ = count[i, j] - # If there are particles from i to j - if pct_ != 0: - # Get percent - pct_ = pct_ / ref[i] * 100.0 - # Get indices in full dataset - i_, j_ = translate_start[i], translate_end[j] - pct_0 = pct[i_, 0] - if pct_ > pct_0: - pct[i_, 1] = pct_0 - pct[i_, 0] = pct_ - i_target[i_, 1] = i_target[i_, 0] - i_target[i_, 0] = j_ - elif pct_ > pct[i_, 1]: - pct[i_, 1] = pct_ - i_target[i_, 1] = j_ - return i_target, pct - - class NetworkObservations(GroupEddiesObservations): __slots__ = ("_index_network",) @@ -221,12 +118,10 @@ def find_segments_relative(self, obs, stopped=None, order=1): :param int obs: index of observation after the event :param int stopped: index of observation before the event :param int order: order of relatives accepted - :return: all relative segments :rtype: EddiesObservations """ - # extraction of network where the event is network_id = self.tracks[obs] nw = self.network(network_id) @@ -1491,19 +1386,24 @@ def segment_coherence( Percentage of particules and their targets after forward or/and backward advection from a specific eddy. :param callable date_function: python function, takes as param `int` (julian day) and return - data filename associated to the date - ex: - def date2file(julian_day): - date = datetime.timedelta(days=julian_day) + datetime.datetime(1950, 1, 1) - - return f"/tmp/dt_global_allsat_phy_l4_{date.strftime('%Y%m%d')}.nc" - + data filename associated to the date (see note) :param dict uv_params: dict of parameters used by :py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` :param str advection_mode: "backward", "forward" or "both" :param int dt_advect: days for advection :param float step_mesh: step for particule mesh in degrees :param str output_name: if not None, name of file saved in zarr. Else, data will not be saved + :return: list of 2 or 4 array (depending if forward, backward or both) with segment matchs, and percents + + .. note:: the param `date_function` should be something like : + + .. code-block:: python + + def date2file(julian_day): + date = datetime.timedelta(days=julian_day) + datetime.datetime(1950, 1, 1) + + return f"/tmp/dt_global_allsat_phy_l4_{date.strftime('%Y%m%d')}.nc" + """ if advection_mode in ["both", "forward"]: @@ -1591,7 +1491,7 @@ def date2file(julian_day): x0, y0, c, - self, + sub_networks, _t, i_target_b, pct_target_b, From 006adc5f0b300e93a84333198945f91d4fb2cb0a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Fri, 14 May 2021 13:48:53 +0200 Subject: [PATCH 4/5] correction for documentation --- src/py_eddy_tracker/generic.py | 5 +++-- src/py_eddy_tracker/observations/network.py | 6 +++--- src/py_eddy_tracker/poly.py | 1 + 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/src/py_eddy_tracker/generic.py b/src/py_eddy_tracker/generic.py index 530c2136..283b4b9e 100644 --- a/src/py_eddy_tracker/generic.py +++ b/src/py_eddy_tracker/generic.py @@ -70,8 +70,9 @@ def build_index(groups): :param array groups: array that contains groups to be separated :return: (first_index of each group, last_index of each group, value to shift groups) :rtype: (array, array, int) - Examples - -------- + + :Example: + >>> build_index(array((1, 1, 3, 4, 4))) (array([0, 2, 2, 3]), array([2, 2, 3, 5]), 1) """ diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 7292e7c2..685b3e42 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -221,7 +221,7 @@ def from_split_network(cls, group_dataset, indexs, **kwargs): :param TrackEddiesObservations group_dataset: Group dataset :param indexs: result from split_network - return NetworkObservations + :return: NetworkObservations """ index_order = indexs.argsort(order=("group", "track", "time")) network = cls.new_like(group_dataset, len(group_dataset), **kwargs) @@ -1598,8 +1598,8 @@ def group_translator(nb, duos): :param int nb: size of translator :param set((int, int)) duos: set of all groups that must be joined - Examples - -------- + :Example: + >>> NetworkObservations.group_translator(5, ((0, 1), (0, 2), (1, 3))) [3, 3, 3, 3, 5] """ diff --git a/src/py_eddy_tracker/poly.py b/src/py_eddy_tracker/poly.py index fd4ae9c4..fc36185b 100644 --- a/src/py_eddy_tracker/poly.py +++ b/src/py_eddy_tracker/poly.py @@ -717,6 +717,7 @@ def visvalingam(x, y, fixed_size=18): """Polygon simplification with visvalingam algorithm X, Y are considered like a polygon, the next point after the last one is the first one + :param array x: :param array y: :param int fixed_size: array size of out From b3f66bcf5faf2379e62c72cdfb5e07a62ddbf56e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Sun, 16 May 2021 15:34:43 +0200 Subject: [PATCH 5/5] minor corrections sorting imports adding notebooks for binder adding kwargs to shift_files correction of new warnings done --- examples/16_network/pet_follow_particle.py | 6 +- examples/16_network/pet_ioannou_2017_case.py | 5 +- .../01_general_things/pet_storage.ipynb | 2 +- .../pet_contour_circle.ipynb | 2 +- .../pet_display_id.ipynb | 2 +- .../pet_eddy_detection.ipynb | 2 +- .../pet_eddy_detection_ACC.ipynb | 2 +- .../pet_eddy_detection_gulf_stream.ipynb | 2 +- .../pet_filter_and_detection.ipynb | 2 +- .../pet_interp_grid_on_dataset.ipynb | 2 +- .../pet_radius_vs_area.ipynb | 2 +- .../pet_shape_gallery.ipynb | 2 +- .../pet_sla_and_adt.ipynb | 2 +- .../06_grid_manipulation/pet_advect.ipynb | 2 +- .../06_grid_manipulation/pet_filter.ipynb | 2 +- .../pet_hide_pixel_out_eddies.ipynb | 2 +- .../06_grid_manipulation/pet_lavd.ipynb | 2 +- .../pet_okubo_weiss.ipynb | 2 +- .../07_cube_manipulation/pet_cube.ipynb | 2 +- .../07_cube_manipulation/pet_fsle_med.ipynb | 4 +- .../pet_lavd_detection.ipynb | 2 +- .../pet_display_field.ipynb | 2 +- .../pet_display_track.ipynb | 2 +- .../pet_one_track.ipynb | 2 +- .../pet_run_a_tracking.ipynb | 2 +- .../pet_select_track_across_area.ipynb | 2 +- .../pet_track_anim.ipynb | 2 +- .../pet_track_anim_matplotlib_animation.ipynb | 2 +- .../pet_birth_and_death.ipynb | 2 +- .../pet_center_count.ipynb | 2 +- .../pet_geographic_stats.ipynb | 2 +- .../10_tracking_diagnostics/pet_groups.ipynb | 2 +- .../10_tracking_diagnostics/pet_histo.ipynb | 2 +- .../pet_lifetime.ipynb | 2 +- .../pet_normalised_lifetime.ipynb | 2 +- .../pet_pixel_used.ipynb | 2 +- .../pet_propagation.ipynb | 2 +- .../pet_SST_collocation.ipynb | 2 +- .../14_generic_tools/pet_fit_contour.ipynb | 2 +- .../14_generic_tools/pet_visvalingam.ipynb | 2 +- .../python_module/16_network/pet_atlas.ipynb | 2 +- .../16_network/pet_follow_particle.ipynb | 24 +------ .../16_network/pet_group_anim.ipynb | 2 +- .../16_network/pet_ioannou_2017_case.ipynb | 4 +- .../16_network/pet_relative.ipynb | 2 +- .../16_network/pet_replay_segmentation.ipynb | 2 +- .../16_network/pet_segmentation_anim.ipynb | 2 +- .../16_network/pet_something_cool.ipynb | 65 +++++++++++++++++++ src/py_eddy_tracker/dataset/grid.py | 4 +- src/py_eddy_tracker/observations/groups.py | 6 +- src/py_eddy_tracker/observations/network.py | 8 +-- 51 files changed, 127 insertions(+), 83 deletions(-) create mode 100644 notebooks/python_module/16_network/pet_something_cool.ipynb diff --git a/examples/16_network/pet_follow_particle.py b/examples/16_network/pet_follow_particle.py index b4dfe343..e5451daa 100644 --- a/examples/16_network/pet_follow_particle.py +++ b/examples/16_network/pet_follow_particle.py @@ -8,16 +8,14 @@ from matplotlib import colors from matplotlib import pyplot as plt from matplotlib.animation import FuncAnimation -from numba import njit -from numba import types as nb_types -from numpy import arange, meshgrid, ones, unique, where, zeros +from numpy import arange, meshgrid, ones, unique, zeros from py_eddy_tracker import start_logger from py_eddy_tracker.appli.gui import Anim from py_eddy_tracker.data import get_demo_path from py_eddy_tracker.dataset.grid import GridCollection -from py_eddy_tracker.observations.network import NetworkObservations from py_eddy_tracker.observations.groups import particle_candidate +from py_eddy_tracker.observations.network import NetworkObservations from py_eddy_tracker.poly import group_obs start_logger().setLevel("ERROR") diff --git a/examples/16_network/pet_ioannou_2017_case.py b/examples/16_network/pet_ioannou_2017_case.py index 768f0c88..bbe26e3f 100644 --- a/examples/16_network/pet_ioannou_2017_case.py +++ b/examples/16_network/pet_ioannou_2017_case.py @@ -14,14 +14,13 @@ from matplotlib import pyplot as plt from matplotlib.animation import FuncAnimation from matplotlib.ticker import FuncFormatter -from numpy import arange, where, array, pi +from numpy import arange, array, pi, where from py_eddy_tracker.appli.gui import Anim from py_eddy_tracker.data import get_demo_path +from py_eddy_tracker.generic import coordinates_to_local from py_eddy_tracker.gui import GUI_AXES from py_eddy_tracker.observations.network import NetworkObservations - -from py_eddy_tracker.generic import coordinates_to_local from py_eddy_tracker.poly import fit_ellipse # %% diff --git a/notebooks/python_module/01_general_things/pet_storage.ipynb b/notebooks/python_module/01_general_things/pet_storage.ipynb index fa8d1a55..a56e4def 100644 --- a/notebooks/python_module/01_general_things/pet_storage.ipynb +++ b/notebooks/python_module/01_general_things/pet_storage.ipynb @@ -230,7 +230,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_contour_circle.ipynb b/notebooks/python_module/02_eddy_identification/pet_contour_circle.ipynb index 36989357..2d924387 100644 --- a/notebooks/python_module/02_eddy_identification/pet_contour_circle.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_contour_circle.ipynb @@ -82,7 +82,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_display_id.ipynb b/notebooks/python_module/02_eddy_identification/pet_display_id.ipynb index 6d40974f..d59f9e15 100644 --- a/notebooks/python_module/02_eddy_identification/pet_display_id.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_display_id.ipynb @@ -129,7 +129,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_eddy_detection.ipynb b/notebooks/python_module/02_eddy_identification/pet_eddy_detection.ipynb index fb6c17f8..7469b034 100644 --- a/notebooks/python_module/02_eddy_identification/pet_eddy_detection.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_eddy_detection.ipynb @@ -291,7 +291,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_eddy_detection_ACC.ipynb b/notebooks/python_module/02_eddy_identification/pet_eddy_detection_ACC.ipynb index c2a3648d..6ac75cee 100644 --- a/notebooks/python_module/02_eddy_identification/pet_eddy_detection_ACC.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_eddy_detection_ACC.ipynb @@ -161,7 +161,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_eddy_detection_gulf_stream.ipynb b/notebooks/python_module/02_eddy_identification/pet_eddy_detection_gulf_stream.ipynb index c39bc011..49024327 100644 --- a/notebooks/python_module/02_eddy_identification/pet_eddy_detection_gulf_stream.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_eddy_detection_gulf_stream.ipynb @@ -273,7 +273,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_filter_and_detection.ipynb b/notebooks/python_module/02_eddy_identification/pet_filter_and_detection.ipynb index 63e763ff..381aa8f6 100644 --- a/notebooks/python_module/02_eddy_identification/pet_filter_and_detection.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_filter_and_detection.ipynb @@ -176,7 +176,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_interp_grid_on_dataset.ipynb b/notebooks/python_module/02_eddy_identification/pet_interp_grid_on_dataset.ipynb index 94e61b30..0cfdc9a8 100644 --- a/notebooks/python_module/02_eddy_identification/pet_interp_grid_on_dataset.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_interp_grid_on_dataset.ipynb @@ -111,7 +111,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_radius_vs_area.ipynb b/notebooks/python_module/02_eddy_identification/pet_radius_vs_area.ipynb index c70f7dd6..03eba8bf 100644 --- a/notebooks/python_module/02_eddy_identification/pet_radius_vs_area.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_radius_vs_area.ipynb @@ -107,7 +107,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_shape_gallery.ipynb b/notebooks/python_module/02_eddy_identification/pet_shape_gallery.ipynb index ffa58c1f..0ef03f6f 100644 --- a/notebooks/python_module/02_eddy_identification/pet_shape_gallery.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_shape_gallery.ipynb @@ -100,7 +100,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_sla_and_adt.ipynb b/notebooks/python_module/02_eddy_identification/pet_sla_and_adt.ipynb index efbfcc76..9b8b3951 100644 --- a/notebooks/python_module/02_eddy_identification/pet_sla_and_adt.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_sla_and_adt.ipynb @@ -223,7 +223,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb b/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb index b660df52..bceed074 100644 --- a/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb +++ b/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb @@ -262,7 +262,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/06_grid_manipulation/pet_filter.ipynb b/notebooks/python_module/06_grid_manipulation/pet_filter.ipynb index 74a266c2..2d6a7d3a 100644 --- a/notebooks/python_module/06_grid_manipulation/pet_filter.ipynb +++ b/notebooks/python_module/06_grid_manipulation/pet_filter.ipynb @@ -215,7 +215,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/06_grid_manipulation/pet_hide_pixel_out_eddies.ipynb b/notebooks/python_module/06_grid_manipulation/pet_hide_pixel_out_eddies.ipynb index c9bca31e..f30076fa 100644 --- a/notebooks/python_module/06_grid_manipulation/pet_hide_pixel_out_eddies.ipynb +++ b/notebooks/python_module/06_grid_manipulation/pet_hide_pixel_out_eddies.ipynb @@ -111,7 +111,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb b/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb index 67983cec..a5ca088c 100644 --- a/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb +++ b/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb @@ -201,7 +201,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/06_grid_manipulation/pet_okubo_weiss.ipynb b/notebooks/python_module/06_grid_manipulation/pet_okubo_weiss.ipynb index b410be0a..ca4998ee 100644 --- a/notebooks/python_module/06_grid_manipulation/pet_okubo_weiss.ipynb +++ b/notebooks/python_module/06_grid_manipulation/pet_okubo_weiss.ipynb @@ -201,7 +201,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/07_cube_manipulation/pet_cube.ipynb b/notebooks/python_module/07_cube_manipulation/pet_cube.ipynb index a8ed7f1b..22cf3158 100644 --- a/notebooks/python_module/07_cube_manipulation/pet_cube.ipynb +++ b/notebooks/python_module/07_cube_manipulation/pet_cube.ipynb @@ -158,7 +158,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb b/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb index 4f2e1467..a90c3b9f 100644 --- a/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb +++ b/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb @@ -33,7 +33,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## ADT in med\n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_cube` method is\nmade for data stores in time cube, you could use also \n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` method to\nload data-cube from multiple file.\n\n" + "## ADT in med\n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_cube` method is\nmade for data stores in time cube, you could use also\n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` method to\nload data-cube from multiple file.\n\n" ] }, { @@ -172,7 +172,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/07_cube_manipulation/pet_lavd_detection.ipynb b/notebooks/python_module/07_cube_manipulation/pet_lavd_detection.ipynb index f4e5f77e..bd197c57 100644 --- a/notebooks/python_module/07_cube_manipulation/pet_lavd_detection.ipynb +++ b/notebooks/python_module/07_cube_manipulation/pet_lavd_detection.ipynb @@ -194,7 +194,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/08_tracking_manipulation/pet_display_field.ipynb b/notebooks/python_module/08_tracking_manipulation/pet_display_field.ipynb index bf924b36..6e43e9a4 100644 --- a/notebooks/python_module/08_tracking_manipulation/pet_display_field.ipynb +++ b/notebooks/python_module/08_tracking_manipulation/pet_display_field.ipynb @@ -82,7 +82,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/08_tracking_manipulation/pet_display_track.ipynb b/notebooks/python_module/08_tracking_manipulation/pet_display_track.ipynb index 1af7b49a..c98e53f0 100644 --- a/notebooks/python_module/08_tracking_manipulation/pet_display_track.ipynb +++ b/notebooks/python_module/08_tracking_manipulation/pet_display_track.ipynb @@ -118,7 +118,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/08_tracking_manipulation/pet_one_track.ipynb b/notebooks/python_module/08_tracking_manipulation/pet_one_track.ipynb index 2749f7e9..95595a7a 100644 --- a/notebooks/python_module/08_tracking_manipulation/pet_one_track.ipynb +++ b/notebooks/python_module/08_tracking_manipulation/pet_one_track.ipynb @@ -93,7 +93,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/08_tracking_manipulation/pet_run_a_tracking.ipynb b/notebooks/python_module/08_tracking_manipulation/pet_run_a_tracking.ipynb index e8871283..d0a2e5b0 100644 --- a/notebooks/python_module/08_tracking_manipulation/pet_run_a_tracking.ipynb +++ b/notebooks/python_module/08_tracking_manipulation/pet_run_a_tracking.ipynb @@ -154,7 +154,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/08_tracking_manipulation/pet_select_track_across_area.ipynb b/notebooks/python_module/08_tracking_manipulation/pet_select_track_across_area.ipynb index 5ba0d481..8e64b680 100644 --- a/notebooks/python_module/08_tracking_manipulation/pet_select_track_across_area.ipynb +++ b/notebooks/python_module/08_tracking_manipulation/pet_select_track_across_area.ipynb @@ -100,7 +100,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/08_tracking_manipulation/pet_track_anim.ipynb b/notebooks/python_module/08_tracking_manipulation/pet_track_anim.ipynb index 041c8987..65768145 100644 --- a/notebooks/python_module/08_tracking_manipulation/pet_track_anim.ipynb +++ b/notebooks/python_module/08_tracking_manipulation/pet_track_anim.ipynb @@ -82,7 +82,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/08_tracking_manipulation/pet_track_anim_matplotlib_animation.ipynb b/notebooks/python_module/08_tracking_manipulation/pet_track_anim_matplotlib_animation.ipynb index 9f77dbae..6d7fcc2e 100644 --- a/notebooks/python_module/08_tracking_manipulation/pet_track_anim_matplotlib_animation.ipynb +++ b/notebooks/python_module/08_tracking_manipulation/pet_track_anim_matplotlib_animation.ipynb @@ -93,7 +93,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_birth_and_death.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_birth_and_death.ipynb index d9a2ef2b..635c6b5a 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_birth_and_death.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_birth_and_death.ipynb @@ -144,7 +144,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_center_count.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_center_count.ipynb index b6bb15bd..753cd625 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_center_count.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_center_count.ipynb @@ -118,7 +118,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_geographic_stats.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_geographic_stats.ipynb index 3e884552..df495703 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_geographic_stats.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_geographic_stats.ipynb @@ -118,7 +118,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_groups.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_groups.ipynb index 85e32c6a..9f06e010 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_groups.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_groups.ipynb @@ -136,7 +136,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_histo.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_histo.ipynb index 851c6ca4..81809d8b 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_histo.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_histo.ipynb @@ -82,7 +82,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_lifetime.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_lifetime.ipynb index 4a3ff0af..ed8c0295 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_lifetime.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_lifetime.ipynb @@ -82,7 +82,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb index 867e081f..a53f2d3a 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb @@ -111,7 +111,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_pixel_used.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_pixel_used.ipynb index 81bed372..23f830d6 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_pixel_used.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_pixel_used.ipynb @@ -118,7 +118,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_propagation.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_propagation.ipynb index e0d1f2d2..9792f8f4 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_propagation.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_propagation.ipynb @@ -118,7 +118,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/12_external_data/pet_SST_collocation.ipynb b/notebooks/python_module/12_external_data/pet_SST_collocation.ipynb index 05b0413c..b30682a1 100644 --- a/notebooks/python_module/12_external_data/pet_SST_collocation.ipynb +++ b/notebooks/python_module/12_external_data/pet_SST_collocation.ipynb @@ -226,7 +226,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/14_generic_tools/pet_fit_contour.ipynb b/notebooks/python_module/14_generic_tools/pet_fit_contour.ipynb index 5306fa0c..a46a7e22 100644 --- a/notebooks/python_module/14_generic_tools/pet_fit_contour.ipynb +++ b/notebooks/python_module/14_generic_tools/pet_fit_contour.ipynb @@ -100,7 +100,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/14_generic_tools/pet_visvalingam.ipynb b/notebooks/python_module/14_generic_tools/pet_visvalingam.ipynb index 0183abde..69e49b57 100644 --- a/notebooks/python_module/14_generic_tools/pet_visvalingam.ipynb +++ b/notebooks/python_module/14_generic_tools/pet_visvalingam.ipynb @@ -75,7 +75,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_atlas.ipynb b/notebooks/python_module/16_network/pet_atlas.ipynb index ee8f1934..31e3580f 100644 --- a/notebooks/python_module/16_network/pet_atlas.ipynb +++ b/notebooks/python_module/16_network/pet_atlas.ipynb @@ -363,7 +363,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_follow_particle.ipynb b/notebooks/python_module/16_network/pet_follow_particle.ipynb index 28d0048d..6be13adf 100644 --- a/notebooks/python_module/16_network/pet_follow_particle.ipynb +++ b/notebooks/python_module/16_network/pet_follow_particle.ipynb @@ -26,7 +26,7 @@ }, "outputs": [], "source": [ - "import re\n\nfrom matplotlib import colors\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom numba import njit\nfrom numba import types as nb_types\nfrom numpy import arange, meshgrid, ones, unique, where, zeros\n\nfrom py_eddy_tracker import start_logger\nfrom py_eddy_tracker.appli.gui import Anim\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.dataset.grid import GridCollection\nfrom py_eddy_tracker.observations.network import NetworkObservations\nfrom py_eddy_tracker.poly import group_obs\n\nstart_logger().setLevel(\"ERROR\")" + "import re\n\nfrom matplotlib import colors\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom numpy import arange, meshgrid, ones, unique, zeros\n\nfrom py_eddy_tracker import start_logger\nfrom py_eddy_tracker.appli.gui import Anim\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.dataset.grid import GridCollection\nfrom py_eddy_tracker.observations.groups import particle_candidate\nfrom py_eddy_tracker.observations.network import NetworkObservations\nfrom py_eddy_tracker.poly import group_obs\n\nstart_logger().setLevel(\"ERROR\")" ] }, { @@ -105,24 +105,6 @@ "cmap = colors.ListedColormap(list(n.COLORS), name=\"from_list\", N=n.segment.max() + 1)\na = Anim(\n n,\n intern=False,\n figsize=(12, 6),\n nb_step=1,\n dpi=60,\n field_color=\"segment\",\n field_txt=\"segment\",\n cmap=cmap,\n)\na.fig.suptitle(\"\"), a.ax.set_xlim(24, 36), a.ax.set_ylim(30, 36)\na.txt.set_position((25, 31))\n\nstep = 0.25\nkw_p = dict(nb_step=2, time_step=86400 * step * 0.5, t_init=t_snapshot - 2 * step)\n\nmappables = dict()\nparticules = c.advect(x, y, \"u\", \"v\", **kw_p)\nfilament = c.filament(x_f, y_f, \"u\", \"v\", **kw_p, filament_size=3)\nkw = dict(ls=\"\", marker=\".\", markersize=0.25)\nfor k in index_:\n m = k == index\n mappables[k] = a.ax.plot([], [], color=cmap(k), **kw)[0]\nm_filament = a.ax.plot([], [], lw=0.25, color=\"gray\")[0]\n\n\ndef update(frame):\n tt, xt, yt = particules.__next__()\n for k, mappable in mappables.items():\n m = index == k\n mappable.set_data(xt[m], yt[m])\n tt, xt, yt = filament.__next__()\n m_filament.set_data(xt, yt)\n if frame % 1 == 0:\n a.func_animation(frame)\n\n\nani = VideoAnimation(a.fig, update, frames=arange(20200, 20269, step), interval=200)" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## In which observations are the particle\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "def advect(x, y, c, t0, delta_t):\n \"\"\"\n Advect particle from t0 to t0 + delta_t, with data cube.\n \"\"\"\n kw = dict(nb_step=6, time_step=86400 / 6)\n if delta_t < 0:\n kw[\"backward\"] = True\n delta_t = -delta_t\n p = c.advect(x, y, \"u\", \"v\", t_init=t0, **kw)\n for _ in range(delta_t):\n t, x, y = p.__next__()\n return t, x, y\n\n\ndef particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs):\n # Obs from initial time\n m_start = eddies.time == t_start\n e = eddies.extract_with_mask(m_start)\n # to be able to get global index\n translate_start = where(m_start)[0]\n # Identify particle in eddies (only in core)\n i_start = e.contains(x, y, intern=True)\n m = i_start != -1\n x, y, i_start = x[m], y[m], i_start[m]\n # Advect\n t_end, x, y = advect(x, y, c, t_start, **kwargs)\n # eddies at last date\n m_end = eddies.time == t_end / 86400\n e_end = eddies.extract_with_mask(m_end)\n # to be able to get global index\n translate_end = where(m_end)[0]\n # Id eddies for each alive particle (in core and extern)\n i_end = e_end.contains(x, y)\n # compute matrix and fill target array\n get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct)\n\n\n@njit(cache=True)\ndef get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct):\n nb_start, nb_end = translate_start.size, translate_end.size\n # Matrix which will store count for every couple\n count = zeros((nb_start, nb_end), dtype=nb_types.int32)\n # Number of particles in each origin observation\n ref = zeros(nb_start, dtype=nb_types.int32)\n # For each particle\n for i in range(i_start.size):\n i_end_ = i_end[i]\n i_start_ = i_start[i]\n if i_end_ != -1:\n count[i_start_, i_end_] += 1\n ref[i_start_] += 1\n for i in range(nb_start):\n for j in range(nb_end):\n pct_ = count[i, j]\n # If there are particles from i to j\n if pct_ != 0:\n # Get percent\n pct_ = pct_ / ref[i] * 100.0\n # Get indices in full dataset\n i_, j_ = translate_start[i], translate_end[j]\n pct_0 = pct[i_, 0]\n if pct_ > pct_0:\n pct[i_, 1] = pct_0\n pct[i_, 0] = pct_\n i_target[i_, 1] = i_target[i_, 0]\n i_target[i_, 0] = j_\n elif pct_ > pct[i_, 1]:\n pct[i_, 1] = pct_\n i_target[i_, 1] = j_\n return i_target, pct" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -138,7 +120,7 @@ }, "outputs": [], "source": [ - "step = 1 / 60.0\n\nx, y = meshgrid(arange(24, 36, step), arange(31, 36, step))\nx0, y0 = x.reshape(-1), y.reshape(-1)\n# Pre-order to speed up\n_, i = group_obs(x0, y0, 1, 360)\nx0, y0 = x0[i], y0[i]\n\nt_start, t_end = n.period\ndt = 14\n\nshape = (n.obs.size, 2)\n# Forward run\ni_target_f, pct_target_f = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in range(t_start, t_end - dt):\n particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, delta_t=dt)\n\n# Backward run\ni_target_b, pct_target_b = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in range(t_start + dt, t_end):\n particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, delta_t=-dt)" + "step = 1 / 60.0\n\nx, y = meshgrid(arange(24, 36, step), arange(31, 36, step))\nx0, y0 = x.reshape(-1), y.reshape(-1)\n# Pre-order to speed up\n_, i = group_obs(x0, y0, 1, 360)\nx0, y0 = x0[i], y0[i]\n\nt_start, t_end = n.period\ndt = 14\n\nshape = (n.obs.size, 2)\n# Forward run\ni_target_f, pct_target_f = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in range(t_start, t_end - dt):\n particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, n_days=dt)\n\n# Backward run\ni_target_b, pct_target_b = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in range(t_start + dt, t_end):\n particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, n_days=-dt)" ] }, { @@ -169,7 +151,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_group_anim.ipynb b/notebooks/python_module/16_network/pet_group_anim.ipynb index ffb9dd17..7129259c 100644 --- a/notebooks/python_module/16_network/pet_group_anim.ipynb +++ b/notebooks/python_module/16_network/pet_group_anim.ipynb @@ -205,7 +205,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb b/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb index 9b3d40d6..788e94ca 100644 --- a/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb +++ b/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb @@ -26,7 +26,7 @@ }, "outputs": [], "source": [ - "import re\nfrom datetime import datetime, timedelta\n\nfrom matplotlib import colors\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom matplotlib.ticker import FuncFormatter\nfrom numpy import arange, where, array, pi\n\nfrom py_eddy_tracker.appli.gui import Anim\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.gui import GUI_AXES\nfrom py_eddy_tracker.observations.network import NetworkObservations\n\nfrom py_eddy_tracker.generic import coordinates_to_local\nfrom py_eddy_tracker.poly import fit_ellipse" + "import re\nfrom datetime import datetime, timedelta\n\nfrom matplotlib import colors\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom matplotlib.ticker import FuncFormatter\nfrom numpy import arange, array, pi, where\n\nfrom py_eddy_tracker.appli.gui import Anim\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.generic import coordinates_to_local\nfrom py_eddy_tracker.gui import GUI_AXES\nfrom py_eddy_tracker.observations.network import NetworkObservations\nfrom py_eddy_tracker.poly import fit_ellipse" ] }, { @@ -338,7 +338,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_relative.ipynb b/notebooks/python_module/16_network/pet_relative.ipynb index cee4010a..9f3fd3d9 100644 --- a/notebooks/python_module/16_network/pet_relative.ipynb +++ b/notebooks/python_module/16_network/pet_relative.ipynb @@ -539,7 +539,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_replay_segmentation.ipynb b/notebooks/python_module/16_network/pet_replay_segmentation.ipynb index 48f4955b..7c632138 100644 --- a/notebooks/python_module/16_network/pet_replay_segmentation.ipynb +++ b/notebooks/python_module/16_network/pet_replay_segmentation.ipynb @@ -172,7 +172,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_segmentation_anim.ipynb b/notebooks/python_module/16_network/pet_segmentation_anim.ipynb index ae36381c..05c68873 100644 --- a/notebooks/python_module/16_network/pet_segmentation_anim.ipynb +++ b/notebooks/python_module/16_network/pet_segmentation_anim.ipynb @@ -147,7 +147,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_something_cool.ipynb b/notebooks/python_module/16_network/pet_something_cool.ipynb new file mode 100644 index 00000000..158852f9 --- /dev/null +++ b/notebooks/python_module/16_network/pet_something_cool.ipynb @@ -0,0 +1,65 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "%matplotlib inline" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# essai\n\non tente des trucs\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "import cartopy.crs as ccrs\nimport cartopy.feature as cfeature\nimport numpy as np\nfrom matplotlib import pyplot as plt\n\nfrom py_eddy_tracker.observations.network import NetworkObservations\n\n\ndef rect_from_extent(extent):\n rect_lon = [extent[0], extent[1], extent[1], extent[0], extent[0]]\n rect_lat = [extent[2], extent[2], extent[3], extent[3], extent[2]]\n return rect_lon, rect_lat\n\n\ndef indice_from_extent(lon, lat, extent):\n mask = (lon > extent[0]) * (lon < extent[1]) * (lat > extent[2]) * (lat < extent[3])\n return np.where(mask)[0]\n\n\nfichier = \"/data/adelepoulle/work/Eddies/20201217_network_build/big_network.nc\"\nnetwork = NetworkObservations.load_file(fichier)\nsub_network = network.network(1078566)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# extent_begin = [0, 2, -50, -15]\n# extent_end = [-42, -35, -40, -10]\n\nextent_begin = [2, 22, -50, -30]\ni_obs_begin = indice_from_extent(\n sub_network.longitude, sub_network.latitude, extent_begin\n)\nnetwork_begin = sub_network.find_link(i_obs_begin)\ntime_mini = network_begin.time.min()\ntime_maxi = network_begin.time.max()\n\nextent_end = [-52, -45, -37, -33]\ni_obs_end = indice_from_extent(\n (network_begin.longitude + 180) % 360 - 180, network_begin.latitude, extent_end\n)\nnetwork_end = network_begin.find_link(i_obs_end, forward=False, backward=True)\n\n\ndatasets = [network_begin, network_end]\nextents = [extent_begin, extent_end]\nfig, (ax1, ax2) = plt.subplots(\n 2, 1, figsize=(10, 9), dpi=140, subplot_kw={\"projection\": ccrs.PlateCarree()}\n)\n\nfor ax, dataset, extent in zip([ax1, ax2], datasets, extents):\n sca = dataset.scatter(\n ax,\n name=\"time\",\n cmap=\"Spectral_r\",\n label=\"observation dans le temps\",\n vmin=time_mini,\n vmax=time_maxi,\n )\n\n x, y = rect_from_extent(extent)\n ax.fill(x, y, color=\"grey\", alpha=0.3, label=\"observations choisies\")\n # ax.plot(x, y, marker='o')\n\n ax.legend()\n\n gridlines = ax.gridlines(\n alpha=0.2, color=\"black\", linestyle=\"dotted\", draw_labels=True, dms=True\n )\n\n gridlines.left_labels = False\n gridlines.top_labels = False\n\n ax.coastlines()\n ax.add_feature(cfeature.LAND)\n ax.add_feature(cfeature.LAKES, zorder=10)\n ax.add_feature(cfeature.BORDERS, lw=0.25)\n ax.add_feature(cfeature.OCEAN, alpha=0.2)\n\n\nax1.set_title(\n \"Recherche du d\u00e9placement de l'eau dans les eddies \u00e0 travers les observations choisies\"\n)\nax2.set_title(\"Recherche de la provenance de l'eau \u00e0 travers les observations choisies\")\nax2.set_extent(ax1.get_extent(), ccrs.PlateCarree())\n\nfig.subplots_adjust(right=0.87, left=0.02)\ncbar_ax = fig.add_axes([0.90, 0.1, 0.02, 0.8])\ncbar = fig.colorbar(sca[\"scatter\"], cax=cbar_ax, orientation=\"vertical\")\n_ = cbar.set_label(\"time (jj)\", rotation=270, labelpad=-65)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.9" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 28fa8526..bd9e70d3 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -2264,12 +2264,12 @@ def from_netcdf_list(cls, filenames, t, x_name, y_name, indexs=None, heigth=None new.datasets.append((t, d)) return new - def shift_files(self, t, filename, x_name, y_name, indexs=None, heigth=None): + def shift_files(self, t, filename, heigth=None, **rgd_kwargs): """Add next file to the list and remove the oldest""" self.datasets = self.datasets[1:] - d = RegularGridDataset(filename, x_name, y_name, indexs=indexs) + d = RegularGridDataset(filename, **rgd_kwargs) if heigth is not None: d.add_uv(heigth) self.datasets.append((t, d)) diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index 835101ff..c0924cb3 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -1,8 +1,9 @@ import logging from abc import ABC, abstractmethod -from numba import njit, types as nb_types -from numpy import arange, int32, interp, median, zeros, where +from numba import njit +from numba import types as nb_types +from numpy import arange, int32, interp, median, where, zeros from .observation import EddiesObservations @@ -65,7 +66,6 @@ def get_missing_indices( return indices - def advect(x, y, c, t0, n_days): """ Advect particle from t0 to t0 + n_days, with data cube. diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 685b3e42..0e5b9576 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -5,6 +5,7 @@ import logging from glob import glob +import zarr from numba import njit from numba import types as nb_types from numpy import ( @@ -15,22 +16,21 @@ concatenate, empty, in1d, + meshgrid, ones, uint16, uint32, unique, where, zeros, - meshgrid, ) -import zarr +from ..dataset.grid import GridCollection from ..generic import build_index, wrap_longitude -from ..poly import bbox_intersection, vertice_overlap, group_obs +from ..poly import bbox_intersection, group_obs, vertice_overlap from .groups import GroupEddiesObservations, get_missing_indices, particle_candidate from .observation import EddiesObservations from .tracking import TrackEddiesObservations, track_loess_filter, track_median_filter -from ..dataset.grid import GridCollection logger = logging.getLogger("pet")