Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
corrections for merge request
move particle_candidate in groups
add default values for shift_files
mistake with get_uv_quad correction still in comments
change delta_t to n_days
correct whitespaces
  • Loading branch information
ludwigVonKoopa committed May 14, 2021
commit b5c31016a085bd47c7e281cdff5c2a7e0bc7e392
80 changes: 3 additions & 77 deletions examples/16_network/pet_follow_particle.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from py_eddy_tracker.data import get_demo_path
from py_eddy_tracker.dataset.grid import GridCollection
from py_eddy_tracker.observations.network import NetworkObservations
from py_eddy_tracker.observations.groups import particle_candidate
from py_eddy_tracker.poly import group_obs

start_logger().setLevel("ERROR")
Expand Down Expand Up @@ -124,81 +125,6 @@ def update(frame):
ani = VideoAnimation(a.fig, update, frames=arange(20200, 20269, step), interval=200)


# %%
# In which observations are the particle
# --------------------------------------
def advect(x, y, c, t0, delta_t):
"""
Advect particle from t0 to t0 + delta_t, with data cube.
"""
kw = dict(nb_step=6, time_step=86400 / 6)
if delta_t < 0:
kw["backward"] = True
delta_t = -delta_t
p = c.advect(x, y, "u", "v", t_init=t0, **kw)
for _ in range(delta_t):
t, x, y = p.__next__()
return t, x, y


def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs):
# Obs from initial time
m_start = eddies.time == t_start
e = eddies.extract_with_mask(m_start)
# to be able to get global index
translate_start = where(m_start)[0]
# Identify particle in eddies (only in core)
i_start = e.contains(x, y, intern=True)
m = i_start != -1
x, y, i_start = x[m], y[m], i_start[m]
# Advect
t_end, x, y = advect(x, y, c, t_start, **kwargs)
# eddies at last date
m_end = eddies.time == t_end / 86400
e_end = eddies.extract_with_mask(m_end)
# to be able to get global index
translate_end = where(m_end)[0]
# Id eddies for each alive particle (in core and extern)
i_end = e_end.contains(x, y)
# compute matrix and fill target array
get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct)


@njit(cache=True)
def get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct):
nb_start, nb_end = translate_start.size, translate_end.size
# Matrix which will store count for every couple
count = zeros((nb_start, nb_end), dtype=nb_types.int32)
# Number of particles in each origin observation
ref = zeros(nb_start, dtype=nb_types.int32)
# For each particle
for i in range(i_start.size):
i_end_ = i_end[i]
i_start_ = i_start[i]
if i_end_ != -1:
count[i_start_, i_end_] += 1
ref[i_start_] += 1
for i in range(nb_start):
for j in range(nb_end):
pct_ = count[i, j]
# If there are particles from i to j
if pct_ != 0:
# Get percent
pct_ = pct_ / ref[i] * 100.0
# Get indices in full dataset
i_, j_ = translate_start[i], translate_end[j]
pct_0 = pct[i_, 0]
if pct_ > pct_0:
pct[i_, 1] = pct_0
pct[i_, 0] = pct_
i_target[i_, 1] = i_target[i_, 0]
i_target[i_, 0] = j_
elif pct_ > pct[i_, 1]:
pct[i_, 1] = pct_
i_target[i_, 1] = j_
return i_target, pct


# %%
# Particle advection
# ^^^^^^^^^^^^^^^^^^
Expand All @@ -217,12 +143,12 @@ def get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct):
# Forward run
i_target_f, pct_target_f = -ones(shape, dtype="i4"), zeros(shape, dtype="i1")
for t in range(t_start, t_end - dt):
particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, delta_t=dt)
particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, n_days=dt)

# Backward run
i_target_b, pct_target_b = -ones(shape, dtype="i4"), zeros(shape, dtype="i1")
for t in range(t_start + dt, t_end):
particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, delta_t=-dt)
particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, n_days=-dt)

# %%
fig = plt.figure(figsize=(10, 10))
Expand Down
6 changes: 3 additions & 3 deletions src/py_eddy_tracker/dataset/grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -2264,7 +2264,7 @@ def from_netcdf_list(cls, filenames, t, x_name, y_name, indexs=None, heigth=None
new.datasets.append((t, d))
return new

def shift_files(self, t, filename, x_name, y_name, indexs, heigth):
def shift_files(self, t, filename, x_name, y_name, indexs=None, heigth=None):
"""Add next file to the list and remove the oldest"""

self.datasets = self.datasets[1:]
Expand Down Expand Up @@ -2553,8 +2553,8 @@ def get_uv_quad(i0, j0, u, v, m, nb_x=0):
i1 %= nb_x
i_max, j_max = m.shape

# if i1 >= i_max or j1 >= j_max:
# return True, nan, nan, nan, nan, nan, nan, nan, nan
if i1 >= i_max or j1 >= j_max:
return True, nan, nan, nan, nan, nan, nan, nan, nan

if m[i0, j0] or m[i0, j1] or m[i1, j0] or m[i1, j1]:
return True, nan, nan, nan, nan, nan, nan, nan, nan
Expand Down
109 changes: 107 additions & 2 deletions src/py_eddy_tracker/observations/groups.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import logging
from abc import ABC, abstractmethod

from numba import njit
from numpy import arange, int32, interp, median, zeros
from numba import njit, types as nb_types
from numpy import arange, int32, interp, median, zeros, where

from .observation import EddiesObservations

Expand Down Expand Up @@ -65,6 +65,111 @@ def get_missing_indices(
return indices



def advect(x, y, c, t0, n_days):
"""
Advect particle from t0 to t0 + n_days, with data cube.

:param np.array(float) x: longitude of particles
:param np.array(float) y: latitude of particles
:param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles
:param int t0: julian day of advection start
:param int n_days: number of days to advect
"""

kw = dict(nb_step=6, time_step=86400 / 6)
if n_days < 0:
kw["backward"] = True
n_days = -n_days
p = c.advect(x, y, "u", "v", t_init=t0, **kw)
for _ in range(n_days):
t, x, y = p.__next__()
return t, x, y


def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs):
"""Select particles within eddies, advect them, return target observation and associated percentages

:param np.array(float) x: longitude of particles
:param np.array(float) y: latitude of particles
:param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles
:param GroupEddiesObservations eddies: GroupEddiesObservations considered
:param int t_start: julian day of the advection
:param np.array(int) i_target: corresponding obs where particles are advected
:param np.array(int) pct: corresponding percentage of avected particles
:params dict kwargs: dict of params given to `advect`

"""

# Obs from initial time
m_start = eddies.time == t_start

e = eddies.extract_with_mask(m_start)
# to be able to get global index
translate_start = where(m_start)[0]
# Identify particle in eddies (only in core)
i_start = e.contains(x, y, intern=True)
m = i_start != -1

x, y, i_start = x[m], y[m], i_start[m]
# Advect
t_end, x, y = advect(x, y, c, t_start, **kwargs)
# eddies at last date
m_end = eddies.time == t_end / 86400
e_end = eddies.extract_with_mask(m_end)
# to be able to get global index
translate_end = where(m_end)[0]
# Id eddies for each alive particle (in core and extern)
i_end = e_end.contains(x, y)
# compute matrix and fill target array
get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct)


@njit(cache=True)
def get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct):
"""Compute target observation and associated percentages

:param np.array(int) i_start: indices of associated contours at starting advection day
:param np.array(int) i_end: indices of associated contours after advection
:param np.array(int) translate_start: corresponding global indices at starting advection day
:param np.array(int) translate_end: corresponding global indices after advection
:param np.array(int) i_target: corresponding obs where particles are advected
:param np.array(int) pct: corresponding percentage of avected particles
"""

nb_start, nb_end = translate_start.size, translate_end.size
# Matrix which will store count for every couple
count = zeros((nb_start, nb_end), dtype=nb_types.int32)
# Number of particles in each origin observation
ref = zeros(nb_start, dtype=nb_types.int32)
# For each particle
for i in range(i_start.size):
i_end_ = i_end[i]
i_start_ = i_start[i]
if i_end_ != -1:
count[i_start_, i_end_] += 1
ref[i_start_] += 1
for i in range(nb_start):
for j in range(nb_end):
pct_ = count[i, j]
# If there are particles from i to j
if pct_ != 0:
# Get percent
pct_ = pct_ / ref[i] * 100.0
# Get indices in full dataset
i_, j_ = translate_start[i], translate_end[j]
pct_0 = pct[i_, 0]
if pct_ > pct_0:
pct[i_, 1] = pct_0
pct[i_, 0] = pct_
i_target[i_, 1] = i_target[i_, 0]
i_target[i_, 0] = j_
elif pct_ > pct[i_, 1]:
pct[i_, 1] = pct_
i_target[i_, 1] = j_
return i_target, pct


class GroupEddiesObservations(EddiesObservations, ABC):
@abstractmethod
def fix_next_previous_obs(self):
Expand Down
Loading