Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,12 @@ Changed
^^^^^^^
Fixed
^^^^^
- GridCollection get_next_time_step & get_previous_time_step needed more files to work in the dataset list.
The loop needed explicitly self.dataset[i+-1] even when i==0, therefore indice went out of range
Added
^^^^^


[3.4.0] - 2021-03-29
--------------------
Changed
Expand Down
10 changes: 2 additions & 8 deletions examples/16_network/pet_follow_particle.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,25 +128,19 @@ def update(frame):
# ^^^^^^^^^^^^^^^^^^
step = 1 / 60.0

x, y = meshgrid(arange(24, 36, step), arange(31, 36, step))
x0, y0 = x.reshape(-1), y.reshape(-1)
# Pre-order to speed up
_, i = group_obs(x0, y0, 1, 360)
x0, y0 = x0[i], y0[i]

t_start, t_end = n.period
dt = 14

shape = (n.obs.size, 2)
# Forward run
i_target_f, pct_target_f = -ones(shape, dtype="i4"), zeros(shape, dtype="i1")
for t in range(t_start, t_end - dt):
particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, n_days=dt)
particle_candidate(c, n, step, t, i_target_f, pct_target_f, n_days=dt)

# Backward run
i_target_b, pct_target_b = -ones(shape, dtype="i4"), zeros(shape, dtype="i1")
for t in range(t_start + dt, t_end):
particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, n_days=-dt)
particle_candidate(c, n, step, t, i_target_b, pct_target_b, n_days=-dt)

# %%
fig = plt.figure(figsize=(10, 10))
Expand Down
4 changes: 1 addition & 3 deletions src/py_eddy_tracker/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,7 @@


def start_logger():
FORMAT_LOG = (
"%(levelname)-8s %(asctime)s %(module)s.%(funcName)s :\n\t\t\t\t\t%(message)s"
)
FORMAT_LOG = "%(levelname)-8s %(asctime)s %(module)s.%(funcName)s :\n\t%(message)s"
logger = logging.getLogger("pet")
if len(logger.handlers) == 0:
# set up logging to CONSOLE
Expand Down
22 changes: 11 additions & 11 deletions src/py_eddy_tracker/dataset/grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -2260,11 +2260,13 @@ def from_netcdf_cube(cls, filename, x_name, y_name, t_name, heigth=None):
@classmethod
def from_netcdf_list(cls, filenames, t, x_name, y_name, indexs=None, heigth=None):
new = cls()
for i, t in enumerate(t):
d = RegularGridDataset(filenames[i], x_name, y_name, indexs=indexs)
for i, _t in enumerate(t):
filename = filenames[i]
logger.debug(f"load file {i:02d}/{len(t)} t={_t} : {filename}")
d = RegularGridDataset(filename, x_name, y_name, indexs=indexs)
if heigth is not None:
d.add_uv(heigth)
new.datasets.append((t, d))
new.datasets.append((_t, d))
return new

def shift_files(self, t, filename, heigth=None, **rgd_kwargs):
Expand All @@ -2276,6 +2278,7 @@ def shift_files(self, t, filename, heigth=None, **rgd_kwargs):
if heigth is not None:
d.add_uv(heigth)
self.datasets.append((t, d))
logger.debug(f"shift and adding i={len(self.datasets)} t={t} : {filename}")

def interp(self, grid_name, t, lons, lats, method="bilinear"):
"""
Expand Down Expand Up @@ -2436,6 +2439,7 @@ def advect(
else:
mask_particule += isnan(x) + isnan(y)
while True:
logger.debug(f"advect : t={t}")
if (backward and t <= t1) or (not backward and t >= t1):
t0, u0, v0, m0 = t1, u1, v1, m1
t1, d1 = generator.__next__()
Expand All @@ -2462,25 +2466,21 @@ def advect(
yield t, x, y

def get_next_time_step(self, t_init):
first = True
for i, (t, dataset) in enumerate(self.datasets):
if t < t_init:
continue
if first:
first = False
yield self.datasets[i - 1]

logger.debug(f"i={i}, t={t}, dataset={dataset}")
yield t, dataset

def get_previous_time_step(self, t_init):
first = True
i = len(self.datasets)
for t, dataset in reversed(self.datasets):
i -= 1
if t > t_init:
continue
if first:
first = False
yield self.datasets[i + 1]

logger.debug(f"i={i}, t={t}, dataset={dataset}")
yield t, dataset


Expand Down
68 changes: 57 additions & 11 deletions src/py_eddy_tracker/observations/groups.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@

from numba import njit
from numba import types as nb_types
from numpy import arange, int32, interp, median, where, zeros
from numpy import arange, int32, interp, median, where, zeros, meshgrid, concatenate

from .observation import EddiesObservations
from ..poly import group_obs

logger = logging.getLogger("pet")

Expand Down Expand Up @@ -87,11 +88,55 @@ def advect(x, y, c, t0, n_days):
return t, x, y


def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs):
def create_particles(eddies, step):
"""create particles only inside speed contour. Avoir creating too large numpy arrays, only to me masked

:param eddies: network where eddies are
:type eddies: network
:param step: step for particles
:type step: float
:return: lon, lat and indices of particles in contour speed
:rtype: tuple(np.array)
"""

lon = eddies.contour_lon_s
lat = eddies.contour_lat_s

# compute bounding boxes of each eddies
lonMins = lon.min(axis=1)
lonMins = lonMins - (lonMins % step)
lonMaxs = lon.max(axis=1)
lonMaxs = lonMaxs - (lonMaxs % step) + step * 2

latMins = lat.min(axis=1)
latMins = latMins - (latMins % step)
latMaxs = lat.max(axis=1)
latMaxs = latMaxs - (latMaxs % step) + step * 2

lon = []
lat = []
# for each eddies, create mesh with particles then concatenate
for lonMin, lonMax, latMin, latMax in zip(lonMins, lonMaxs, latMins, latMaxs):
x0, y0 = meshgrid(arange(lonMin, lonMax, step), arange(latMin, latMax, step))

x0, y0 = x0.reshape(-1), y0.reshape(-1)
lon.append(x0)
lat.append(y0)

x = concatenate(lon)
y = concatenate(lat)

_, i = group_obs(x, y, 1, 360)
x, y = x[i], y[i]

i_start = eddies.contains(x, y, intern=True)
m = i_start != -1
return x[m], y[m], i_start[m]


def particle_candidate(c, eddies, step_mesh, t_start, i_target, pct, **kwargs):
"""Select particles within eddies, advect them, return target observation and associated percentages

:param np.array(float) x: longitude of particles
:param np.array(float) y: latitude of particles
:param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles
:param GroupEddiesObservations eddies: GroupEddiesObservations considered
:param int t_start: julian day of the advection
Expand All @@ -100,27 +145,28 @@ def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs):
:params dict kwargs: dict of params given to `advect`

"""

# Obs from initial time
m_start = eddies.time == t_start

e = eddies.extract_with_mask(m_start)

# to be able to get global index
translate_start = where(m_start)[0]
# Identify particle in eddies (only in core)
i_start = e.contains(x, y, intern=True)
m = i_start != -1

x, y, i_start = x[m], y[m], i_start[m]
# Advect
x, y, i_start = create_particles(e, step_mesh)

# Advection
t_end, x, y = advect(x, y, c, t_start, **kwargs)

# eddies at last date
m_end = eddies.time == t_end / 86400
e_end = eddies.extract_with_mask(m_end)

# to be able to get global index
translate_end = where(m_end)[0]

# Id eddies for each alive particle (in core and extern)
i_end = e_end.contains(x, y)

# compute matrix and fill target array
get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct)

Expand Down
Loading