Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
- Majuscules + orthographe
- n.period in int for particle advection
- mise en page
  • Loading branch information
CoriPegliasco committed Aug 17, 2021
commit 08c2393a053ee9bffcf77957fd369ddc120a0d1a
13 changes: 10 additions & 3 deletions examples/02_eddy_identification/pet_eddy_detection_ACC.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,8 @@ def set_fancy_labels(fig, ticklabelsize=14, labelsize=14, labelweight="semibold"
y_name="latitude",
# Manual area subset
indexs=dict(
latitude=slice(100 - margin, 220 + margin), longitude=slice(0, 230 + margin),
latitude=slice(100 - margin, 220 + margin),
longitude=slice(0, 230 + margin),
),
)
g_raw = RegularGridDataset(**kw_data)
Expand Down Expand Up @@ -187,10 +188,16 @@ def set_fancy_labels(fig, ticklabelsize=14, labelsize=14, labelweight="semibold"
ax.set_ylabel("With filter")

ax.plot(
a_[field][i_a] * factor, a[field][j_a] * factor, "r.", label="Anticyclonic",
a_[field][i_a] * factor,
a[field][j_a] * factor,
"r.",
label="Anticyclonic",
)
ax.plot(
c_[field][i_c] * factor, c[field][j_c] * factor, "b.", label="Cyclonic",
c_[field][i_c] * factor,
c[field][j_c] * factor,
"b.",
label="Cyclonic",
)
ax.set_aspect("equal"), ax.grid()
ax.plot((0, 1000), (0, 1000), "g")
Expand Down
6 changes: 5 additions & 1 deletion examples/06_grid_manipulation/pet_lavd.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,11 @@ def update(i_frame):
# Format LAVD data
lavd = RegularGridDataset.with_array(
coordinates=("lon", "lat"),
datas=dict(lavd=lavd.T, lon=x_g, lat=y_g,),
datas=dict(
lavd=lavd.T,
lon=x_g,
lat=y_g,
),
centered=True,
)

Expand Down
28 changes: 14 additions & 14 deletions examples/16_network/pet_atlas.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,33 +153,33 @@ def update_axes(ax, mappable=None):
update_axes(ax, m).set_label("Pixel used in % all atlas")

# %%
# All Spliting
# ------------
# Display the occurence of spliting events
# All splitting
# -------------
# Display the occurence of splitting events
ax = start_axes("")
g_all_spliting = n.spliting_event().grid_count(bins)
m = g_all_spliting.display(ax, **kw_time, vmin=0, vmax=1)
g_all_splitting = n.splitting_event().grid_count(bins)
m = g_all_splitting.display(ax, **kw_time, vmin=0, vmax=1)
update_axes(ax, m).set_label("Pixel used in % of time")

# %%
# Ratio spliting events / eddy presence
# Ratio splitting events / eddy presence
ax = start_axes("")
g_ = g_all_spliting.vars["count"] * 100.0 / g_all.vars["count"]
m = g_all_spliting.display(ax, **kw_ratio, vmin=0, vmax=5, name=g_)
g_ = g_all_splitting.vars["count"] * 100.0 / g_all.vars["count"]
m = g_all_splitting.display(ax, **kw_ratio, vmin=0, vmax=5, name=g_)
update_axes(ax, m).set_label("Pixel used in % all atlas")

# %%
# Spliting in networks longer than 10 days
# ----------------------------------------
# splitting in networks longer than 10 days
# -----------------------------------------
ax = start_axes("")
g_10_spliting = n10.spliting_event().grid_count(bins)
m = g_10_spliting.display(ax, **kw_time, vmin=0, vmax=1)
g_10_splitting = n10.splitting_event().grid_count(bins)
m = g_10_splitting.display(ax, **kw_time, vmin=0, vmax=1)
update_axes(ax, m).set_label("Pixel used in % of time")
# %%
ax = start_axes("")
g_ = ma.array(
g_10_spliting.vars["count"] * 100.0 / g_10.vars["count"],
g_10_splitting.vars["count"] * 100.0 / g_10.vars["count"],
mask=g_10.vars["count"] < 365,
)
m = g_10_spliting.display(ax, **kw_ratio, vmin=0, vmax=5, name=g_)
m = g_10_splitting.display(ax, **kw_ratio, vmin=0, vmax=5, name=g_)
update_axes(ax, m).set_label("Pixel used in % all atlas")
4 changes: 3 additions & 1 deletion examples/16_network/pet_follow_particle.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,9 +125,11 @@ def update(frame):
# %%
# Particle advection
# ^^^^^^^^^^^^^^^^^^
# Advection from speed contour to speed contour (default)

step = 1 / 60.0

t_start, t_end = n.period
t_start, t_end = int(n.period[0]), int(n.period[1])
dt = 14

shape = (n.obs.size, 2)
Expand Down
6 changes: 3 additions & 3 deletions examples/16_network/pet_relative.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,13 +292,13 @@
m1

# %%
# Get spliting event
# ------------------
# Get splitting event
# -------------------
# Display the position of the eddies before a splitting
fig = plt.figure(figsize=(15, 8))
ax = fig.add_axes([0.04, 0.06, 0.90, 0.88], projection=GUI_AXES)
n.plot(ax, color_cycle=n.COLORS)
s0, s1, s1_start = n.spliting_event(triplet=True)
s0, s1, s1_start = n.splitting_event(triplet=True)
s0.display(ax, color="violet", lw=2, label="Eddies before splitting")
s1.display(ax, color="blueviolet", lw=2, label="Eddies after splitting")
s1_start.display(ax, color="black", lw=2, label="Eddies starting by splitting")
Expand Down
8 changes: 7 additions & 1 deletion examples/16_network/pet_replay_segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,13 @@ def get_obs(dataset):
n_.median_filter(15, "time", "latitude")
kw["s"] = (n_.radius_e * 1e-3) ** 2 / 30 ** 2 * 20
m = n_.scatter_timeline(
ax, "shape_error_e", vmin=14, vmax=70, **kw, yfield="lon", method="all",
ax,
"shape_error_e",
vmin=14,
vmax=70,
**kw,
yfield="lon",
method="all",
)
ax.set_ylabel("Longitude")
cb = update_axes(ax, m["scatter"])
Expand Down
3 changes: 2 additions & 1 deletion examples/16_network/pet_segmentation_anim.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,8 @@ def update(i_frame):

indices_frames = INDICES[i_frame]
mappable_CONTOUR.set_data(
e.contour_lon_e[indices_frames], e.contour_lat_e[indices_frames],
e.contour_lon_e[indices_frames],
e.contour_lat_e[indices_frames],
)
mappable_CONTOUR.set_color(cmap.colors[tr[indices_frames] % len(cmap.colors)])
return (mappable_tracks,)
Expand Down
15 changes: 11 additions & 4 deletions src/py_eddy_tracker/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -404,7 +404,7 @@ def identify_time(str_date):
nc_dims=("obs",),
nc_attr=dict(
long_name="Previous observation index",
comment="Index of previous observation in a spliting case",
comment="Index of previous observation in a splitting case",
),
),
next_obs=dict(
Expand All @@ -422,14 +422,20 @@ def identify_time(str_date):
nc_name="previous_cost",
nc_type="float32",
nc_dims=("obs",),
nc_attr=dict(long_name="Previous cost for previous observation", comment="",),
nc_attr=dict(
long_name="Previous cost for previous observation",
comment="",
),
),
next_cost=dict(
attr_name=None,
nc_name="next_cost",
nc_type="float32",
nc_dims=("obs",),
nc_attr=dict(long_name="Next cost for next observation", comment="",),
nc_attr=dict(
long_name="Next cost for next observation",
comment="",
),
),
n=dict(
attr_name=None,
Expand Down Expand Up @@ -640,7 +646,8 @@ def identify_time(str_date):
nc_type="f4",
nc_dims=("obs",),
nc_attr=dict(
long_name="Log base 10 background chlorophyll", units="Log(Chl/[mg/m^3])",
long_name="Log base 10 background chlorophyll",
units="Log(Chl/[mg/m^3])",
),
),
year=dict(
Expand Down
18 changes: 14 additions & 4 deletions src/py_eddy_tracker/appli/eddies.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,8 @@ def browse_dataset_in(
filenames = bytes_(glob(full_path))

dataset_list = empty(
len(filenames), dtype=[("filename", "S500"), ("date", "datetime64[s]")],
len(filenames),
dtype=[("filename", "S500"), ("date", "datetime64[s]")],
)
dataset_list["filename"] = filenames

Expand Down Expand Up @@ -371,7 +372,8 @@ def track(

logger.info("Longer track saved have %d obs", c.nb_obs_by_tracks.max())
logger.info(
"The mean length is %d observations for long track", c.nb_obs_by_tracks.mean(),
"The mean length is %d observations for long track",
c.nb_obs_by_tracks.mean(),
)

long_track.write_file(**kw_write)
Expand All @@ -381,7 +383,14 @@ def track(


def get_group(
dataset1, dataset2, index1, index2, score, invalid=2, low=10, high=60,
dataset1,
dataset2,
index1,
index2,
score,
invalid=2,
low=10,
high=60,
):
group1, group2 = dict(), dict()
m_valid = (score * 100) >= invalid
Expand Down Expand Up @@ -490,7 +499,8 @@ def get_values(v, dataset):
]

labels = dict(
high=f"{high:0.0f} <= high", low=f"{invalid:0.0f} <= low < {low:0.0f}",
high=f"{high:0.0f} <= high",
low=f"{invalid:0.0f} <= low < {low:0.0f}",
)

keys = [labels.get(key, key) for key in list(gr_ref.values())[0].keys()]
Expand Down
4 changes: 3 additions & 1 deletion src/py_eddy_tracker/appli/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,9 @@ def subset_network():
help="Remove short dead end, first is for minimal obs number and second for minimal segment time to keep",
)
parser.add_argument(
"--remove_trash", action="store_true", help="Remove trash (network id == 0)",
"--remove_trash",
action="store_true",
help="Remove trash (network id == 0)",
)
parser.add_argument(
"-p",
Expand Down
19 changes: 9 additions & 10 deletions src/py_eddy_tracker/eddy_feature.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,13 +61,13 @@ def __init__(
"""
Create amplitude object

:param Contours contour:
:param float contour_height:
:param array data:
:param float interval:
:param Contours contour: usefull class defined below
:param float contour_height: field value of the contour
:param array data: grid
:param float interval: step between two contours
:param int mle: maximum number of local extrema in contour
:param int nb_step_min: number of intervals to consider an eddy
:param int nb_step_to_be_mle: number of intervals to be considered as an another maxima
:param int nb_step_min: minimum number of intervals to consider the contour as an eddy
:param int nb_step_to_be_mle: number of intervals to be considered as another extrema
"""

# Height of the contour
Expand Down Expand Up @@ -116,8 +116,7 @@ def within_amplitude_limits(self):
def all_pixels_below_h0(self, level):
"""
Check CSS11 criterion 1: The SSH values of all of the pixels
are below (above) a given SSH threshold for cyclonic (anticyclonic)
eddies.
are below a given SSH threshold for cyclonic eddies.
"""
# In some cases pixel value may be very close to the contour bounds
if self.sla.mask.any() or ((self.sla.data - self.h_0) > self.EPSILON).any():
Expand Down Expand Up @@ -602,8 +601,8 @@ def display(
4. - Amplitude criterion (yellow)
:param str field:
Must be 'shape_error', 'x', 'y' or 'radius'.
If define display_criterion is not use.
bins argument must be define
If defined display_criterion is not use.
bins argument must be defined
:param array bins: bins used to colorize contour
:param str cmap: Name of cmap for field display
:param dict kwargs: look at :py:meth:`matplotlib.collections.LineCollection`
Expand Down
37 changes: 31 additions & 6 deletions src/py_eddy_tracker/observations/groups.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def get_missing_indices(

def advect(x, y, c, t0, n_days):
"""
Advect particle from t0 to t0 + n_days, with data cube.
Advect particles from t0 to t0 + n_days, with data cube.

:param np.array(float) x: longitude of particles
:param np.array(float) y: latitude of particles
Expand All @@ -87,14 +87,26 @@ def advect(x, y, c, t0, n_days):
return t, x, y


def particle_candidate(c, eddies, step_mesh, t_start, i_target, pct, **kwargs):
def particle_candidate(
c,
eddies,
step_mesh,
t_start,
i_target,
pct,
contour_start="speed",
contour_end="effective",
**kwargs
):
"""Select particles within eddies, advect them, return target observation and associated percentages

:param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles
:param GroupEddiesObservations eddies: GroupEddiesObservations considered
:param int t_start: julian day of the advection
:param np.array(int) i_target: corresponding obs where particles are advected
:param np.array(int) pct: corresponding percentage of avected particles
:param str contour_start: contour where particles are injected
:param str contour_end: contour where particles are counted after advection
:params dict kwargs: dict of params given to `advect`

"""
Expand All @@ -105,7 +117,14 @@ def particle_candidate(c, eddies, step_mesh, t_start, i_target, pct, **kwargs):
# to be able to get global index
translate_start = where(m_start)[0]

x, y, i_start = e.create_particles(step_mesh)
# Create particles in specified contour
if contour_start == "speed":
x, y, i_start = e.create_particles(step_mesh, intern=True)
elif contour_start == "effective":
x, y, i_start = e.create_particles(step_mesh, intern=False)
else:
x, y, i_start = e.create_particles(step_mesh, intern=True)
print("The contour_start was not correct, speed contour is used")

# Advection
t_end, x, y = advect(x, y, c, t_start, **kwargs)
Expand All @@ -117,8 +136,14 @@ def particle_candidate(c, eddies, step_mesh, t_start, i_target, pct, **kwargs):
# to be able to get global index
translate_end = where(m_end)[0]

# Id eddies for each alive particle (in core and extern)
i_end = e_end.contains(x, y)
# Id eddies for each alive particle in specified contour
if contour_end == "speed":
i_end = e_end.contains(x, y, intern=True)
elif contour_end == "effective":
i_end = e_end.contains(x, y, intern=False)
else:
i_end = e_end.contains(x, y, intern=True)
print("The contour_end was not correct, speed contour is used")

# compute matrix and fill target array
get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct)
Expand Down Expand Up @@ -206,7 +231,7 @@ def filled_by_interpolation(self, mask):
)

def insert_virtual(self):
"""insert virtual observations on segments where observations are missing"""
"""Insert virtual observations on segments where observations are missing"""

dt_theorical = median(self.time[1:] - self.time[:-1])
indices = self.get_missing_indices(dt_theorical)
Expand Down
Loading