Skip to content

Commit ab3020f

Browse files
Merge pull request AntSimi#95 from AntSimi/time_float
Time float, increase of amplitude precision
2 parents 8e26230 + 43c2616 commit ab3020f

File tree

17 files changed

+136
-79
lines changed

17 files changed

+136
-79
lines changed

CHANGELOG.rst

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,13 +10,25 @@ and this project adheres to `Semantic Versioning <https://semver.org/spec/v2.0.0
1010
------------
1111
Changed
1212
^^^^^^^
13+
14+
- Now time allows second precision (instead of daily precision) in storage on uint32 from 01/01/1950 to 01/01/2086
15+
New identifications are produced with this type, old files could still be loaded.
16+
If you use old identifications for tracking use the `--unraw` option to unpack old times and store data with the new format.
17+
- Now amplitude is stored with .1 mm of precision (instead of 1 mm), same advice as for time.
18+
1319
Fixed
1420
^^^^^
15-
- GridCollection get_next_time_step & get_previous_time_step needed more files to work in the dataset list.
16-
The loop needed explicitly self.dataset[i+-1] even when i==0, therefore indice went out of range
21+
1722
Added
1823
^^^^^
1924

25+
[3.5.0] - 2021-06-22
26+
--------------------
27+
28+
Fixed
29+
^^^^^
30+
- GridCollection get_next_time_step & get_previous_time_step needed more files to work in the dataset list.
31+
The loop needed explicitly self.dataset[i+-1] even when i==0, therefore indice went out of range
2032

2133
[3.4.0] - 2021-03-29
2234
--------------------

doc/run_tracking.rst

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,9 @@ Tracking
55
Requirements
66
************
77

8-
Before to run tracking, you will need to run identification on every time step of the period (period of your study).
8+
Before tracking, you will need to run identification on every time step of the period (period of your study).
99

10-
**Advice** : Before to run tracking, displaying some identification file allows to learn a lot
10+
**Advice** : Before tracking, displaying some identification files. You will learn a lot
1111

1212
Default method
1313
**************
@@ -24,9 +24,9 @@ Example of conf.yaml
2424
FILES_PATTERN: MY_IDENTIFICATION_PATH/Anticyclonic*.nc
2525
SAVE_DIR: MY_OUTPUT_PATH
2626
27-
# Number of timestep for missing detection
27+
# Number of consecutive timesteps with missing detection allowed
2828
VIRTUAL_LENGTH_MAX: 3
29-
# Minimal time to consider as a full track
29+
# Minimal number of timesteps to considered as a long trajectory
3030
TRACK_DURATION_MIN: 10
3131
3232
To run:
@@ -63,13 +63,13 @@ With yaml you could also select another tracker:
6363
.. code-block:: yaml
6464
6565
PATHS:
66-
# Files produces with EddyIdentification
66+
# Files produced with EddyIdentification
6767
FILES_PATTERN: MY/IDENTIFICATION_PATH/Anticyclonic*.nc
6868
SAVE_DIR: MY_OUTPUT_PATH
6969
70-
# Number of timesteps for missing detection
70+
# Number of consecutive timesteps with missing detection allowed
7171
VIRTUAL_LENGTH_MAX: 3
72-
# Minimal time to consider as a full track
72+
# Minimal number of timesteps to considered as a long trajectory
7373
TRACK_DURATION_MIN: 10
7474
7575
CLASS:

examples/06_grid_manipulation/pet_okubo_weiss.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
r"""
22
Get Okubo Weis
3-
=====================
3+
==============
44
55
.. math:: OW = S_n^2 + S_s^2 + \omega^2
66

examples/16_network/pet_follow_particle.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -133,12 +133,12 @@ def update(frame):
133133
shape = (n.obs.size, 2)
134134
# Forward run
135135
i_target_f, pct_target_f = -ones(shape, dtype="i4"), zeros(shape, dtype="i1")
136-
for t in range(t_start, t_end - dt):
136+
for t in arange(t_start, t_end - dt):
137137
particle_candidate(c, n, step, t, i_target_f, pct_target_f, n_days=dt)
138138

139139
# Backward run
140140
i_target_b, pct_target_b = -ones(shape, dtype="i4"), zeros(shape, dtype="i1")
141-
for t in range(t_start + dt, t_end):
141+
for t in arange(t_start + dt, t_end):
142142
particle_candidate(c, n, step, t, i_target_b, pct_target_b, n_days=-dt)
143143

144144
# %%
@@ -149,6 +149,11 @@ def update(frame):
149149
ax_2nd_f = fig.add_axes([0.52, 0.05, 0.45, 0.45])
150150
ax_1st_b.set_title("Backward advection for each time step")
151151
ax_1st_f.set_title("Forward advection for each time step")
152+
ax_1st_b.set_ylabel("Color -> First target\nLatitude")
153+
ax_2nd_b.set_ylabel("Color -> Secondary target\nLatitude")
154+
ax_2nd_b.set_xlabel("Julian days"), ax_2nd_f.set_xlabel("Julian days")
155+
ax_1st_f.set_yticks([]), ax_2nd_f.set_yticks([])
156+
ax_1st_f.set_xticks([]), ax_1st_b.set_xticks([])
152157

153158

154159
def color_alpha(target, pct, vmin=5, vmax=80):

notebooks/python_module/16_network/pet_follow_particle.ipynb

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
"cell_type": "markdown",
1616
"metadata": {},
1717
"source": [
18-
"\n# Follow particle\n"
18+
"\nFollow particle\n===============\n"
1919
]
2020
},
2121
{
@@ -55,7 +55,7 @@
5555
"cell_type": "markdown",
5656
"metadata": {},
5757
"source": [
58-
"## Schema\n\n"
58+
"Schema\n------\n\n"
5959
]
6060
},
6161
{
@@ -73,7 +73,7 @@
7373
"cell_type": "markdown",
7474
"metadata": {},
7575
"source": [
76-
"## Animation\nParticle settings\n\n"
76+
"Animation\n---------\nParticle settings\n\n"
7777
]
7878
},
7979
{
@@ -109,7 +109,7 @@
109109
"cell_type": "markdown",
110110
"metadata": {},
111111
"source": [
112-
"### Particle advection\n\n"
112+
"Particle advection\n^^^^^^^^^^^^^^^^^^\n\n"
113113
]
114114
},
115115
{
@@ -120,7 +120,7 @@
120120
},
121121
"outputs": [],
122122
"source": [
123-
"step = 1 / 60.0\n\nx, y = meshgrid(arange(24, 36, step), arange(31, 36, step))\nx0, y0 = x.reshape(-1), y.reshape(-1)\n# Pre-order to speed up\n_, i = group_obs(x0, y0, 1, 360)\nx0, y0 = x0[i], y0[i]\n\nt_start, t_end = n.period\ndt = 14\n\nshape = (n.obs.size, 2)\n# Forward run\ni_target_f, pct_target_f = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in range(t_start, t_end - dt):\n particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, n_days=dt)\n\n# Backward run\ni_target_b, pct_target_b = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in range(t_start + dt, t_end):\n particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, n_days=-dt)"
123+
"step = 1 / 60.0\n\nx, y = meshgrid(arange(24, 36, step), arange(31, 36, step))\nx0, y0 = x.reshape(-1), y.reshape(-1)\n# Pre-order to speed up\n_, i = group_obs(x0, y0, 1, 360)\nx0, y0 = x0[i], y0[i]\n\nt_start, t_end = n.period\ndt = 14\n\nshape = (n.obs.size, 2)\n# Forward run\ni_target_f, pct_target_f = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in arange(t_start, t_end - dt):\n particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, n_days=dt)\n\n# Backward run\ni_target_b, pct_target_b = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in arange(t_start + dt, t_end):\n particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, n_days=-dt)"
124124
]
125125
},
126126
{
@@ -131,7 +131,7 @@
131131
},
132132
"outputs": [],
133133
"source": [
134-
"fig = plt.figure(figsize=(10, 10))\nax_1st_b = fig.add_axes([0.05, 0.52, 0.45, 0.45])\nax_2nd_b = fig.add_axes([0.05, 0.05, 0.45, 0.45])\nax_1st_f = fig.add_axes([0.52, 0.52, 0.45, 0.45])\nax_2nd_f = fig.add_axes([0.52, 0.05, 0.45, 0.45])\nax_1st_b.set_title(\"Backward advection for each time step\")\nax_1st_f.set_title(\"Forward advection for each time step\")\n\n\ndef color_alpha(target, pct, vmin=5, vmax=80):\n color = cmap(n.segment[target])\n # We will hide under 5 % and from 80% to 100 % it will be 1\n alpha = (pct - vmin) / (vmax - vmin)\n alpha[alpha < 0] = 0\n alpha[alpha > 1] = 1\n color[:, 3] = alpha\n return color\n\n\nkw = dict(\n name=None, yfield=\"longitude\", event=False, zorder=-100, s=(n.speed_area / 20e6)\n)\nn.scatter_timeline(ax_1st_b, c=color_alpha(i_target_b.T[0], pct_target_b.T[0]), **kw)\nn.scatter_timeline(ax_2nd_b, c=color_alpha(i_target_b.T[1], pct_target_b.T[1]), **kw)\nn.scatter_timeline(ax_1st_f, c=color_alpha(i_target_f.T[0], pct_target_f.T[0]), **kw)\nn.scatter_timeline(ax_2nd_f, c=color_alpha(i_target_f.T[1], pct_target_f.T[1]), **kw)\nfor ax in (ax_1st_b, ax_2nd_b, ax_1st_f, ax_2nd_f):\n n.display_timeline(ax, field=\"longitude\", marker=\"+\", lw=2, markersize=5)\n ax.grid()"
134+
"fig = plt.figure(figsize=(10, 10))\nax_1st_b = fig.add_axes([0.05, 0.52, 0.45, 0.45])\nax_2nd_b = fig.add_axes([0.05, 0.05, 0.45, 0.45])\nax_1st_f = fig.add_axes([0.52, 0.52, 0.45, 0.45])\nax_2nd_f = fig.add_axes([0.52, 0.05, 0.45, 0.45])\nax_1st_b.set_title(\"Backward advection for each time step\")\nax_1st_f.set_title(\"Forward advection for each time step\")\nax_1st_b.set_ylabel(\"Color -> First target\\nLatitude\")\nax_2nd_b.set_ylabel(\"Color -> Secondary target\\nLatitude\")\nax_2nd_b.set_xlabel(\"Julian days\"), ax_2nd_f.set_xlabel(\"Julian days\")\nax_1st_f.set_yticks([]), ax_2nd_f.set_yticks([])\nax_1st_f.set_xticks([]), ax_1st_b.set_xticks([])\n\n\ndef color_alpha(target, pct, vmin=5, vmax=80):\n color = cmap(n.segment[target])\n # We will hide under 5 % and from 80% to 100 % it will be 1\n alpha = (pct - vmin) / (vmax - vmin)\n alpha[alpha < 0] = 0\n alpha[alpha > 1] = 1\n color[:, 3] = alpha\n return color\n\n\nkw = dict(\n name=None, yfield=\"longitude\", event=False, zorder=-100, s=(n.speed_area / 20e6)\n)\nn.scatter_timeline(ax_1st_b, c=color_alpha(i_target_b.T[0], pct_target_b.T[0]), **kw)\nn.scatter_timeline(ax_2nd_b, c=color_alpha(i_target_b.T[1], pct_target_b.T[1]), **kw)\nn.scatter_timeline(ax_1st_f, c=color_alpha(i_target_f.T[0], pct_target_f.T[0]), **kw)\nn.scatter_timeline(ax_2nd_f, c=color_alpha(i_target_f.T[1], pct_target_f.T[1]), **kw)\nfor ax in (ax_1st_b, ax_2nd_b, ax_1st_f, ax_2nd_f):\n n.display_timeline(ax, field=\"longitude\", marker=\"+\", lw=2, markersize=5)\n ax.grid()"
135135
]
136136
}
137137
],
@@ -151,7 +151,7 @@
151151
"name": "python",
152152
"nbconvert_exporter": "python",
153153
"pygments_lexer": "ipython3",
154-
"version": "3.7.9"
154+
"version": "3.7.7"
155155
}
156156
},
157157
"nbformat": 4,

notebooks/python_module/16_network/pet_segmentation_anim.ipynb

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
"cell_type": "markdown",
1616
"metadata": {},
1717
"source": [
18-
"\n# Network segmentation process\n"
18+
"\nNetwork segmentation process\n============================\n"
1919
]
2020
},
2121
{
@@ -62,7 +62,7 @@
6262
"cell_type": "markdown",
6363
"metadata": {},
6464
"source": [
65-
"## Load data\nLoad data where observations are put in same network but no segmentation\n\n"
65+
"Load data\n---------\nLoad data where observations are put in same network but no segmentation\n\n"
6666
]
6767
},
6868
{
@@ -80,7 +80,7 @@
8080
"cell_type": "markdown",
8181
"metadata": {},
8282
"source": [
83-
"## Do segmentation\nSegmentation based on maximum overlap, temporal window for candidates = 5 days\n\n"
83+
"Do segmentation\n---------------\nSegmentation based on maximum overlap, temporal window for candidates = 5 days\n\n"
8484
]
8585
},
8686
{
@@ -98,7 +98,7 @@
9898
"cell_type": "markdown",
9999
"metadata": {},
100100
"source": [
101-
"## Anim\n\n"
101+
"Anim\n----\n\n"
102102
]
103103
},
104104
{
@@ -109,14 +109,14 @@
109109
},
110110
"outputs": [],
111111
"source": [
112-
"def update(i_frame):\n tr = TRACKS[i_frame]\n mappable_tracks.set_array(tr)\n s = 40 * ones(tr.shape)\n s[tr == 0] = 4\n mappable_tracks.set_sizes(s)\n\n indices_frames = INDICES[i_frame]\n mappable_CONTOUR.set_data(\n e.contour_lon_e[indices_frames],\n e.contour_lat_e[indices_frames],\n )\n mappable_CONTOUR.set_color(cmap.colors[tr[indices_frames] % len(cmap.colors)])\n return (mappable_tracks,)\n\n\nfig = plt.figure(figsize=(16, 9), dpi=60)\nax = fig.add_axes([0.04, 0.06, 0.94, 0.88], projection=GUI_AXES)\nax.set_title(f\"{len(e)} observations to segment\")\nax.set_xlim(19, 29), ax.set_ylim(31, 35.5), ax.grid()\nvmax = TRACKS[-1].max()\ncmap = ListedColormap([\"gray\", *e.COLORS[:-1]], name=\"from_list\", N=vmax)\nmappable_tracks = ax.scatter(\n e.lon, e.lat, c=TRACKS[0], cmap=cmap, vmin=0, vmax=vmax, s=20\n)\nmappable_CONTOUR = ax.plot(\n e.contour_lon_e[INDICES[0]], e.contour_lat_e[INDICES[0]], color=cmap.colors[0]\n)[0]\nani = VideoAnimation(fig, update, frames=range(1, len(TRACKS), 4), interval=125)"
112+
"def update(i_frame):\n tr = TRACKS[i_frame]\n mappable_tracks.set_array(tr)\n s = 40 * ones(tr.shape)\n s[tr == 0] = 4\n mappable_tracks.set_sizes(s)\n\n indices_frames = INDICES[i_frame]\n mappable_CONTOUR.set_data(\n e.contour_lon_e[indices_frames], e.contour_lat_e[indices_frames],\n )\n mappable_CONTOUR.set_color(cmap.colors[tr[indices_frames] % len(cmap.colors)])\n return (mappable_tracks,)\n\n\nfig = plt.figure(figsize=(16, 9), dpi=60)\nax = fig.add_axes([0.04, 0.06, 0.94, 0.88], projection=GUI_AXES)\nax.set_title(f\"{len(e)} observations to segment\")\nax.set_xlim(19, 29), ax.set_ylim(31, 35.5), ax.grid()\nvmax = TRACKS[-1].max()\ncmap = ListedColormap([\"gray\", *e.COLORS[:-1]], name=\"from_list\", N=vmax)\nmappable_tracks = ax.scatter(\n e.lon, e.lat, c=TRACKS[0], cmap=cmap, vmin=0, vmax=vmax, s=20\n)\nmappable_CONTOUR = ax.plot(\n e.contour_lon_e[INDICES[0]], e.contour_lat_e[INDICES[0]], color=cmap.colors[0]\n)[0]\nani = VideoAnimation(fig, update, frames=range(1, len(TRACKS), 4), interval=125)"
113113
]
114114
},
115115
{
116116
"cell_type": "markdown",
117117
"metadata": {},
118118
"source": [
119-
"## Final Result\n\n"
119+
"Final Result\n------------\n\n"
120120
]
121121
},
122122
{
@@ -147,7 +147,7 @@
147147
"name": "python",
148148
"nbconvert_exporter": "python",
149149
"pygments_lexer": "ipython3",
150-
"version": "3.7.9"
150+
"version": "3.7.7"
151151
}
152152
},
153153
"nbformat": 4,

share/tracking.yaml

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,13 @@
11
PATHS:
2-
# Files produces with EddyIdentification
2+
# Files produced with EddyIdentification
33
FILES_PATTERN: /home/emason/toto/Anticyclonic_*.nc
4-
# Path for saving of outputs
4+
# Path to save outputs
55
SAVE_DIR: '/home/emason/toto/'
66

7-
# Minimum number of observations to store eddy
8-
TRACK_DURATION_MIN: 4
7+
# Number of consecutive timesteps with missing detection allowed
98
VIRTUAL_LENGTH_MAX: 0
9+
# Minimal number of timesteps to considered as a long trajectory
10+
TRACK_DURATION_MIN: 4
1011

1112
CLASS:
1213
MODULE: py_eddy_tracker.featured_tracking.area_tracker

src/py_eddy_tracker/__init__.py

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222

2323
import logging
2424
from argparse import ArgumentParser
25+
from datetime import datetime
2526

2627
import zarr
2728

@@ -106,12 +107,26 @@ def parse_args(self, *args, **kwargs):
106107
return opts
107108

108109

110+
TIME_MODELS = ["%Y%m%d", "%Y%m%d%H%M%S", "%Y%m%dT%H%M%S"]
111+
112+
113+
def identify_time(str_date):
114+
for model in TIME_MODELS:
115+
try:
116+
return datetime.strptime(str_date, model)
117+
except ValueError:
118+
pass
119+
raise Exception("No time model found")
120+
121+
109122
VAR_DESCR = dict(
110123
time=dict(
111124
attr_name="time",
112125
nc_name="time",
113126
old_nc_name=["j1"],
114-
nc_type="int32",
127+
nc_type="float64",
128+
output_type="uint32",
129+
scale_factor=1 / 86400.0,
115130
nc_dims=("obs",),
116131
nc_attr=dict(
117132
standard_name="time",
@@ -251,7 +266,7 @@ def parse_args(self, *args, **kwargs):
251266
old_nc_name=["A"],
252267
nc_type="float32",
253268
output_type="uint16",
254-
scale_factor=0.001,
269+
scale_factor=0.0001,
255270
nc_dims=("obs",),
256271
nc_attr=dict(
257272
long_name="Amplitude",

src/py_eddy_tracker/appli/eddies.py

Lines changed: 22 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from numpy import bincount, bytes_, empty, in1d, unique
1616
from yaml import safe_load
1717

18-
from .. import EddyParser
18+
from .. import EddyParser, identify_time
1919
from ..observations.observation import EddiesObservations, reverse_index
2020
from ..observations.tracking import TrackEddiesObservations
2121
from ..tracking import Correspondances
@@ -163,7 +163,12 @@ def eddies_tracking():
163163
parser.add_argument(
164164
"--zarr", action="store_true", help="Output will be wrote in zarr"
165165
)
166-
parser.add_argument("--unraw", action="store_true", help="Load unraw data")
166+
parser.add_argument(
167+
"--unraw",
168+
action="store_true",
169+
help="Load unraw data, use only for netcdf."
170+
"If unraw is active, netcdf is loaded without apply scalefactor and add_offset.",
171+
)
167172
parser.add_argument(
168173
"--blank_period",
169174
type=int,
@@ -223,7 +228,7 @@ def browse_dataset_in(
223228
data_dir,
224229
files_model,
225230
date_regexp,
226-
date_model,
231+
date_model=None,
227232
start_date=None,
228233
end_date=None,
229234
sub_sampling_step=1,
@@ -238,11 +243,7 @@ def browse_dataset_in(
238243
filenames = bytes_(glob(full_path))
239244

240245
dataset_list = empty(
241-
len(filenames),
242-
dtype=[
243-
("filename", "S500"),
244-
("date", "datetime64[D]"),
245-
],
246+
len(filenames), dtype=[("filename", "S500"), ("date", "datetime64[s]")],
246247
)
247248
dataset_list["filename"] = filenames
248249

@@ -268,13 +269,15 @@ def browse_dataset_in(
268269
str_date = result.groups()[0]
269270

270271
if str_date is not None:
271-
item["date"] = datetime.strptime(str_date, date_model).date()
272+
if date_model is None:
273+
item["date"] = identify_time(str_date)
274+
else:
275+
item["date"] = datetime.strptime(str_date, date_model)
272276

273277
dataset_list.sort(order=["date", "filename"])
274-
275278
steps = unique(dataset_list["date"][1:] - dataset_list["date"][:-1])
276279
if len(steps) > 1:
277-
raise Exception("Several days steps in grid dataset %s" % steps)
280+
raise Exception("Several timesteps in grid dataset %s" % steps)
278281

279282
if sub_sampling_step != 1:
280283
logger.info("Grid subsampling %d", sub_sampling_step)
@@ -304,7 +307,7 @@ def track(
304307
correspondances_only=False,
305308
**kw_c,
306309
):
307-
kw = dict(date_regexp=".*_([0-9]*?).[nz].*", date_model="%Y%m%d")
310+
kw = dict(date_regexp=".*_([0-9]*?).[nz].*")
308311
if isinstance(pattern, list):
309312
kw.update(dict(data_dir=None, files_model=None, files=pattern))
310313
else:
@@ -323,10 +326,9 @@ def track(
323326
c = Correspondances(datasets=datasets["filename"], **kw_c)
324327
c.track()
325328
logger.info("Track finish")
326-
t0, t1 = c.period
327329
kw_save = dict(
328-
date_start=t0,
329-
date_stop=t1,
330+
date_start=datasets["date"][0],
331+
date_stop=datasets["date"][-1],
330332
date_prod=datetime.now(),
331333
path=output_dir,
332334
sign_type=c.current_obs.sign_legend,
@@ -351,11 +353,13 @@ def track(
351353

352354
short_c = c._copy()
353355
short_c.shorter_than(size_max=nb_obs_min)
354-
c.longer_than(size_min=nb_obs_min)
355-
356-
long_track = c.merge(raw_data=raw)
357356
short_track = short_c.merge(raw_data=raw)
358357

358+
if c.longer_than(size_min=nb_obs_min) is False:
359+
long_track = short_track.empty_dataset()
360+
else:
361+
long_track = c.merge(raw_data=raw)
362+
359363
# We flag obs
360364
if c.virtual:
361365
long_track["virtual"][:] = long_track["time"] == 0

0 commit comments

Comments
 (0)