Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@ and this project adheres to `Semantic Versioning <https://semver.org/spec/v2.0.0
------------
Changed
^^^^^^^
- Now time will be allow second precision in storage on uint32 from 01/01/1950 to 01/01/2086
new identification will be produce with this type, old file could be still loaded.
If you use old identification to track use `--unraw` option to unpack old time and store in new format.

Fixed
^^^^^
- GridCollection get_next_time_step & get_previous_time_step needed more files to work in the dataset list.
Expand Down
4 changes: 2 additions & 2 deletions doc/run_tracking.rst
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ Example of conf.yaml

# Number of timestep for missing detection
VIRTUAL_LENGTH_MAX: 3
# Minimal time to consider as a full track
# Minimal number of timesteps to consider as a long track
TRACK_DURATION_MIN: 10

To run:
Expand Down Expand Up @@ -69,7 +69,7 @@ With yaml you could also select another tracker:

# Number of timesteps for missing detection
VIRTUAL_LENGTH_MAX: 3
# Minimal time to consider as a full track
# Minimal number of timesteps to consider as a long track
TRACK_DURATION_MIN: 10

CLASS:
Expand Down
2 changes: 1 addition & 1 deletion examples/06_grid_manipulation/pet_okubo_weiss.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
r"""
Get Okubo Weis
=====================
==============

.. math:: OW = S_n^2 + S_s^2 + \omega^2

Expand Down
5 changes: 5 additions & 0 deletions examples/16_network/pet_follow_particle.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,11 @@ def update(frame):
ax_2nd_f = fig.add_axes([0.52, 0.05, 0.45, 0.45])
ax_1st_b.set_title("Backward advection for each time step")
ax_1st_f.set_title("Forward advection for each time step")
ax_1st_b.set_ylabel("Color -> First target\nLatitude")
ax_2nd_b.set_ylabel("Color -> Secondary target\nLatitude")
ax_2nd_b.set_xlabel("Julian days"), ax_2nd_f.set_xlabel("Julian days")
ax_1st_f.set_yticks([]), ax_2nd_f.set_yticks([])
ax_1st_f.set_xticks([]), ax_1st_b.set_xticks([])


def color_alpha(target, pct, vmin=5, vmax=80):
Expand Down
12 changes: 6 additions & 6 deletions notebooks/python_module/16_network/pet_follow_particle.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"\n# Follow particle\n"
"\nFollow particle\n===============\n"
]
},
{
Expand Down Expand Up @@ -55,7 +55,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"## Schema\n\n"
"Schema\n------\n\n"
]
},
{
Expand All @@ -73,7 +73,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"## Animation\nParticle settings\n\n"
"Animation\n---------\nParticle settings\n\n"
]
},
{
Expand Down Expand Up @@ -109,7 +109,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"### Particle advection\n\n"
"Particle advection\n^^^^^^^^^^^^^^^^^^\n\n"
]
},
{
Expand All @@ -131,7 +131,7 @@
},
"outputs": [],
"source": [
"fig = plt.figure(figsize=(10, 10))\nax_1st_b = fig.add_axes([0.05, 0.52, 0.45, 0.45])\nax_2nd_b = fig.add_axes([0.05, 0.05, 0.45, 0.45])\nax_1st_f = fig.add_axes([0.52, 0.52, 0.45, 0.45])\nax_2nd_f = fig.add_axes([0.52, 0.05, 0.45, 0.45])\nax_1st_b.set_title(\"Backward advection for each time step\")\nax_1st_f.set_title(\"Forward advection for each time step\")\n\n\ndef color_alpha(target, pct, vmin=5, vmax=80):\n color = cmap(n.segment[target])\n # We will hide under 5 % and from 80% to 100 % it will be 1\n alpha = (pct - vmin) / (vmax - vmin)\n alpha[alpha < 0] = 0\n alpha[alpha > 1] = 1\n color[:, 3] = alpha\n return color\n\n\nkw = dict(\n name=None, yfield=\"longitude\", event=False, zorder=-100, s=(n.speed_area / 20e6)\n)\nn.scatter_timeline(ax_1st_b, c=color_alpha(i_target_b.T[0], pct_target_b.T[0]), **kw)\nn.scatter_timeline(ax_2nd_b, c=color_alpha(i_target_b.T[1], pct_target_b.T[1]), **kw)\nn.scatter_timeline(ax_1st_f, c=color_alpha(i_target_f.T[0], pct_target_f.T[0]), **kw)\nn.scatter_timeline(ax_2nd_f, c=color_alpha(i_target_f.T[1], pct_target_f.T[1]), **kw)\nfor ax in (ax_1st_b, ax_2nd_b, ax_1st_f, ax_2nd_f):\n n.display_timeline(ax, field=\"longitude\", marker=\"+\", lw=2, markersize=5)\n ax.grid()"
"fig = plt.figure(figsize=(10, 10))\nax_1st_b = fig.add_axes([0.05, 0.52, 0.45, 0.45])\nax_2nd_b = fig.add_axes([0.05, 0.05, 0.45, 0.45])\nax_1st_f = fig.add_axes([0.52, 0.52, 0.45, 0.45])\nax_2nd_f = fig.add_axes([0.52, 0.05, 0.45, 0.45])\nax_1st_b.set_title(\"Backward advection for each time step\")\nax_1st_f.set_title(\"Forward advection for each time step\")\nax_1st_b.set_ylabel(\"Color -> First target\\nLatitude\")\nax_2nd_b.set_ylabel(\"Color -> Secondary target\\nLatitude\")\nax_2nd_b.set_xlabel(\"Julian days\"), ax_2nd_f.set_xlabel(\"Julian days\")\nax_1st_f.set_yticks([]), ax_2nd_f.set_yticks([])\nax_1st_f.set_xticks([]), ax_1st_b.set_xticks([])\n\n\ndef color_alpha(target, pct, vmin=5, vmax=80):\n color = cmap(n.segment[target])\n # We will hide under 5 % and from 80% to 100 % it will be 1\n alpha = (pct - vmin) / (vmax - vmin)\n alpha[alpha < 0] = 0\n alpha[alpha > 1] = 1\n color[:, 3] = alpha\n return color\n\n\nkw = dict(\n name=None, yfield=\"longitude\", event=False, zorder=-100, s=(n.speed_area / 20e6)\n)\nn.scatter_timeline(ax_1st_b, c=color_alpha(i_target_b.T[0], pct_target_b.T[0]), **kw)\nn.scatter_timeline(ax_2nd_b, c=color_alpha(i_target_b.T[1], pct_target_b.T[1]), **kw)\nn.scatter_timeline(ax_1st_f, c=color_alpha(i_target_f.T[0], pct_target_f.T[0]), **kw)\nn.scatter_timeline(ax_2nd_f, c=color_alpha(i_target_f.T[1], pct_target_f.T[1]), **kw)\nfor ax in (ax_1st_b, ax_2nd_b, ax_1st_f, ax_2nd_f):\n n.display_timeline(ax, field=\"longitude\", marker=\"+\", lw=2, markersize=5)\n ax.grid()"
]
}
],
Expand All @@ -151,7 +151,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.9"
"version": "3.7.7"
}
},
"nbformat": 4,
Expand Down
7 changes: 6 additions & 1 deletion src/py_eddy_tracker/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,12 +106,17 @@ def parse_args(self, *args, **kwargs):
return opts


TIME_MODELS = ["%Y%m%d", "%Y%m%d%H%M%S", "%Y%m%dT%H%M%S"]


VAR_DESCR = dict(
time=dict(
attr_name="time",
nc_name="time",
old_nc_name=["j1"],
nc_type="int32",
nc_type="float64",
output_type="uint32",
scale_factor=1 / 86400.0,
nc_dims=("obs",),
nc_attr=dict(
standard_name="time",
Expand Down
40 changes: 24 additions & 16 deletions src/py_eddy_tracker/appli/eddies.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from numpy import bincount, bytes_, empty, in1d, unique
from yaml import safe_load

from .. import EddyParser
from .. import TIME_MODELS, EddyParser
from ..observations.observation import EddiesObservations, reverse_index
from ..observations.tracking import TrackEddiesObservations
from ..tracking import Correspondances
Expand Down Expand Up @@ -223,7 +223,7 @@ def browse_dataset_in(
data_dir,
files_model,
date_regexp,
date_model,
date_model=None,
start_date=None,
end_date=None,
sub_sampling_step=1,
Expand All @@ -238,11 +238,7 @@ def browse_dataset_in(
filenames = bytes_(glob(full_path))

dataset_list = empty(
len(filenames),
dtype=[
("filename", "S500"),
("date", "datetime64[D]"),
],
len(filenames), dtype=[("filename", "S500"), ("date", "datetime64[s]")],
)
dataset_list["filename"] = filenames

Expand All @@ -268,10 +264,21 @@ def browse_dataset_in(
str_date = result.groups()[0]

if str_date is not None:
item["date"] = datetime.strptime(str_date, date_model).date()
if date_model is None:
model_found = False
for model in TIME_MODELS:
try:
item["date"] = datetime.strptime(str_date, model)
model_found = True
break
except ValueError:
pass
if not model_found:
raise Exception("No time model found")
else:
item["date"] = datetime.strptime(str_date, date_model)

dataset_list.sort(order=["date", "filename"])

steps = unique(dataset_list["date"][1:] - dataset_list["date"][:-1])
if len(steps) > 1:
raise Exception("Several days steps in grid dataset %s" % steps)
Expand Down Expand Up @@ -304,7 +311,7 @@ def track(
correspondances_only=False,
**kw_c,
):
kw = dict(date_regexp=".*_([0-9]*?).[nz].*", date_model="%Y%m%d")
kw = dict(date_regexp=".*_([0-9]*?).[nz].*")
if isinstance(pattern, list):
kw.update(dict(data_dir=None, files_model=None, files=pattern))
else:
Expand All @@ -323,10 +330,9 @@ def track(
c = Correspondances(datasets=datasets["filename"], **kw_c)
c.track()
logger.info("Track finish")
t0, t1 = c.period
kw_save = dict(
date_start=t0,
date_stop=t1,
date_start=datasets["date"][0],
date_stop=datasets["date"][-1],
date_prod=datetime.now(),
path=output_dir,
sign_type=c.current_obs.sign_legend,
Expand All @@ -351,11 +357,13 @@ def track(

short_c = c._copy()
short_c.shorter_than(size_max=nb_obs_min)
c.longer_than(size_min=nb_obs_min)

long_track = c.merge(raw_data=raw)
short_track = short_c.merge(raw_data=raw)

if c.longer_than(size_min=nb_obs_min) is False:
long_track = short_track.empty_dataset()
else:
long_track = c.merge(raw_data=raw)

# We flag obs
if c.virtual:
long_track["virtual"][:] = long_track["time"] == 0
Expand Down
15 changes: 12 additions & 3 deletions src/py_eddy_tracker/appli/grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from argparse import Action
from datetime import datetime

from .. import EddyParser
from .. import TIME_MODELS, EddyParser
from ..dataset.grid import RegularGridDataset, UnRegularGridDataset


Expand Down Expand Up @@ -121,7 +121,16 @@ def eddy_id(args=None):
cut_wavelength = [0, *cut_wavelength]
inf_bnds, upper_bnds = cut_wavelength

date = datetime.strptime(args.datetime, "%Y%m%d")
model_found = False
for model in TIME_MODELS:
try:
date = datetime.strptime(args.datetime, model)
model_found = True
break
except ValueError:
pass
if not model_found:
raise Exception("No time model found")
kwargs = dict(
step=args.isoline_step,
shape_error=args.fit_errmax,
Expand Down Expand Up @@ -150,7 +159,7 @@ def eddy_id(args=None):
sampling_method=args.sampling_method,
**kwargs,
)
out_name = date.strftime("%(path)s/%(sign_type)s_%Y%m%d.nc")
out_name = date.strftime("%(path)s/%(sign_type)s_%Y%m%dT%H%M%S.nc")
a.write_file(path=args.path_out, filename=out_name, zarr_flag=args.zarr)
c.write_file(path=args.path_out, filename=out_name, zarr_flag=args.zarr)

Expand Down
Binary file modified src/py_eddy_tracker/data/Anticyclonic_20190223.nc
Binary file not shown.
4 changes: 2 additions & 2 deletions src/py_eddy_tracker/gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,8 +291,8 @@ def get_infos(self, name, index):
i_first = d.index_from_track[tr]
track = d.obs[i_first : i_first + nb]
nb -= 1
t0 = timedelta(days=int(track[0]["time"])) + datetime(1950, 1, 1)
t1 = timedelta(days=int(track[-1]["time"])) + datetime(1950, 1, 1)
t0 = timedelta(days=track[0]["time"]) + datetime(1950, 1, 1)
t1 = timedelta(days=track[-1]["time"]) + datetime(1950, 1, 1)
txt = f"--{name}--\n"
txt += f" {t0} -> {t1}\n"
txt += f" Tracks : {tr} {now['n']}/{nb} ({now['n'] / nb * 100:.2f} %)\n"
Expand Down
4 changes: 2 additions & 2 deletions src/py_eddy_tracker/observations/groups.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,15 +186,15 @@ def filled_by_interpolation(self, mask):

.. minigallery:: py_eddy_tracker.TrackEddiesObservations.filled_by_interpolation
"""

if self.track.size == 0:
return
nb_filled = mask.sum()
logger.info("%d obs will be filled (unobserved)", nb_filled)

nb_obs = len(self)
index = arange(nb_obs)

for field in self.obs.dtype.descr:
# print(f"field : {field}")
var = field[0]
if (
var in ["n", "virtual", "track", "cost_association"]
Expand Down
2 changes: 2 additions & 0 deletions src/py_eddy_tracker/observations/observation.py
Original file line number Diff line number Diff line change
Expand Up @@ -1328,6 +1328,8 @@ def solve_conflict(cost):
def solve_simultaneous(cost):
"""Write something (TODO)"""
mask = ~cost.mask
if mask.size == 0:
return mask
# Count number of links by self obs and other obs
self_links, other_links = sum_row_column(mask)
max_links = max(self_links.max(), other_links.max())
Expand Down
15 changes: 9 additions & 6 deletions src/py_eddy_tracker/observations/tracking.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,8 @@ def normalize_longitude(self):
- contour_lon_e (how to do if in raw)
- contour_lon_s (how to do if in raw)
"""
if self.lon.size == 0:
return
lon0 = (self.lon[self.index_from_track] - 180).repeat(self.nb_obs_by_track)
logger.debug("Normalize longitude")
self.lon[:] = (self.lon - lon0) % 360 + lon0
Expand Down Expand Up @@ -228,12 +230,13 @@ def set_global_attr_netcdf(self, h_nc):
)
h_nc.date_created = datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")
t = h_nc.variables[VAR_DESCR_inv["j1"]]
delta = t.max - t.min + 1
h_nc.time_coverage_duration = "P%dD" % delta
d_start = datetime(1950, 1, 1) + timedelta(int(t.min))
d_end = datetime(1950, 1, 1) + timedelta(int(t.max))
h_nc.time_coverage_start = d_start.strftime("%Y-%m-%dT00:00:00Z")
h_nc.time_coverage_end = d_end.strftime("%Y-%m-%dT00:00:00Z")
if t.size:
delta = t.max - t.min + 1
h_nc.time_coverage_duration = "P%dD" % delta
d_start = datetime(1950, 1, 1) + timedelta(int(t.min))
d_end = datetime(1950, 1, 1) + timedelta(int(t.max))
h_nc.time_coverage_start = d_start.strftime("%Y-%m-%dT00:00:00Z")
h_nc.time_coverage_end = d_end.strftime("%Y-%m-%dT00:00:00Z")

def extract_with_period(self, period, **kwargs):
"""
Expand Down
9 changes: 6 additions & 3 deletions src/py_eddy_tracker/tracking.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,10 +161,10 @@ def period(self):

"""
date_start = datetime(1950, 1, 1) + timedelta(
int(self.class_method.load_file(self.datasets[0]).time[0])
self.class_method.load_file(self.datasets[0]).time[0]
)
date_stop = datetime(1950, 1, 1) + timedelta(
int(self.class_method.load_file(self.datasets[-1]).time[0])
self.class_method.load_file(self.datasets[-1]).time[0]
)
return date_start, date_stop

Expand Down Expand Up @@ -584,7 +584,10 @@ def prepare_merging(self):
def longer_than(self, size_min):
"""Remove from correspondance table all association for shorter eddies than size_min"""
# Identify eddies longer than
i_keep_track = where(self.nb_obs_by_tracks >= size_min)[0]
mask = self.nb_obs_by_tracks >= size_min
if not mask.any():
return False
i_keep_track = where(mask)[0]
# Reduce array
self.nb_obs_by_tracks = self.nb_obs_by_tracks[i_keep_track]
self.i_current_by_tracks = (
Expand Down
9 changes: 5 additions & 4 deletions src/scripts/EddyTranslate
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ def id_parser():
)
parser.add_argument("filename_in")
parser.add_argument("filename_out")
parser.add_argument("--unraw", action="store_true", help="Load unraw data")
return parser


Expand All @@ -32,10 +33,10 @@ def get_variable_name(filename):
return list(h.keys())


def get_variable(filename, varname):
def get_variable(filename, varname, raw=True):
if is_nc(filename):
dataset = EddiesObservations.load_from_netcdf(
filename, raw_data=True, include_vars=(varname,)
filename, raw_data=raw, include_vars=(varname,)
)
else:
dataset = EddiesObservations.load_from_zarr(filename, include_vars=(varname,))
Expand All @@ -49,8 +50,8 @@ if __name__ == "__main__":
if not is_nc(args.filename_out):
h = zarr.open(args.filename_out, "w")
for varname in variables:
get_variable(args.filename_in, varname).to_zarr(h)
get_variable(args.filename_in, varname, raw=not args.unraw).to_zarr(h)
else:
with Dataset(args.filename_out, "w") as h:
for varname in variables:
get_variable(args.filename_in, varname).to_netcdf(h)
get_variable(args.filename_in, varname, raw=not args.unraw).to_netcdf(h)