diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 87b5d870..f33f15dd 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -10,13 +10,25 @@ and this project adheres to `Semantic Versioning First target\nLatitude") +ax_2nd_b.set_ylabel("Color -> Secondary target\nLatitude") +ax_2nd_b.set_xlabel("Julian days"), ax_2nd_f.set_xlabel("Julian days") +ax_1st_f.set_yticks([]), ax_2nd_f.set_yticks([]) +ax_1st_f.set_xticks([]), ax_1st_b.set_xticks([]) def color_alpha(target, pct, vmin=5, vmax=80): diff --git a/notebooks/python_module/16_network/pet_follow_particle.ipynb b/notebooks/python_module/16_network/pet_follow_particle.ipynb index 6be13adf..15820ad3 100644 --- a/notebooks/python_module/16_network/pet_follow_particle.ipynb +++ b/notebooks/python_module/16_network/pet_follow_particle.ipynb @@ -15,7 +15,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\n# Follow particle\n" + "\nFollow particle\n===============\n" ] }, { @@ -55,7 +55,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Schema\n\n" + "Schema\n------\n\n" ] }, { @@ -73,7 +73,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Animation\nParticle settings\n\n" + "Animation\n---------\nParticle settings\n\n" ] }, { @@ -109,7 +109,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Particle advection\n\n" + "Particle advection\n^^^^^^^^^^^^^^^^^^\n\n" ] }, { @@ -120,7 +120,7 @@ }, "outputs": [], "source": [ - "step = 1 / 60.0\n\nx, y = meshgrid(arange(24, 36, step), arange(31, 36, step))\nx0, y0 = x.reshape(-1), y.reshape(-1)\n# Pre-order to speed up\n_, i = group_obs(x0, y0, 1, 360)\nx0, y0 = x0[i], y0[i]\n\nt_start, t_end = n.period\ndt = 14\n\nshape = (n.obs.size, 2)\n# Forward run\ni_target_f, pct_target_f = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in range(t_start, t_end - dt):\n particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, n_days=dt)\n\n# Backward run\ni_target_b, pct_target_b = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in range(t_start + dt, t_end):\n particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, n_days=-dt)" + "step = 1 / 60.0\n\nx, y = meshgrid(arange(24, 36, step), arange(31, 36, step))\nx0, y0 = x.reshape(-1), y.reshape(-1)\n# Pre-order to speed up\n_, i = group_obs(x0, y0, 1, 360)\nx0, y0 = x0[i], y0[i]\n\nt_start, t_end = n.period\ndt = 14\n\nshape = (n.obs.size, 2)\n# Forward run\ni_target_f, pct_target_f = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in arange(t_start, t_end - dt):\n particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, n_days=dt)\n\n# Backward run\ni_target_b, pct_target_b = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in arange(t_start + dt, t_end):\n particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, n_days=-dt)" ] }, { @@ -131,7 +131,7 @@ }, "outputs": [], "source": [ - "fig = plt.figure(figsize=(10, 10))\nax_1st_b = fig.add_axes([0.05, 0.52, 0.45, 0.45])\nax_2nd_b = fig.add_axes([0.05, 0.05, 0.45, 0.45])\nax_1st_f = fig.add_axes([0.52, 0.52, 0.45, 0.45])\nax_2nd_f = fig.add_axes([0.52, 0.05, 0.45, 0.45])\nax_1st_b.set_title(\"Backward advection for each time step\")\nax_1st_f.set_title(\"Forward advection for each time step\")\n\n\ndef color_alpha(target, pct, vmin=5, vmax=80):\n color = cmap(n.segment[target])\n # We will hide under 5 % and from 80% to 100 % it will be 1\n alpha = (pct - vmin) / (vmax - vmin)\n alpha[alpha < 0] = 0\n alpha[alpha > 1] = 1\n color[:, 3] = alpha\n return color\n\n\nkw = dict(\n name=None, yfield=\"longitude\", event=False, zorder=-100, s=(n.speed_area / 20e6)\n)\nn.scatter_timeline(ax_1st_b, c=color_alpha(i_target_b.T[0], pct_target_b.T[0]), **kw)\nn.scatter_timeline(ax_2nd_b, c=color_alpha(i_target_b.T[1], pct_target_b.T[1]), **kw)\nn.scatter_timeline(ax_1st_f, c=color_alpha(i_target_f.T[0], pct_target_f.T[0]), **kw)\nn.scatter_timeline(ax_2nd_f, c=color_alpha(i_target_f.T[1], pct_target_f.T[1]), **kw)\nfor ax in (ax_1st_b, ax_2nd_b, ax_1st_f, ax_2nd_f):\n n.display_timeline(ax, field=\"longitude\", marker=\"+\", lw=2, markersize=5)\n ax.grid()" + "fig = plt.figure(figsize=(10, 10))\nax_1st_b = fig.add_axes([0.05, 0.52, 0.45, 0.45])\nax_2nd_b = fig.add_axes([0.05, 0.05, 0.45, 0.45])\nax_1st_f = fig.add_axes([0.52, 0.52, 0.45, 0.45])\nax_2nd_f = fig.add_axes([0.52, 0.05, 0.45, 0.45])\nax_1st_b.set_title(\"Backward advection for each time step\")\nax_1st_f.set_title(\"Forward advection for each time step\")\nax_1st_b.set_ylabel(\"Color -> First target\\nLatitude\")\nax_2nd_b.set_ylabel(\"Color -> Secondary target\\nLatitude\")\nax_2nd_b.set_xlabel(\"Julian days\"), ax_2nd_f.set_xlabel(\"Julian days\")\nax_1st_f.set_yticks([]), ax_2nd_f.set_yticks([])\nax_1st_f.set_xticks([]), ax_1st_b.set_xticks([])\n\n\ndef color_alpha(target, pct, vmin=5, vmax=80):\n color = cmap(n.segment[target])\n # We will hide under 5 % and from 80% to 100 % it will be 1\n alpha = (pct - vmin) / (vmax - vmin)\n alpha[alpha < 0] = 0\n alpha[alpha > 1] = 1\n color[:, 3] = alpha\n return color\n\n\nkw = dict(\n name=None, yfield=\"longitude\", event=False, zorder=-100, s=(n.speed_area / 20e6)\n)\nn.scatter_timeline(ax_1st_b, c=color_alpha(i_target_b.T[0], pct_target_b.T[0]), **kw)\nn.scatter_timeline(ax_2nd_b, c=color_alpha(i_target_b.T[1], pct_target_b.T[1]), **kw)\nn.scatter_timeline(ax_1st_f, c=color_alpha(i_target_f.T[0], pct_target_f.T[0]), **kw)\nn.scatter_timeline(ax_2nd_f, c=color_alpha(i_target_f.T[1], pct_target_f.T[1]), **kw)\nfor ax in (ax_1st_b, ax_2nd_b, ax_1st_f, ax_2nd_f):\n n.display_timeline(ax, field=\"longitude\", marker=\"+\", lw=2, markersize=5)\n ax.grid()" ] } ], @@ -151,7 +151,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.9" + "version": "3.7.7" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_segmentation_anim.ipynb b/notebooks/python_module/16_network/pet_segmentation_anim.ipynb index 05c68873..34047da4 100644 --- a/notebooks/python_module/16_network/pet_segmentation_anim.ipynb +++ b/notebooks/python_module/16_network/pet_segmentation_anim.ipynb @@ -15,7 +15,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\n# Network segmentation process\n" + "\nNetwork segmentation process\n============================\n" ] }, { @@ -62,7 +62,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Load data\nLoad data where observations are put in same network but no segmentation\n\n" + "Load data\n---------\nLoad data where observations are put in same network but no segmentation\n\n" ] }, { @@ -80,7 +80,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Do segmentation\nSegmentation based on maximum overlap, temporal window for candidates = 5 days\n\n" + "Do segmentation\n---------------\nSegmentation based on maximum overlap, temporal window for candidates = 5 days\n\n" ] }, { @@ -98,7 +98,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Anim\n\n" + "Anim\n----\n\n" ] }, { @@ -109,14 +109,14 @@ }, "outputs": [], "source": [ - "def update(i_frame):\n tr = TRACKS[i_frame]\n mappable_tracks.set_array(tr)\n s = 40 * ones(tr.shape)\n s[tr == 0] = 4\n mappable_tracks.set_sizes(s)\n\n indices_frames = INDICES[i_frame]\n mappable_CONTOUR.set_data(\n e.contour_lon_e[indices_frames],\n e.contour_lat_e[indices_frames],\n )\n mappable_CONTOUR.set_color(cmap.colors[tr[indices_frames] % len(cmap.colors)])\n return (mappable_tracks,)\n\n\nfig = plt.figure(figsize=(16, 9), dpi=60)\nax = fig.add_axes([0.04, 0.06, 0.94, 0.88], projection=GUI_AXES)\nax.set_title(f\"{len(e)} observations to segment\")\nax.set_xlim(19, 29), ax.set_ylim(31, 35.5), ax.grid()\nvmax = TRACKS[-1].max()\ncmap = ListedColormap([\"gray\", *e.COLORS[:-1]], name=\"from_list\", N=vmax)\nmappable_tracks = ax.scatter(\n e.lon, e.lat, c=TRACKS[0], cmap=cmap, vmin=0, vmax=vmax, s=20\n)\nmappable_CONTOUR = ax.plot(\n e.contour_lon_e[INDICES[0]], e.contour_lat_e[INDICES[0]], color=cmap.colors[0]\n)[0]\nani = VideoAnimation(fig, update, frames=range(1, len(TRACKS), 4), interval=125)" + "def update(i_frame):\n tr = TRACKS[i_frame]\n mappable_tracks.set_array(tr)\n s = 40 * ones(tr.shape)\n s[tr == 0] = 4\n mappable_tracks.set_sizes(s)\n\n indices_frames = INDICES[i_frame]\n mappable_CONTOUR.set_data(\n e.contour_lon_e[indices_frames], e.contour_lat_e[indices_frames],\n )\n mappable_CONTOUR.set_color(cmap.colors[tr[indices_frames] % len(cmap.colors)])\n return (mappable_tracks,)\n\n\nfig = plt.figure(figsize=(16, 9), dpi=60)\nax = fig.add_axes([0.04, 0.06, 0.94, 0.88], projection=GUI_AXES)\nax.set_title(f\"{len(e)} observations to segment\")\nax.set_xlim(19, 29), ax.set_ylim(31, 35.5), ax.grid()\nvmax = TRACKS[-1].max()\ncmap = ListedColormap([\"gray\", *e.COLORS[:-1]], name=\"from_list\", N=vmax)\nmappable_tracks = ax.scatter(\n e.lon, e.lat, c=TRACKS[0], cmap=cmap, vmin=0, vmax=vmax, s=20\n)\nmappable_CONTOUR = ax.plot(\n e.contour_lon_e[INDICES[0]], e.contour_lat_e[INDICES[0]], color=cmap.colors[0]\n)[0]\nani = VideoAnimation(fig, update, frames=range(1, len(TRACKS), 4), interval=125)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Final Result\n\n" + "Final Result\n------------\n\n" ] }, { @@ -147,7 +147,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.9" + "version": "3.7.7" } }, "nbformat": 4, diff --git a/share/tracking.yaml b/share/tracking.yaml index b9c98488..d6264104 100644 --- a/share/tracking.yaml +++ b/share/tracking.yaml @@ -1,12 +1,13 @@ PATHS: - # Files produces with EddyIdentification + # Files produced with EddyIdentification FILES_PATTERN: /home/emason/toto/Anticyclonic_*.nc - # Path for saving of outputs + # Path to save outputs SAVE_DIR: '/home/emason/toto/' -# Minimum number of observations to store eddy -TRACK_DURATION_MIN: 4 +# Number of consecutive timesteps with missing detection allowed VIRTUAL_LENGTH_MAX: 0 +# Minimal number of timesteps to considered as a long trajectory +TRACK_DURATION_MIN: 4 CLASS: MODULE: py_eddy_tracker.featured_tracking.area_tracker diff --git a/src/py_eddy_tracker/__init__.py b/src/py_eddy_tracker/__init__.py index 5cf0d59a..fbeb1450 100644 --- a/src/py_eddy_tracker/__init__.py +++ b/src/py_eddy_tracker/__init__.py @@ -22,6 +22,7 @@ import logging from argparse import ArgumentParser +from datetime import datetime import zarr @@ -106,12 +107,26 @@ def parse_args(self, *args, **kwargs): return opts +TIME_MODELS = ["%Y%m%d", "%Y%m%d%H%M%S", "%Y%m%dT%H%M%S"] + + +def identify_time(str_date): + for model in TIME_MODELS: + try: + return datetime.strptime(str_date, model) + except ValueError: + pass + raise Exception("No time model found") + + VAR_DESCR = dict( time=dict( attr_name="time", nc_name="time", old_nc_name=["j1"], - nc_type="int32", + nc_type="float64", + output_type="uint32", + scale_factor=1 / 86400.0, nc_dims=("obs",), nc_attr=dict( standard_name="time", @@ -251,7 +266,7 @@ def parse_args(self, *args, **kwargs): old_nc_name=["A"], nc_type="float32", output_type="uint16", - scale_factor=0.001, + scale_factor=0.0001, nc_dims=("obs",), nc_attr=dict( long_name="Amplitude", diff --git a/src/py_eddy_tracker/appli/eddies.py b/src/py_eddy_tracker/appli/eddies.py index d30ef259..4809fddf 100644 --- a/src/py_eddy_tracker/appli/eddies.py +++ b/src/py_eddy_tracker/appli/eddies.py @@ -15,7 +15,7 @@ from numpy import bincount, bytes_, empty, in1d, unique from yaml import safe_load -from .. import EddyParser +from .. import EddyParser, identify_time from ..observations.observation import EddiesObservations, reverse_index from ..observations.tracking import TrackEddiesObservations from ..tracking import Correspondances @@ -163,7 +163,12 @@ def eddies_tracking(): parser.add_argument( "--zarr", action="store_true", help="Output will be wrote in zarr" ) - parser.add_argument("--unraw", action="store_true", help="Load unraw data") + parser.add_argument( + "--unraw", + action="store_true", + help="Load unraw data, use only for netcdf." + "If unraw is active, netcdf is loaded without apply scalefactor and add_offset.", + ) parser.add_argument( "--blank_period", type=int, @@ -223,7 +228,7 @@ def browse_dataset_in( data_dir, files_model, date_regexp, - date_model, + date_model=None, start_date=None, end_date=None, sub_sampling_step=1, @@ -238,11 +243,7 @@ def browse_dataset_in( filenames = bytes_(glob(full_path)) dataset_list = empty( - len(filenames), - dtype=[ - ("filename", "S500"), - ("date", "datetime64[D]"), - ], + len(filenames), dtype=[("filename", "S500"), ("date", "datetime64[s]")], ) dataset_list["filename"] = filenames @@ -268,13 +269,15 @@ def browse_dataset_in( str_date = result.groups()[0] if str_date is not None: - item["date"] = datetime.strptime(str_date, date_model).date() + if date_model is None: + item["date"] = identify_time(str_date) + else: + item["date"] = datetime.strptime(str_date, date_model) dataset_list.sort(order=["date", "filename"]) - steps = unique(dataset_list["date"][1:] - dataset_list["date"][:-1]) if len(steps) > 1: - raise Exception("Several days steps in grid dataset %s" % steps) + raise Exception("Several timesteps in grid dataset %s" % steps) if sub_sampling_step != 1: logger.info("Grid subsampling %d", sub_sampling_step) @@ -304,7 +307,7 @@ def track( correspondances_only=False, **kw_c, ): - kw = dict(date_regexp=".*_([0-9]*?).[nz].*", date_model="%Y%m%d") + kw = dict(date_regexp=".*_([0-9]*?).[nz].*") if isinstance(pattern, list): kw.update(dict(data_dir=None, files_model=None, files=pattern)) else: @@ -323,10 +326,9 @@ def track( c = Correspondances(datasets=datasets["filename"], **kw_c) c.track() logger.info("Track finish") - t0, t1 = c.period kw_save = dict( - date_start=t0, - date_stop=t1, + date_start=datasets["date"][0], + date_stop=datasets["date"][-1], date_prod=datetime.now(), path=output_dir, sign_type=c.current_obs.sign_legend, @@ -351,11 +353,13 @@ def track( short_c = c._copy() short_c.shorter_than(size_max=nb_obs_min) - c.longer_than(size_min=nb_obs_min) - - long_track = c.merge(raw_data=raw) short_track = short_c.merge(raw_data=raw) + if c.longer_than(size_min=nb_obs_min) is False: + long_track = short_track.empty_dataset() + else: + long_track = c.merge(raw_data=raw) + # We flag obs if c.virtual: long_track["virtual"][:] = long_track["time"] == 0 diff --git a/src/py_eddy_tracker/appli/grid.py b/src/py_eddy_tracker/appli/grid.py index 7f2b9610..099465ee 100644 --- a/src/py_eddy_tracker/appli/grid.py +++ b/src/py_eddy_tracker/appli/grid.py @@ -3,9 +3,8 @@ All entry point to manipulate grid """ from argparse import Action -from datetime import datetime -from .. import EddyParser +from .. import EddyParser, identify_time from ..dataset.grid import RegularGridDataset, UnRegularGridDataset @@ -121,7 +120,7 @@ def eddy_id(args=None): cut_wavelength = [0, *cut_wavelength] inf_bnds, upper_bnds = cut_wavelength - date = datetime.strptime(args.datetime, "%Y%m%d") + date = identify_time(args.datetime) kwargs = dict( step=args.isoline_step, shape_error=args.fit_errmax, @@ -150,7 +149,7 @@ def eddy_id(args=None): sampling_method=args.sampling_method, **kwargs, ) - out_name = date.strftime("%(path)s/%(sign_type)s_%Y%m%d.nc") + out_name = date.strftime("%(path)s/%(sign_type)s_%Y%m%dT%H%M%S.nc") a.write_file(path=args.path_out, filename=out_name, zarr_flag=args.zarr) c.write_file(path=args.path_out, filename=out_name, zarr_flag=args.zarr) diff --git a/src/py_eddy_tracker/data/Anticyclonic_20190223.nc b/src/py_eddy_tracker/data/Anticyclonic_20190223.nc index ce48c8d6..4ab8f226 100644 Binary files a/src/py_eddy_tracker/data/Anticyclonic_20190223.nc and b/src/py_eddy_tracker/data/Anticyclonic_20190223.nc differ diff --git a/src/py_eddy_tracker/gui.py b/src/py_eddy_tracker/gui.py index fadd4947..deeb6660 100644 --- a/src/py_eddy_tracker/gui.py +++ b/src/py_eddy_tracker/gui.py @@ -291,8 +291,8 @@ def get_infos(self, name, index): i_first = d.index_from_track[tr] track = d.obs[i_first : i_first + nb] nb -= 1 - t0 = timedelta(days=int(track[0]["time"])) + datetime(1950, 1, 1) - t1 = timedelta(days=int(track[-1]["time"])) + datetime(1950, 1, 1) + t0 = timedelta(days=track[0]["time"]) + datetime(1950, 1, 1) + t1 = timedelta(days=track[-1]["time"]) + datetime(1950, 1, 1) txt = f"--{name}--\n" txt += f" {t0} -> {t1}\n" txt += f" Tracks : {tr} {now['n']}/{nb} ({now['n'] / nb * 100:.2f} %)\n" diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index 64a81a36..544fd5f5 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -186,7 +186,8 @@ def filled_by_interpolation(self, mask): .. minigallery:: py_eddy_tracker.TrackEddiesObservations.filled_by_interpolation """ - + if self.track.size == 0: + return nb_filled = mask.sum() logger.info("%d obs will be filled (unobserved)", nb_filled) @@ -194,7 +195,6 @@ def filled_by_interpolation(self, mask): index = arange(nb_obs) for field in self.obs.dtype.descr: - # print(f"field : {field}") var = field[0] if ( var in ["n", "virtual", "track", "cost_association"] diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index d969f800..db0c2a45 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -69,7 +69,7 @@ poly_indexs, reduce_size, vertice_overlap, - winding_number_poly + winding_number_poly, ) logger = logging.getLogger("pet") @@ -1326,8 +1326,15 @@ def solve_conflict(cost): @staticmethod def solve_simultaneous(cost): - """Write something (TODO)""" + """Deduce link from cost matrix. + + :param array(float) cost: Cost for each available link + :return: return a boolean mask array, True for each valid couple + :rtype: array(bool) + """ mask = ~cost.mask + if mask.size == 0: + return mask # Count number of links by self obs and other obs self_links, other_links = sum_row_column(mask) max_links = max(self_links.max(), other_links.max()) diff --git a/src/py_eddy_tracker/observations/tracking.py b/src/py_eddy_tracker/observations/tracking.py index 58514eb2..492842c7 100644 --- a/src/py_eddy_tracker/observations/tracking.py +++ b/src/py_eddy_tracker/observations/tracking.py @@ -16,6 +16,7 @@ degrees, empty, histogram, + int_, median, nan, ones, @@ -173,6 +174,8 @@ def normalize_longitude(self): - contour_lon_e (how to do if in raw) - contour_lon_s (how to do if in raw) """ + if self.lon.size == 0: + return lon0 = (self.lon[self.index_from_track] - 180).repeat(self.nb_obs_by_track) logger.debug("Normalize longitude") self.lon[:] = (self.lon - lon0) % 360 + lon0 @@ -228,12 +231,13 @@ def set_global_attr_netcdf(self, h_nc): ) h_nc.date_created = datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ") t = h_nc.variables[VAR_DESCR_inv["j1"]] - delta = t.max - t.min + 1 - h_nc.time_coverage_duration = "P%dD" % delta - d_start = datetime(1950, 1, 1) + timedelta(int(t.min)) - d_end = datetime(1950, 1, 1) + timedelta(int(t.max)) - h_nc.time_coverage_start = d_start.strftime("%Y-%m-%dT00:00:00Z") - h_nc.time_coverage_end = d_end.strftime("%Y-%m-%dT00:00:00Z") + if t.size: + delta = t.max - t.min + 1 + h_nc.time_coverage_duration = "P%dD" % delta + d_start = datetime(1950, 1, 1) + timedelta(int(t.min)) + d_end = datetime(1950, 1, 1) + timedelta(int(t.max)) + h_nc.time_coverage_start = d_start.strftime("%Y-%m-%dT00:00:00Z") + h_nc.time_coverage_end = d_end.strftime("%Y-%m-%dT00:00:00Z") def extract_with_period(self, period, **kwargs): """ @@ -598,6 +602,12 @@ def plot(self, ax, ref=None, **kwargs): def split_network(self, intern=True, **kwargs): """Return each group (network) divided in segments""" + # Find timestep of dataset + # FIXME : how to know exact time sampling + t = unique(self.time) + dts = t[1:] - t[:-1] + timestep = median(dts) + track_s, track_e, track_ref = build_index(self.tracks) ids = empty( len(self), @@ -611,7 +621,7 @@ def split_network(self, intern=True, **kwargs): ("next_obs", "i4"), ], ) - ids["group"], ids["time"] = self.tracks, self.time + ids["group"], ids["time"] = self.tracks, int_(self.time / timestep) # Initialisation # To store the id of the segments, the backward and forward cost associations ids["track"], ids["previous_cost"], ids["next_cost"] = 0, 0, 0 @@ -638,6 +648,7 @@ def split_network(self, intern=True, **kwargs): local_ids["next_obs"][m] += i_s if display_iteration: print() + ids["time"] *= timestep return ids def set_tracks(self, x, y, ids, window, **kwargs): @@ -649,8 +660,7 @@ def set_tracks(self, x, y, ids, window, **kwargs): :param ndarray ids: several fields like time, group, ... :param int windows: number of days where observations could missed """ - - time_index = build_index(ids["time"]) + time_index = build_index((ids["time"]).astype("i4")) nb = x.shape[0] used = zeros(nb, dtype="bool") track_id = 1 @@ -695,8 +705,7 @@ def get_previous_obs( i_current, ids, x, y, time_s, time_e, time_ref, window, **kwargs ): """Backward association of observations to the segments""" - - time_cur = ids["time"][i_current] + time_cur = int_(ids["time"][i_current]) t0, t1 = time_cur - 1 - time_ref, max(time_cur - window - time_ref, 0) for t_step in range(t0, t1 - 1, -1): i0, i1 = time_s[t_step], time_e[t_step] @@ -726,7 +735,7 @@ def get_previous_obs( def get_next_obs(i_current, ids, x, y, time_s, time_e, time_ref, window, **kwargs): """Forward association of observations to the segments""" time_max = time_e.shape[0] - 1 - time_cur = ids["time"][i_current] + time_cur = int_(ids["time"][i_current]) t0, t1 = time_cur + 1 - time_ref, min(time_cur + window - time_ref, time_max) if t0 > time_max: return -1 diff --git a/src/py_eddy_tracker/tracking.py b/src/py_eddy_tracker/tracking.py index cba9985e..577496ff 100644 --- a/src/py_eddy_tracker/tracking.py +++ b/src/py_eddy_tracker/tracking.py @@ -161,10 +161,10 @@ def period(self): """ date_start = datetime(1950, 1, 1) + timedelta( - int(self.class_method.load_file(self.datasets[0]).time[0]) + self.class_method.load_file(self.datasets[0]).time[0] ) date_stop = datetime(1950, 1, 1) + timedelta( - int(self.class_method.load_file(self.datasets[-1]).time[0]) + self.class_method.load_file(self.datasets[-1]).time[0] ) return date_start, date_stop @@ -584,7 +584,10 @@ def prepare_merging(self): def longer_than(self, size_min): """Remove from correspondance table all association for shorter eddies than size_min""" # Identify eddies longer than - i_keep_track = where(self.nb_obs_by_tracks >= size_min)[0] + mask = self.nb_obs_by_tracks >= size_min + if not mask.any(): + return False + i_keep_track = where(mask)[0] # Reduce array self.nb_obs_by_tracks = self.nb_obs_by_tracks[i_keep_track] self.i_current_by_tracks = ( diff --git a/src/scripts/EddyTranslate b/src/scripts/EddyTranslate index 94142132..26ab3a7b 100644 --- a/src/scripts/EddyTranslate +++ b/src/scripts/EddyTranslate @@ -16,6 +16,8 @@ def id_parser(): ) parser.add_argument("filename_in") parser.add_argument("filename_out") + parser.add_argument("--unraw", action="store_true", help="Load unraw data, use only for netcdf." + "If unraw is active, netcdf is loaded without apply scalefactor and add_offset.") return parser @@ -32,10 +34,10 @@ def get_variable_name(filename): return list(h.keys()) -def get_variable(filename, varname): +def get_variable(filename, varname, raw=True): if is_nc(filename): dataset = EddiesObservations.load_from_netcdf( - filename, raw_data=True, include_vars=(varname,) + filename, raw_data=raw, include_vars=(varname,) ) else: dataset = EddiesObservations.load_from_zarr(filename, include_vars=(varname,)) @@ -49,8 +51,8 @@ if __name__ == "__main__": if not is_nc(args.filename_out): h = zarr.open(args.filename_out, "w") for varname in variables: - get_variable(args.filename_in, varname).to_zarr(h) + get_variable(args.filename_in, varname, raw=not args.unraw).to_zarr(h) else: with Dataset(args.filename_out, "w") as h: for varname in variables: - get_variable(args.filename_in, varname).to_netcdf(h) + get_variable(args.filename_in, varname, raw=not args.unraw).to_netcdf(h)